1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "insn-attr.h"
35 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
42 #include "typeclass.h"
45 #include "langhooks.h"
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first. */
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
67 #define STACK_PUSH_CODE PRE_INC
71 /* Assume that case vectors are not pc-relative. */
72 #ifndef CASE_VECTOR_PC_RELATIVE
73 #define CASE_VECTOR_PC_RELATIVE 0
76 /* Convert defined/undefined to boolean. */
77 #ifdef TARGET_MEM_FUNCTIONS
78 #undef TARGET_MEM_FUNCTIONS
79 #define TARGET_MEM_FUNCTIONS 1
81 #define TARGET_MEM_FUNCTIONS 0
85 /* If this is nonzero, we do not bother generating VOLATILE
86 around volatile memory references, and we are willing to
87 output indirect addresses. If cse is to follow, we reject
88 indirect addresses so a useful potential cse is generated;
89 if it is used only once, instruction combination will produce
90 the same indirect address eventually. */
93 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
94 static tree placeholder_list = 0;
96 /* This structure is used by move_by_pieces to describe the move to
107 int explicit_inc_from;
108 unsigned HOST_WIDE_INT len;
109 HOST_WIDE_INT offset;
113 /* This structure is used by store_by_pieces to describe the clear to
116 struct store_by_pieces
122 unsigned HOST_WIDE_INT len;
123 HOST_WIDE_INT offset;
124 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
129 static rtx enqueue_insn PARAMS ((rtx, rtx));
130 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
131 PARAMS ((unsigned HOST_WIDE_INT,
133 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
134 struct move_by_pieces *));
135 static bool block_move_libcall_safe_for_call_parm PARAMS ((void));
136 static bool emit_block_move_via_movstr PARAMS ((rtx, rtx, rtx, unsigned));
137 static rtx emit_block_move_via_libcall PARAMS ((rtx, rtx, rtx));
138 static tree emit_block_move_libcall_fn PARAMS ((int));
139 static void emit_block_move_via_loop PARAMS ((rtx, rtx, rtx, unsigned));
140 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
142 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
144 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
146 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
148 struct store_by_pieces *));
149 static bool clear_storage_via_clrstr PARAMS ((rtx, rtx, unsigned));
150 static rtx clear_storage_via_libcall PARAMS ((rtx, rtx));
151 static tree clear_storage_libcall_fn PARAMS ((int));
152 static rtx compress_float_constant PARAMS ((rtx, rtx));
153 static rtx get_subtarget PARAMS ((rtx));
154 static int is_zeros_p PARAMS ((tree));
155 static int mostly_zeros_p PARAMS ((tree));
156 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
157 HOST_WIDE_INT, enum machine_mode,
158 tree, tree, int, int));
159 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
160 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, enum machine_mode, int, tree,
164 static rtx var_rtx PARAMS ((tree));
165 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
166 static HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree, tree));
167 static int is_aligning_offset PARAMS ((tree, tree));
168 static rtx expand_increment PARAMS ((tree, int, int));
169 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
170 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
171 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
173 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
175 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
177 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
179 /* Record for each mode whether we can move a register directly to or
180 from an object of that mode in memory. If we can't, we won't try
181 to use that mode directly when accessing a field of that mode. */
183 static char direct_load[NUM_MACHINE_MODES];
184 static char direct_store[NUM_MACHINE_MODES];
186 /* Record for each mode whether we can float-extend from memory. */
188 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
190 /* If a memory-to-memory move would take MOVE_RATIO or more simple
191 move-instruction sequences, we will do a movstr or libcall instead. */
194 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
197 /* If we are optimizing for space (-Os), cut down the default move ratio. */
198 #define MOVE_RATIO (optimize_size ? 3 : 15)
202 /* This macro is used to determine whether move_by_pieces should be called
203 to perform a structure copy. */
204 #ifndef MOVE_BY_PIECES_P
205 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
206 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
209 /* If a clear memory operation would take CLEAR_RATIO or more simple
210 move-instruction sequences, we will do a clrstr or libcall instead. */
213 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
214 #define CLEAR_RATIO 2
216 /* If we are optimizing for space, cut down the default clear ratio. */
217 #define CLEAR_RATIO (optimize_size ? 3 : 15)
221 /* This macro is used to determine whether clear_by_pieces should be
222 called to clear storage. */
223 #ifndef CLEAR_BY_PIECES_P
224 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
225 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
228 /* This array records the insn_code of insns to perform block moves. */
229 enum insn_code movstr_optab[NUM_MACHINE_MODES];
231 /* This array records the insn_code of insns to perform block clears. */
232 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
234 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
236 #ifndef SLOW_UNALIGNED_ACCESS
237 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
240 /* This is run once per compilation to set up which modes can be used
241 directly in memory and to initialize the block move optab. */
247 enum machine_mode mode;
252 /* Try indexing by frame ptr and try by stack ptr.
253 It is known that on the Convex the stack ptr isn't a valid index.
254 With luck, one or the other is valid on any machine. */
255 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
256 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
258 /* A scratch register we can modify in-place below to avoid
259 useless RTL allocations. */
260 reg = gen_rtx_REG (VOIDmode, -1);
262 insn = rtx_alloc (INSN);
263 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
264 PATTERN (insn) = pat;
266 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
267 mode = (enum machine_mode) ((int) mode + 1))
271 direct_load[(int) mode] = direct_store[(int) mode] = 0;
272 PUT_MODE (mem, mode);
273 PUT_MODE (mem1, mode);
274 PUT_MODE (reg, mode);
276 /* See if there is some register that can be used in this mode and
277 directly loaded or stored from memory. */
279 if (mode != VOIDmode && mode != BLKmode)
280 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
281 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
284 if (! HARD_REGNO_MODE_OK (regno, mode))
290 SET_DEST (pat) = reg;
291 if (recog (pat, insn, &num_clobbers) >= 0)
292 direct_load[(int) mode] = 1;
294 SET_SRC (pat) = mem1;
295 SET_DEST (pat) = reg;
296 if (recog (pat, insn, &num_clobbers) >= 0)
297 direct_load[(int) mode] = 1;
300 SET_DEST (pat) = mem;
301 if (recog (pat, insn, &num_clobbers) >= 0)
302 direct_store[(int) mode] = 1;
305 SET_DEST (pat) = mem1;
306 if (recog (pat, insn, &num_clobbers) >= 0)
307 direct_store[(int) mode] = 1;
311 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
313 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
314 mode = GET_MODE_WIDER_MODE (mode))
316 enum machine_mode srcmode;
317 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
318 srcmode = GET_MODE_WIDER_MODE (srcmode))
322 ic = can_extend_p (mode, srcmode, 0);
323 if (ic == CODE_FOR_nothing)
326 PUT_MODE (mem, srcmode);
328 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
329 float_extend_from_mem[mode][srcmode] = true;
334 /* This is run at the start of compiling a function. */
339 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
342 pending_stack_adjust = 0;
343 stack_pointer_delta = 0;
344 inhibit_defer_pop = 0;
346 apply_args_value = 0;
350 /* Small sanity check that the queue is empty at the end of a function. */
353 finish_expr_for_function ()
359 /* Manage the queue of increment instructions to be output
360 for POSTINCREMENT_EXPR expressions, etc. */
362 /* Queue up to increment (or change) VAR later. BODY says how:
363 BODY should be the same thing you would pass to emit_insn
364 to increment right away. It will go to emit_insn later on.
366 The value is a QUEUED expression to be used in place of VAR
367 where you want to guarantee the pre-incrementation value of VAR. */
370 enqueue_insn (var, body)
373 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
374 body, pending_chain);
375 return pending_chain;
378 /* Use protect_from_queue to convert a QUEUED expression
379 into something that you can put immediately into an instruction.
380 If the queued incrementation has not happened yet,
381 protect_from_queue returns the variable itself.
382 If the incrementation has happened, protect_from_queue returns a temp
383 that contains a copy of the old value of the variable.
385 Any time an rtx which might possibly be a QUEUED is to be put
386 into an instruction, it must be passed through protect_from_queue first.
387 QUEUED expressions are not meaningful in instructions.
389 Do not pass a value through protect_from_queue and then hold
390 on to it for a while before putting it in an instruction!
391 If the queue is flushed in between, incorrect code will result. */
394 protect_from_queue (x, modify)
398 RTX_CODE code = GET_CODE (x);
400 #if 0 /* A QUEUED can hang around after the queue is forced out. */
401 /* Shortcut for most common case. */
402 if (pending_chain == 0)
408 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
409 use of autoincrement. Make a copy of the contents of the memory
410 location rather than a copy of the address, but not if the value is
411 of mode BLKmode. Don't modify X in place since it might be
413 if (code == MEM && GET_MODE (x) != BLKmode
414 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
417 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
421 rtx temp = gen_reg_rtx (GET_MODE (x));
423 emit_insn_before (gen_move_insn (temp, new),
428 /* Copy the address into a pseudo, so that the returned value
429 remains correct across calls to emit_queue. */
430 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
433 /* Otherwise, recursively protect the subexpressions of all
434 the kinds of rtx's that can contain a QUEUED. */
437 rtx tem = protect_from_queue (XEXP (x, 0), 0);
438 if (tem != XEXP (x, 0))
444 else if (code == PLUS || code == MULT)
446 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
447 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
448 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
457 /* If the increment has not happened, use the variable itself. Copy it
458 into a new pseudo so that the value remains correct across calls to
460 if (QUEUED_INSN (x) == 0)
461 return copy_to_reg (QUEUED_VAR (x));
462 /* If the increment has happened and a pre-increment copy exists,
464 if (QUEUED_COPY (x) != 0)
465 return QUEUED_COPY (x);
466 /* The increment has happened but we haven't set up a pre-increment copy.
467 Set one up now, and use it. */
468 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
469 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
471 return QUEUED_COPY (x);
474 /* Return nonzero if X contains a QUEUED expression:
475 if it contains anything that will be altered by a queued increment.
476 We handle only combinations of MEM, PLUS, MINUS and MULT operators
477 since memory addresses generally contain only those. */
483 enum rtx_code code = GET_CODE (x);
489 return queued_subexp_p (XEXP (x, 0));
493 return (queued_subexp_p (XEXP (x, 0))
494 || queued_subexp_p (XEXP (x, 1)));
500 /* Perform all the pending incrementations. */
506 while ((p = pending_chain))
508 rtx body = QUEUED_BODY (p);
510 switch (GET_CODE (body))
518 QUEUED_INSN (p) = body;
522 #ifdef ENABLE_CHECKING
529 QUEUED_INSN (p) = emit_insn (body);
533 pending_chain = QUEUED_NEXT (p);
537 /* Copy data from FROM to TO, where the machine modes are not the same.
538 Both modes may be integer, or both may be floating.
539 UNSIGNEDP should be nonzero if FROM is an unsigned type.
540 This causes zero-extension instead of sign-extension. */
543 convert_move (to, from, unsignedp)
547 enum machine_mode to_mode = GET_MODE (to);
548 enum machine_mode from_mode = GET_MODE (from);
549 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
550 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
554 /* rtx code for making an equivalent value. */
555 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
557 to = protect_from_queue (to, 1);
558 from = protect_from_queue (from, 0);
560 if (to_real != from_real)
563 /* If FROM is a SUBREG that indicates that we have already done at least
564 the required extension, strip it. We don't handle such SUBREGs as
567 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
568 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
569 >= GET_MODE_SIZE (to_mode))
570 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
571 from = gen_lowpart (to_mode, from), from_mode = to_mode;
573 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
576 if (to_mode == from_mode
577 || (from_mode == VOIDmode && CONSTANT_P (from)))
579 emit_move_insn (to, from);
583 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
585 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
588 if (VECTOR_MODE_P (to_mode))
589 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
591 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
593 emit_move_insn (to, from);
597 if (to_real != from_real)
604 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
606 /* Try converting directly if the insn is supported. */
607 if ((code = can_extend_p (to_mode, from_mode, 0))
610 emit_unop_insn (code, to, from, UNKNOWN);
615 #ifdef HAVE_trunchfqf2
616 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
618 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
622 #ifdef HAVE_trunctqfqf2
623 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
625 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
629 #ifdef HAVE_truncsfqf2
630 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
632 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
636 #ifdef HAVE_truncdfqf2
637 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
639 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
643 #ifdef HAVE_truncxfqf2
644 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
646 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
650 #ifdef HAVE_trunctfqf2
651 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
653 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
658 #ifdef HAVE_trunctqfhf2
659 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
661 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
665 #ifdef HAVE_truncsfhf2
666 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
668 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
672 #ifdef HAVE_truncdfhf2
673 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
675 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
679 #ifdef HAVE_truncxfhf2
680 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
682 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
686 #ifdef HAVE_trunctfhf2
687 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
689 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
694 #ifdef HAVE_truncsftqf2
695 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
697 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
701 #ifdef HAVE_truncdftqf2
702 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
704 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
708 #ifdef HAVE_truncxftqf2
709 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
711 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
715 #ifdef HAVE_trunctftqf2
716 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
718 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
723 #ifdef HAVE_truncdfsf2
724 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
726 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
730 #ifdef HAVE_truncxfsf2
731 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
733 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
737 #ifdef HAVE_trunctfsf2
738 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
740 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
744 #ifdef HAVE_truncxfdf2
745 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
747 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
751 #ifdef HAVE_trunctfdf2
752 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
754 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
766 libcall = extendsfdf2_libfunc;
770 libcall = extendsfxf2_libfunc;
774 libcall = extendsftf2_libfunc;
786 libcall = truncdfsf2_libfunc;
790 libcall = extenddfxf2_libfunc;
794 libcall = extenddftf2_libfunc;
806 libcall = truncxfsf2_libfunc;
810 libcall = truncxfdf2_libfunc;
822 libcall = trunctfsf2_libfunc;
826 libcall = trunctfdf2_libfunc;
838 if (libcall == (rtx) 0)
839 /* This conversion is not implemented yet. */
843 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
845 insns = get_insns ();
847 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
852 /* Now both modes are integers. */
854 /* Handle expanding beyond a word. */
855 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
856 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
863 enum machine_mode lowpart_mode;
864 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
866 /* Try converting directly if the insn is supported. */
867 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
870 /* If FROM is a SUBREG, put it into a register. Do this
871 so that we always generate the same set of insns for
872 better cse'ing; if an intermediate assignment occurred,
873 we won't be doing the operation directly on the SUBREG. */
874 if (optimize > 0 && GET_CODE (from) == SUBREG)
875 from = force_reg (from_mode, from);
876 emit_unop_insn (code, to, from, equiv_code);
879 /* Next, try converting via full word. */
880 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
881 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
882 != CODE_FOR_nothing))
884 if (GET_CODE (to) == REG)
885 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
886 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
887 emit_unop_insn (code, to,
888 gen_lowpart (word_mode, to), equiv_code);
892 /* No special multiword conversion insn; do it by hand. */
895 /* Since we will turn this into a no conflict block, we must ensure
896 that the source does not overlap the target. */
898 if (reg_overlap_mentioned_p (to, from))
899 from = force_reg (from_mode, from);
901 /* Get a copy of FROM widened to a word, if necessary. */
902 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
903 lowpart_mode = word_mode;
905 lowpart_mode = from_mode;
907 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
909 lowpart = gen_lowpart (lowpart_mode, to);
910 emit_move_insn (lowpart, lowfrom);
912 /* Compute the value to put in each remaining word. */
914 fill_value = const0_rtx;
919 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
920 && STORE_FLAG_VALUE == -1)
922 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
924 fill_value = gen_reg_rtx (word_mode);
925 emit_insn (gen_slt (fill_value));
931 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
932 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
934 fill_value = convert_to_mode (word_mode, fill_value, 1);
938 /* Fill the remaining words. */
939 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
941 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
942 rtx subword = operand_subword (to, index, 1, to_mode);
947 if (fill_value != subword)
948 emit_move_insn (subword, fill_value);
951 insns = get_insns ();
954 emit_no_conflict_block (insns, to, from, NULL_RTX,
955 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
959 /* Truncating multi-word to a word or less. */
960 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
961 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
963 if (!((GET_CODE (from) == MEM
964 && ! MEM_VOLATILE_P (from)
965 && direct_load[(int) to_mode]
966 && ! mode_dependent_address_p (XEXP (from, 0)))
967 || GET_CODE (from) == REG
968 || GET_CODE (from) == SUBREG))
969 from = force_reg (from_mode, from);
970 convert_move (to, gen_lowpart (word_mode, from), 0);
974 /* Handle pointer conversion. */ /* SPEE 900220. */
975 if (to_mode == PQImode)
977 if (from_mode != QImode)
978 from = convert_to_mode (QImode, from, unsignedp);
980 #ifdef HAVE_truncqipqi2
981 if (HAVE_truncqipqi2)
983 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
986 #endif /* HAVE_truncqipqi2 */
990 if (from_mode == PQImode)
992 if (to_mode != QImode)
994 from = convert_to_mode (QImode, from, unsignedp);
999 #ifdef HAVE_extendpqiqi2
1000 if (HAVE_extendpqiqi2)
1002 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1005 #endif /* HAVE_extendpqiqi2 */
1010 if (to_mode == PSImode)
1012 if (from_mode != SImode)
1013 from = convert_to_mode (SImode, from, unsignedp);
1015 #ifdef HAVE_truncsipsi2
1016 if (HAVE_truncsipsi2)
1018 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1021 #endif /* HAVE_truncsipsi2 */
1025 if (from_mode == PSImode)
1027 if (to_mode != SImode)
1029 from = convert_to_mode (SImode, from, unsignedp);
1034 #ifdef HAVE_extendpsisi2
1035 if (! unsignedp && HAVE_extendpsisi2)
1037 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1040 #endif /* HAVE_extendpsisi2 */
1041 #ifdef HAVE_zero_extendpsisi2
1042 if (unsignedp && HAVE_zero_extendpsisi2)
1044 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1047 #endif /* HAVE_zero_extendpsisi2 */
1052 if (to_mode == PDImode)
1054 if (from_mode != DImode)
1055 from = convert_to_mode (DImode, from, unsignedp);
1057 #ifdef HAVE_truncdipdi2
1058 if (HAVE_truncdipdi2)
1060 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1063 #endif /* HAVE_truncdipdi2 */
1067 if (from_mode == PDImode)
1069 if (to_mode != DImode)
1071 from = convert_to_mode (DImode, from, unsignedp);
1076 #ifdef HAVE_extendpdidi2
1077 if (HAVE_extendpdidi2)
1079 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1082 #endif /* HAVE_extendpdidi2 */
1087 /* Now follow all the conversions between integers
1088 no more than a word long. */
1090 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1091 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1092 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1093 GET_MODE_BITSIZE (from_mode)))
1095 if (!((GET_CODE (from) == MEM
1096 && ! MEM_VOLATILE_P (from)
1097 && direct_load[(int) to_mode]
1098 && ! mode_dependent_address_p (XEXP (from, 0)))
1099 || GET_CODE (from) == REG
1100 || GET_CODE (from) == SUBREG))
1101 from = force_reg (from_mode, from);
1102 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1103 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1104 from = copy_to_reg (from);
1105 emit_move_insn (to, gen_lowpart (to_mode, from));
1109 /* Handle extension. */
1110 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1112 /* Convert directly if that works. */
1113 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1114 != CODE_FOR_nothing)
1117 from = force_not_mem (from);
1119 emit_unop_insn (code, to, from, equiv_code);
1124 enum machine_mode intermediate;
1128 /* Search for a mode to convert via. */
1129 for (intermediate = from_mode; intermediate != VOIDmode;
1130 intermediate = GET_MODE_WIDER_MODE (intermediate))
1131 if (((can_extend_p (to_mode, intermediate, unsignedp)
1132 != CODE_FOR_nothing)
1133 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1134 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1135 GET_MODE_BITSIZE (intermediate))))
1136 && (can_extend_p (intermediate, from_mode, unsignedp)
1137 != CODE_FOR_nothing))
1139 convert_move (to, convert_to_mode (intermediate, from,
1140 unsignedp), unsignedp);
1144 /* No suitable intermediate mode.
1145 Generate what we need with shifts. */
1146 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1147 - GET_MODE_BITSIZE (from_mode), 0);
1148 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1149 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1151 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1154 emit_move_insn (to, tmp);
1159 /* Support special truncate insns for certain modes. */
1161 if (from_mode == DImode && to_mode == SImode)
1163 #ifdef HAVE_truncdisi2
1164 if (HAVE_truncdisi2)
1166 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1170 convert_move (to, force_reg (from_mode, from), unsignedp);
1174 if (from_mode == DImode && to_mode == HImode)
1176 #ifdef HAVE_truncdihi2
1177 if (HAVE_truncdihi2)
1179 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1183 convert_move (to, force_reg (from_mode, from), unsignedp);
1187 if (from_mode == DImode && to_mode == QImode)
1189 #ifdef HAVE_truncdiqi2
1190 if (HAVE_truncdiqi2)
1192 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1196 convert_move (to, force_reg (from_mode, from), unsignedp);
1200 if (from_mode == SImode && to_mode == HImode)
1202 #ifdef HAVE_truncsihi2
1203 if (HAVE_truncsihi2)
1205 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1209 convert_move (to, force_reg (from_mode, from), unsignedp);
1213 if (from_mode == SImode && to_mode == QImode)
1215 #ifdef HAVE_truncsiqi2
1216 if (HAVE_truncsiqi2)
1218 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1222 convert_move (to, force_reg (from_mode, from), unsignedp);
1226 if (from_mode == HImode && to_mode == QImode)
1228 #ifdef HAVE_trunchiqi2
1229 if (HAVE_trunchiqi2)
1231 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1235 convert_move (to, force_reg (from_mode, from), unsignedp);
1239 if (from_mode == TImode && to_mode == DImode)
1241 #ifdef HAVE_trunctidi2
1242 if (HAVE_trunctidi2)
1244 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1248 convert_move (to, force_reg (from_mode, from), unsignedp);
1252 if (from_mode == TImode && to_mode == SImode)
1254 #ifdef HAVE_trunctisi2
1255 if (HAVE_trunctisi2)
1257 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1261 convert_move (to, force_reg (from_mode, from), unsignedp);
1265 if (from_mode == TImode && to_mode == HImode)
1267 #ifdef HAVE_trunctihi2
1268 if (HAVE_trunctihi2)
1270 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1274 convert_move (to, force_reg (from_mode, from), unsignedp);
1278 if (from_mode == TImode && to_mode == QImode)
1280 #ifdef HAVE_trunctiqi2
1281 if (HAVE_trunctiqi2)
1283 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1287 convert_move (to, force_reg (from_mode, from), unsignedp);
1291 /* Handle truncation of volatile memrefs, and so on;
1292 the things that couldn't be truncated directly,
1293 and for which there was no special instruction. */
1294 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1296 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1297 emit_move_insn (to, temp);
1301 /* Mode combination is not recognized. */
1305 /* Return an rtx for a value that would result
1306 from converting X to mode MODE.
1307 Both X and MODE may be floating, or both integer.
1308 UNSIGNEDP is nonzero if X is an unsigned value.
1309 This can be done by referring to a part of X in place
1310 or by copying to a new temporary with conversion.
1312 This function *must not* call protect_from_queue
1313 except when putting X into an insn (in which case convert_move does it). */
1316 convert_to_mode (mode, x, unsignedp)
1317 enum machine_mode mode;
1321 return convert_modes (mode, VOIDmode, x, unsignedp);
1324 /* Return an rtx for a value that would result
1325 from converting X from mode OLDMODE to mode MODE.
1326 Both modes may be floating, or both integer.
1327 UNSIGNEDP is nonzero if X is an unsigned value.
1329 This can be done by referring to a part of X in place
1330 or by copying to a new temporary with conversion.
1332 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1334 This function *must not* call protect_from_queue
1335 except when putting X into an insn (in which case convert_move does it). */
1338 convert_modes (mode, oldmode, x, unsignedp)
1339 enum machine_mode mode, oldmode;
1345 /* If FROM is a SUBREG that indicates that we have already done at least
1346 the required extension, strip it. */
1348 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1349 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1350 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1351 x = gen_lowpart (mode, x);
1353 if (GET_MODE (x) != VOIDmode)
1354 oldmode = GET_MODE (x);
1356 if (mode == oldmode)
1359 /* There is one case that we must handle specially: If we are converting
1360 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1361 we are to interpret the constant as unsigned, gen_lowpart will do
1362 the wrong if the constant appears negative. What we want to do is
1363 make the high-order word of the constant zero, not all ones. */
1365 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1366 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1367 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1369 HOST_WIDE_INT val = INTVAL (x);
1371 if (oldmode != VOIDmode
1372 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1374 int width = GET_MODE_BITSIZE (oldmode);
1376 /* We need to zero extend VAL. */
1377 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1380 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1383 /* We can do this with a gen_lowpart if both desired and current modes
1384 are integer, and this is either a constant integer, a register, or a
1385 non-volatile MEM. Except for the constant case where MODE is no
1386 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1388 if ((GET_CODE (x) == CONST_INT
1389 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1390 || (GET_MODE_CLASS (mode) == MODE_INT
1391 && GET_MODE_CLASS (oldmode) == MODE_INT
1392 && (GET_CODE (x) == CONST_DOUBLE
1393 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1394 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1395 && direct_load[(int) mode])
1396 || (GET_CODE (x) == REG
1397 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1398 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1400 /* ?? If we don't know OLDMODE, we have to assume here that
1401 X does not need sign- or zero-extension. This may not be
1402 the case, but it's the best we can do. */
1403 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1404 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1406 HOST_WIDE_INT val = INTVAL (x);
1407 int width = GET_MODE_BITSIZE (oldmode);
1409 /* We must sign or zero-extend in this case. Start by
1410 zero-extending, then sign extend if we need to. */
1411 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1413 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1414 val |= (HOST_WIDE_INT) (-1) << width;
1416 return gen_int_mode (val, mode);
1419 return gen_lowpart (mode, x);
1422 temp = gen_reg_rtx (mode);
1423 convert_move (temp, x, unsignedp);
1427 /* This macro is used to determine what the largest unit size that
1428 move_by_pieces can use is. */
1430 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1431 move efficiently, as opposed to MOVE_MAX which is the maximum
1432 number of bytes we can move with a single instruction. */
1434 #ifndef MOVE_MAX_PIECES
1435 #define MOVE_MAX_PIECES MOVE_MAX
1438 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1439 store efficiently. Due to internal GCC limitations, this is
1440 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1441 for an immediate constant. */
1443 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1445 /* Generate several move instructions to copy LEN bytes from block FROM to
1446 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1447 and TO through protect_from_queue before calling.
1449 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1450 used to push FROM to the stack.
1452 ALIGN is maximum alignment we can assume. */
1455 move_by_pieces (to, from, len, align)
1457 unsigned HOST_WIDE_INT len;
1460 struct move_by_pieces data;
1461 rtx to_addr, from_addr = XEXP (from, 0);
1462 unsigned int max_size = MOVE_MAX_PIECES + 1;
1463 enum machine_mode mode = VOIDmode, tmode;
1464 enum insn_code icode;
1467 data.from_addr = from_addr;
1470 to_addr = XEXP (to, 0);
1473 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1474 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1476 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1483 #ifdef STACK_GROWS_DOWNWARD
1489 data.to_addr = to_addr;
1492 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1493 || GET_CODE (from_addr) == POST_INC
1494 || GET_CODE (from_addr) == POST_DEC);
1496 data.explicit_inc_from = 0;
1497 data.explicit_inc_to = 0;
1498 if (data.reverse) data.offset = len;
1501 /* If copying requires more than two move insns,
1502 copy addresses to registers (to make displacements shorter)
1503 and use post-increment if available. */
1504 if (!(data.autinc_from && data.autinc_to)
1505 && move_by_pieces_ninsns (len, align) > 2)
1507 /* Find the mode of the largest move... */
1508 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1509 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1510 if (GET_MODE_SIZE (tmode) < max_size)
1513 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1515 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1516 data.autinc_from = 1;
1517 data.explicit_inc_from = -1;
1519 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1521 data.from_addr = copy_addr_to_reg (from_addr);
1522 data.autinc_from = 1;
1523 data.explicit_inc_from = 1;
1525 if (!data.autinc_from && CONSTANT_P (from_addr))
1526 data.from_addr = copy_addr_to_reg (from_addr);
1527 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1529 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1531 data.explicit_inc_to = -1;
1533 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1535 data.to_addr = copy_addr_to_reg (to_addr);
1537 data.explicit_inc_to = 1;
1539 if (!data.autinc_to && CONSTANT_P (to_addr))
1540 data.to_addr = copy_addr_to_reg (to_addr);
1543 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1544 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1545 align = MOVE_MAX * BITS_PER_UNIT;
1547 /* First move what we can in the largest integer mode, then go to
1548 successively smaller modes. */
1550 while (max_size > 1)
1552 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1553 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1554 if (GET_MODE_SIZE (tmode) < max_size)
1557 if (mode == VOIDmode)
1560 icode = mov_optab->handlers[(int) mode].insn_code;
1561 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1562 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1564 max_size = GET_MODE_SIZE (mode);
1567 /* The code above should have handled everything. */
1572 /* Return number of insns required to move L bytes by pieces.
1573 ALIGN (in bits) is maximum alignment we can assume. */
1575 static unsigned HOST_WIDE_INT
1576 move_by_pieces_ninsns (l, align)
1577 unsigned HOST_WIDE_INT l;
1580 unsigned HOST_WIDE_INT n_insns = 0;
1581 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1583 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1584 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1585 align = MOVE_MAX * BITS_PER_UNIT;
1587 while (max_size > 1)
1589 enum machine_mode mode = VOIDmode, tmode;
1590 enum insn_code icode;
1592 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1593 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1594 if (GET_MODE_SIZE (tmode) < max_size)
1597 if (mode == VOIDmode)
1600 icode = mov_optab->handlers[(int) mode].insn_code;
1601 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1602 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1604 max_size = GET_MODE_SIZE (mode);
1612 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1613 with move instructions for mode MODE. GENFUN is the gen_... function
1614 to make a move insn for that mode. DATA has all the other info. */
1617 move_by_pieces_1 (genfun, mode, data)
1618 rtx (*genfun) PARAMS ((rtx, ...));
1619 enum machine_mode mode;
1620 struct move_by_pieces *data;
1622 unsigned int size = GET_MODE_SIZE (mode);
1623 rtx to1 = NULL_RTX, from1;
1625 while (data->len >= size)
1628 data->offset -= size;
1632 if (data->autinc_to)
1633 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1636 to1 = adjust_address (data->to, mode, data->offset);
1639 if (data->autinc_from)
1640 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1643 from1 = adjust_address (data->from, mode, data->offset);
1645 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1646 emit_insn (gen_add2_insn (data->to_addr,
1647 GEN_INT (-(HOST_WIDE_INT)size)));
1648 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1649 emit_insn (gen_add2_insn (data->from_addr,
1650 GEN_INT (-(HOST_WIDE_INT)size)));
1653 emit_insn ((*genfun) (to1, from1));
1656 #ifdef PUSH_ROUNDING
1657 emit_single_push_insn (mode, from1, NULL);
1663 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1664 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1665 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1666 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1668 if (! data->reverse)
1669 data->offset += size;
1675 /* Emit code to move a block Y to a block X. This may be done with
1676 string-move instructions, with multiple scalar move instructions,
1677 or with a library call.
1679 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1680 SIZE is an rtx that says how long they are.
1681 ALIGN is the maximum alignment we can assume they have.
1682 METHOD describes what kind of copy this is, and what mechanisms may be used.
1684 Return the address of the new block, if memcpy is called and returns it,
1688 emit_block_move (x, y, size, method)
1690 enum block_op_methods method;
1698 case BLOCK_OP_NORMAL:
1699 may_use_call = true;
1702 case BLOCK_OP_CALL_PARM:
1703 may_use_call = block_move_libcall_safe_for_call_parm ();
1705 /* Make inhibit_defer_pop nonzero around the library call
1706 to force it to pop the arguments right away. */
1710 case BLOCK_OP_NO_LIBCALL:
1711 may_use_call = false;
1718 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1720 if (GET_MODE (x) != BLKmode)
1722 if (GET_MODE (y) != BLKmode)
1725 x = protect_from_queue (x, 1);
1726 y = protect_from_queue (y, 0);
1727 size = protect_from_queue (size, 0);
1729 if (GET_CODE (x) != MEM)
1731 if (GET_CODE (y) != MEM)
1736 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1737 move_by_pieces (x, y, INTVAL (size), align);
1738 else if (emit_block_move_via_movstr (x, y, size, align))
1740 else if (may_use_call)
1741 retval = emit_block_move_via_libcall (x, y, size);
1743 emit_block_move_via_loop (x, y, size, align);
1745 if (method == BLOCK_OP_CALL_PARM)
1751 /* A subroutine of emit_block_move. Returns true if calling the
1752 block move libcall will not clobber any parameters which may have
1753 already been placed on the stack. */
1756 block_move_libcall_safe_for_call_parm ()
1762 /* Check to see whether memcpy takes all register arguments. */
1764 takes_regs_uninit, takes_regs_no, takes_regs_yes
1765 } takes_regs = takes_regs_uninit;
1769 case takes_regs_uninit:
1771 CUMULATIVE_ARGS args_so_far;
1774 fn = emit_block_move_libcall_fn (false);
1775 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1777 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1778 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1780 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1781 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1782 if (!tmp || !REG_P (tmp))
1783 goto fail_takes_regs;
1784 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1785 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1787 goto fail_takes_regs;
1789 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1792 takes_regs = takes_regs_yes;
1795 case takes_regs_yes:
1799 takes_regs = takes_regs_no;
1810 /* A subroutine of emit_block_move. Expand a movstr pattern;
1811 return true if successful. */
1814 emit_block_move_via_movstr (x, y, size, align)
1818 /* Try the most limited insn first, because there's no point
1819 including more than one in the machine description unless
1820 the more limited one has some advantage. */
1822 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1823 enum machine_mode mode;
1825 /* Since this is a move insn, we don't care about volatility. */
1828 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1829 mode = GET_MODE_WIDER_MODE (mode))
1831 enum insn_code code = movstr_optab[(int) mode];
1832 insn_operand_predicate_fn pred;
1834 if (code != CODE_FOR_nothing
1835 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1836 here because if SIZE is less than the mode mask, as it is
1837 returned by the macro, it will definitely be less than the
1838 actual mode mask. */
1839 && ((GET_CODE (size) == CONST_INT
1840 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1841 <= (GET_MODE_MASK (mode) >> 1)))
1842 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1843 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1844 || (*pred) (x, BLKmode))
1845 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1846 || (*pred) (y, BLKmode))
1847 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1848 || (*pred) (opalign, VOIDmode)))
1851 rtx last = get_last_insn ();
1854 op2 = convert_to_mode (mode, size, 1);
1855 pred = insn_data[(int) code].operand[2].predicate;
1856 if (pred != 0 && ! (*pred) (op2, mode))
1857 op2 = copy_to_mode_reg (mode, op2);
1859 /* ??? When called via emit_block_move_for_call, it'd be
1860 nice if there were some way to inform the backend, so
1861 that it doesn't fail the expansion because it thinks
1862 emitting the libcall would be more efficient. */
1864 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1872 delete_insns_since (last);
1880 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1881 Return the return value from memcpy, 0 otherwise. */
1884 emit_block_move_via_libcall (dst, src, size)
1887 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1888 enum machine_mode size_mode;
1891 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1893 It is unsafe to save the value generated by protect_from_queue
1894 and reuse it later. Consider what happens if emit_queue is
1895 called before the return value from protect_from_queue is used.
1897 Expansion of the CALL_EXPR below will call emit_queue before
1898 we are finished emitting RTL for argument setup. So if we are
1899 not careful we could get the wrong value for an argument.
1901 To avoid this problem we go ahead and emit code to copy X, Y &
1902 SIZE into new pseudos. We can then place those new pseudos
1903 into an RTL_EXPR and use them later, even after a call to
1906 Note this is not strictly needed for library calls since they
1907 do not call emit_queue before loading their arguments. However,
1908 we may need to have library calls call emit_queue in the future
1909 since failing to do so could cause problems for targets which
1910 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1912 dst = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1913 src = copy_to_mode_reg (Pmode, XEXP (src, 0));
1915 if (TARGET_MEM_FUNCTIONS)
1916 size_mode = TYPE_MODE (sizetype);
1918 size_mode = TYPE_MODE (unsigned_type_node);
1919 size = convert_to_mode (size_mode, size, 1);
1920 size = copy_to_mode_reg (size_mode, size);
1922 /* It is incorrect to use the libcall calling conventions to call
1923 memcpy in this context. This could be a user call to memcpy and
1924 the user may wish to examine the return value from memcpy. For
1925 targets where libcalls and normal calls have different conventions
1926 for returning pointers, we could end up generating incorrect code.
1928 For convenience, we generate the call to bcopy this way as well. */
1930 dst_tree = make_tree (ptr_type_node, dst);
1931 src_tree = make_tree (ptr_type_node, src);
1932 if (TARGET_MEM_FUNCTIONS)
1933 size_tree = make_tree (sizetype, size);
1935 size_tree = make_tree (unsigned_type_node, size);
1937 fn = emit_block_move_libcall_fn (true);
1938 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1939 if (TARGET_MEM_FUNCTIONS)
1941 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1942 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1946 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1947 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1950 /* Now we have to build up the CALL_EXPR itself. */
1951 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1952 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1953 call_expr, arg_list, NULL_TREE);
1954 TREE_SIDE_EFFECTS (call_expr) = 1;
1956 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1958 /* If we are initializing a readonly value, show the above call
1959 clobbered it. Otherwise, a load from it may erroneously be
1960 hoisted from a loop. */
1961 if (RTX_UNCHANGING_P (dst))
1962 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
1964 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
1967 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1968 for the function we use for block copies. The first time FOR_CALL
1969 is true, we call assemble_external. */
1971 static GTY(()) tree block_move_fn;
1974 emit_block_move_libcall_fn (for_call)
1977 static bool emitted_extern;
1978 tree fn = block_move_fn, args;
1982 if (TARGET_MEM_FUNCTIONS)
1984 fn = get_identifier ("memcpy");
1985 args = build_function_type_list (ptr_type_node, ptr_type_node,
1986 const_ptr_type_node, sizetype,
1991 fn = get_identifier ("bcopy");
1992 args = build_function_type_list (void_type_node, const_ptr_type_node,
1993 ptr_type_node, unsigned_type_node,
1997 fn = build_decl (FUNCTION_DECL, fn, args);
1998 DECL_EXTERNAL (fn) = 1;
1999 TREE_PUBLIC (fn) = 1;
2000 DECL_ARTIFICIAL (fn) = 1;
2001 TREE_NOTHROW (fn) = 1;
2006 if (for_call && !emitted_extern)
2008 emitted_extern = true;
2009 make_decl_rtl (fn, NULL);
2010 assemble_external (fn);
2016 /* A subroutine of emit_block_move. Copy the data via an explicit
2017 loop. This is used only when libcalls are forbidden. */
2018 /* ??? It'd be nice to copy in hunks larger than QImode. */
2021 emit_block_move_via_loop (x, y, size, align)
2023 unsigned int align ATTRIBUTE_UNUSED;
2025 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2026 enum machine_mode iter_mode;
2028 iter_mode = GET_MODE (size);
2029 if (iter_mode == VOIDmode)
2030 iter_mode = word_mode;
2032 top_label = gen_label_rtx ();
2033 cmp_label = gen_label_rtx ();
2034 iter = gen_reg_rtx (iter_mode);
2036 emit_move_insn (iter, const0_rtx);
2038 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2039 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2040 do_pending_stack_adjust ();
2042 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2044 emit_jump (cmp_label);
2045 emit_label (top_label);
2047 tmp = convert_modes (Pmode, iter_mode, iter, true);
2048 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2049 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2050 x = change_address (x, QImode, x_addr);
2051 y = change_address (y, QImode, y_addr);
2053 emit_move_insn (x, y);
2055 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2056 true, OPTAB_LIB_WIDEN);
2058 emit_move_insn (iter, tmp);
2060 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2061 emit_label (cmp_label);
2063 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2066 emit_note (NULL, NOTE_INSN_LOOP_END);
2069 /* Copy all or part of a value X into registers starting at REGNO.
2070 The number of registers to be filled is NREGS. */
2073 move_block_to_reg (regno, x, nregs, mode)
2077 enum machine_mode mode;
2080 #ifdef HAVE_load_multiple
2088 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2089 x = validize_mem (force_const_mem (mode, x));
2091 /* See if the machine can do this with a load multiple insn. */
2092 #ifdef HAVE_load_multiple
2093 if (HAVE_load_multiple)
2095 last = get_last_insn ();
2096 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
2104 delete_insns_since (last);
2108 for (i = 0; i < nregs; i++)
2109 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2110 operand_subword_force (x, i, mode));
2113 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2114 The number of registers to be filled is NREGS. SIZE indicates the number
2115 of bytes in the object X. */
2118 move_block_from_reg (regno, x, nregs, size)
2125 #ifdef HAVE_store_multiple
2129 enum machine_mode mode;
2134 /* If SIZE is that of a mode no bigger than a word, just use that
2135 mode's store operation. */
2136 if (size <= UNITS_PER_WORD
2137 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode
2138 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
2140 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
2144 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
2145 to the left before storing to memory. Note that the previous test
2146 doesn't handle all cases (e.g. SIZE == 3). */
2147 if (size < UNITS_PER_WORD
2149 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
2151 rtx tem = operand_subword (x, 0, 1, BLKmode);
2157 shift = expand_shift (LSHIFT_EXPR, word_mode,
2158 gen_rtx_REG (word_mode, regno),
2159 build_int_2 ((UNITS_PER_WORD - size)
2160 * BITS_PER_UNIT, 0), NULL_RTX, 0);
2161 emit_move_insn (tem, shift);
2165 /* See if the machine can do this with a store multiple insn. */
2166 #ifdef HAVE_store_multiple
2167 if (HAVE_store_multiple)
2169 last = get_last_insn ();
2170 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
2178 delete_insns_since (last);
2182 for (i = 0; i < nregs; i++)
2184 rtx tem = operand_subword (x, i, 1, BLKmode);
2189 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2193 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2194 registers represented by a PARALLEL. SSIZE represents the total size of
2195 block SRC in bytes, or -1 if not known. */
2196 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
2197 the balance will be in what would be the low-order memory addresses, i.e.
2198 left justified for big endian, right justified for little endian. This
2199 happens to be true for the targets currently using this support. If this
2200 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2204 emit_group_load (dst, orig_src, ssize)
2211 if (GET_CODE (dst) != PARALLEL)
2214 /* Check for a NULL entry, used to indicate that the parameter goes
2215 both on the stack and in registers. */
2216 if (XEXP (XVECEXP (dst, 0, 0), 0))
2221 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
2223 /* Process the pieces. */
2224 for (i = start; i < XVECLEN (dst, 0); i++)
2226 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2227 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2228 unsigned int bytelen = GET_MODE_SIZE (mode);
2231 /* Handle trailing fragments that run over the size of the struct. */
2232 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2234 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2235 bytelen = ssize - bytepos;
2240 /* If we won't be loading directly from memory, protect the real source
2241 from strange tricks we might play; but make sure that the source can
2242 be loaded directly into the destination. */
2244 if (GET_CODE (orig_src) != MEM
2245 && (!CONSTANT_P (orig_src)
2246 || (GET_MODE (orig_src) != mode
2247 && GET_MODE (orig_src) != VOIDmode)))
2249 if (GET_MODE (orig_src) == VOIDmode)
2250 src = gen_reg_rtx (mode);
2252 src = gen_reg_rtx (GET_MODE (orig_src));
2254 emit_move_insn (src, orig_src);
2257 /* Optimize the access just a bit. */
2258 if (GET_CODE (src) == MEM
2259 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2260 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2261 && bytelen == GET_MODE_SIZE (mode))
2263 tmps[i] = gen_reg_rtx (mode);
2264 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2266 else if (GET_CODE (src) == CONCAT)
2269 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2270 || (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2271 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1)))))
2273 tmps[i] = XEXP (src, bytepos != 0);
2274 if (! CONSTANT_P (tmps[i])
2275 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2276 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2277 0, 1, NULL_RTX, mode, mode, ssize);
2279 else if (bytepos == 0)
2281 rtx mem = assign_stack_temp (GET_MODE (src),
2282 GET_MODE_SIZE (GET_MODE (src)), 0);
2283 emit_move_insn (mem, src);
2284 tmps[i] = adjust_address (mem, mode, 0);
2289 else if (CONSTANT_P (src)
2290 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2293 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2294 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2297 if (BYTES_BIG_ENDIAN && shift)
2298 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2299 tmps[i], 0, OPTAB_WIDEN);
2304 /* Copy the extracted pieces into the proper (probable) hard regs. */
2305 for (i = start; i < XVECLEN (dst, 0); i++)
2306 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2309 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2310 registers represented by a PARALLEL. SSIZE represents the total size of
2311 block DST, or -1 if not known. */
2314 emit_group_store (orig_dst, src, ssize)
2321 if (GET_CODE (src) != PARALLEL)
2324 /* Check for a NULL entry, used to indicate that the parameter goes
2325 both on the stack and in registers. */
2326 if (XEXP (XVECEXP (src, 0, 0), 0))
2331 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2333 /* Copy the (probable) hard regs into pseudos. */
2334 for (i = start; i < XVECLEN (src, 0); i++)
2336 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2337 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2338 emit_move_insn (tmps[i], reg);
2342 /* If we won't be storing directly into memory, protect the real destination
2343 from strange tricks we might play. */
2345 if (GET_CODE (dst) == PARALLEL)
2349 /* We can get a PARALLEL dst if there is a conditional expression in
2350 a return statement. In that case, the dst and src are the same,
2351 so no action is necessary. */
2352 if (rtx_equal_p (dst, src))
2355 /* It is unclear if we can ever reach here, but we may as well handle
2356 it. Allocate a temporary, and split this into a store/load to/from
2359 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2360 emit_group_store (temp, src, ssize);
2361 emit_group_load (dst, temp, ssize);
2364 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2366 dst = gen_reg_rtx (GET_MODE (orig_dst));
2367 /* Make life a bit easier for combine. */
2368 emit_move_insn (dst, const0_rtx);
2371 /* Process the pieces. */
2372 for (i = start; i < XVECLEN (src, 0); i++)
2374 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2375 enum machine_mode mode = GET_MODE (tmps[i]);
2376 unsigned int bytelen = GET_MODE_SIZE (mode);
2379 /* Handle trailing fragments that run over the size of the struct. */
2380 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2382 if (BYTES_BIG_ENDIAN)
2384 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2385 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2386 tmps[i], 0, OPTAB_WIDEN);
2388 bytelen = ssize - bytepos;
2391 if (GET_CODE (dst) == CONCAT)
2393 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2394 dest = XEXP (dst, 0);
2395 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2397 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2398 dest = XEXP (dst, 1);
2404 /* Optimize the access just a bit. */
2405 if (GET_CODE (dest) == MEM
2406 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2407 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2408 && bytelen == GET_MODE_SIZE (mode))
2409 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2411 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2412 mode, tmps[i], ssize);
2417 /* Copy from the pseudo into the (probable) hard reg. */
2418 if (GET_CODE (dst) == REG)
2419 emit_move_insn (orig_dst, dst);
2422 /* Generate code to copy a BLKmode object of TYPE out of a
2423 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2424 is null, a stack temporary is created. TGTBLK is returned.
2426 The primary purpose of this routine is to handle functions
2427 that return BLKmode structures in registers. Some machines
2428 (the PA for example) want to return all small structures
2429 in registers regardless of the structure's alignment. */
2432 copy_blkmode_from_reg (tgtblk, srcreg, type)
2437 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2438 rtx src = NULL, dst = NULL;
2439 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2440 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2444 tgtblk = assign_temp (build_qualified_type (type,
2446 | TYPE_QUAL_CONST)),
2448 preserve_temp_slots (tgtblk);
2451 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2452 into a new pseudo which is a full word.
2454 If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
2455 the wrong part of the register gets copied so we fake a type conversion
2457 if (GET_MODE (srcreg) != BLKmode
2458 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2460 if (FUNCTION_ARG_REG_LITTLE_ENDIAN)
2461 srcreg = simplify_gen_subreg (word_mode, srcreg, GET_MODE (srcreg), 0);
2463 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2466 /* Structures whose size is not a multiple of a word are aligned
2467 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2468 machine, this means we must skip the empty high order bytes when
2469 calculating the bit offset. */
2470 if (BYTES_BIG_ENDIAN
2471 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2472 && bytes % UNITS_PER_WORD)
2473 big_endian_correction
2474 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2476 /* Copy the structure BITSIZE bites at a time.
2478 We could probably emit more efficient code for machines which do not use
2479 strict alignment, but it doesn't seem worth the effort at the current
2481 for (bitpos = 0, xbitpos = big_endian_correction;
2482 bitpos < bytes * BITS_PER_UNIT;
2483 bitpos += bitsize, xbitpos += bitsize)
2485 /* We need a new source operand each time xbitpos is on a
2486 word boundary and when xbitpos == big_endian_correction
2487 (the first time through). */
2488 if (xbitpos % BITS_PER_WORD == 0
2489 || xbitpos == big_endian_correction)
2490 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2493 /* We need a new destination operand each time bitpos is on
2495 if (bitpos % BITS_PER_WORD == 0)
2496 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2498 /* Use xbitpos for the source extraction (right justified) and
2499 xbitpos for the destination store (left justified). */
2500 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2501 extract_bit_field (src, bitsize,
2502 xbitpos % BITS_PER_WORD, 1,
2503 NULL_RTX, word_mode, word_mode,
2511 /* Add a USE expression for REG to the (possibly empty) list pointed
2512 to by CALL_FUSAGE. REG must denote a hard register. */
2515 use_reg (call_fusage, reg)
2516 rtx *call_fusage, reg;
2518 if (GET_CODE (reg) != REG
2519 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2523 = gen_rtx_EXPR_LIST (VOIDmode,
2524 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2527 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2528 starting at REGNO. All of these registers must be hard registers. */
2531 use_regs (call_fusage, regno, nregs)
2538 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2541 for (i = 0; i < nregs; i++)
2542 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2545 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2546 PARALLEL REGS. This is for calls that pass values in multiple
2547 non-contiguous locations. The Irix 6 ABI has examples of this. */
2550 use_group_regs (call_fusage, regs)
2556 for (i = 0; i < XVECLEN (regs, 0); i++)
2558 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2560 /* A NULL entry means the parameter goes both on the stack and in
2561 registers. This can also be a MEM for targets that pass values
2562 partially on the stack and partially in registers. */
2563 if (reg != 0 && GET_CODE (reg) == REG)
2564 use_reg (call_fusage, reg);
2569 /* Determine whether the LEN bytes generated by CONSTFUN can be
2570 stored to memory using several move instructions. CONSTFUNDATA is
2571 a pointer which will be passed as argument in every CONSTFUN call.
2572 ALIGN is maximum alignment we can assume. Return nonzero if a
2573 call to store_by_pieces should succeed. */
2576 can_store_by_pieces (len, constfun, constfundata, align)
2577 unsigned HOST_WIDE_INT len;
2578 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2582 unsigned HOST_WIDE_INT max_size, l;
2583 HOST_WIDE_INT offset = 0;
2584 enum machine_mode mode, tmode;
2585 enum insn_code icode;
2589 if (! MOVE_BY_PIECES_P (len, align))
2592 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2593 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2594 align = MOVE_MAX * BITS_PER_UNIT;
2596 /* We would first store what we can in the largest integer mode, then go to
2597 successively smaller modes. */
2600 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2605 max_size = STORE_MAX_PIECES + 1;
2606 while (max_size > 1)
2608 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2609 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2610 if (GET_MODE_SIZE (tmode) < max_size)
2613 if (mode == VOIDmode)
2616 icode = mov_optab->handlers[(int) mode].insn_code;
2617 if (icode != CODE_FOR_nothing
2618 && align >= GET_MODE_ALIGNMENT (mode))
2620 unsigned int size = GET_MODE_SIZE (mode);
2627 cst = (*constfun) (constfundata, offset, mode);
2628 if (!LEGITIMATE_CONSTANT_P (cst))
2638 max_size = GET_MODE_SIZE (mode);
2641 /* The code above should have handled everything. */
2649 /* Generate several move instructions to store LEN bytes generated by
2650 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2651 pointer which will be passed as argument in every CONSTFUN call.
2652 ALIGN is maximum alignment we can assume. */
2655 store_by_pieces (to, len, constfun, constfundata, align)
2657 unsigned HOST_WIDE_INT len;
2658 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2662 struct store_by_pieces data;
2664 if (! MOVE_BY_PIECES_P (len, align))
2666 to = protect_from_queue (to, 1);
2667 data.constfun = constfun;
2668 data.constfundata = constfundata;
2671 store_by_pieces_1 (&data, align);
2674 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2675 rtx with BLKmode). The caller must pass TO through protect_from_queue
2676 before calling. ALIGN is maximum alignment we can assume. */
2679 clear_by_pieces (to, len, align)
2681 unsigned HOST_WIDE_INT len;
2684 struct store_by_pieces data;
2686 data.constfun = clear_by_pieces_1;
2687 data.constfundata = NULL;
2690 store_by_pieces_1 (&data, align);
2693 /* Callback routine for clear_by_pieces.
2694 Return const0_rtx unconditionally. */
2697 clear_by_pieces_1 (data, offset, mode)
2698 PTR data ATTRIBUTE_UNUSED;
2699 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2700 enum machine_mode mode ATTRIBUTE_UNUSED;
2705 /* Subroutine of clear_by_pieces and store_by_pieces.
2706 Generate several move instructions to store LEN bytes of block TO. (A MEM
2707 rtx with BLKmode). The caller must pass TO through protect_from_queue
2708 before calling. ALIGN is maximum alignment we can assume. */
2711 store_by_pieces_1 (data, align)
2712 struct store_by_pieces *data;
2715 rtx to_addr = XEXP (data->to, 0);
2716 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2717 enum machine_mode mode = VOIDmode, tmode;
2718 enum insn_code icode;
2721 data->to_addr = to_addr;
2723 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2724 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2726 data->explicit_inc_to = 0;
2728 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2730 data->offset = data->len;
2732 /* If storing requires more than two move insns,
2733 copy addresses to registers (to make displacements shorter)
2734 and use post-increment if available. */
2735 if (!data->autinc_to
2736 && move_by_pieces_ninsns (data->len, align) > 2)
2738 /* Determine the main mode we'll be using. */
2739 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2740 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2741 if (GET_MODE_SIZE (tmode) < max_size)
2744 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2746 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2747 data->autinc_to = 1;
2748 data->explicit_inc_to = -1;
2751 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2752 && ! data->autinc_to)
2754 data->to_addr = copy_addr_to_reg (to_addr);
2755 data->autinc_to = 1;
2756 data->explicit_inc_to = 1;
2759 if ( !data->autinc_to && CONSTANT_P (to_addr))
2760 data->to_addr = copy_addr_to_reg (to_addr);
2763 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2764 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2765 align = MOVE_MAX * BITS_PER_UNIT;
2767 /* First store what we can in the largest integer mode, then go to
2768 successively smaller modes. */
2770 while (max_size > 1)
2772 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2773 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2774 if (GET_MODE_SIZE (tmode) < max_size)
2777 if (mode == VOIDmode)
2780 icode = mov_optab->handlers[(int) mode].insn_code;
2781 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2782 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2784 max_size = GET_MODE_SIZE (mode);
2787 /* The code above should have handled everything. */
2792 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2793 with move instructions for mode MODE. GENFUN is the gen_... function
2794 to make a move insn for that mode. DATA has all the other info. */
2797 store_by_pieces_2 (genfun, mode, data)
2798 rtx (*genfun) PARAMS ((rtx, ...));
2799 enum machine_mode mode;
2800 struct store_by_pieces *data;
2802 unsigned int size = GET_MODE_SIZE (mode);
2805 while (data->len >= size)
2808 data->offset -= size;
2810 if (data->autinc_to)
2811 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2814 to1 = adjust_address (data->to, mode, data->offset);
2816 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2817 emit_insn (gen_add2_insn (data->to_addr,
2818 GEN_INT (-(HOST_WIDE_INT) size)));
2820 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2821 emit_insn ((*genfun) (to1, cst));
2823 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2824 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2826 if (! data->reverse)
2827 data->offset += size;
2833 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2834 its length in bytes. */
2837 clear_storage (object, size)
2842 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2843 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2845 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2846 just move a zero. Otherwise, do this a piece at a time. */
2847 if (GET_MODE (object) != BLKmode
2848 && GET_CODE (size) == CONST_INT
2849 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2850 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2853 object = protect_from_queue (object, 1);
2854 size = protect_from_queue (size, 0);
2856 if (GET_CODE (size) == CONST_INT
2857 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2858 clear_by_pieces (object, INTVAL (size), align);
2859 else if (clear_storage_via_clrstr (object, size, align))
2862 retval = clear_storage_via_libcall (object, size);
2868 /* A subroutine of clear_storage. Expand a clrstr pattern;
2869 return true if successful. */
2872 clear_storage_via_clrstr (object, size, align)
2876 /* Try the most limited insn first, because there's no point
2877 including more than one in the machine description unless
2878 the more limited one has some advantage. */
2880 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2881 enum machine_mode mode;
2883 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2884 mode = GET_MODE_WIDER_MODE (mode))
2886 enum insn_code code = clrstr_optab[(int) mode];
2887 insn_operand_predicate_fn pred;
2889 if (code != CODE_FOR_nothing
2890 /* We don't need MODE to be narrower than
2891 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2892 the mode mask, as it is returned by the macro, it will
2893 definitely be less than the actual mode mask. */
2894 && ((GET_CODE (size) == CONST_INT
2895 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2896 <= (GET_MODE_MASK (mode) >> 1)))
2897 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2898 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2899 || (*pred) (object, BLKmode))
2900 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2901 || (*pred) (opalign, VOIDmode)))
2904 rtx last = get_last_insn ();
2907 op1 = convert_to_mode (mode, size, 1);
2908 pred = insn_data[(int) code].operand[1].predicate;
2909 if (pred != 0 && ! (*pred) (op1, mode))
2910 op1 = copy_to_mode_reg (mode, op1);
2912 pat = GEN_FCN ((int) code) (object, op1, opalign);
2919 delete_insns_since (last);
2926 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2927 Return the return value of memset, 0 otherwise. */
2930 clear_storage_via_libcall (object, size)
2933 tree call_expr, arg_list, fn, object_tree, size_tree;
2934 enum machine_mode size_mode;
2937 /* OBJECT or SIZE may have been passed through protect_from_queue.
2939 It is unsafe to save the value generated by protect_from_queue
2940 and reuse it later. Consider what happens if emit_queue is
2941 called before the return value from protect_from_queue is used.
2943 Expansion of the CALL_EXPR below will call emit_queue before
2944 we are finished emitting RTL for argument setup. So if we are
2945 not careful we could get the wrong value for an argument.
2947 To avoid this problem we go ahead and emit code to copy OBJECT
2948 and SIZE into new pseudos. We can then place those new pseudos
2949 into an RTL_EXPR and use them later, even after a call to
2952 Note this is not strictly needed for library calls since they
2953 do not call emit_queue before loading their arguments. However,
2954 we may need to have library calls call emit_queue in the future
2955 since failing to do so could cause problems for targets which
2956 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2958 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2960 if (TARGET_MEM_FUNCTIONS)
2961 size_mode = TYPE_MODE (sizetype);
2963 size_mode = TYPE_MODE (unsigned_type_node);
2964 size = convert_to_mode (size_mode, size, 1);
2965 size = copy_to_mode_reg (size_mode, size);
2967 /* It is incorrect to use the libcall calling conventions to call
2968 memset in this context. This could be a user call to memset and
2969 the user may wish to examine the return value from memset. For
2970 targets where libcalls and normal calls have different conventions
2971 for returning pointers, we could end up generating incorrect code.
2973 For convenience, we generate the call to bzero this way as well. */
2975 object_tree = make_tree (ptr_type_node, object);
2976 if (TARGET_MEM_FUNCTIONS)
2977 size_tree = make_tree (sizetype, size);
2979 size_tree = make_tree (unsigned_type_node, size);
2981 fn = clear_storage_libcall_fn (true);
2982 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2983 if (TARGET_MEM_FUNCTIONS)
2984 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2985 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2987 /* Now we have to build up the CALL_EXPR itself. */
2988 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2989 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2990 call_expr, arg_list, NULL_TREE);
2991 TREE_SIDE_EFFECTS (call_expr) = 1;
2993 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2995 /* If we are initializing a readonly value, show the above call
2996 clobbered it. Otherwise, a load from it may erroneously be
2997 hoisted from a loop. */
2998 if (RTX_UNCHANGING_P (object))
2999 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3001 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3004 /* A subroutine of clear_storage_via_libcall. Create the tree node
3005 for the function we use for block clears. The first time FOR_CALL
3006 is true, we call assemble_external. */
3008 static GTY(()) tree block_clear_fn;
3011 clear_storage_libcall_fn (for_call)
3014 static bool emitted_extern;
3015 tree fn = block_clear_fn, args;
3019 if (TARGET_MEM_FUNCTIONS)
3021 fn = get_identifier ("memset");
3022 args = build_function_type_list (ptr_type_node, ptr_type_node,
3023 integer_type_node, sizetype,
3028 fn = get_identifier ("bzero");
3029 args = build_function_type_list (void_type_node, ptr_type_node,
3030 unsigned_type_node, NULL_TREE);
3033 fn = build_decl (FUNCTION_DECL, fn, args);
3034 DECL_EXTERNAL (fn) = 1;
3035 TREE_PUBLIC (fn) = 1;
3036 DECL_ARTIFICIAL (fn) = 1;
3037 TREE_NOTHROW (fn) = 1;
3039 block_clear_fn = fn;
3042 if (for_call && !emitted_extern)
3044 emitted_extern = true;
3045 make_decl_rtl (fn, NULL);
3046 assemble_external (fn);
3052 /* Generate code to copy Y into X.
3053 Both Y and X must have the same mode, except that
3054 Y can be a constant with VOIDmode.
3055 This mode cannot be BLKmode; use emit_block_move for that.
3057 Return the last instruction emitted. */
3060 emit_move_insn (x, y)
3063 enum machine_mode mode = GET_MODE (x);
3064 rtx y_cst = NULL_RTX;
3067 x = protect_from_queue (x, 1);
3068 y = protect_from_queue (y, 0);
3070 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3073 /* Never force constant_p_rtx to memory. */
3074 if (GET_CODE (y) == CONSTANT_P_RTX)
3076 else if (CONSTANT_P (y))
3079 && FLOAT_MODE_P (GET_MODE (x))
3080 && (last_insn = compress_float_constant (x, y)))
3083 if (!LEGITIMATE_CONSTANT_P (y))
3086 y = force_const_mem (mode, y);
3090 /* If X or Y are memory references, verify that their addresses are valid
3092 if (GET_CODE (x) == MEM
3093 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3094 && ! push_operand (x, GET_MODE (x)))
3096 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3097 x = validize_mem (x);
3099 if (GET_CODE (y) == MEM
3100 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3102 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3103 y = validize_mem (y);
3105 if (mode == BLKmode)
3108 last_insn = emit_move_insn_1 (x, y);
3110 if (y_cst && GET_CODE (x) == REG)
3111 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3116 /* Low level part of emit_move_insn.
3117 Called just like emit_move_insn, but assumes X and Y
3118 are basically valid. */
3121 emit_move_insn_1 (x, y)
3124 enum machine_mode mode = GET_MODE (x);
3125 enum machine_mode submode;
3126 enum mode_class class = GET_MODE_CLASS (mode);
3128 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3131 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3133 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3135 /* Expand complex moves by moving real part and imag part, if possible. */
3136 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
3137 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
3139 (class == MODE_COMPLEX_INT
3140 ? MODE_INT : MODE_FLOAT),
3142 && (mov_optab->handlers[(int) submode].insn_code
3143 != CODE_FOR_nothing))
3145 /* Don't split destination if it is a stack push. */
3146 int stack = push_operand (x, GET_MODE (x));
3148 #ifdef PUSH_ROUNDING
3149 /* In case we output to the stack, but the size is smaller machine can
3150 push exactly, we need to use move instructions. */
3152 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3153 != GET_MODE_SIZE (submode)))
3156 HOST_WIDE_INT offset1, offset2;
3158 /* Do not use anti_adjust_stack, since we don't want to update
3159 stack_pointer_delta. */
3160 temp = expand_binop (Pmode,
3161 #ifdef STACK_GROWS_DOWNWARD
3169 (GET_MODE_SIZE (GET_MODE (x)))),
3170 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3172 if (temp != stack_pointer_rtx)
3173 emit_move_insn (stack_pointer_rtx, temp);
3175 #ifdef STACK_GROWS_DOWNWARD
3177 offset2 = GET_MODE_SIZE (submode);
3179 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3180 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3181 + GET_MODE_SIZE (submode));
3184 emit_move_insn (change_address (x, submode,
3185 gen_rtx_PLUS (Pmode,
3187 GEN_INT (offset1))),
3188 gen_realpart (submode, y));
3189 emit_move_insn (change_address (x, submode,
3190 gen_rtx_PLUS (Pmode,
3192 GEN_INT (offset2))),
3193 gen_imagpart (submode, y));
3197 /* If this is a stack, push the highpart first, so it
3198 will be in the argument order.
3200 In that case, change_address is used only to convert
3201 the mode, not to change the address. */
3204 /* Note that the real part always precedes the imag part in memory
3205 regardless of machine's endianness. */
3206 #ifdef STACK_GROWS_DOWNWARD
3207 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3208 (gen_rtx_MEM (submode, XEXP (x, 0)),
3209 gen_imagpart (submode, y)));
3210 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3211 (gen_rtx_MEM (submode, XEXP (x, 0)),
3212 gen_realpart (submode, y)));
3214 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3215 (gen_rtx_MEM (submode, XEXP (x, 0)),
3216 gen_realpart (submode, y)));
3217 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3218 (gen_rtx_MEM (submode, XEXP (x, 0)),
3219 gen_imagpart (submode, y)));
3224 rtx realpart_x, realpart_y;
3225 rtx imagpart_x, imagpart_y;
3227 /* If this is a complex value with each part being smaller than a
3228 word, the usual calling sequence will likely pack the pieces into
3229 a single register. Unfortunately, SUBREG of hard registers only
3230 deals in terms of words, so we have a problem converting input
3231 arguments to the CONCAT of two registers that is used elsewhere
3232 for complex values. If this is before reload, we can copy it into
3233 memory and reload. FIXME, we should see about using extract and
3234 insert on integer registers, but complex short and complex char
3235 variables should be rarely used. */
3236 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
3237 && (reload_in_progress | reload_completed) == 0)
3240 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3242 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
3244 if (packed_dest_p || packed_src_p)
3246 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3247 ? MODE_FLOAT : MODE_INT);
3249 enum machine_mode reg_mode
3250 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
3252 if (reg_mode != BLKmode)
3254 rtx mem = assign_stack_temp (reg_mode,
3255 GET_MODE_SIZE (mode), 0);
3256 rtx cmem = adjust_address (mem, mode, 0);
3259 = N_("function using short complex types cannot be inline");
3263 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3265 emit_move_insn_1 (cmem, y);
3266 return emit_move_insn_1 (sreg, mem);
3270 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3272 emit_move_insn_1 (mem, sreg);
3273 return emit_move_insn_1 (x, cmem);
3279 realpart_x = gen_realpart (submode, x);
3280 realpart_y = gen_realpart (submode, y);
3281 imagpart_x = gen_imagpart (submode, x);
3282 imagpart_y = gen_imagpart (submode, y);
3284 /* Show the output dies here. This is necessary for SUBREGs
3285 of pseudos since we cannot track their lifetimes correctly;
3286 hard regs shouldn't appear here except as return values.
3287 We never want to emit such a clobber after reload. */
3289 && ! (reload_in_progress || reload_completed)
3290 && (GET_CODE (realpart_x) == SUBREG
3291 || GET_CODE (imagpart_x) == SUBREG))
3292 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3294 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3295 (realpart_x, realpart_y));
3296 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3297 (imagpart_x, imagpart_y));
3300 return get_last_insn ();
3303 /* This will handle any multi-word or full-word mode that lacks a move_insn
3304 pattern. However, you will get better code if you define such patterns,
3305 even if they must turn into multiple assembler instructions. */
3306 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3313 #ifdef PUSH_ROUNDING
3315 /* If X is a push on the stack, do the push now and replace
3316 X with a reference to the stack pointer. */
3317 if (push_operand (x, GET_MODE (x)))
3322 /* Do not use anti_adjust_stack, since we don't want to update
3323 stack_pointer_delta. */
3324 temp = expand_binop (Pmode,
3325 #ifdef STACK_GROWS_DOWNWARD
3333 (GET_MODE_SIZE (GET_MODE (x)))),
3334 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3336 if (temp != stack_pointer_rtx)
3337 emit_move_insn (stack_pointer_rtx, temp);
3339 code = GET_CODE (XEXP (x, 0));
3341 /* Just hope that small offsets off SP are OK. */
3342 if (code == POST_INC)
3343 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3344 GEN_INT (-((HOST_WIDE_INT)
3345 GET_MODE_SIZE (GET_MODE (x)))));
3346 else if (code == POST_DEC)
3347 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3348 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3350 temp = stack_pointer_rtx;
3352 x = change_address (x, VOIDmode, temp);
3356 /* If we are in reload, see if either operand is a MEM whose address
3357 is scheduled for replacement. */
3358 if (reload_in_progress && GET_CODE (x) == MEM
3359 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3360 x = replace_equiv_address_nv (x, inner);
3361 if (reload_in_progress && GET_CODE (y) == MEM
3362 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3363 y = replace_equiv_address_nv (y, inner);
3369 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3372 rtx xpart = operand_subword (x, i, 1, mode);
3373 rtx ypart = operand_subword (y, i, 1, mode);
3375 /* If we can't get a part of Y, put Y into memory if it is a
3376 constant. Otherwise, force it into a register. If we still
3377 can't get a part of Y, abort. */
3378 if (ypart == 0 && CONSTANT_P (y))
3380 y = force_const_mem (mode, y);
3381 ypart = operand_subword (y, i, 1, mode);
3383 else if (ypart == 0)
3384 ypart = operand_subword_force (y, i, mode);
3386 if (xpart == 0 || ypart == 0)
3389 need_clobber |= (GET_CODE (xpart) == SUBREG);
3391 last_insn = emit_move_insn (xpart, ypart);
3397 /* Show the output dies here. This is necessary for SUBREGs
3398 of pseudos since we cannot track their lifetimes correctly;
3399 hard regs shouldn't appear here except as return values.
3400 We never want to emit such a clobber after reload. */
3402 && ! (reload_in_progress || reload_completed)
3403 && need_clobber != 0)
3404 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3414 /* If Y is representable exactly in a narrower mode, and the target can
3415 perform the extension directly from constant or memory, then emit the
3416 move as an extension. */
3419 compress_float_constant (x, y)
3422 enum machine_mode dstmode = GET_MODE (x);
3423 enum machine_mode orig_srcmode = GET_MODE (y);
3424 enum machine_mode srcmode;
3427 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3429 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3430 srcmode != orig_srcmode;
3431 srcmode = GET_MODE_WIDER_MODE (srcmode))
3434 rtx trunc_y, last_insn;
3436 /* Skip if the target can't extend this way. */
3437 ic = can_extend_p (dstmode, srcmode, 0);
3438 if (ic == CODE_FOR_nothing)
3441 /* Skip if the narrowed value isn't exact. */
3442 if (! exact_real_truncate (srcmode, &r))
3445 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3447 if (LEGITIMATE_CONSTANT_P (trunc_y))
3449 /* Skip if the target needs extra instructions to perform
3451 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3454 else if (float_extend_from_mem[dstmode][srcmode])
3455 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3459 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3460 last_insn = get_last_insn ();
3462 if (GET_CODE (x) == REG)
3463 REG_NOTES (last_insn)
3464 = gen_rtx_EXPR_LIST (REG_EQUAL, y, REG_NOTES (last_insn));
3472 /* Pushing data onto the stack. */
3474 /* Push a block of length SIZE (perhaps variable)
3475 and return an rtx to address the beginning of the block.
3476 Note that it is not possible for the value returned to be a QUEUED.
3477 The value may be virtual_outgoing_args_rtx.
3479 EXTRA is the number of bytes of padding to push in addition to SIZE.
3480 BELOW nonzero means this padding comes at low addresses;
3481 otherwise, the padding comes at high addresses. */
3484 push_block (size, extra, below)
3490 size = convert_modes (Pmode, ptr_mode, size, 1);
3491 if (CONSTANT_P (size))
3492 anti_adjust_stack (plus_constant (size, extra));
3493 else if (GET_CODE (size) == REG && extra == 0)
3494 anti_adjust_stack (size);
3497 temp = copy_to_mode_reg (Pmode, size);
3499 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3500 temp, 0, OPTAB_LIB_WIDEN);
3501 anti_adjust_stack (temp);
3504 #ifndef STACK_GROWS_DOWNWARD
3510 temp = virtual_outgoing_args_rtx;
3511 if (extra != 0 && below)
3512 temp = plus_constant (temp, extra);
3516 if (GET_CODE (size) == CONST_INT)
3517 temp = plus_constant (virtual_outgoing_args_rtx,
3518 -INTVAL (size) - (below ? 0 : extra));
3519 else if (extra != 0 && !below)
3520 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3521 negate_rtx (Pmode, plus_constant (size, extra)));
3523 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3524 negate_rtx (Pmode, size));
3527 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3530 #ifdef PUSH_ROUNDING
3532 /* Emit single push insn. */
3535 emit_single_push_insn (mode, x, type)
3537 enum machine_mode mode;
3541 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3543 enum insn_code icode;
3544 insn_operand_predicate_fn pred;
3546 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3547 /* If there is push pattern, use it. Otherwise try old way of throwing
3548 MEM representing push operation to move expander. */
3549 icode = push_optab->handlers[(int) mode].insn_code;
3550 if (icode != CODE_FOR_nothing)
3552 if (((pred = insn_data[(int) icode].operand[0].predicate)
3553 && !((*pred) (x, mode))))
3554 x = force_reg (mode, x);
3555 emit_insn (GEN_FCN (icode) (x));
3558 if (GET_MODE_SIZE (mode) == rounded_size)
3559 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3562 #ifdef STACK_GROWS_DOWNWARD
3563 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3564 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3566 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3567 GEN_INT (rounded_size));
3569 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3572 dest = gen_rtx_MEM (mode, dest_addr);
3576 set_mem_attributes (dest, type, 1);
3578 if (flag_optimize_sibling_calls)
3579 /* Function incoming arguments may overlap with sibling call
3580 outgoing arguments and we cannot allow reordering of reads
3581 from function arguments with stores to outgoing arguments
3582 of sibling calls. */
3583 set_mem_alias_set (dest, 0);
3585 emit_move_insn (dest, x);
3589 /* Generate code to push X onto the stack, assuming it has mode MODE and
3591 MODE is redundant except when X is a CONST_INT (since they don't
3593 SIZE is an rtx for the size of data to be copied (in bytes),
3594 needed only if X is BLKmode.
3596 ALIGN (in bits) is maximum alignment we can assume.
3598 If PARTIAL and REG are both nonzero, then copy that many of the first
3599 words of X into registers starting with REG, and push the rest of X.
3600 The amount of space pushed is decreased by PARTIAL words,
3601 rounded *down* to a multiple of PARM_BOUNDARY.
3602 REG must be a hard register in this case.
3603 If REG is zero but PARTIAL is not, take any all others actions for an
3604 argument partially in registers, but do not actually load any
3607 EXTRA is the amount in bytes of extra space to leave next to this arg.
3608 This is ignored if an argument block has already been allocated.
3610 On a machine that lacks real push insns, ARGS_ADDR is the address of
3611 the bottom of the argument block for this call. We use indexing off there
3612 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3613 argument block has not been preallocated.
3615 ARGS_SO_FAR is the size of args previously pushed for this call.
3617 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3618 for arguments passed in registers. If nonzero, it will be the number
3619 of bytes required. */
3622 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3623 args_addr, args_so_far, reg_parm_stack_space,
3626 enum machine_mode mode;
3635 int reg_parm_stack_space;
3639 enum direction stack_direction
3640 #ifdef STACK_GROWS_DOWNWARD
3646 /* Decide where to pad the argument: `downward' for below,
3647 `upward' for above, or `none' for don't pad it.
3648 Default is below for small data on big-endian machines; else above. */
3649 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3651 /* Invert direction if stack is post-decrement.
3653 if (STACK_PUSH_CODE == POST_DEC)
3654 if (where_pad != none)
3655 where_pad = (where_pad == downward ? upward : downward);
3657 xinner = x = protect_from_queue (x, 0);
3659 if (mode == BLKmode)
3661 /* Copy a block into the stack, entirely or partially. */
3664 int used = partial * UNITS_PER_WORD;
3665 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3673 /* USED is now the # of bytes we need not copy to the stack
3674 because registers will take care of them. */
3677 xinner = adjust_address (xinner, BLKmode, used);
3679 /* If the partial register-part of the arg counts in its stack size,
3680 skip the part of stack space corresponding to the registers.
3681 Otherwise, start copying to the beginning of the stack space,
3682 by setting SKIP to 0. */
3683 skip = (reg_parm_stack_space == 0) ? 0 : used;
3685 #ifdef PUSH_ROUNDING
3686 /* Do it with several push insns if that doesn't take lots of insns
3687 and if there is no difficulty with push insns that skip bytes
3688 on the stack for alignment purposes. */
3691 && GET_CODE (size) == CONST_INT
3693 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3694 /* Here we avoid the case of a structure whose weak alignment
3695 forces many pushes of a small amount of data,
3696 and such small pushes do rounding that causes trouble. */
3697 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3698 || align >= BIGGEST_ALIGNMENT
3699 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3700 == (align / BITS_PER_UNIT)))
3701 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3703 /* Push padding now if padding above and stack grows down,
3704 or if padding below and stack grows up.
3705 But if space already allocated, this has already been done. */
3706 if (extra && args_addr == 0
3707 && where_pad != none && where_pad != stack_direction)
3708 anti_adjust_stack (GEN_INT (extra));
3710 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3713 #endif /* PUSH_ROUNDING */
3717 /* Otherwise make space on the stack and copy the data
3718 to the address of that space. */
3720 /* Deduct words put into registers from the size we must copy. */
3723 if (GET_CODE (size) == CONST_INT)
3724 size = GEN_INT (INTVAL (size) - used);
3726 size = expand_binop (GET_MODE (size), sub_optab, size,
3727 GEN_INT (used), NULL_RTX, 0,
3731 /* Get the address of the stack space.
3732 In this case, we do not deal with EXTRA separately.
3733 A single stack adjust will do. */
3736 temp = push_block (size, extra, where_pad == downward);
3739 else if (GET_CODE (args_so_far) == CONST_INT)
3740 temp = memory_address (BLKmode,
3741 plus_constant (args_addr,
3742 skip + INTVAL (args_so_far)));
3744 temp = memory_address (BLKmode,
3745 plus_constant (gen_rtx_PLUS (Pmode,
3750 if (!ACCUMULATE_OUTGOING_ARGS)
3752 /* If the source is referenced relative to the stack pointer,
3753 copy it to another register to stabilize it. We do not need
3754 to do this if we know that we won't be changing sp. */
3756 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3757 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3758 temp = copy_to_reg (temp);
3761 target = gen_rtx_MEM (BLKmode, temp);
3765 set_mem_attributes (target, type, 1);
3766 /* Function incoming arguments may overlap with sibling call
3767 outgoing arguments and we cannot allow reordering of reads
3768 from function arguments with stores to outgoing arguments
3769 of sibling calls. */
3770 set_mem_alias_set (target, 0);
3773 /* ALIGN may well be better aligned than TYPE, e.g. due to
3774 PARM_BOUNDARY. Assume the caller isn't lying. */
3775 set_mem_align (target, align);
3777 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3780 else if (partial > 0)
3782 /* Scalar partly in registers. */
3784 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3787 /* # words of start of argument
3788 that we must make space for but need not store. */
3789 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3790 int args_offset = INTVAL (args_so_far);
3793 /* Push padding now if padding above and stack grows down,
3794 or if padding below and stack grows up.
3795 But if space already allocated, this has already been done. */
3796 if (extra && args_addr == 0
3797 && where_pad != none && where_pad != stack_direction)
3798 anti_adjust_stack (GEN_INT (extra));
3800 /* If we make space by pushing it, we might as well push
3801 the real data. Otherwise, we can leave OFFSET nonzero
3802 and leave the space uninitialized. */
3806 /* Now NOT_STACK gets the number of words that we don't need to
3807 allocate on the stack. */
3808 not_stack = partial - offset;
3810 /* If the partial register-part of the arg counts in its stack size,
3811 skip the part of stack space corresponding to the registers.
3812 Otherwise, start copying to the beginning of the stack space,
3813 by setting SKIP to 0. */
3814 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3816 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3817 x = validize_mem (force_const_mem (mode, x));
3819 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3820 SUBREGs of such registers are not allowed. */
3821 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3822 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3823 x = copy_to_reg (x);
3825 /* Loop over all the words allocated on the stack for this arg. */
3826 /* We can do it by words, because any scalar bigger than a word
3827 has a size a multiple of a word. */
3828 #ifndef PUSH_ARGS_REVERSED
3829 for (i = not_stack; i < size; i++)
3831 for (i = size - 1; i >= not_stack; i--)
3833 if (i >= not_stack + offset)
3834 emit_push_insn (operand_subword_force (x, i, mode),
3835 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3837 GEN_INT (args_offset + ((i - not_stack + skip)
3839 reg_parm_stack_space, alignment_pad);
3844 rtx target = NULL_RTX;
3847 /* Push padding now if padding above and stack grows down,
3848 or if padding below and stack grows up.
3849 But if space already allocated, this has already been done. */
3850 if (extra && args_addr == 0
3851 && where_pad != none && where_pad != stack_direction)
3852 anti_adjust_stack (GEN_INT (extra));
3854 #ifdef PUSH_ROUNDING
3855 if (args_addr == 0 && PUSH_ARGS)
3856 emit_single_push_insn (mode, x, type);
3860 if (GET_CODE (args_so_far) == CONST_INT)
3862 = memory_address (mode,
3863 plus_constant (args_addr,
3864 INTVAL (args_so_far)));
3866 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3869 dest = gen_rtx_MEM (mode, addr);
3872 set_mem_attributes (dest, type, 1);
3873 /* Function incoming arguments may overlap with sibling call
3874 outgoing arguments and we cannot allow reordering of reads
3875 from function arguments with stores to outgoing arguments
3876 of sibling calls. */
3877 set_mem_alias_set (dest, 0);
3880 emit_move_insn (dest, x);
3884 /* If part should go in registers, copy that part
3885 into the appropriate registers. Do this now, at the end,
3886 since mem-to-mem copies above may do function calls. */
3887 if (partial > 0 && reg != 0)
3889 /* Handle calls that pass values in multiple non-contiguous locations.
3890 The Irix 6 ABI has examples of this. */
3891 if (GET_CODE (reg) == PARALLEL)
3892 emit_group_load (reg, x, -1); /* ??? size? */
3894 move_block_to_reg (REGNO (reg), x, partial, mode);
3897 if (extra && args_addr == 0 && where_pad == stack_direction)
3898 anti_adjust_stack (GEN_INT (extra));
3900 if (alignment_pad && args_addr == 0)
3901 anti_adjust_stack (alignment_pad);
3904 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3912 /* Only registers can be subtargets. */
3913 || GET_CODE (x) != REG
3914 /* If the register is readonly, it can't be set more than once. */
3915 || RTX_UNCHANGING_P (x)
3916 /* Don't use hard regs to avoid extending their life. */
3917 || REGNO (x) < FIRST_PSEUDO_REGISTER
3918 /* Avoid subtargets inside loops,
3919 since they hide some invariant expressions. */
3920 || preserve_subexpressions_p ())
3924 /* Expand an assignment that stores the value of FROM into TO.
3925 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3926 (This may contain a QUEUED rtx;
3927 if the value is constant, this rtx is a constant.)
3928 Otherwise, the returned value is NULL_RTX.
3930 SUGGEST_REG is no longer actually used.
3931 It used to mean, copy the value through a register
3932 and return that register, if that is possible.
3933 We now use WANT_VALUE to decide whether to do this. */
3936 expand_assignment (to, from, want_value, suggest_reg)
3939 int suggest_reg ATTRIBUTE_UNUSED;
3944 /* Don't crash if the lhs of the assignment was erroneous. */
3946 if (TREE_CODE (to) == ERROR_MARK)
3948 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3949 return want_value ? result : NULL_RTX;
3952 /* Assignment of a structure component needs special treatment
3953 if the structure component's rtx is not simply a MEM.
3954 Assignment of an array element at a constant index, and assignment of
3955 an array element in an unaligned packed structure field, has the same
3958 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3959 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3961 enum machine_mode mode1;
3962 HOST_WIDE_INT bitsize, bitpos;
3970 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3971 &unsignedp, &volatilep);
3973 /* If we are going to use store_bit_field and extract_bit_field,
3974 make sure to_rtx will be safe for multiple use. */
3976 if (mode1 == VOIDmode && want_value)
3977 tem = stabilize_reference (tem);
3979 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3983 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3985 if (GET_CODE (to_rtx) != MEM)
3988 #ifdef POINTERS_EXTEND_UNSIGNED
3989 if (GET_MODE (offset_rtx) != Pmode)
3990 offset_rtx = convert_memory_address (Pmode, offset_rtx);
3992 if (GET_MODE (offset_rtx) != ptr_mode)
3993 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3996 /* A constant address in TO_RTX can have VOIDmode, we must not try
3997 to call force_reg for that case. Avoid that case. */
3998 if (GET_CODE (to_rtx) == MEM
3999 && GET_MODE (to_rtx) == BLKmode
4000 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4002 && (bitpos % bitsize) == 0
4003 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4004 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4006 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4010 to_rtx = offset_address (to_rtx, offset_rtx,
4011 highest_pow2_factor_for_type (TREE_TYPE (to),
4015 if (GET_CODE (to_rtx) == MEM)
4017 /* If the field is at offset zero, we could have been given the
4018 DECL_RTX of the parent struct. Don't munge it. */
4019 to_rtx = shallow_copy_rtx (to_rtx);
4021 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4024 /* Deal with volatile and readonly fields. The former is only done
4025 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4026 if (volatilep && GET_CODE (to_rtx) == MEM)
4028 if (to_rtx == orig_to_rtx)
4029 to_rtx = copy_rtx (to_rtx);
4030 MEM_VOLATILE_P (to_rtx) = 1;
4033 if (TREE_CODE (to) == COMPONENT_REF
4034 && TREE_READONLY (TREE_OPERAND (to, 1)))
4036 if (to_rtx == orig_to_rtx)
4037 to_rtx = copy_rtx (to_rtx);
4038 RTX_UNCHANGING_P (to_rtx) = 1;
4041 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
4043 if (to_rtx == orig_to_rtx)
4044 to_rtx = copy_rtx (to_rtx);
4045 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4048 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4050 /* Spurious cast for HPUX compiler. */
4051 ? ((enum machine_mode)
4052 TYPE_MODE (TREE_TYPE (to)))
4054 unsignedp, TREE_TYPE (tem), get_alias_set (to));
4056 preserve_temp_slots (result);
4060 /* If the value is meaningful, convert RESULT to the proper mode.
4061 Otherwise, return nothing. */
4062 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4063 TYPE_MODE (TREE_TYPE (from)),
4065 TREE_UNSIGNED (TREE_TYPE (to)))
4069 /* If the rhs is a function call and its value is not an aggregate,
4070 call the function before we start to compute the lhs.
4071 This is needed for correct code for cases such as
4072 val = setjmp (buf) on machines where reference to val
4073 requires loading up part of an address in a separate insn.
4075 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4076 since it might be a promoted variable where the zero- or sign- extension
4077 needs to be done. Handling this in the normal way is safe because no
4078 computation is done before the call. */
4079 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
4080 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4081 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4082 && GET_CODE (DECL_RTL (to)) == REG))
4087 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4089 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4091 /* Handle calls that return values in multiple non-contiguous locations.
4092 The Irix 6 ABI has examples of this. */
4093 if (GET_CODE (to_rtx) == PARALLEL)
4094 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
4095 else if (GET_MODE (to_rtx) == BLKmode)
4096 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4099 #ifdef POINTERS_EXTEND_UNSIGNED
4100 if (POINTER_TYPE_P (TREE_TYPE (to))
4101 && GET_MODE (to_rtx) != GET_MODE (value))
4102 value = convert_memory_address (GET_MODE (to_rtx), value);
4104 emit_move_insn (to_rtx, value);
4106 preserve_temp_slots (to_rtx);
4109 return want_value ? to_rtx : NULL_RTX;
4112 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4113 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4116 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4118 /* Don't move directly into a return register. */
4119 if (TREE_CODE (to) == RESULT_DECL
4120 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
4125 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4127 if (GET_CODE (to_rtx) == PARALLEL)
4128 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
4130 emit_move_insn (to_rtx, temp);
4132 preserve_temp_slots (to_rtx);
4135 return want_value ? to_rtx : NULL_RTX;
4138 /* In case we are returning the contents of an object which overlaps
4139 the place the value is being stored, use a safe function when copying
4140 a value through a pointer into a structure value return block. */
4141 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4142 && current_function_returns_struct
4143 && !current_function_returns_pcc_struct)
4148 size = expr_size (from);
4149 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4151 if (TARGET_MEM_FUNCTIONS)
4152 emit_library_call (memmove_libfunc, LCT_NORMAL,
4153 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4154 XEXP (from_rtx, 0), Pmode,
4155 convert_to_mode (TYPE_MODE (sizetype),
4156 size, TREE_UNSIGNED (sizetype)),
4157 TYPE_MODE (sizetype));
4159 emit_library_call (bcopy_libfunc, LCT_NORMAL,
4160 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4161 XEXP (to_rtx, 0), Pmode,
4162 convert_to_mode (TYPE_MODE (integer_type_node),
4164 TREE_UNSIGNED (integer_type_node)),
4165 TYPE_MODE (integer_type_node));
4167 preserve_temp_slots (to_rtx);
4170 return want_value ? to_rtx : NULL_RTX;
4173 /* Compute FROM and store the value in the rtx we got. */
4176 result = store_expr (from, to_rtx, want_value);
4177 preserve_temp_slots (result);
4180 return want_value ? result : NULL_RTX;
4183 /* Generate code for computing expression EXP,
4184 and storing the value into TARGET.
4185 TARGET may contain a QUEUED rtx.
4187 If WANT_VALUE is nonzero, return a copy of the value
4188 not in TARGET, so that we can be sure to use the proper
4189 value in a containing expression even if TARGET has something
4190 else stored in it. If possible, we copy the value through a pseudo
4191 and return that pseudo. Or, if the value is constant, we try to
4192 return the constant. In some cases, we return a pseudo
4193 copied *from* TARGET.
4195 If the mode is BLKmode then we may return TARGET itself.
4196 It turns out that in BLKmode it doesn't cause a problem.
4197 because C has no operators that could combine two different
4198 assignments into the same BLKmode object with different values
4199 with no sequence point. Will other languages need this to
4202 If WANT_VALUE is 0, we return NULL, to make sure
4203 to catch quickly any cases where the caller uses the value
4204 and fails to set WANT_VALUE. */
4207 store_expr (exp, target, want_value)
4213 int dont_return_target = 0;
4214 int dont_store_target = 0;
4216 if (TREE_CODE (exp) == COMPOUND_EXPR)
4218 /* Perform first part of compound expression, then assign from second
4220 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4222 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4224 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4226 /* For conditional expression, get safe form of the target. Then
4227 test the condition, doing the appropriate assignment on either
4228 side. This avoids the creation of unnecessary temporaries.
4229 For non-BLKmode, it is more efficient not to do this. */
4231 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4234 target = protect_from_queue (target, 1);
4236 do_pending_stack_adjust ();
4238 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4239 start_cleanup_deferral ();
4240 store_expr (TREE_OPERAND (exp, 1), target, 0);
4241 end_cleanup_deferral ();
4243 emit_jump_insn (gen_jump (lab2));
4246 start_cleanup_deferral ();
4247 store_expr (TREE_OPERAND (exp, 2), target, 0);
4248 end_cleanup_deferral ();
4253 return want_value ? target : NULL_RTX;
4255 else if (queued_subexp_p (target))
4256 /* If target contains a postincrement, let's not risk
4257 using it as the place to generate the rhs. */
4259 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4261 /* Expand EXP into a new pseudo. */
4262 temp = gen_reg_rtx (GET_MODE (target));
4263 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4266 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4268 /* If target is volatile, ANSI requires accessing the value
4269 *from* the target, if it is accessed. So make that happen.
4270 In no case return the target itself. */
4271 if (! MEM_VOLATILE_P (target) && want_value)
4272 dont_return_target = 1;
4274 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4275 && GET_MODE (target) != BLKmode)
4276 /* If target is in memory and caller wants value in a register instead,
4277 arrange that. Pass TARGET as target for expand_expr so that,
4278 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4279 We know expand_expr will not use the target in that case.
4280 Don't do this if TARGET is volatile because we are supposed
4281 to write it and then read it. */
4283 temp = expand_expr (exp, target, GET_MODE (target), 0);
4284 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4286 /* If TEMP is already in the desired TARGET, only copy it from
4287 memory and don't store it there again. */
4289 || (rtx_equal_p (temp, target)
4290 && ! side_effects_p (temp) && ! side_effects_p (target)))
4291 dont_store_target = 1;
4292 temp = copy_to_reg (temp);
4294 dont_return_target = 1;
4296 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4297 /* If this is an scalar in a register that is stored in a wider mode
4298 than the declared mode, compute the result into its declared mode
4299 and then convert to the wider mode. Our value is the computed
4302 rtx inner_target = 0;
4304 /* If we don't want a value, we can do the conversion inside EXP,
4305 which will often result in some optimizations. Do the conversion
4306 in two steps: first change the signedness, if needed, then
4307 the extend. But don't do this if the type of EXP is a subtype
4308 of something else since then the conversion might involve
4309 more than just converting modes. */
4310 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4311 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4313 if (TREE_UNSIGNED (TREE_TYPE (exp))
4314 != SUBREG_PROMOTED_UNSIGNED_P (target))
4316 ((*lang_hooks.types.signed_or_unsigned_type)
4317 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4319 exp = convert ((*lang_hooks.types.type_for_mode)
4320 (GET_MODE (SUBREG_REG (target)),
4321 SUBREG_PROMOTED_UNSIGNED_P (target)),
4324 inner_target = SUBREG_REG (target);
4327 temp = expand_expr (exp, inner_target, VOIDmode, 0);
4329 /* If TEMP is a volatile MEM and we want a result value, make
4330 the access now so it gets done only once. Likewise if
4331 it contains TARGET. */
4332 if (GET_CODE (temp) == MEM && want_value
4333 && (MEM_VOLATILE_P (temp)
4334 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4335 temp = copy_to_reg (temp);
4337 /* If TEMP is a VOIDmode constant, use convert_modes to make
4338 sure that we properly convert it. */
4339 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4341 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4342 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4343 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4344 GET_MODE (target), temp,
4345 SUBREG_PROMOTED_UNSIGNED_P (target));
4348 convert_move (SUBREG_REG (target), temp,
4349 SUBREG_PROMOTED_UNSIGNED_P (target));
4351 /* If we promoted a constant, change the mode back down to match
4352 target. Otherwise, the caller might get confused by a result whose
4353 mode is larger than expected. */
4355 if (want_value && GET_MODE (temp) != GET_MODE (target))
4357 if (GET_MODE (temp) != VOIDmode)
4359 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4360 SUBREG_PROMOTED_VAR_P (temp) = 1;
4361 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4362 SUBREG_PROMOTED_UNSIGNED_P (target));
4365 temp = convert_modes (GET_MODE (target),
4366 GET_MODE (SUBREG_REG (target)),
4367 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4370 return want_value ? temp : NULL_RTX;
4374 temp = expand_expr (exp, target, GET_MODE (target), 0);
4375 /* Return TARGET if it's a specified hardware register.
4376 If TARGET is a volatile mem ref, either return TARGET
4377 or return a reg copied *from* TARGET; ANSI requires this.
4379 Otherwise, if TEMP is not TARGET, return TEMP
4380 if it is constant (for efficiency),
4381 or if we really want the correct value. */
4382 if (!(target && GET_CODE (target) == REG
4383 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4384 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4385 && ! rtx_equal_p (temp, target)
4386 && (CONSTANT_P (temp) || want_value))
4387 dont_return_target = 1;
4390 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4391 the same as that of TARGET, adjust the constant. This is needed, for
4392 example, in case it is a CONST_DOUBLE and we want only a word-sized
4394 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4395 && TREE_CODE (exp) != ERROR_MARK
4396 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4397 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4398 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4400 /* If value was not generated in the target, store it there.
4401 Convert the value to TARGET's type first if necessary.
4402 If TEMP and TARGET compare equal according to rtx_equal_p, but
4403 one or both of them are volatile memory refs, we have to distinguish
4405 - expand_expr has used TARGET. In this case, we must not generate
4406 another copy. This can be detected by TARGET being equal according
4408 - expand_expr has not used TARGET - that means that the source just
4409 happens to have the same RTX form. Since temp will have been created
4410 by expand_expr, it will compare unequal according to == .
4411 We must generate a copy in this case, to reach the correct number
4412 of volatile memory references. */
4414 if ((! rtx_equal_p (temp, target)
4415 || (temp != target && (side_effects_p (temp)
4416 || side_effects_p (target))))
4417 && TREE_CODE (exp) != ERROR_MARK
4418 && ! dont_store_target
4419 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4420 but TARGET is not valid memory reference, TEMP will differ
4421 from TARGET although it is really the same location. */
4422 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4423 || target != DECL_RTL_IF_SET (exp))
4424 /* If there's nothing to copy, don't bother. Don't call expr_size
4425 unless necessary, because some front-ends (C++) expr_size-hook
4426 aborts on objects that are not supposed to be bit-copied or
4428 && expr_size (exp) != const0_rtx)
4430 target = protect_from_queue (target, 1);
4431 if (GET_MODE (temp) != GET_MODE (target)
4432 && GET_MODE (temp) != VOIDmode)
4434 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4435 if (dont_return_target)
4437 /* In this case, we will return TEMP,
4438 so make sure it has the proper mode.
4439 But don't forget to store the value into TARGET. */
4440 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4441 emit_move_insn (target, temp);
4444 convert_move (target, temp, unsignedp);
4447 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4449 /* Handle copying a string constant into an array. The string
4450 constant may be shorter than the array. So copy just the string's
4451 actual length, and clear the rest. First get the size of the data
4452 type of the string, which is actually the size of the target. */
4453 rtx size = expr_size (exp);
4455 if (GET_CODE (size) == CONST_INT
4456 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4457 emit_block_move (target, temp, size, BLOCK_OP_NORMAL);
4460 /* Compute the size of the data to copy from the string. */
4462 = size_binop (MIN_EXPR,
4463 make_tree (sizetype, size),
4464 size_int (TREE_STRING_LENGTH (exp)));
4465 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4469 /* Copy that much. */
4470 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4471 emit_block_move (target, temp, copy_size_rtx, BLOCK_OP_NORMAL);
4473 /* Figure out how much is left in TARGET that we have to clear.
4474 Do all calculations in ptr_mode. */
4475 if (GET_CODE (copy_size_rtx) == CONST_INT)
4477 size = plus_constant (size, -INTVAL (copy_size_rtx));
4478 target = adjust_address (target, BLKmode,
4479 INTVAL (copy_size_rtx));
4483 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4484 copy_size_rtx, NULL_RTX, 0,
4487 #ifdef POINTERS_EXTEND_UNSIGNED
4488 if (GET_MODE (copy_size_rtx) != Pmode)
4489 copy_size_rtx = convert_memory_address (Pmode,
4493 target = offset_address (target, copy_size_rtx,
4494 highest_pow2_factor (copy_size));
4495 label = gen_label_rtx ();
4496 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4497 GET_MODE (size), 0, label);
4500 if (size != const0_rtx)
4501 clear_storage (target, size);
4507 /* Handle calls that return values in multiple non-contiguous locations.
4508 The Irix 6 ABI has examples of this. */
4509 else if (GET_CODE (target) == PARALLEL)
4510 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4511 else if (GET_MODE (temp) == BLKmode)
4512 emit_block_move (target, temp, expr_size (exp), BLOCK_OP_NORMAL);
4514 emit_move_insn (target, temp);
4517 /* If we don't want a value, return NULL_RTX. */
4521 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4522 ??? The latter test doesn't seem to make sense. */
4523 else if (dont_return_target && GET_CODE (temp) != MEM)
4526 /* Return TARGET itself if it is a hard register. */
4527 else if (want_value && GET_MODE (target) != BLKmode
4528 && ! (GET_CODE (target) == REG
4529 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4530 return copy_to_reg (target);
4536 /* Return 1 if EXP just contains zeros. */
4544 switch (TREE_CODE (exp))
4548 case NON_LVALUE_EXPR:
4549 case VIEW_CONVERT_EXPR:
4550 return is_zeros_p (TREE_OPERAND (exp, 0));
4553 return integer_zerop (exp);
4557 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4560 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4563 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4564 elt = TREE_CHAIN (elt))
4565 if (!is_zeros_p (TREE_VALUE (elt)))
4571 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4572 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4573 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4574 if (! is_zeros_p (TREE_VALUE (elt)))
4584 /* Return 1 if EXP contains mostly (3/4) zeros. */
4587 mostly_zeros_p (exp)
4590 if (TREE_CODE (exp) == CONSTRUCTOR)
4592 int elts = 0, zeros = 0;
4593 tree elt = CONSTRUCTOR_ELTS (exp);
4594 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4596 /* If there are no ranges of true bits, it is all zero. */
4597 return elt == NULL_TREE;
4599 for (; elt; elt = TREE_CHAIN (elt))
4601 /* We do not handle the case where the index is a RANGE_EXPR,
4602 so the statistic will be somewhat inaccurate.
4603 We do make a more accurate count in store_constructor itself,
4604 so since this function is only used for nested array elements,
4605 this should be close enough. */
4606 if (mostly_zeros_p (TREE_VALUE (elt)))
4611 return 4 * zeros >= 3 * elts;
4614 return is_zeros_p (exp);
4617 /* Helper function for store_constructor.
4618 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4619 TYPE is the type of the CONSTRUCTOR, not the element type.
4620 CLEARED is as for store_constructor.
4621 ALIAS_SET is the alias set to use for any stores.
4623 This provides a recursive shortcut back to store_constructor when it isn't
4624 necessary to go through store_field. This is so that we can pass through
4625 the cleared field to let store_constructor know that we may not have to
4626 clear a substructure if the outer structure has already been cleared. */
4629 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4632 unsigned HOST_WIDE_INT bitsize;
4633 HOST_WIDE_INT bitpos;
4634 enum machine_mode mode;
4639 if (TREE_CODE (exp) == CONSTRUCTOR
4640 && bitpos % BITS_PER_UNIT == 0
4641 /* If we have a non-zero bitpos for a register target, then we just
4642 let store_field do the bitfield handling. This is unlikely to
4643 generate unnecessary clear instructions anyways. */
4644 && (bitpos == 0 || GET_CODE (target) == MEM))
4646 if (GET_CODE (target) == MEM)
4648 = adjust_address (target,
4649 GET_MODE (target) == BLKmode
4651 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4652 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4655 /* Update the alias set, if required. */
4656 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4657 && MEM_ALIAS_SET (target) != 0)
4659 target = copy_rtx (target);
4660 set_mem_alias_set (target, alias_set);
4663 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4666 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4670 /* Store the value of constructor EXP into the rtx TARGET.
4671 TARGET is either a REG or a MEM; we know it cannot conflict, since
4672 safe_from_p has been called.
4673 CLEARED is true if TARGET is known to have been zero'd.
4674 SIZE is the number of bytes of TARGET we are allowed to modify: this
4675 may not be the same as the size of EXP if we are assigning to a field
4676 which has been packed to exclude padding bits. */
4679 store_constructor (exp, target, cleared, size)
4685 tree type = TREE_TYPE (exp);
4686 #ifdef WORD_REGISTER_OPERATIONS
4687 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4690 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4691 || TREE_CODE (type) == QUAL_UNION_TYPE)
4695 /* We either clear the aggregate or indicate the value is dead. */
4696 if ((TREE_CODE (type) == UNION_TYPE
4697 || TREE_CODE (type) == QUAL_UNION_TYPE)
4699 && ! CONSTRUCTOR_ELTS (exp))
4700 /* If the constructor is empty, clear the union. */
4702 clear_storage (target, expr_size (exp));
4706 /* If we are building a static constructor into a register,
4707 set the initial value as zero so we can fold the value into
4708 a constant. But if more than one register is involved,
4709 this probably loses. */
4710 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4711 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4713 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4717 /* If the constructor has fewer fields than the structure
4718 or if we are initializing the structure to mostly zeros,
4719 clear the whole structure first. Don't do this if TARGET is a
4720 register whose mode size isn't equal to SIZE since clear_storage
4721 can't handle this case. */
4722 else if (! cleared && size > 0
4723 && ((list_length (CONSTRUCTOR_ELTS (exp))
4724 != fields_length (type))
4725 || mostly_zeros_p (exp))
4726 && (GET_CODE (target) != REG
4727 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4730 clear_storage (target, GEN_INT (size));
4735 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4737 /* Store each element of the constructor into
4738 the corresponding field of TARGET. */
4740 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4742 tree field = TREE_PURPOSE (elt);
4743 tree value = TREE_VALUE (elt);
4744 enum machine_mode mode;
4745 HOST_WIDE_INT bitsize;
4746 HOST_WIDE_INT bitpos = 0;
4749 rtx to_rtx = target;
4751 /* Just ignore missing fields.
4752 We cleared the whole structure, above,
4753 if any fields are missing. */
4757 if (cleared && is_zeros_p (value))
4760 if (host_integerp (DECL_SIZE (field), 1))
4761 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4765 unsignedp = TREE_UNSIGNED (field);
4766 mode = DECL_MODE (field);
4767 if (DECL_BIT_FIELD (field))
4770 offset = DECL_FIELD_OFFSET (field);
4771 if (host_integerp (offset, 0)
4772 && host_integerp (bit_position (field), 0))
4774 bitpos = int_bit_position (field);
4778 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4784 if (contains_placeholder_p (offset))
4785 offset = build (WITH_RECORD_EXPR, sizetype,
4786 offset, make_tree (TREE_TYPE (exp), target));
4788 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4789 if (GET_CODE (to_rtx) != MEM)
4792 #ifdef POINTERS_EXTEND_UNSIGNED
4793 if (GET_MODE (offset_rtx) != Pmode)
4794 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4796 if (GET_MODE (offset_rtx) != ptr_mode)
4797 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4800 to_rtx = offset_address (to_rtx, offset_rtx,
4801 highest_pow2_factor (offset));
4804 if (TREE_READONLY (field))
4806 if (GET_CODE (to_rtx) == MEM)
4807 to_rtx = copy_rtx (to_rtx);
4809 RTX_UNCHANGING_P (to_rtx) = 1;
4812 #ifdef WORD_REGISTER_OPERATIONS
4813 /* If this initializes a field that is smaller than a word, at the
4814 start of a word, try to widen it to a full word.
4815 This special case allows us to output C++ member function
4816 initializations in a form that the optimizers can understand. */
4817 if (GET_CODE (target) == REG
4818 && bitsize < BITS_PER_WORD
4819 && bitpos % BITS_PER_WORD == 0
4820 && GET_MODE_CLASS (mode) == MODE_INT
4821 && TREE_CODE (value) == INTEGER_CST
4823 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4825 tree type = TREE_TYPE (value);
4827 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4829 type = (*lang_hooks.types.type_for_size)
4830 (BITS_PER_WORD, TREE_UNSIGNED (type));
4831 value = convert (type, value);
4834 if (BYTES_BIG_ENDIAN)
4836 = fold (build (LSHIFT_EXPR, type, value,
4837 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4838 bitsize = BITS_PER_WORD;
4843 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4844 && DECL_NONADDRESSABLE_P (field))
4846 to_rtx = copy_rtx (to_rtx);
4847 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4850 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4851 value, type, cleared,
4852 get_alias_set (TREE_TYPE (field)));
4855 else if (TREE_CODE (type) == ARRAY_TYPE
4856 || TREE_CODE (type) == VECTOR_TYPE)
4861 tree domain = TYPE_DOMAIN (type);
4862 tree elttype = TREE_TYPE (type);
4864 HOST_WIDE_INT minelt = 0;
4865 HOST_WIDE_INT maxelt = 0;
4867 /* Vectors are like arrays, but the domain is stored via an array
4869 if (TREE_CODE (type) == VECTOR_TYPE)
4871 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4872 the same field as TYPE_DOMAIN, we are not guaranteed that
4874 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4875 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4878 const_bounds_p = (TYPE_MIN_VALUE (domain)
4879 && TYPE_MAX_VALUE (domain)
4880 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4881 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4883 /* If we have constant bounds for the range of the type, get them. */
4886 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4887 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4890 /* If the constructor has fewer elements than the array,
4891 clear the whole array first. Similarly if this is
4892 static constructor of a non-BLKmode object. */
4893 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4897 HOST_WIDE_INT count = 0, zero_count = 0;
4898 need_to_clear = ! const_bounds_p;
4900 /* This loop is a more accurate version of the loop in
4901 mostly_zeros_p (it handles RANGE_EXPR in an index).
4902 It is also needed to check for missing elements. */
4903 for (elt = CONSTRUCTOR_ELTS (exp);
4904 elt != NULL_TREE && ! need_to_clear;
4905 elt = TREE_CHAIN (elt))
4907 tree index = TREE_PURPOSE (elt);
4908 HOST_WIDE_INT this_node_count;
4910 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4912 tree lo_index = TREE_OPERAND (index, 0);
4913 tree hi_index = TREE_OPERAND (index, 1);
4915 if (! host_integerp (lo_index, 1)
4916 || ! host_integerp (hi_index, 1))
4922 this_node_count = (tree_low_cst (hi_index, 1)
4923 - tree_low_cst (lo_index, 1) + 1);
4926 this_node_count = 1;
4928 count += this_node_count;
4929 if (mostly_zeros_p (TREE_VALUE (elt)))
4930 zero_count += this_node_count;
4933 /* Clear the entire array first if there are any missing elements,
4934 or if the incidence of zero elements is >= 75%. */
4936 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4940 if (need_to_clear && size > 0)
4945 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4947 clear_storage (target, GEN_INT (size));
4951 else if (REG_P (target))
4952 /* Inform later passes that the old value is dead. */
4953 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4955 /* Store each element of the constructor into
4956 the corresponding element of TARGET, determined
4957 by counting the elements. */
4958 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4960 elt = TREE_CHAIN (elt), i++)
4962 enum machine_mode mode;
4963 HOST_WIDE_INT bitsize;
4964 HOST_WIDE_INT bitpos;
4966 tree value = TREE_VALUE (elt);
4967 tree index = TREE_PURPOSE (elt);
4968 rtx xtarget = target;
4970 if (cleared && is_zeros_p (value))
4973 unsignedp = TREE_UNSIGNED (elttype);
4974 mode = TYPE_MODE (elttype);
4975 if (mode == BLKmode)
4976 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4977 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4980 bitsize = GET_MODE_BITSIZE (mode);
4982 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4984 tree lo_index = TREE_OPERAND (index, 0);
4985 tree hi_index = TREE_OPERAND (index, 1);
4986 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4987 struct nesting *loop;
4988 HOST_WIDE_INT lo, hi, count;
4991 /* If the range is constant and "small", unroll the loop. */
4993 && host_integerp (lo_index, 0)
4994 && host_integerp (hi_index, 0)
4995 && (lo = tree_low_cst (lo_index, 0),
4996 hi = tree_low_cst (hi_index, 0),
4997 count = hi - lo + 1,
4998 (GET_CODE (target) != MEM
5000 || (host_integerp (TYPE_SIZE (elttype), 1)
5001 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5004 lo -= minelt; hi -= minelt;
5005 for (; lo <= hi; lo++)
5007 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5009 if (GET_CODE (target) == MEM
5010 && !MEM_KEEP_ALIAS_SET_P (target)
5011 && TREE_CODE (type) == ARRAY_TYPE
5012 && TYPE_NONALIASED_COMPONENT (type))
5014 target = copy_rtx (target);
5015 MEM_KEEP_ALIAS_SET_P (target) = 1;
5018 store_constructor_field
5019 (target, bitsize, bitpos, mode, value, type, cleared,
5020 get_alias_set (elttype));
5025 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5026 loop_top = gen_label_rtx ();
5027 loop_end = gen_label_rtx ();
5029 unsignedp = TREE_UNSIGNED (domain);
5031 index = build_decl (VAR_DECL, NULL_TREE, domain);
5034 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5036 SET_DECL_RTL (index, index_r);
5037 if (TREE_CODE (value) == SAVE_EXPR
5038 && SAVE_EXPR_RTL (value) == 0)
5040 /* Make sure value gets expanded once before the
5042 expand_expr (value, const0_rtx, VOIDmode, 0);
5045 store_expr (lo_index, index_r, 0);
5046 loop = expand_start_loop (0);
5048 /* Assign value to element index. */
5050 = convert (ssizetype,
5051 fold (build (MINUS_EXPR, TREE_TYPE (index),
5052 index, TYPE_MIN_VALUE (domain))));
5053 position = size_binop (MULT_EXPR, position,
5055 TYPE_SIZE_UNIT (elttype)));
5057 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5058 xtarget = offset_address (target, pos_rtx,
5059 highest_pow2_factor (position));
5060 xtarget = adjust_address (xtarget, mode, 0);
5061 if (TREE_CODE (value) == CONSTRUCTOR)
5062 store_constructor (value, xtarget, cleared,
5063 bitsize / BITS_PER_UNIT);
5065 store_expr (value, xtarget, 0);
5067 expand_exit_loop_if_false (loop,
5068 build (LT_EXPR, integer_type_node,
5071 expand_increment (build (PREINCREMENT_EXPR,
5073 index, integer_one_node), 0, 0);
5075 emit_label (loop_end);
5078 else if ((index != 0 && ! host_integerp (index, 0))
5079 || ! host_integerp (TYPE_SIZE (elttype), 1))
5084 index = ssize_int (1);
5087 index = convert (ssizetype,
5088 fold (build (MINUS_EXPR, index,
5089 TYPE_MIN_VALUE (domain))));
5091 position = size_binop (MULT_EXPR, index,
5093 TYPE_SIZE_UNIT (elttype)));
5094 xtarget = offset_address (target,
5095 expand_expr (position, 0, VOIDmode, 0),
5096 highest_pow2_factor (position));
5097 xtarget = adjust_address (xtarget, mode, 0);
5098 store_expr (value, xtarget, 0);
5103 bitpos = ((tree_low_cst (index, 0) - minelt)
5104 * tree_low_cst (TYPE_SIZE (elttype), 1));
5106 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5108 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5109 && TREE_CODE (type) == ARRAY_TYPE
5110 && TYPE_NONALIASED_COMPONENT (type))
5112 target = copy_rtx (target);
5113 MEM_KEEP_ALIAS_SET_P (target) = 1;
5116 store_constructor_field (target, bitsize, bitpos, mode, value,
5117 type, cleared, get_alias_set (elttype));
5123 /* Set constructor assignments. */
5124 else if (TREE_CODE (type) == SET_TYPE)
5126 tree elt = CONSTRUCTOR_ELTS (exp);
5127 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5128 tree domain = TYPE_DOMAIN (type);
5129 tree domain_min, domain_max, bitlength;
5131 /* The default implementation strategy is to extract the constant
5132 parts of the constructor, use that to initialize the target,
5133 and then "or" in whatever non-constant ranges we need in addition.
5135 If a large set is all zero or all ones, it is
5136 probably better to set it using memset (if available) or bzero.
5137 Also, if a large set has just a single range, it may also be
5138 better to first clear all the first clear the set (using
5139 bzero/memset), and set the bits we want. */
5141 /* Check for all zeros. */
5142 if (elt == NULL_TREE && size > 0)
5145 clear_storage (target, GEN_INT (size));
5149 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5150 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5151 bitlength = size_binop (PLUS_EXPR,
5152 size_diffop (domain_max, domain_min),
5155 nbits = tree_low_cst (bitlength, 1);
5157 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5158 are "complicated" (more than one range), initialize (the
5159 constant parts) by copying from a constant. */
5160 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5161 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5163 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5164 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5165 char *bit_buffer = (char *) alloca (nbits);
5166 HOST_WIDE_INT word = 0;
5167 unsigned int bit_pos = 0;
5168 unsigned int ibit = 0;
5169 unsigned int offset = 0; /* In bytes from beginning of set. */
5171 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5174 if (bit_buffer[ibit])
5176 if (BYTES_BIG_ENDIAN)
5177 word |= (1 << (set_word_size - 1 - bit_pos));
5179 word |= 1 << bit_pos;
5183 if (bit_pos >= set_word_size || ibit == nbits)
5185 if (word != 0 || ! cleared)
5187 rtx datum = GEN_INT (word);
5190 /* The assumption here is that it is safe to use
5191 XEXP if the set is multi-word, but not if
5192 it's single-word. */
5193 if (GET_CODE (target) == MEM)
5194 to_rtx = adjust_address (target, mode, offset);
5195 else if (offset == 0)
5199 emit_move_insn (to_rtx, datum);
5206 offset += set_word_size / BITS_PER_UNIT;
5211 /* Don't bother clearing storage if the set is all ones. */
5212 if (TREE_CHAIN (elt) != NULL_TREE
5213 || (TREE_PURPOSE (elt) == NULL_TREE
5215 : ( ! host_integerp (TREE_VALUE (elt), 0)
5216 || ! host_integerp (TREE_PURPOSE (elt), 0)
5217 || (tree_low_cst (TREE_VALUE (elt), 0)
5218 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5219 != (HOST_WIDE_INT) nbits))))
5220 clear_storage (target, expr_size (exp));
5222 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5224 /* Start of range of element or NULL. */
5225 tree startbit = TREE_PURPOSE (elt);
5226 /* End of range of element, or element value. */
5227 tree endbit = TREE_VALUE (elt);
5228 HOST_WIDE_INT startb, endb;
5229 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5231 bitlength_rtx = expand_expr (bitlength,
5232 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5234 /* Handle non-range tuple element like [ expr ]. */
5235 if (startbit == NULL_TREE)
5237 startbit = save_expr (endbit);
5241 startbit = convert (sizetype, startbit);
5242 endbit = convert (sizetype, endbit);
5243 if (! integer_zerop (domain_min))
5245 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5246 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5248 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5249 EXPAND_CONST_ADDRESS);
5250 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5251 EXPAND_CONST_ADDRESS);
5257 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5258 (GET_MODE (target), 0),
5261 emit_move_insn (targetx, target);
5264 else if (GET_CODE (target) == MEM)
5269 /* Optimization: If startbit and endbit are constants divisible
5270 by BITS_PER_UNIT, call memset instead. */
5271 if (TARGET_MEM_FUNCTIONS
5272 && TREE_CODE (startbit) == INTEGER_CST
5273 && TREE_CODE (endbit) == INTEGER_CST
5274 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5275 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5277 emit_library_call (memset_libfunc, LCT_NORMAL,
5279 plus_constant (XEXP (targetx, 0),
5280 startb / BITS_PER_UNIT),
5282 constm1_rtx, TYPE_MODE (integer_type_node),
5283 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5284 TYPE_MODE (sizetype));
5287 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5288 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5289 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5290 startbit_rtx, TYPE_MODE (sizetype),
5291 endbit_rtx, TYPE_MODE (sizetype));
5294 emit_move_insn (target, targetx);
5302 /* Store the value of EXP (an expression tree)
5303 into a subfield of TARGET which has mode MODE and occupies
5304 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5305 If MODE is VOIDmode, it means that we are storing into a bit-field.
5307 If VALUE_MODE is VOIDmode, return nothing in particular.
5308 UNSIGNEDP is not used in this case.
5310 Otherwise, return an rtx for the value stored. This rtx
5311 has mode VALUE_MODE if that is convenient to do.
5312 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5314 TYPE is the type of the underlying object,
5316 ALIAS_SET is the alias set for the destination. This value will
5317 (in general) be different from that for TARGET, since TARGET is a
5318 reference to the containing structure. */
5321 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5324 HOST_WIDE_INT bitsize;
5325 HOST_WIDE_INT bitpos;
5326 enum machine_mode mode;
5328 enum machine_mode value_mode;
5333 HOST_WIDE_INT width_mask = 0;
5335 if (TREE_CODE (exp) == ERROR_MARK)
5338 /* If we have nothing to store, do nothing unless the expression has
5341 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5342 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5343 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5345 /* If we are storing into an unaligned field of an aligned union that is
5346 in a register, we may have the mode of TARGET being an integer mode but
5347 MODE == BLKmode. In that case, get an aligned object whose size and
5348 alignment are the same as TARGET and store TARGET into it (we can avoid
5349 the store if the field being stored is the entire width of TARGET). Then
5350 call ourselves recursively to store the field into a BLKmode version of
5351 that object. Finally, load from the object into TARGET. This is not
5352 very efficient in general, but should only be slightly more expensive
5353 than the otherwise-required unaligned accesses. Perhaps this can be
5354 cleaned up later. */
5357 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5361 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5363 rtx blk_object = adjust_address (object, BLKmode, 0);
5365 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5366 emit_move_insn (object, target);
5368 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5371 emit_move_insn (target, object);
5373 /* We want to return the BLKmode version of the data. */
5377 if (GET_CODE (target) == CONCAT)
5379 /* We're storing into a struct containing a single __complex. */
5383 return store_expr (exp, target, 0);
5386 /* If the structure is in a register or if the component
5387 is a bit field, we cannot use addressing to access it.
5388 Use bit-field techniques or SUBREG to store in it. */
5390 if (mode == VOIDmode
5391 || (mode != BLKmode && ! direct_store[(int) mode]
5392 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5393 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5394 || GET_CODE (target) == REG
5395 || GET_CODE (target) == SUBREG
5396 /* If the field isn't aligned enough to store as an ordinary memref,
5397 store it as a bit field. */
5398 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5399 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5400 || bitpos % GET_MODE_ALIGNMENT (mode)))
5401 /* If the RHS and field are a constant size and the size of the
5402 RHS isn't the same size as the bitfield, we must use bitfield
5405 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5406 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5408 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5410 /* If BITSIZE is narrower than the size of the type of EXP
5411 we will be narrowing TEMP. Normally, what's wanted are the
5412 low-order bits. However, if EXP's type is a record and this is
5413 big-endian machine, we want the upper BITSIZE bits. */
5414 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5415 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5416 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5417 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5418 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5422 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5424 if (mode != VOIDmode && mode != BLKmode
5425 && mode != TYPE_MODE (TREE_TYPE (exp)))
5426 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5428 /* If the modes of TARGET and TEMP are both BLKmode, both
5429 must be in memory and BITPOS must be aligned on a byte
5430 boundary. If so, we simply do a block copy. */
5431 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5433 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5434 || bitpos % BITS_PER_UNIT != 0)
5437 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5438 emit_block_move (target, temp,
5439 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5443 return value_mode == VOIDmode ? const0_rtx : target;
5446 /* Store the value in the bitfield. */
5447 store_bit_field (target, bitsize, bitpos, mode, temp,
5448 int_size_in_bytes (type));
5450 if (value_mode != VOIDmode)
5452 /* The caller wants an rtx for the value.
5453 If possible, avoid refetching from the bitfield itself. */
5455 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5458 enum machine_mode tmode;
5460 tmode = GET_MODE (temp);
5461 if (tmode == VOIDmode)
5465 return expand_and (tmode, temp,
5466 gen_int_mode (width_mask, tmode),
5469 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5470 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5471 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5474 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5475 NULL_RTX, value_mode, VOIDmode,
5476 int_size_in_bytes (type));
5482 rtx addr = XEXP (target, 0);
5483 rtx to_rtx = target;
5485 /* If a value is wanted, it must be the lhs;
5486 so make the address stable for multiple use. */
5488 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5489 && ! CONSTANT_ADDRESS_P (addr)
5490 /* A frame-pointer reference is already stable. */
5491 && ! (GET_CODE (addr) == PLUS
5492 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5493 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5494 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5495 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5497 /* Now build a reference to just the desired component. */
5499 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5501 if (to_rtx == target)
5502 to_rtx = copy_rtx (to_rtx);
5504 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5505 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5506 set_mem_alias_set (to_rtx, alias_set);
5508 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5512 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5513 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5514 codes and find the ultimate containing object, which we return.
5516 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5517 bit position, and *PUNSIGNEDP to the signedness of the field.
5518 If the position of the field is variable, we store a tree
5519 giving the variable offset (in units) in *POFFSET.
5520 This offset is in addition to the bit position.
5521 If the position is not variable, we store 0 in *POFFSET.
5523 If any of the extraction expressions is volatile,
5524 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5526 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5527 is a mode that can be used to access the field. In that case, *PBITSIZE
5530 If the field describes a variable-sized object, *PMODE is set to
5531 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5532 this case, but the address of the object can be found. */
5535 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5536 punsignedp, pvolatilep)
5538 HOST_WIDE_INT *pbitsize;
5539 HOST_WIDE_INT *pbitpos;
5541 enum machine_mode *pmode;
5546 enum machine_mode mode = VOIDmode;
5547 tree offset = size_zero_node;
5548 tree bit_offset = bitsize_zero_node;
5549 tree placeholder_ptr = 0;
5552 /* First get the mode, signedness, and size. We do this from just the
5553 outermost expression. */
5554 if (TREE_CODE (exp) == COMPONENT_REF)
5556 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5557 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5558 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5560 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5562 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5564 size_tree = TREE_OPERAND (exp, 1);
5565 *punsignedp = TREE_UNSIGNED (exp);
5569 mode = TYPE_MODE (TREE_TYPE (exp));
5570 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5572 if (mode == BLKmode)
5573 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5575 *pbitsize = GET_MODE_BITSIZE (mode);
5580 if (! host_integerp (size_tree, 1))
5581 mode = BLKmode, *pbitsize = -1;
5583 *pbitsize = tree_low_cst (size_tree, 1);
5586 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5587 and find the ultimate containing object. */
5590 if (TREE_CODE (exp) == BIT_FIELD_REF)
5591 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5592 else if (TREE_CODE (exp) == COMPONENT_REF)
5594 tree field = TREE_OPERAND (exp, 1);
5595 tree this_offset = DECL_FIELD_OFFSET (field);
5597 /* If this field hasn't been filled in yet, don't go
5598 past it. This should only happen when folding expressions
5599 made during type construction. */
5600 if (this_offset == 0)
5602 else if (! TREE_CONSTANT (this_offset)
5603 && contains_placeholder_p (this_offset))
5604 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5606 offset = size_binop (PLUS_EXPR, offset, this_offset);
5607 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5608 DECL_FIELD_BIT_OFFSET (field));
5610 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5613 else if (TREE_CODE (exp) == ARRAY_REF
5614 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5616 tree index = TREE_OPERAND (exp, 1);
5617 tree array = TREE_OPERAND (exp, 0);
5618 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5619 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5620 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5622 /* We assume all arrays have sizes that are a multiple of a byte.
5623 First subtract the lower bound, if any, in the type of the
5624 index, then convert to sizetype and multiply by the size of the
5626 if (low_bound != 0 && ! integer_zerop (low_bound))
5627 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5630 /* If the index has a self-referential type, pass it to a
5631 WITH_RECORD_EXPR; if the component size is, pass our
5632 component to one. */
5633 if (! TREE_CONSTANT (index)
5634 && contains_placeholder_p (index))
5635 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5636 if (! TREE_CONSTANT (unit_size)
5637 && contains_placeholder_p (unit_size))
5638 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5640 offset = size_binop (PLUS_EXPR, offset,
5641 size_binop (MULT_EXPR,
5642 convert (sizetype, index),
5646 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5648 tree new = find_placeholder (exp, &placeholder_ptr);
5650 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5651 We might have been called from tree optimization where we
5652 haven't set up an object yet. */
5660 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5661 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5662 && ! ((TREE_CODE (exp) == NOP_EXPR
5663 || TREE_CODE (exp) == CONVERT_EXPR)
5664 && (TYPE_MODE (TREE_TYPE (exp))
5665 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5668 /* If any reference in the chain is volatile, the effect is volatile. */
5669 if (TREE_THIS_VOLATILE (exp))
5672 exp = TREE_OPERAND (exp, 0);
5675 /* If OFFSET is constant, see if we can return the whole thing as a
5676 constant bit position. Otherwise, split it up. */
5677 if (host_integerp (offset, 0)
5678 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5680 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5681 && host_integerp (tem, 0))
5682 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5684 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5690 /* Return 1 if T is an expression that get_inner_reference handles. */
5693 handled_component_p (t)
5696 switch (TREE_CODE (t))
5701 case ARRAY_RANGE_REF:
5702 case NON_LVALUE_EXPR:
5703 case VIEW_CONVERT_EXPR:
5708 return (TYPE_MODE (TREE_TYPE (t))
5709 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5716 /* Given an rtx VALUE that may contain additions and multiplications, return
5717 an equivalent value that just refers to a register, memory, or constant.
5718 This is done by generating instructions to perform the arithmetic and
5719 returning a pseudo-register containing the value.
5721 The returned value may be a REG, SUBREG, MEM or constant. */
5724 force_operand (value, target)
5728 /* Use subtarget as the target for operand 0 of a binary operation. */
5729 rtx subtarget = get_subtarget (target);
5730 enum rtx_code code = GET_CODE (value);
5732 /* Check for a PIC address load. */
5733 if ((code == PLUS || code == MINUS)
5734 && XEXP (value, 0) == pic_offset_table_rtx
5735 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5736 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5737 || GET_CODE (XEXP (value, 1)) == CONST))
5740 subtarget = gen_reg_rtx (GET_MODE (value));
5741 emit_move_insn (subtarget, value);
5745 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5748 target = gen_reg_rtx (GET_MODE (value));
5749 convert_move (target, force_operand (XEXP (value, 0), NULL),
5750 code == ZERO_EXTEND);
5754 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5756 op2 = XEXP (value, 1);
5757 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5759 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5762 op2 = negate_rtx (GET_MODE (value), op2);
5765 /* Check for an addition with OP2 a constant integer and our first
5766 operand a PLUS of a virtual register and something else. In that
5767 case, we want to emit the sum of the virtual register and the
5768 constant first and then add the other value. This allows virtual
5769 register instantiation to simply modify the constant rather than
5770 creating another one around this addition. */
5771 if (code == PLUS && GET_CODE (op2) == CONST_INT
5772 && GET_CODE (XEXP (value, 0)) == PLUS
5773 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5774 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5775 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5777 rtx temp = expand_simple_binop (GET_MODE (value), code,
5778 XEXP (XEXP (value, 0), 0), op2,
5779 subtarget, 0, OPTAB_LIB_WIDEN);
5780 return expand_simple_binop (GET_MODE (value), code, temp,
5781 force_operand (XEXP (XEXP (value,
5783 target, 0, OPTAB_LIB_WIDEN);
5786 op1 = force_operand (XEXP (value, 0), subtarget);
5787 op2 = force_operand (op2, NULL_RTX);
5791 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5793 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5794 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5795 target, 1, OPTAB_LIB_WIDEN);
5797 return expand_divmod (0,
5798 FLOAT_MODE_P (GET_MODE (value))
5799 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5800 GET_MODE (value), op1, op2, target, 0);
5803 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5807 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5811 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5815 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5816 target, 0, OPTAB_LIB_WIDEN);
5819 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5820 target, 1, OPTAB_LIB_WIDEN);
5823 if (GET_RTX_CLASS (code) == '1')
5825 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5826 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5829 #ifdef INSN_SCHEDULING
5830 /* On machines that have insn scheduling, we want all memory reference to be
5831 explicit, so we need to deal with such paradoxical SUBREGs. */
5832 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5833 && (GET_MODE_SIZE (GET_MODE (value))
5834 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5836 = simplify_gen_subreg (GET_MODE (value),
5837 force_reg (GET_MODE (SUBREG_REG (value)),
5838 force_operand (SUBREG_REG (value),
5840 GET_MODE (SUBREG_REG (value)),
5841 SUBREG_BYTE (value));
5847 /* Subroutine of expand_expr: return nonzero iff there is no way that
5848 EXP can reference X, which is being modified. TOP_P is nonzero if this
5849 call is going to be used to determine whether we need a temporary
5850 for EXP, as opposed to a recursive call to this function.
5852 It is always safe for this routine to return zero since it merely
5853 searches for optimization opportunities. */
5856 safe_from_p (x, exp, top_p)
5863 static tree save_expr_list;
5866 /* If EXP has varying size, we MUST use a target since we currently
5867 have no way of allocating temporaries of variable size
5868 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5869 So we assume here that something at a higher level has prevented a
5870 clash. This is somewhat bogus, but the best we can do. Only
5871 do this when X is BLKmode and when we are at the top level. */
5872 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5873 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5874 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5875 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5876 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5878 && GET_MODE (x) == BLKmode)
5879 /* If X is in the outgoing argument area, it is always safe. */
5880 || (GET_CODE (x) == MEM
5881 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5882 || (GET_CODE (XEXP (x, 0)) == PLUS
5883 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5886 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5887 find the underlying pseudo. */
5888 if (GET_CODE (x) == SUBREG)
5891 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5895 /* A SAVE_EXPR might appear many times in the expression passed to the
5896 top-level safe_from_p call, and if it has a complex subexpression,
5897 examining it multiple times could result in a combinatorial explosion.
5898 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5899 with optimization took about 28 minutes to compile -- even though it was
5900 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5901 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5902 we have processed. Note that the only test of top_p was above. */
5911 rtn = safe_from_p (x, exp, 0);
5913 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5914 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5919 /* Now look at our tree code and possibly recurse. */
5920 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5923 exp_rtl = DECL_RTL_IF_SET (exp);
5930 if (TREE_CODE (exp) == TREE_LIST)
5931 return ((TREE_VALUE (exp) == 0
5932 || safe_from_p (x, TREE_VALUE (exp), 0))
5933 && (TREE_CHAIN (exp) == 0
5934 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5935 else if (TREE_CODE (exp) == ERROR_MARK)
5936 return 1; /* An already-visited SAVE_EXPR? */
5941 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5945 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5946 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5950 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5951 the expression. If it is set, we conflict iff we are that rtx or
5952 both are in memory. Otherwise, we check all operands of the
5953 expression recursively. */
5955 switch (TREE_CODE (exp))
5958 /* If the operand is static or we are static, we can't conflict.
5959 Likewise if we don't conflict with the operand at all. */
5960 if (staticp (TREE_OPERAND (exp, 0))
5961 || TREE_STATIC (exp)
5962 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5965 /* Otherwise, the only way this can conflict is if we are taking
5966 the address of a DECL a that address if part of X, which is
5968 exp = TREE_OPERAND (exp, 0);
5971 if (!DECL_RTL_SET_P (exp)
5972 || GET_CODE (DECL_RTL (exp)) != MEM)
5975 exp_rtl = XEXP (DECL_RTL (exp), 0);
5980 if (GET_CODE (x) == MEM
5981 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5982 get_alias_set (exp)))
5987 /* Assume that the call will clobber all hard registers and
5989 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5990 || GET_CODE (x) == MEM)
5995 /* If a sequence exists, we would have to scan every instruction
5996 in the sequence to see if it was safe. This is probably not
5998 if (RTL_EXPR_SEQUENCE (exp))
6001 exp_rtl = RTL_EXPR_RTL (exp);
6004 case WITH_CLEANUP_EXPR:
6005 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6008 case CLEANUP_POINT_EXPR:
6009 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6012 exp_rtl = SAVE_EXPR_RTL (exp);
6016 /* If we've already scanned this, don't do it again. Otherwise,
6017 show we've scanned it and record for clearing the flag if we're
6019 if (TREE_PRIVATE (exp))
6022 TREE_PRIVATE (exp) = 1;
6023 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6025 TREE_PRIVATE (exp) = 0;
6029 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6033 /* The only operand we look at is operand 1. The rest aren't
6034 part of the expression. */
6035 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6037 case METHOD_CALL_EXPR:
6038 /* This takes an rtx argument, but shouldn't appear here. */
6045 /* If we have an rtx, we do not need to scan our operands. */
6049 nops = first_rtl_op (TREE_CODE (exp));
6050 for (i = 0; i < nops; i++)
6051 if (TREE_OPERAND (exp, i) != 0
6052 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6055 /* If this is a language-specific tree code, it may require
6056 special handling. */
6057 if ((unsigned int) TREE_CODE (exp)
6058 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6059 && !(*lang_hooks.safe_from_p) (x, exp))
6063 /* If we have an rtl, find any enclosed object. Then see if we conflict
6067 if (GET_CODE (exp_rtl) == SUBREG)
6069 exp_rtl = SUBREG_REG (exp_rtl);
6070 if (GET_CODE (exp_rtl) == REG
6071 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6075 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6076 are memory and they conflict. */
6077 return ! (rtx_equal_p (x, exp_rtl)
6078 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6079 && true_dependence (exp_rtl, VOIDmode, x,
6080 rtx_addr_varies_p)));
6083 /* If we reach here, it is safe. */
6087 /* Subroutine of expand_expr: return rtx if EXP is a
6088 variable or parameter; else return 0. */
6095 switch (TREE_CODE (exp))
6099 return DECL_RTL (exp);
6105 #ifdef MAX_INTEGER_COMPUTATION_MODE
6108 check_max_integer_computation_mode (exp)
6111 enum tree_code code;
6112 enum machine_mode mode;
6114 /* Strip any NOPs that don't change the mode. */
6116 code = TREE_CODE (exp);
6118 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6119 if (code == NOP_EXPR
6120 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6123 /* First check the type of the overall operation. We need only look at
6124 unary, binary and relational operations. */
6125 if (TREE_CODE_CLASS (code) == '1'
6126 || TREE_CODE_CLASS (code) == '2'
6127 || TREE_CODE_CLASS (code) == '<')
6129 mode = TYPE_MODE (TREE_TYPE (exp));
6130 if (GET_MODE_CLASS (mode) == MODE_INT
6131 && mode > MAX_INTEGER_COMPUTATION_MODE)
6132 internal_error ("unsupported wide integer operation");
6135 /* Check operand of a unary op. */
6136 if (TREE_CODE_CLASS (code) == '1')
6138 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6139 if (GET_MODE_CLASS (mode) == MODE_INT
6140 && mode > MAX_INTEGER_COMPUTATION_MODE)
6141 internal_error ("unsupported wide integer operation");
6144 /* Check operands of a binary/comparison op. */
6145 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6147 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6148 if (GET_MODE_CLASS (mode) == MODE_INT
6149 && mode > MAX_INTEGER_COMPUTATION_MODE)
6150 internal_error ("unsupported wide integer operation");
6152 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6153 if (GET_MODE_CLASS (mode) == MODE_INT
6154 && mode > MAX_INTEGER_COMPUTATION_MODE)
6155 internal_error ("unsupported wide integer operation");
6160 /* Return the highest power of two that EXP is known to be a multiple of.
6161 This is used in updating alignment of MEMs in array references. */
6163 static HOST_WIDE_INT
6164 highest_pow2_factor (exp)
6167 HOST_WIDE_INT c0, c1;
6169 switch (TREE_CODE (exp))
6172 /* We can find the lowest bit that's a one. If the low
6173 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6174 We need to handle this case since we can find it in a COND_EXPR,
6175 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6176 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6178 if (TREE_CONSTANT_OVERFLOW (exp))
6179 return BIGGEST_ALIGNMENT;
6182 /* Note: tree_low_cst is intentionally not used here,
6183 we don't care about the upper bits. */
6184 c0 = TREE_INT_CST_LOW (exp);
6186 return c0 ? c0 : BIGGEST_ALIGNMENT;
6190 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6191 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6192 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6193 return MIN (c0, c1);
6196 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6197 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6200 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6202 if (integer_pow2p (TREE_OPERAND (exp, 1))
6203 && host_integerp (TREE_OPERAND (exp, 1), 1))
6205 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6206 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6207 return MAX (1, c0 / c1);
6211 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6212 case SAVE_EXPR: case WITH_RECORD_EXPR:
6213 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6216 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6219 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6220 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6221 return MIN (c0, c1);
6230 /* Similar, except that it is known that the expression must be a multiple
6231 of the alignment of TYPE. */
6233 static HOST_WIDE_INT
6234 highest_pow2_factor_for_type (type, exp)
6238 HOST_WIDE_INT type_align, factor;
6240 factor = highest_pow2_factor (exp);
6241 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6242 return MAX (factor, type_align);
6245 /* Return an object on the placeholder list that matches EXP, a
6246 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6247 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6248 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6249 is a location which initially points to a starting location in the
6250 placeholder list (zero means start of the list) and where a pointer into
6251 the placeholder list at which the object is found is placed. */
6254 find_placeholder (exp, plist)
6258 tree type = TREE_TYPE (exp);
6259 tree placeholder_expr;
6261 for (placeholder_expr
6262 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6263 placeholder_expr != 0;
6264 placeholder_expr = TREE_CHAIN (placeholder_expr))
6266 tree need_type = TYPE_MAIN_VARIANT (type);
6269 /* Find the outermost reference that is of the type we want. If none,
6270 see if any object has a type that is a pointer to the type we
6272 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6273 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6274 || TREE_CODE (elt) == COND_EXPR)
6275 ? TREE_OPERAND (elt, 1)
6276 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6277 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6278 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6279 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6280 ? TREE_OPERAND (elt, 0) : 0))
6281 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6284 *plist = placeholder_expr;
6288 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6290 = ((TREE_CODE (elt) == COMPOUND_EXPR
6291 || TREE_CODE (elt) == COND_EXPR)
6292 ? TREE_OPERAND (elt, 1)
6293 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6294 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6295 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6296 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6297 ? TREE_OPERAND (elt, 0) : 0))
6298 if (POINTER_TYPE_P (TREE_TYPE (elt))
6299 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6303 *plist = placeholder_expr;
6304 return build1 (INDIRECT_REF, need_type, elt);
6311 /* expand_expr: generate code for computing expression EXP.
6312 An rtx for the computed value is returned. The value is never null.
6313 In the case of a void EXP, const0_rtx is returned.
6315 The value may be stored in TARGET if TARGET is nonzero.
6316 TARGET is just a suggestion; callers must assume that
6317 the rtx returned may not be the same as TARGET.
6319 If TARGET is CONST0_RTX, it means that the value will be ignored.
6321 If TMODE is not VOIDmode, it suggests generating the
6322 result in mode TMODE. But this is done only when convenient.
6323 Otherwise, TMODE is ignored and the value generated in its natural mode.
6324 TMODE is just a suggestion; callers must assume that
6325 the rtx returned may not have mode TMODE.
6327 Note that TARGET may have neither TMODE nor MODE. In that case, it
6328 probably will not be used.
6330 If MODIFIER is EXPAND_SUM then when EXP is an addition
6331 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6332 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6333 products as above, or REG or MEM, or constant.
6334 Ordinarily in such cases we would output mul or add instructions
6335 and then return a pseudo reg containing the sum.
6337 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6338 it also marks a label as absolutely required (it can't be dead).
6339 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6340 This is used for outputting expressions used in initializers.
6342 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6343 with a constant address even if that address is not normally legitimate.
6344 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6347 expand_expr (exp, target, tmode, modifier)
6350 enum machine_mode tmode;
6351 enum expand_modifier modifier;
6354 tree type = TREE_TYPE (exp);
6355 int unsignedp = TREE_UNSIGNED (type);
6356 enum machine_mode mode;
6357 enum tree_code code = TREE_CODE (exp);
6359 rtx subtarget, original_target;
6363 /* Handle ERROR_MARK before anybody tries to access its type. */
6364 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6366 op0 = CONST0_RTX (tmode);
6372 mode = TYPE_MODE (type);
6373 /* Use subtarget as the target for operand 0 of a binary operation. */
6374 subtarget = get_subtarget (target);
6375 original_target = target;
6376 ignore = (target == const0_rtx
6377 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6378 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6379 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6380 && TREE_CODE (type) == VOID_TYPE));
6382 /* If we are going to ignore this result, we need only do something
6383 if there is a side-effect somewhere in the expression. If there
6384 is, short-circuit the most common cases here. Note that we must
6385 not call expand_expr with anything but const0_rtx in case this
6386 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6390 if (! TREE_SIDE_EFFECTS (exp))
6393 /* Ensure we reference a volatile object even if value is ignored, but
6394 don't do this if all we are doing is taking its address. */
6395 if (TREE_THIS_VOLATILE (exp)
6396 && TREE_CODE (exp) != FUNCTION_DECL
6397 && mode != VOIDmode && mode != BLKmode
6398 && modifier != EXPAND_CONST_ADDRESS)
6400 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6401 if (GET_CODE (temp) == MEM)
6402 temp = copy_to_reg (temp);
6406 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6407 || code == INDIRECT_REF || code == BUFFER_REF)
6408 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6411 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6412 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6414 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6415 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6418 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6419 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6420 /* If the second operand has no side effects, just evaluate
6422 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6424 else if (code == BIT_FIELD_REF)
6426 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6427 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6428 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6435 #ifdef MAX_INTEGER_COMPUTATION_MODE
6436 /* Only check stuff here if the mode we want is different from the mode
6437 of the expression; if it's the same, check_max_integer_computiation_mode
6438 will handle it. Do we really need to check this stuff at all? */
6441 && GET_MODE (target) != mode
6442 && TREE_CODE (exp) != INTEGER_CST
6443 && TREE_CODE (exp) != PARM_DECL
6444 && TREE_CODE (exp) != ARRAY_REF
6445 && TREE_CODE (exp) != ARRAY_RANGE_REF
6446 && TREE_CODE (exp) != COMPONENT_REF
6447 && TREE_CODE (exp) != BIT_FIELD_REF
6448 && TREE_CODE (exp) != INDIRECT_REF
6449 && TREE_CODE (exp) != CALL_EXPR
6450 && TREE_CODE (exp) != VAR_DECL
6451 && TREE_CODE (exp) != RTL_EXPR)
6453 enum machine_mode mode = GET_MODE (target);
6455 if (GET_MODE_CLASS (mode) == MODE_INT
6456 && mode > MAX_INTEGER_COMPUTATION_MODE)
6457 internal_error ("unsupported wide integer operation");
6461 && TREE_CODE (exp) != INTEGER_CST
6462 && TREE_CODE (exp) != PARM_DECL
6463 && TREE_CODE (exp) != ARRAY_REF
6464 && TREE_CODE (exp) != ARRAY_RANGE_REF
6465 && TREE_CODE (exp) != COMPONENT_REF
6466 && TREE_CODE (exp) != BIT_FIELD_REF
6467 && TREE_CODE (exp) != INDIRECT_REF
6468 && TREE_CODE (exp) != VAR_DECL
6469 && TREE_CODE (exp) != CALL_EXPR
6470 && TREE_CODE (exp) != RTL_EXPR
6471 && GET_MODE_CLASS (tmode) == MODE_INT
6472 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6473 internal_error ("unsupported wide integer operation");
6475 check_max_integer_computation_mode (exp);
6478 /* If will do cse, generate all results into pseudo registers
6479 since 1) that allows cse to find more things
6480 and 2) otherwise cse could produce an insn the machine
6481 cannot support. And exception is a CONSTRUCTOR into a multi-word
6482 MEM: that's much more likely to be most efficient into the MEM. */
6484 if (! cse_not_expected && mode != BLKmode && target
6485 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6486 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6493 tree function = decl_function_context (exp);
6494 /* Handle using a label in a containing function. */
6495 if (function != current_function_decl
6496 && function != inline_function_decl && function != 0)
6498 struct function *p = find_function_data (function);
6499 p->expr->x_forced_labels
6500 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6501 p->expr->x_forced_labels);
6505 if (modifier == EXPAND_INITIALIZER)
6506 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6511 temp = gen_rtx_MEM (FUNCTION_MODE,
6512 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6513 if (function != current_function_decl
6514 && function != inline_function_decl && function != 0)
6515 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6520 if (DECL_RTL (exp) == 0)
6522 error_with_decl (exp, "prior parameter's size depends on `%s'");
6523 return CONST0_RTX (mode);
6526 /* ... fall through ... */
6529 /* If a static var's type was incomplete when the decl was written,
6530 but the type is complete now, lay out the decl now. */
6531 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6532 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6534 rtx value = DECL_RTL_IF_SET (exp);
6536 layout_decl (exp, 0);
6538 /* If the RTL was already set, update its mode and memory
6542 PUT_MODE (value, DECL_MODE (exp));
6543 SET_DECL_RTL (exp, 0);
6544 set_mem_attributes (value, exp, 1);
6545 SET_DECL_RTL (exp, value);
6549 /* ... fall through ... */
6553 if (DECL_RTL (exp) == 0)
6556 /* Ensure variable marked as used even if it doesn't go through
6557 a parser. If it hasn't be used yet, write out an external
6559 if (! TREE_USED (exp))
6561 assemble_external (exp);
6562 TREE_USED (exp) = 1;
6565 /* Show we haven't gotten RTL for this yet. */
6568 /* Handle variables inherited from containing functions. */
6569 context = decl_function_context (exp);
6571 /* We treat inline_function_decl as an alias for the current function
6572 because that is the inline function whose vars, types, etc.
6573 are being merged into the current function.
6574 See expand_inline_function. */
6576 if (context != 0 && context != current_function_decl
6577 && context != inline_function_decl
6578 /* If var is static, we don't need a static chain to access it. */
6579 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6580 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6584 /* Mark as non-local and addressable. */
6585 DECL_NONLOCAL (exp) = 1;
6586 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6588 (*lang_hooks.mark_addressable) (exp);
6589 if (GET_CODE (DECL_RTL (exp)) != MEM)
6591 addr = XEXP (DECL_RTL (exp), 0);
6592 if (GET_CODE (addr) == MEM)
6594 = replace_equiv_address (addr,
6595 fix_lexical_addr (XEXP (addr, 0), exp));
6597 addr = fix_lexical_addr (addr, exp);
6599 temp = replace_equiv_address (DECL_RTL (exp), addr);
6602 /* This is the case of an array whose size is to be determined
6603 from its initializer, while the initializer is still being parsed.
6606 else if (GET_CODE (DECL_RTL (exp)) == MEM
6607 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6608 temp = validize_mem (DECL_RTL (exp));
6610 /* If DECL_RTL is memory, we are in the normal case and either
6611 the address is not valid or it is not a register and -fforce-addr
6612 is specified, get the address into a register. */
6614 else if (GET_CODE (DECL_RTL (exp)) == MEM
6615 && modifier != EXPAND_CONST_ADDRESS
6616 && modifier != EXPAND_SUM
6617 && modifier != EXPAND_INITIALIZER
6618 && (! memory_address_p (DECL_MODE (exp),
6619 XEXP (DECL_RTL (exp), 0))
6621 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6622 temp = replace_equiv_address (DECL_RTL (exp),
6623 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6625 /* If we got something, return it. But first, set the alignment
6626 if the address is a register. */
6629 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6630 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6635 /* If the mode of DECL_RTL does not match that of the decl, it
6636 must be a promoted value. We return a SUBREG of the wanted mode,
6637 but mark it so that we know that it was already extended. */
6639 if (GET_CODE (DECL_RTL (exp)) == REG
6640 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6642 /* Get the signedness used for this variable. Ensure we get the
6643 same mode we got when the variable was declared. */
6644 if (GET_MODE (DECL_RTL (exp))
6645 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6646 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6649 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6650 SUBREG_PROMOTED_VAR_P (temp) = 1;
6651 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6655 return DECL_RTL (exp);
6658 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6659 TREE_INT_CST_HIGH (exp), mode);
6661 /* ??? If overflow is set, fold will have done an incomplete job,
6662 which can result in (plus xx (const_int 0)), which can get
6663 simplified by validate_replace_rtx during virtual register
6664 instantiation, which can result in unrecognizable insns.
6665 Avoid this by forcing all overflows into registers. */
6666 if (TREE_CONSTANT_OVERFLOW (exp)
6667 && modifier != EXPAND_INITIALIZER)
6668 temp = force_reg (mode, temp);
6673 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
6676 /* If optimized, generate immediate CONST_DOUBLE
6677 which will be turned into memory by reload if necessary.
6679 We used to force a register so that loop.c could see it. But
6680 this does not allow gen_* patterns to perform optimizations with
6681 the constants. It also produces two insns in cases like "x = 1.0;".
6682 On most machines, floating-point constants are not permitted in
6683 many insns, so we'd end up copying it to a register in any case.
6685 Now, we do the copying in expand_binop, if appropriate. */
6686 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6687 TYPE_MODE (TREE_TYPE (exp)));
6691 if (! TREE_CST_RTL (exp))
6692 output_constant_def (exp, 1);
6694 /* TREE_CST_RTL probably contains a constant address.
6695 On RISC machines where a constant address isn't valid,
6696 make some insns to get that address into a register. */
6697 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6698 && modifier != EXPAND_CONST_ADDRESS
6699 && modifier != EXPAND_INITIALIZER
6700 && modifier != EXPAND_SUM
6701 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6703 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6704 return replace_equiv_address (TREE_CST_RTL (exp),
6705 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6706 return TREE_CST_RTL (exp);
6708 case EXPR_WITH_FILE_LOCATION:
6711 const char *saved_input_filename = input_filename;
6712 int saved_lineno = lineno;
6713 input_filename = EXPR_WFL_FILENAME (exp);
6714 lineno = EXPR_WFL_LINENO (exp);
6715 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6716 emit_line_note (input_filename, lineno);
6717 /* Possibly avoid switching back and forth here. */
6718 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6719 input_filename = saved_input_filename;
6720 lineno = saved_lineno;
6725 context = decl_function_context (exp);
6727 /* If this SAVE_EXPR was at global context, assume we are an
6728 initialization function and move it into our context. */
6730 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6732 /* We treat inline_function_decl as an alias for the current function
6733 because that is the inline function whose vars, types, etc.
6734 are being merged into the current function.
6735 See expand_inline_function. */
6736 if (context == current_function_decl || context == inline_function_decl)
6739 /* If this is non-local, handle it. */
6742 /* The following call just exists to abort if the context is
6743 not of a containing function. */
6744 find_function_data (context);
6746 temp = SAVE_EXPR_RTL (exp);
6747 if (temp && GET_CODE (temp) == REG)
6749 put_var_into_stack (exp);
6750 temp = SAVE_EXPR_RTL (exp);
6752 if (temp == 0 || GET_CODE (temp) != MEM)
6755 replace_equiv_address (temp,
6756 fix_lexical_addr (XEXP (temp, 0), exp));
6758 if (SAVE_EXPR_RTL (exp) == 0)
6760 if (mode == VOIDmode)
6763 temp = assign_temp (build_qualified_type (type,
6765 | TYPE_QUAL_CONST)),
6768 SAVE_EXPR_RTL (exp) = temp;
6769 if (!optimize && GET_CODE (temp) == REG)
6770 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6773 /* If the mode of TEMP does not match that of the expression, it
6774 must be a promoted value. We pass store_expr a SUBREG of the
6775 wanted mode but mark it so that we know that it was already
6776 extended. Note that `unsignedp' was modified above in
6779 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6781 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6782 SUBREG_PROMOTED_VAR_P (temp) = 1;
6783 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6786 if (temp == const0_rtx)
6787 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6789 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6791 TREE_USED (exp) = 1;
6794 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6795 must be a promoted value. We return a SUBREG of the wanted mode,
6796 but mark it so that we know that it was already extended. */
6798 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6799 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6801 /* Compute the signedness and make the proper SUBREG. */
6802 promote_mode (type, mode, &unsignedp, 0);
6803 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6804 SUBREG_PROMOTED_VAR_P (temp) = 1;
6805 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6809 return SAVE_EXPR_RTL (exp);
6814 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6815 TREE_OPERAND (exp, 0)
6816 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6820 case PLACEHOLDER_EXPR:
6822 tree old_list = placeholder_list;
6823 tree placeholder_expr = 0;
6825 exp = find_placeholder (exp, &placeholder_expr);
6829 placeholder_list = TREE_CHAIN (placeholder_expr);
6830 temp = expand_expr (exp, original_target, tmode, modifier);
6831 placeholder_list = old_list;
6835 case WITH_RECORD_EXPR:
6836 /* Put the object on the placeholder list, expand our first operand,
6837 and pop the list. */
6838 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6840 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6842 placeholder_list = TREE_CHAIN (placeholder_list);
6846 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6847 expand_goto (TREE_OPERAND (exp, 0));
6849 expand_computed_goto (TREE_OPERAND (exp, 0));
6853 expand_exit_loop_if_false (NULL,
6854 invert_truthvalue (TREE_OPERAND (exp, 0)));
6857 case LABELED_BLOCK_EXPR:
6858 if (LABELED_BLOCK_BODY (exp))
6859 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6860 /* Should perhaps use expand_label, but this is simpler and safer. */
6861 do_pending_stack_adjust ();
6862 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6865 case EXIT_BLOCK_EXPR:
6866 if (EXIT_BLOCK_RETURN (exp))
6867 sorry ("returned value in block_exit_expr");
6868 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6873 expand_start_loop (1);
6874 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6882 tree vars = TREE_OPERAND (exp, 0);
6883 int vars_need_expansion = 0;
6885 /* Need to open a binding contour here because
6886 if there are any cleanups they must be contained here. */
6887 expand_start_bindings (2);
6889 /* Mark the corresponding BLOCK for output in its proper place. */
6890 if (TREE_OPERAND (exp, 2) != 0
6891 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6892 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
6894 /* If VARS have not yet been expanded, expand them now. */
6897 if (!DECL_RTL_SET_P (vars))
6899 vars_need_expansion = 1;
6902 expand_decl_init (vars);
6903 vars = TREE_CHAIN (vars);
6906 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6908 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6914 if (RTL_EXPR_SEQUENCE (exp))
6916 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6918 emit_insn (RTL_EXPR_SEQUENCE (exp));
6919 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6921 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6922 free_temps_for_rtl_expr (exp);
6923 return RTL_EXPR_RTL (exp);
6926 /* If we don't need the result, just ensure we evaluate any
6932 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6933 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6938 /* All elts simple constants => refer to a constant in memory. But
6939 if this is a non-BLKmode mode, let it store a field at a time
6940 since that should make a CONST_INT or CONST_DOUBLE when we
6941 fold. Likewise, if we have a target we can use, it is best to
6942 store directly into the target unless the type is large enough
6943 that memcpy will be used. If we are making an initializer and
6944 all operands are constant, put it in memory as well.
6946 FIXME: Avoid trying to fill vector constructors piece-meal.
6947 Output them with output_constant_def below unless we're sure
6948 they're zeros. This should go away when vector initializers
6949 are treated like VECTOR_CST instead of arrays.
6951 else if ((TREE_STATIC (exp)
6952 && ((mode == BLKmode
6953 && ! (target != 0 && safe_from_p (target, exp, 1)))
6954 || TREE_ADDRESSABLE (exp)
6955 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6956 && (! MOVE_BY_PIECES_P
6957 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6959 && ((TREE_CODE (type) == VECTOR_TYPE
6960 && !is_zeros_p (exp))
6961 || ! mostly_zeros_p (exp)))))
6962 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6964 rtx constructor = output_constant_def (exp, 1);
6966 if (modifier != EXPAND_CONST_ADDRESS
6967 && modifier != EXPAND_INITIALIZER
6968 && modifier != EXPAND_SUM)
6969 constructor = validize_mem (constructor);
6975 /* Handle calls that pass values in multiple non-contiguous
6976 locations. The Irix 6 ABI has examples of this. */
6977 if (target == 0 || ! safe_from_p (target, exp, 1)
6978 || GET_CODE (target) == PARALLEL)
6980 = assign_temp (build_qualified_type (type,
6982 | (TREE_READONLY (exp)
6983 * TYPE_QUAL_CONST))),
6984 0, TREE_ADDRESSABLE (exp), 1);
6986 store_constructor (exp, target, 0, int_expr_size (exp));
6992 tree exp1 = TREE_OPERAND (exp, 0);
6994 tree string = string_constant (exp1, &index);
6996 /* Try to optimize reads from const strings. */
6998 && TREE_CODE (string) == STRING_CST
6999 && TREE_CODE (index) == INTEGER_CST
7000 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7001 && GET_MODE_CLASS (mode) == MODE_INT
7002 && GET_MODE_SIZE (mode) == 1
7003 && modifier != EXPAND_WRITE)
7004 return gen_int_mode (TREE_STRING_POINTER (string)
7005 [TREE_INT_CST_LOW (index)], mode);
7007 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7008 op0 = memory_address (mode, op0);
7009 temp = gen_rtx_MEM (mode, op0);
7010 set_mem_attributes (temp, exp, 0);
7012 /* If we are writing to this object and its type is a record with
7013 readonly fields, we must mark it as readonly so it will
7014 conflict with readonly references to those fields. */
7015 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
7016 RTX_UNCHANGING_P (temp) = 1;
7022 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7026 tree array = TREE_OPERAND (exp, 0);
7027 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7028 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
7029 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7032 /* Optimize the special-case of a zero lower bound.
7034 We convert the low_bound to sizetype to avoid some problems
7035 with constant folding. (E.g. suppose the lower bound is 1,
7036 and its mode is QI. Without the conversion, (ARRAY
7037 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7038 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7040 if (! integer_zerop (low_bound))
7041 index = size_diffop (index, convert (sizetype, low_bound));
7043 /* Fold an expression like: "foo"[2].
7044 This is not done in fold so it won't happen inside &.
7045 Don't fold if this is for wide characters since it's too
7046 difficult to do correctly and this is a very rare case. */
7048 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7049 && TREE_CODE (array) == STRING_CST
7050 && TREE_CODE (index) == INTEGER_CST
7051 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
7052 && GET_MODE_CLASS (mode) == MODE_INT
7053 && GET_MODE_SIZE (mode) == 1)
7054 return gen_int_mode (TREE_STRING_POINTER (array)
7055 [TREE_INT_CST_LOW (index)], mode);
7057 /* If this is a constant index into a constant array,
7058 just get the value from the array. Handle both the cases when
7059 we have an explicit constructor and when our operand is a variable
7060 that was declared const. */
7062 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7063 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
7064 && TREE_CODE (index) == INTEGER_CST
7065 && 0 > compare_tree_int (index,
7066 list_length (CONSTRUCTOR_ELTS
7067 (TREE_OPERAND (exp, 0)))))
7071 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7072 i = TREE_INT_CST_LOW (index);
7073 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7077 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7081 else if (optimize >= 1
7082 && modifier != EXPAND_CONST_ADDRESS
7083 && modifier != EXPAND_INITIALIZER
7084 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7085 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7086 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7088 if (TREE_CODE (index) == INTEGER_CST)
7090 tree init = DECL_INITIAL (array);
7092 if (TREE_CODE (init) == CONSTRUCTOR)
7096 for (elem = CONSTRUCTOR_ELTS (init);
7098 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7099 elem = TREE_CHAIN (elem))
7102 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7103 return expand_expr (fold (TREE_VALUE (elem)), target,
7106 else if (TREE_CODE (init) == STRING_CST
7107 && 0 > compare_tree_int (index,
7108 TREE_STRING_LENGTH (init)))
7110 tree type = TREE_TYPE (TREE_TYPE (init));
7111 enum machine_mode mode = TYPE_MODE (type);
7113 if (GET_MODE_CLASS (mode) == MODE_INT
7114 && GET_MODE_SIZE (mode) == 1)
7115 return gen_int_mode (TREE_STRING_POINTER (init)
7116 [TREE_INT_CST_LOW (index)], mode);
7125 case ARRAY_RANGE_REF:
7126 /* If the operand is a CONSTRUCTOR, we can just extract the
7127 appropriate field if it is present. Don't do this if we have
7128 already written the data since we want to refer to that copy
7129 and varasm.c assumes that's what we'll do. */
7130 if (code == COMPONENT_REF
7131 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
7132 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
7136 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7137 elt = TREE_CHAIN (elt))
7138 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7139 /* We can normally use the value of the field in the
7140 CONSTRUCTOR. However, if this is a bitfield in
7141 an integral mode that we can fit in a HOST_WIDE_INT,
7142 we must mask only the number of bits in the bitfield,
7143 since this is done implicitly by the constructor. If
7144 the bitfield does not meet either of those conditions,
7145 we can't do this optimization. */
7146 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7147 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7149 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7150 <= HOST_BITS_PER_WIDE_INT))))
7152 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7153 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7155 HOST_WIDE_INT bitsize
7156 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7157 enum machine_mode imode
7158 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7160 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7162 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7163 op0 = expand_and (imode, op0, op1, target);
7168 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7171 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7173 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7183 enum machine_mode mode1;
7184 HOST_WIDE_INT bitsize, bitpos;
7187 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7188 &mode1, &unsignedp, &volatilep);
7191 /* If we got back the original object, something is wrong. Perhaps
7192 we are evaluating an expression too early. In any event, don't
7193 infinitely recurse. */
7197 /* If TEM's type is a union of variable size, pass TARGET to the inner
7198 computation, since it will need a temporary and TARGET is known
7199 to have to do. This occurs in unchecked conversion in Ada. */
7203 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7204 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7206 ? target : NULL_RTX),
7208 (modifier == EXPAND_INITIALIZER
7209 || modifier == EXPAND_CONST_ADDRESS)
7210 ? modifier : EXPAND_NORMAL);
7212 /* If this is a constant, put it into a register if it is a
7213 legitimate constant and OFFSET is 0 and memory if it isn't. */
7214 if (CONSTANT_P (op0))
7216 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7217 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7219 op0 = force_reg (mode, op0);
7221 op0 = validize_mem (force_const_mem (mode, op0));
7226 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
7228 /* If this object is in a register, put it into memory.
7229 This case can't occur in C, but can in Ada if we have
7230 unchecked conversion of an expression from a scalar type to
7231 an array or record type. */
7232 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7233 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7235 /* If the operand is a SAVE_EXPR, we can deal with this by
7236 forcing the SAVE_EXPR into memory. */
7237 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7239 put_var_into_stack (TREE_OPERAND (exp, 0));
7240 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7245 = build_qualified_type (TREE_TYPE (tem),
7246 (TYPE_QUALS (TREE_TYPE (tem))
7247 | TYPE_QUAL_CONST));
7248 rtx memloc = assign_temp (nt, 1, 1, 1);
7250 emit_move_insn (memloc, op0);
7255 if (GET_CODE (op0) != MEM)
7258 #ifdef POINTERS_EXTEND_UNSIGNED
7259 if (GET_MODE (offset_rtx) != Pmode)
7260 offset_rtx = convert_memory_address (Pmode, offset_rtx);
7262 if (GET_MODE (offset_rtx) != ptr_mode)
7263 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7266 /* A constant address in OP0 can have VOIDmode, we must not try
7267 to call force_reg for that case. Avoid that case. */
7268 if (GET_CODE (op0) == MEM
7269 && GET_MODE (op0) == BLKmode
7270 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7272 && (bitpos % bitsize) == 0
7273 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7274 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7276 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7280 op0 = offset_address (op0, offset_rtx,
7281 highest_pow2_factor (offset));
7284 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7285 record its alignment as BIGGEST_ALIGNMENT. */
7286 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7287 && is_aligning_offset (offset, tem))
7288 set_mem_align (op0, BIGGEST_ALIGNMENT);
7290 /* Don't forget about volatility even if this is a bitfield. */
7291 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7293 if (op0 == orig_op0)
7294 op0 = copy_rtx (op0);
7296 MEM_VOLATILE_P (op0) = 1;
7299 /* The following code doesn't handle CONCAT.
7300 Assume only bitpos == 0 can be used for CONCAT, due to
7301 one element arrays having the same mode as its element. */
7302 if (GET_CODE (op0) == CONCAT)
7304 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7309 /* In cases where an aligned union has an unaligned object
7310 as a field, we might be extracting a BLKmode value from
7311 an integer-mode (e.g., SImode) object. Handle this case
7312 by doing the extract into an object as wide as the field
7313 (which we know to be the width of a basic mode), then
7314 storing into memory, and changing the mode to BLKmode. */
7315 if (mode1 == VOIDmode
7316 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7317 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7318 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7319 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7320 && modifier != EXPAND_CONST_ADDRESS
7321 && modifier != EXPAND_INITIALIZER)
7322 /* If the field isn't aligned enough to fetch as a memref,
7323 fetch it as a bit field. */
7324 || (mode1 != BLKmode
7325 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
7326 && ((TYPE_ALIGN (TREE_TYPE (tem))
7327 < GET_MODE_ALIGNMENT (mode))
7328 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7329 /* If the type and the field are a constant size and the
7330 size of the type isn't the same size as the bitfield,
7331 we must use bitfield operations. */
7333 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7335 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7338 enum machine_mode ext_mode = mode;
7340 if (ext_mode == BLKmode
7341 && ! (target != 0 && GET_CODE (op0) == MEM
7342 && GET_CODE (target) == MEM
7343 && bitpos % BITS_PER_UNIT == 0))
7344 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7346 if (ext_mode == BLKmode)
7348 /* In this case, BITPOS must start at a byte boundary and
7349 TARGET, if specified, must be a MEM. */
7350 if (GET_CODE (op0) != MEM
7351 || (target != 0 && GET_CODE (target) != MEM)
7352 || bitpos % BITS_PER_UNIT != 0)
7355 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7357 target = assign_temp (type, 0, 1, 1);
7359 emit_block_move (target, op0,
7360 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7367 op0 = validize_mem (op0);
7369 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7370 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7372 op0 = extract_bit_field (op0, bitsize, bitpos,
7373 unsignedp, target, ext_mode, ext_mode,
7374 int_size_in_bytes (TREE_TYPE (tem)));
7376 /* If the result is a record type and BITSIZE is narrower than
7377 the mode of OP0, an integral mode, and this is a big endian
7378 machine, we must put the field into the high-order bits. */
7379 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7380 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7381 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7382 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7383 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7387 if (mode == BLKmode)
7389 rtx new = assign_temp (build_qualified_type
7390 ((*lang_hooks.types.type_for_mode)
7392 TYPE_QUAL_CONST), 0, 1, 1);
7394 emit_move_insn (new, op0);
7395 op0 = copy_rtx (new);
7396 PUT_MODE (op0, BLKmode);
7397 set_mem_attributes (op0, exp, 1);
7403 /* If the result is BLKmode, use that to access the object
7405 if (mode == BLKmode)
7408 /* Get a reference to just this component. */
7409 if (modifier == EXPAND_CONST_ADDRESS
7410 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7411 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7413 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7415 if (op0 == orig_op0)
7416 op0 = copy_rtx (op0);
7418 set_mem_attributes (op0, exp, 0);
7419 if (GET_CODE (XEXP (op0, 0)) == REG)
7420 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7422 MEM_VOLATILE_P (op0) |= volatilep;
7423 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7424 || modifier == EXPAND_CONST_ADDRESS
7425 || modifier == EXPAND_INITIALIZER)
7427 else if (target == 0)
7428 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7430 convert_move (target, op0, unsignedp);
7436 rtx insn, before = get_last_insn (), vtbl_ref;
7438 /* Evaluate the interior expression. */
7439 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7442 /* Get or create an instruction off which to hang a note. */
7443 if (REG_P (subtarget))
7446 insn = get_last_insn ();
7449 if (! INSN_P (insn))
7450 insn = prev_nonnote_insn (insn);
7454 target = gen_reg_rtx (GET_MODE (subtarget));
7455 insn = emit_move_insn (target, subtarget);
7458 /* Collect the data for the note. */
7459 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7460 vtbl_ref = plus_constant (vtbl_ref,
7461 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7462 /* Discard the initial CONST that was added. */
7463 vtbl_ref = XEXP (vtbl_ref, 0);
7466 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7471 /* Intended for a reference to a buffer of a file-object in Pascal.
7472 But it's not certain that a special tree code will really be
7473 necessary for these. INDIRECT_REF might work for them. */
7479 /* Pascal set IN expression.
7482 rlo = set_low - (set_low%bits_per_word);
7483 the_word = set [ (index - rlo)/bits_per_word ];
7484 bit_index = index % bits_per_word;
7485 bitmask = 1 << bit_index;
7486 return !!(the_word & bitmask); */
7488 tree set = TREE_OPERAND (exp, 0);
7489 tree index = TREE_OPERAND (exp, 1);
7490 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7491 tree set_type = TREE_TYPE (set);
7492 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7493 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7494 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7495 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7496 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7497 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7498 rtx setaddr = XEXP (setval, 0);
7499 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7501 rtx diff, quo, rem, addr, bit, result;
7503 /* If domain is empty, answer is no. Likewise if index is constant
7504 and out of bounds. */
7505 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7506 && TREE_CODE (set_low_bound) == INTEGER_CST
7507 && tree_int_cst_lt (set_high_bound, set_low_bound))
7508 || (TREE_CODE (index) == INTEGER_CST
7509 && TREE_CODE (set_low_bound) == INTEGER_CST
7510 && tree_int_cst_lt (index, set_low_bound))
7511 || (TREE_CODE (set_high_bound) == INTEGER_CST
7512 && TREE_CODE (index) == INTEGER_CST
7513 && tree_int_cst_lt (set_high_bound, index))))
7517 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7519 /* If we get here, we have to generate the code for both cases
7520 (in range and out of range). */
7522 op0 = gen_label_rtx ();
7523 op1 = gen_label_rtx ();
7525 if (! (GET_CODE (index_val) == CONST_INT
7526 && GET_CODE (lo_r) == CONST_INT))
7527 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7528 GET_MODE (index_val), iunsignedp, op1);
7530 if (! (GET_CODE (index_val) == CONST_INT
7531 && GET_CODE (hi_r) == CONST_INT))
7532 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7533 GET_MODE (index_val), iunsignedp, op1);
7535 /* Calculate the element number of bit zero in the first word
7537 if (GET_CODE (lo_r) == CONST_INT)
7538 rlow = GEN_INT (INTVAL (lo_r)
7539 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7541 rlow = expand_binop (index_mode, and_optab, lo_r,
7542 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7543 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7545 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7546 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7548 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7549 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7550 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7551 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7553 addr = memory_address (byte_mode,
7554 expand_binop (index_mode, add_optab, diff,
7555 setaddr, NULL_RTX, iunsignedp,
7558 /* Extract the bit we want to examine. */
7559 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7560 gen_rtx_MEM (byte_mode, addr),
7561 make_tree (TREE_TYPE (index), rem),
7563 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7564 GET_MODE (target) == byte_mode ? target : 0,
7565 1, OPTAB_LIB_WIDEN);
7567 if (result != target)
7568 convert_move (target, result, 1);
7570 /* Output the code to handle the out-of-range case. */
7573 emit_move_insn (target, const0_rtx);
7578 case WITH_CLEANUP_EXPR:
7579 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7581 WITH_CLEANUP_EXPR_RTL (exp)
7582 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7583 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7584 CLEANUP_EH_ONLY (exp));
7586 /* That's it for this cleanup. */
7587 TREE_OPERAND (exp, 1) = 0;
7589 return WITH_CLEANUP_EXPR_RTL (exp);
7591 case CLEANUP_POINT_EXPR:
7593 /* Start a new binding layer that will keep track of all cleanup
7594 actions to be performed. */
7595 expand_start_bindings (2);
7597 target_temp_slot_level = temp_slot_level;
7599 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7600 /* If we're going to use this value, load it up now. */
7602 op0 = force_not_mem (op0);
7603 preserve_temp_slots (op0);
7604 expand_end_bindings (NULL_TREE, 0, 0);
7609 /* Check for a built-in function. */
7610 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7611 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7613 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7615 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7616 == BUILT_IN_FRONTEND)
7617 return (*lang_hooks.expand_expr)
7618 (exp, original_target, tmode, modifier);
7620 return expand_builtin (exp, target, subtarget, tmode, ignore);
7623 return expand_call (exp, target, ignore);
7625 case NON_LVALUE_EXPR:
7628 case REFERENCE_EXPR:
7629 if (TREE_OPERAND (exp, 0) == error_mark_node)
7632 if (TREE_CODE (type) == UNION_TYPE)
7634 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7636 /* If both input and output are BLKmode, this conversion isn't doing
7637 anything except possibly changing memory attribute. */
7638 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7640 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7643 result = copy_rtx (result);
7644 set_mem_attributes (result, exp, 0);
7649 target = assign_temp (type, 0, 1, 1);
7651 if (GET_CODE (target) == MEM)
7652 /* Store data into beginning of memory target. */
7653 store_expr (TREE_OPERAND (exp, 0),
7654 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7656 else if (GET_CODE (target) == REG)
7657 /* Store this field into a union of the proper type. */
7658 store_field (target,
7659 MIN ((int_size_in_bytes (TREE_TYPE
7660 (TREE_OPERAND (exp, 0)))
7662 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7663 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7664 VOIDmode, 0, type, 0);
7668 /* Return the entire union. */
7672 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7674 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7677 /* If the signedness of the conversion differs and OP0 is
7678 a promoted SUBREG, clear that indication since we now
7679 have to do the proper extension. */
7680 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7681 && GET_CODE (op0) == SUBREG)
7682 SUBREG_PROMOTED_VAR_P (op0) = 0;
7687 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7688 if (GET_MODE (op0) == mode)
7691 /* If OP0 is a constant, just convert it into the proper mode. */
7692 if (CONSTANT_P (op0))
7694 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7695 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7697 if (modifier == EXPAND_INITIALIZER)
7698 return simplify_gen_subreg (mode, op0, inner_mode,
7699 subreg_lowpart_offset (mode,
7702 return convert_modes (mode, inner_mode, op0,
7703 TREE_UNSIGNED (inner_type));
7706 if (modifier == EXPAND_INITIALIZER)
7707 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7711 convert_to_mode (mode, op0,
7712 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7714 convert_move (target, op0,
7715 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7718 case VIEW_CONVERT_EXPR:
7719 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7721 /* If the input and output modes are both the same, we are done.
7722 Otherwise, if neither mode is BLKmode and both are within a word, we
7723 can use gen_lowpart. If neither is true, make sure the operand is
7724 in memory and convert the MEM to the new mode. */
7725 if (TYPE_MODE (type) == GET_MODE (op0))
7727 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7728 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7729 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7730 op0 = gen_lowpart (TYPE_MODE (type), op0);
7731 else if (GET_CODE (op0) != MEM)
7733 /* If the operand is not a MEM, force it into memory. Since we
7734 are going to be be changing the mode of the MEM, don't call
7735 force_const_mem for constants because we don't allow pool
7736 constants to change mode. */
7737 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7739 if (TREE_ADDRESSABLE (exp))
7742 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7744 = assign_stack_temp_for_type
7745 (TYPE_MODE (inner_type),
7746 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7748 emit_move_insn (target, op0);
7752 /* At this point, OP0 is in the correct mode. If the output type is such
7753 that the operand is known to be aligned, indicate that it is.
7754 Otherwise, we need only be concerned about alignment for non-BLKmode
7756 if (GET_CODE (op0) == MEM)
7758 op0 = copy_rtx (op0);
7760 if (TYPE_ALIGN_OK (type))
7761 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7762 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7763 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7765 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7766 HOST_WIDE_INT temp_size
7767 = MAX (int_size_in_bytes (inner_type),
7768 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7769 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7770 temp_size, 0, type);
7771 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7773 if (TREE_ADDRESSABLE (exp))
7776 if (GET_MODE (op0) == BLKmode)
7777 emit_block_move (new_with_op0_mode, op0,
7778 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7781 emit_move_insn (new_with_op0_mode, op0);
7786 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7792 /* We come here from MINUS_EXPR when the second operand is a
7795 this_optab = ! unsignedp && flag_trapv
7796 && (GET_MODE_CLASS (mode) == MODE_INT)
7797 ? addv_optab : add_optab;
7799 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7800 something else, make sure we add the register to the constant and
7801 then to the other thing. This case can occur during strength
7802 reduction and doing it this way will produce better code if the
7803 frame pointer or argument pointer is eliminated.
7805 fold-const.c will ensure that the constant is always in the inner
7806 PLUS_EXPR, so the only case we need to do anything about is if
7807 sp, ap, or fp is our second argument, in which case we must swap
7808 the innermost first argument and our second argument. */
7810 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7811 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7812 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7813 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7814 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7815 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7817 tree t = TREE_OPERAND (exp, 1);
7819 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7820 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7823 /* If the result is to be ptr_mode and we are adding an integer to
7824 something, we might be forming a constant. So try to use
7825 plus_constant. If it produces a sum and we can't accept it,
7826 use force_operand. This allows P = &ARR[const] to generate
7827 efficient code on machines where a SYMBOL_REF is not a valid
7830 If this is an EXPAND_SUM call, always return the sum. */
7831 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7832 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7834 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7835 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7836 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7840 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7842 /* Use immed_double_const to ensure that the constant is
7843 truncated according to the mode of OP1, then sign extended
7844 to a HOST_WIDE_INT. Using the constant directly can result
7845 in non-canonical RTL in a 64x32 cross compile. */
7847 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7849 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7850 op1 = plus_constant (op1, INTVAL (constant_part));
7851 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7852 op1 = force_operand (op1, target);
7856 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7857 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7858 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7862 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7863 (modifier == EXPAND_INITIALIZER
7864 ? EXPAND_INITIALIZER : EXPAND_SUM));
7865 if (! CONSTANT_P (op0))
7867 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7868 VOIDmode, modifier);
7869 /* Don't go to both_summands if modifier
7870 says it's not right to return a PLUS. */
7871 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7875 /* Use immed_double_const to ensure that the constant is
7876 truncated according to the mode of OP1, then sign extended
7877 to a HOST_WIDE_INT. Using the constant directly can result
7878 in non-canonical RTL in a 64x32 cross compile. */
7880 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7882 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7883 op0 = plus_constant (op0, INTVAL (constant_part));
7884 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7885 op0 = force_operand (op0, target);
7890 /* No sense saving up arithmetic to be done
7891 if it's all in the wrong mode to form part of an address.
7892 And force_operand won't know whether to sign-extend or
7894 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7895 || mode != ptr_mode)
7898 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7901 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
7902 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
7905 /* Make sure any term that's a sum with a constant comes last. */
7906 if (GET_CODE (op0) == PLUS
7907 && CONSTANT_P (XEXP (op0, 1)))
7913 /* If adding to a sum including a constant,
7914 associate it to put the constant outside. */
7915 if (GET_CODE (op1) == PLUS
7916 && CONSTANT_P (XEXP (op1, 1)))
7918 rtx constant_term = const0_rtx;
7920 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7923 /* Ensure that MULT comes first if there is one. */
7924 else if (GET_CODE (op0) == MULT)
7925 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7927 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7929 /* Let's also eliminate constants from op0 if possible. */
7930 op0 = eliminate_constant_term (op0, &constant_term);
7932 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7933 their sum should be a constant. Form it into OP1, since the
7934 result we want will then be OP0 + OP1. */
7936 temp = simplify_binary_operation (PLUS, mode, constant_term,
7941 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7944 /* Put a constant term last and put a multiplication first. */
7945 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7946 temp = op1, op1 = op0, op0 = temp;
7948 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7949 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7952 /* For initializers, we are allowed to return a MINUS of two
7953 symbolic constants. Here we handle all cases when both operands
7955 /* Handle difference of two symbolic constants,
7956 for the sake of an initializer. */
7957 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7958 && really_constant_p (TREE_OPERAND (exp, 0))
7959 && really_constant_p (TREE_OPERAND (exp, 1)))
7961 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
7963 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
7966 /* If the last operand is a CONST_INT, use plus_constant of
7967 the negated constant. Else make the MINUS. */
7968 if (GET_CODE (op1) == CONST_INT)
7969 return plus_constant (op0, - INTVAL (op1));
7971 return gen_rtx_MINUS (mode, op0, op1);
7973 /* Convert A - const to A + (-const). */
7974 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7976 tree negated = fold (build1 (NEGATE_EXPR, type,
7977 TREE_OPERAND (exp, 1)));
7979 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7980 /* If we can't negate the constant in TYPE, leave it alone and
7981 expand_binop will negate it for us. We used to try to do it
7982 here in the signed version of TYPE, but that doesn't work
7983 on POINTER_TYPEs. */;
7986 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7990 this_optab = ! unsignedp && flag_trapv
7991 && (GET_MODE_CLASS(mode) == MODE_INT)
7992 ? subv_optab : sub_optab;
7996 /* If first operand is constant, swap them.
7997 Thus the following special case checks need only
7998 check the second operand. */
7999 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8001 tree t1 = TREE_OPERAND (exp, 0);
8002 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8003 TREE_OPERAND (exp, 1) = t1;
8006 /* Attempt to return something suitable for generating an
8007 indexed address, for machines that support that. */
8009 if (modifier == EXPAND_SUM && mode == ptr_mode
8010 && host_integerp (TREE_OPERAND (exp, 1), 0))
8012 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8015 /* If we knew for certain that this is arithmetic for an array
8016 reference, and we knew the bounds of the array, then we could
8017 apply the distributive law across (PLUS X C) for constant C.
8018 Without such knowledge, we risk overflowing the computation
8019 when both X and C are large, but X+C isn't. */
8020 /* ??? Could perhaps special-case EXP being unsigned and C being
8021 positive. In that case we are certain that X+C is no smaller
8022 than X and so the transformed expression will overflow iff the
8023 original would have. */
8025 if (GET_CODE (op0) != REG)
8026 op0 = force_operand (op0, NULL_RTX);
8027 if (GET_CODE (op0) != REG)
8028 op0 = copy_to_mode_reg (mode, op0);
8031 gen_rtx_MULT (mode, op0,
8032 GEN_INT (tree_low_cst (TREE_OPERAND (exp, 1), 0)));
8035 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8038 /* Check for multiplying things that have been extended
8039 from a narrower type. If this machine supports multiplying
8040 in that narrower type with a result in the desired type,
8041 do it that way, and avoid the explicit type-conversion. */
8042 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8043 && TREE_CODE (type) == INTEGER_TYPE
8044 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8045 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8046 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8047 && int_fits_type_p (TREE_OPERAND (exp, 1),
8048 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8049 /* Don't use a widening multiply if a shift will do. */
8050 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8051 > HOST_BITS_PER_WIDE_INT)
8052 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8054 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8055 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8057 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8058 /* If both operands are extended, they must either both
8059 be zero-extended or both be sign-extended. */
8060 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8062 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8064 enum machine_mode innermode
8065 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
8066 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8067 ? smul_widen_optab : umul_widen_optab);
8068 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8069 ? umul_widen_optab : smul_widen_optab);
8070 if (mode == GET_MODE_WIDER_MODE (innermode))
8072 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8074 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8075 NULL_RTX, VOIDmode, 0);
8076 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8077 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8080 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8081 NULL_RTX, VOIDmode, 0);
8084 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8085 && innermode == word_mode)
8088 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8089 NULL_RTX, VOIDmode, 0);
8090 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8091 op1 = convert_modes (innermode, mode,
8092 expand_expr (TREE_OPERAND (exp, 1),
8093 NULL_RTX, VOIDmode, 0),
8096 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8097 NULL_RTX, VOIDmode, 0);
8098 temp = expand_binop (mode, other_optab, op0, op1, target,
8099 unsignedp, OPTAB_LIB_WIDEN);
8100 htem = expand_mult_highpart_adjust (innermode,
8101 gen_highpart (innermode, temp),
8103 gen_highpart (innermode, temp),
8105 emit_move_insn (gen_highpart (innermode, temp), htem);
8110 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8111 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8112 return expand_mult (mode, op0, op1, target, unsignedp);
8114 case TRUNC_DIV_EXPR:
8115 case FLOOR_DIV_EXPR:
8117 case ROUND_DIV_EXPR:
8118 case EXACT_DIV_EXPR:
8119 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8121 /* Possible optimization: compute the dividend with EXPAND_SUM
8122 then if the divisor is constant can optimize the case
8123 where some terms of the dividend have coeffs divisible by it. */
8124 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8125 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8126 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8129 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8130 expensive divide. If not, combine will rebuild the original
8132 if (flag_unsafe_math_optimizations && optimize && !optimize_size
8133 && TREE_CODE (type) == REAL_TYPE
8134 && !real_onep (TREE_OPERAND (exp, 0)))
8135 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8136 build (RDIV_EXPR, type,
8137 build_real (type, dconst1),
8138 TREE_OPERAND (exp, 1))),
8139 target, tmode, unsignedp);
8140 this_optab = sdiv_optab;
8143 case TRUNC_MOD_EXPR:
8144 case FLOOR_MOD_EXPR:
8146 case ROUND_MOD_EXPR:
8147 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8149 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8150 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8151 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8153 case FIX_ROUND_EXPR:
8154 case FIX_FLOOR_EXPR:
8156 abort (); /* Not used for C. */
8158 case FIX_TRUNC_EXPR:
8159 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8161 target = gen_reg_rtx (mode);
8162 expand_fix (target, op0, unsignedp);
8166 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8168 target = gen_reg_rtx (mode);
8169 /* expand_float can't figure out what to do if FROM has VOIDmode.
8170 So give it the correct mode. With -O, cse will optimize this. */
8171 if (GET_MODE (op0) == VOIDmode)
8172 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8174 expand_float (target, op0,
8175 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8179 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8180 temp = expand_unop (mode,
8181 ! unsignedp && flag_trapv
8182 && (GET_MODE_CLASS(mode) == MODE_INT)
8183 ? negv_optab : neg_optab, op0, target, 0);
8189 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8191 /* Handle complex values specially. */
8192 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8193 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8194 return expand_complex_abs (mode, op0, target, unsignedp);
8196 /* Unsigned abs is simply the operand. Testing here means we don't
8197 risk generating incorrect code below. */
8198 if (TREE_UNSIGNED (type))
8201 return expand_abs (mode, op0, target, unsignedp,
8202 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8206 target = original_target;
8207 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8208 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8209 || GET_MODE (target) != mode
8210 || (GET_CODE (target) == REG
8211 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8212 target = gen_reg_rtx (mode);
8213 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8214 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8216 /* First try to do it with a special MIN or MAX instruction.
8217 If that does not win, use a conditional jump to select the proper
8219 this_optab = (TREE_UNSIGNED (type)
8220 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8221 : (code == MIN_EXPR ? smin_optab : smax_optab));
8223 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8228 /* At this point, a MEM target is no longer useful; we will get better
8231 if (GET_CODE (target) == MEM)
8232 target = gen_reg_rtx (mode);
8235 emit_move_insn (target, op0);
8237 op0 = gen_label_rtx ();
8239 /* If this mode is an integer too wide to compare properly,
8240 compare word by word. Rely on cse to optimize constant cases. */
8241 if (GET_MODE_CLASS (mode) == MODE_INT
8242 && ! can_compare_p (GE, mode, ccp_jump))
8244 if (code == MAX_EXPR)
8245 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8246 target, op1, NULL_RTX, op0);
8248 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8249 op1, target, NULL_RTX, op0);
8253 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8254 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8255 unsignedp, mode, NULL_RTX, NULL_RTX,
8258 emit_move_insn (target, op1);
8263 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8264 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8270 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8271 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8276 /* ??? Can optimize bitwise operations with one arg constant.
8277 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8278 and (a bitwise1 b) bitwise2 b (etc)
8279 but that is probably not worth while. */
8281 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8282 boolean values when we want in all cases to compute both of them. In
8283 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8284 as actual zero-or-1 values and then bitwise anding. In cases where
8285 there cannot be any side effects, better code would be made by
8286 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8287 how to recognize those cases. */
8289 case TRUTH_AND_EXPR:
8291 this_optab = and_optab;
8296 this_optab = ior_optab;
8299 case TRUTH_XOR_EXPR:
8301 this_optab = xor_optab;
8308 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8310 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8311 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8314 /* Could determine the answer when only additive constants differ. Also,
8315 the addition of one can be handled by changing the condition. */
8322 case UNORDERED_EXPR:
8329 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8333 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8334 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8336 && GET_CODE (original_target) == REG
8337 && (GET_MODE (original_target)
8338 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8340 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8343 /* If temp is constant, we can just compute the result. */
8344 if (GET_CODE (temp) == CONST_INT)
8346 if (INTVAL (temp) != 0)
8347 emit_move_insn (target, const1_rtx);
8349 emit_move_insn (target, const0_rtx);
8354 if (temp != original_target)
8356 enum machine_mode mode1 = GET_MODE (temp);
8357 if (mode1 == VOIDmode)
8358 mode1 = tmode != VOIDmode ? tmode : mode;
8360 temp = copy_to_mode_reg (mode1, temp);
8363 op1 = gen_label_rtx ();
8364 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8365 GET_MODE (temp), unsignedp, op1);
8366 emit_move_insn (temp, const1_rtx);
8371 /* If no set-flag instruction, must generate a conditional
8372 store into a temporary variable. Drop through
8373 and handle this like && and ||. */
8375 case TRUTH_ANDIF_EXPR:
8376 case TRUTH_ORIF_EXPR:
8378 && (target == 0 || ! safe_from_p (target, exp, 1)
8379 /* Make sure we don't have a hard reg (such as function's return
8380 value) live across basic blocks, if not optimizing. */
8381 || (!optimize && GET_CODE (target) == REG
8382 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8383 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8386 emit_clr_insn (target);
8388 op1 = gen_label_rtx ();
8389 jumpifnot (exp, op1);
8392 emit_0_to_1_insn (target);
8395 return ignore ? const0_rtx : target;
8397 case TRUTH_NOT_EXPR:
8398 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8399 /* The parser is careful to generate TRUTH_NOT_EXPR
8400 only with operands that are always zero or one. */
8401 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8402 target, 1, OPTAB_LIB_WIDEN);
8408 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8410 return expand_expr (TREE_OPERAND (exp, 1),
8411 (ignore ? const0_rtx : target),
8415 /* If we would have a "singleton" (see below) were it not for a
8416 conversion in each arm, bring that conversion back out. */
8417 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8418 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8419 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8420 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8422 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8423 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8425 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8426 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8427 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8428 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8429 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8430 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8431 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8432 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8433 return expand_expr (build1 (NOP_EXPR, type,
8434 build (COND_EXPR, TREE_TYPE (iftrue),
8435 TREE_OPERAND (exp, 0),
8437 target, tmode, modifier);
8441 /* Note that COND_EXPRs whose type is a structure or union
8442 are required to be constructed to contain assignments of
8443 a temporary variable, so that we can evaluate them here
8444 for side effect only. If type is void, we must do likewise. */
8446 /* If an arm of the branch requires a cleanup,
8447 only that cleanup is performed. */
8450 tree binary_op = 0, unary_op = 0;
8452 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8453 convert it to our mode, if necessary. */
8454 if (integer_onep (TREE_OPERAND (exp, 1))
8455 && integer_zerop (TREE_OPERAND (exp, 2))
8456 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8460 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8465 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8466 if (GET_MODE (op0) == mode)
8470 target = gen_reg_rtx (mode);
8471 convert_move (target, op0, unsignedp);
8475 /* Check for X ? A + B : A. If we have this, we can copy A to the
8476 output and conditionally add B. Similarly for unary operations.
8477 Don't do this if X has side-effects because those side effects
8478 might affect A or B and the "?" operation is a sequence point in
8479 ANSI. (operand_equal_p tests for side effects.) */
8481 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8482 && operand_equal_p (TREE_OPERAND (exp, 2),
8483 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8484 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8485 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8486 && operand_equal_p (TREE_OPERAND (exp, 1),
8487 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8488 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8489 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8490 && operand_equal_p (TREE_OPERAND (exp, 2),
8491 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8492 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8493 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8494 && operand_equal_p (TREE_OPERAND (exp, 1),
8495 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8496 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8498 /* If we are not to produce a result, we have no target. Otherwise,
8499 if a target was specified use it; it will not be used as an
8500 intermediate target unless it is safe. If no target, use a
8505 else if (original_target
8506 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8507 || (singleton && GET_CODE (original_target) == REG
8508 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8509 && original_target == var_rtx (singleton)))
8510 && GET_MODE (original_target) == mode
8511 #ifdef HAVE_conditional_move
8512 && (! can_conditionally_move_p (mode)
8513 || GET_CODE (original_target) == REG
8514 || TREE_ADDRESSABLE (type))
8516 && (GET_CODE (original_target) != MEM
8517 || TREE_ADDRESSABLE (type)))
8518 temp = original_target;
8519 else if (TREE_ADDRESSABLE (type))
8522 temp = assign_temp (type, 0, 0, 1);
8524 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8525 do the test of X as a store-flag operation, do this as
8526 A + ((X != 0) << log C). Similarly for other simple binary
8527 operators. Only do for C == 1 if BRANCH_COST is low. */
8528 if (temp && singleton && binary_op
8529 && (TREE_CODE (binary_op) == PLUS_EXPR
8530 || TREE_CODE (binary_op) == MINUS_EXPR
8531 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8532 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8533 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8534 : integer_onep (TREE_OPERAND (binary_op, 1)))
8535 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8538 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8539 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8540 ? addv_optab : add_optab)
8541 : TREE_CODE (binary_op) == MINUS_EXPR
8542 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8543 ? subv_optab : sub_optab)
8544 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8547 /* If we had X ? A : A + 1, do this as A + (X == 0).
8549 We have to invert the truth value here and then put it
8550 back later if do_store_flag fails. We cannot simply copy
8551 TREE_OPERAND (exp, 0) to another variable and modify that
8552 because invert_truthvalue can modify the tree pointed to
8554 if (singleton == TREE_OPERAND (exp, 1))
8555 TREE_OPERAND (exp, 0)
8556 = invert_truthvalue (TREE_OPERAND (exp, 0));
8558 result = do_store_flag (TREE_OPERAND (exp, 0),
8559 (safe_from_p (temp, singleton, 1)
8561 mode, BRANCH_COST <= 1);
8563 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8564 result = expand_shift (LSHIFT_EXPR, mode, result,
8565 build_int_2 (tree_log2
8569 (safe_from_p (temp, singleton, 1)
8570 ? temp : NULL_RTX), 0);
8574 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8575 return expand_binop (mode, boptab, op1, result, temp,
8576 unsignedp, OPTAB_LIB_WIDEN);
8578 else if (singleton == TREE_OPERAND (exp, 1))
8579 TREE_OPERAND (exp, 0)
8580 = invert_truthvalue (TREE_OPERAND (exp, 0));
8583 do_pending_stack_adjust ();
8585 op0 = gen_label_rtx ();
8587 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8591 /* If the target conflicts with the other operand of the
8592 binary op, we can't use it. Also, we can't use the target
8593 if it is a hard register, because evaluating the condition
8594 might clobber it. */
8596 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8597 || (GET_CODE (temp) == REG
8598 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8599 temp = gen_reg_rtx (mode);
8600 store_expr (singleton, temp, 0);
8603 expand_expr (singleton,
8604 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8605 if (singleton == TREE_OPERAND (exp, 1))
8606 jumpif (TREE_OPERAND (exp, 0), op0);
8608 jumpifnot (TREE_OPERAND (exp, 0), op0);
8610 start_cleanup_deferral ();
8611 if (binary_op && temp == 0)
8612 /* Just touch the other operand. */
8613 expand_expr (TREE_OPERAND (binary_op, 1),
8614 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8616 store_expr (build (TREE_CODE (binary_op), type,
8617 make_tree (type, temp),
8618 TREE_OPERAND (binary_op, 1)),
8621 store_expr (build1 (TREE_CODE (unary_op), type,
8622 make_tree (type, temp)),
8626 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8627 comparison operator. If we have one of these cases, set the
8628 output to A, branch on A (cse will merge these two references),
8629 then set the output to FOO. */
8631 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8632 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8633 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8634 TREE_OPERAND (exp, 1), 0)
8635 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8636 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8637 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8639 if (GET_CODE (temp) == REG
8640 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8641 temp = gen_reg_rtx (mode);
8642 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8643 jumpif (TREE_OPERAND (exp, 0), op0);
8645 start_cleanup_deferral ();
8646 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8650 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8651 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8652 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8653 TREE_OPERAND (exp, 2), 0)
8654 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8655 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8656 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8658 if (GET_CODE (temp) == REG
8659 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8660 temp = gen_reg_rtx (mode);
8661 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8662 jumpifnot (TREE_OPERAND (exp, 0), op0);
8664 start_cleanup_deferral ();
8665 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8670 op1 = gen_label_rtx ();
8671 jumpifnot (TREE_OPERAND (exp, 0), op0);
8673 start_cleanup_deferral ();
8675 /* One branch of the cond can be void, if it never returns. For
8676 example A ? throw : E */
8678 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8679 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8681 expand_expr (TREE_OPERAND (exp, 1),
8682 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8683 end_cleanup_deferral ();
8685 emit_jump_insn (gen_jump (op1));
8688 start_cleanup_deferral ();
8690 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8691 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8693 expand_expr (TREE_OPERAND (exp, 2),
8694 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8697 end_cleanup_deferral ();
8708 /* Something needs to be initialized, but we didn't know
8709 where that thing was when building the tree. For example,
8710 it could be the return value of a function, or a parameter
8711 to a function which lays down in the stack, or a temporary
8712 variable which must be passed by reference.
8714 We guarantee that the expression will either be constructed
8715 or copied into our original target. */
8717 tree slot = TREE_OPERAND (exp, 0);
8718 tree cleanups = NULL_TREE;
8721 if (TREE_CODE (slot) != VAR_DECL)
8725 target = original_target;
8727 /* Set this here so that if we get a target that refers to a
8728 register variable that's already been used, put_reg_into_stack
8729 knows that it should fix up those uses. */
8730 TREE_USED (slot) = 1;
8734 if (DECL_RTL_SET_P (slot))
8736 target = DECL_RTL (slot);
8737 /* If we have already expanded the slot, so don't do
8739 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8744 target = assign_temp (type, 2, 0, 1);
8745 /* All temp slots at this level must not conflict. */
8746 preserve_temp_slots (target);
8747 SET_DECL_RTL (slot, target);
8748 if (TREE_ADDRESSABLE (slot))
8749 put_var_into_stack (slot);
8751 /* Since SLOT is not known to the called function
8752 to belong to its stack frame, we must build an explicit
8753 cleanup. This case occurs when we must build up a reference
8754 to pass the reference as an argument. In this case,
8755 it is very likely that such a reference need not be
8758 if (TREE_OPERAND (exp, 2) == 0)
8759 TREE_OPERAND (exp, 2)
8760 = (*lang_hooks.maybe_build_cleanup) (slot);
8761 cleanups = TREE_OPERAND (exp, 2);
8766 /* This case does occur, when expanding a parameter which
8767 needs to be constructed on the stack. The target
8768 is the actual stack address that we want to initialize.
8769 The function we call will perform the cleanup in this case. */
8771 /* If we have already assigned it space, use that space,
8772 not target that we were passed in, as our target
8773 parameter is only a hint. */
8774 if (DECL_RTL_SET_P (slot))
8776 target = DECL_RTL (slot);
8777 /* If we have already expanded the slot, so don't do
8779 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8784 SET_DECL_RTL (slot, target);
8785 /* If we must have an addressable slot, then make sure that
8786 the RTL that we just stored in slot is OK. */
8787 if (TREE_ADDRESSABLE (slot))
8788 put_var_into_stack (slot);
8792 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8793 /* Mark it as expanded. */
8794 TREE_OPERAND (exp, 1) = NULL_TREE;
8796 store_expr (exp1, target, 0);
8798 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8805 tree lhs = TREE_OPERAND (exp, 0);
8806 tree rhs = TREE_OPERAND (exp, 1);
8808 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8814 /* If lhs is complex, expand calls in rhs before computing it.
8815 That's so we don't compute a pointer and save it over a
8816 call. If lhs is simple, compute it first so we can give it
8817 as a target if the rhs is just a call. This avoids an
8818 extra temp and copy and that prevents a partial-subsumption
8819 which makes bad code. Actually we could treat
8820 component_ref's of vars like vars. */
8822 tree lhs = TREE_OPERAND (exp, 0);
8823 tree rhs = TREE_OPERAND (exp, 1);
8827 /* Check for |= or &= of a bitfield of size one into another bitfield
8828 of size 1. In this case, (unless we need the result of the
8829 assignment) we can do this more efficiently with a
8830 test followed by an assignment, if necessary.
8832 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8833 things change so we do, this code should be enhanced to
8836 && TREE_CODE (lhs) == COMPONENT_REF
8837 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8838 || TREE_CODE (rhs) == BIT_AND_EXPR)
8839 && TREE_OPERAND (rhs, 0) == lhs
8840 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8841 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8842 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8844 rtx label = gen_label_rtx ();
8846 do_jump (TREE_OPERAND (rhs, 1),
8847 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8848 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8849 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8850 (TREE_CODE (rhs) == BIT_IOR_EXPR
8852 : integer_zero_node)),
8854 do_pending_stack_adjust ();
8859 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8865 if (!TREE_OPERAND (exp, 0))
8866 expand_null_return ();
8868 expand_return (TREE_OPERAND (exp, 0));
8871 case PREINCREMENT_EXPR:
8872 case PREDECREMENT_EXPR:
8873 return expand_increment (exp, 0, ignore);
8875 case POSTINCREMENT_EXPR:
8876 case POSTDECREMENT_EXPR:
8877 /* Faster to treat as pre-increment if result is not used. */
8878 return expand_increment (exp, ! ignore, ignore);
8881 /* Are we taking the address of a nested function? */
8882 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8883 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8884 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8885 && ! TREE_STATIC (exp))
8887 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8888 op0 = force_operand (op0, target);
8890 /* If we are taking the address of something erroneous, just
8892 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8894 /* If we are taking the address of a constant and are at the
8895 top level, we have to use output_constant_def since we can't
8896 call force_const_mem at top level. */
8898 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8899 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8901 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8904 /* We make sure to pass const0_rtx down if we came in with
8905 ignore set, to avoid doing the cleanups twice for something. */
8906 op0 = expand_expr (TREE_OPERAND (exp, 0),
8907 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8908 (modifier == EXPAND_INITIALIZER
8909 ? modifier : EXPAND_CONST_ADDRESS));
8911 /* If we are going to ignore the result, OP0 will have been set
8912 to const0_rtx, so just return it. Don't get confused and
8913 think we are taking the address of the constant. */
8917 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8918 clever and returns a REG when given a MEM. */
8919 op0 = protect_from_queue (op0, 1);
8921 /* We would like the object in memory. If it is a constant, we can
8922 have it be statically allocated into memory. For a non-constant,
8923 we need to allocate some memory and store the value into it. */
8925 if (CONSTANT_P (op0))
8926 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8928 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8929 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8930 || GET_CODE (op0) == PARALLEL)
8932 /* If the operand is a SAVE_EXPR, we can deal with this by
8933 forcing the SAVE_EXPR into memory. */
8934 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8936 put_var_into_stack (TREE_OPERAND (exp, 0));
8937 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8941 /* If this object is in a register, it can't be BLKmode. */
8942 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8943 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8945 if (GET_CODE (op0) == PARALLEL)
8946 /* Handle calls that pass values in multiple
8947 non-contiguous locations. The Irix 6 ABI has examples
8949 emit_group_store (memloc, op0,
8950 int_size_in_bytes (inner_type));
8952 emit_move_insn (memloc, op0);
8958 if (GET_CODE (op0) != MEM)
8961 mark_temp_addr_taken (op0);
8962 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8964 op0 = XEXP (op0, 0);
8965 #ifdef POINTERS_EXTEND_UNSIGNED
8966 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8967 && mode == ptr_mode)
8968 op0 = convert_memory_address (ptr_mode, op0);
8973 /* If OP0 is not aligned as least as much as the type requires, we
8974 need to make a temporary, copy OP0 to it, and take the address of
8975 the temporary. We want to use the alignment of the type, not of
8976 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8977 the test for BLKmode means that can't happen. The test for
8978 BLKmode is because we never make mis-aligned MEMs with
8981 We don't need to do this at all if the machine doesn't have
8982 strict alignment. */
8983 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8984 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8986 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8988 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8990 = assign_stack_temp_for_type
8991 (TYPE_MODE (inner_type),
8992 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8993 : int_size_in_bytes (inner_type),
8994 1, build_qualified_type (inner_type,
8995 (TYPE_QUALS (inner_type)
8996 | TYPE_QUAL_CONST)));
8998 if (TYPE_ALIGN_OK (inner_type))
9001 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9006 op0 = force_operand (XEXP (op0, 0), target);
9010 && GET_CODE (op0) != REG
9011 && modifier != EXPAND_CONST_ADDRESS
9012 && modifier != EXPAND_INITIALIZER
9013 && modifier != EXPAND_SUM)
9014 op0 = force_reg (Pmode, op0);
9016 if (GET_CODE (op0) == REG
9017 && ! REG_USERVAR_P (op0))
9018 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9020 #ifdef POINTERS_EXTEND_UNSIGNED
9021 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9022 && mode == ptr_mode)
9023 op0 = convert_memory_address (ptr_mode, op0);
9028 case ENTRY_VALUE_EXPR:
9031 /* COMPLEX type for Extended Pascal & Fortran */
9034 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9037 /* Get the rtx code of the operands. */
9038 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9039 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9042 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9046 /* Move the real (op0) and imaginary (op1) parts to their location. */
9047 emit_move_insn (gen_realpart (mode, target), op0);
9048 emit_move_insn (gen_imagpart (mode, target), op1);
9050 insns = get_insns ();
9053 /* Complex construction should appear as a single unit. */
9054 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9055 each with a separate pseudo as destination.
9056 It's not correct for flow to treat them as a unit. */
9057 if (GET_CODE (target) != CONCAT)
9058 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9066 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9067 return gen_realpart (mode, op0);
9070 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9071 return gen_imagpart (mode, op0);
9075 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9079 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9082 target = gen_reg_rtx (mode);
9086 /* Store the realpart and the negated imagpart to target. */
9087 emit_move_insn (gen_realpart (partmode, target),
9088 gen_realpart (partmode, op0));
9090 imag_t = gen_imagpart (partmode, target);
9091 temp = expand_unop (partmode,
9092 ! unsignedp && flag_trapv
9093 && (GET_MODE_CLASS(partmode) == MODE_INT)
9094 ? negv_optab : neg_optab,
9095 gen_imagpart (partmode, op0), imag_t, 0);
9097 emit_move_insn (imag_t, temp);
9099 insns = get_insns ();
9102 /* Conjugate should appear as a single unit
9103 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9104 each with a separate pseudo as destination.
9105 It's not correct for flow to treat them as a unit. */
9106 if (GET_CODE (target) != CONCAT)
9107 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9114 case TRY_CATCH_EXPR:
9116 tree handler = TREE_OPERAND (exp, 1);
9118 expand_eh_region_start ();
9120 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9122 expand_eh_region_end_cleanup (handler);
9127 case TRY_FINALLY_EXPR:
9129 tree try_block = TREE_OPERAND (exp, 0);
9130 tree finally_block = TREE_OPERAND (exp, 1);
9132 if (!optimize || unsafe_for_reeval (finally_block) > 1)
9134 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9135 is not sufficient, so we cannot expand the block twice.
9136 So we play games with GOTO_SUBROUTINE_EXPR to let us
9137 expand the thing only once. */
9138 /* When not optimizing, we go ahead with this form since
9139 (1) user breakpoints operate more predictably without
9140 code duplication, and
9141 (2) we're not running any of the global optimizers
9142 that would explode in time/space with the highly
9143 connected CFG created by the indirect branching. */
9145 rtx finally_label = gen_label_rtx ();
9146 rtx done_label = gen_label_rtx ();
9147 rtx return_link = gen_reg_rtx (Pmode);
9148 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9149 (tree) finally_label, (tree) return_link);
9150 TREE_SIDE_EFFECTS (cleanup) = 1;
9152 /* Start a new binding layer that will keep track of all cleanup
9153 actions to be performed. */
9154 expand_start_bindings (2);
9155 target_temp_slot_level = temp_slot_level;
9157 expand_decl_cleanup (NULL_TREE, cleanup);
9158 op0 = expand_expr (try_block, target, tmode, modifier);
9160 preserve_temp_slots (op0);
9161 expand_end_bindings (NULL_TREE, 0, 0);
9162 emit_jump (done_label);
9163 emit_label (finally_label);
9164 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9165 emit_indirect_jump (return_link);
9166 emit_label (done_label);
9170 expand_start_bindings (2);
9171 target_temp_slot_level = temp_slot_level;
9173 expand_decl_cleanup (NULL_TREE, finally_block);
9174 op0 = expand_expr (try_block, target, tmode, modifier);
9176 preserve_temp_slots (op0);
9177 expand_end_bindings (NULL_TREE, 0, 0);
9183 case GOTO_SUBROUTINE_EXPR:
9185 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9186 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9187 rtx return_address = gen_label_rtx ();
9188 emit_move_insn (return_link,
9189 gen_rtx_LABEL_REF (Pmode, return_address));
9191 emit_label (return_address);
9196 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9199 return get_exception_pointer (cfun);
9202 /* Function descriptors are not valid except for as
9203 initialization constants, and should not be expanded. */
9207 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9210 /* Here to do an ordinary binary operator, generating an instruction
9211 from the optab already placed in `this_optab'. */
9213 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
9215 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
9216 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9218 temp = expand_binop (mode, this_optab, op0, op1, target,
9219 unsignedp, OPTAB_LIB_WIDEN);
9225 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9226 when applied to the address of EXP produces an address known to be
9227 aligned more than BIGGEST_ALIGNMENT. */
9230 is_aligning_offset (offset, exp)
9234 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9235 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9236 || TREE_CODE (offset) == NOP_EXPR
9237 || TREE_CODE (offset) == CONVERT_EXPR
9238 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9239 offset = TREE_OPERAND (offset, 0);
9241 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9242 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9243 if (TREE_CODE (offset) != BIT_AND_EXPR
9244 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9245 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9246 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9249 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9250 It must be NEGATE_EXPR. Then strip any more conversions. */
9251 offset = TREE_OPERAND (offset, 0);
9252 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9253 || TREE_CODE (offset) == NOP_EXPR
9254 || TREE_CODE (offset) == CONVERT_EXPR)
9255 offset = TREE_OPERAND (offset, 0);
9257 if (TREE_CODE (offset) != NEGATE_EXPR)
9260 offset = TREE_OPERAND (offset, 0);
9261 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9262 || TREE_CODE (offset) == NOP_EXPR
9263 || TREE_CODE (offset) == CONVERT_EXPR)
9264 offset = TREE_OPERAND (offset, 0);
9266 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9267 whose type is the same as EXP. */
9268 return (TREE_CODE (offset) == ADDR_EXPR
9269 && (TREE_OPERAND (offset, 0) == exp
9270 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9271 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9272 == TREE_TYPE (exp)))));
9275 /* Return the tree node if an ARG corresponds to a string constant or zero
9276 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9277 in bytes within the string that ARG is accessing. The type of the
9278 offset will be `sizetype'. */
9281 string_constant (arg, ptr_offset)
9287 if (TREE_CODE (arg) == ADDR_EXPR
9288 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9290 *ptr_offset = size_zero_node;
9291 return TREE_OPERAND (arg, 0);
9293 else if (TREE_CODE (arg) == PLUS_EXPR)
9295 tree arg0 = TREE_OPERAND (arg, 0);
9296 tree arg1 = TREE_OPERAND (arg, 1);
9301 if (TREE_CODE (arg0) == ADDR_EXPR
9302 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9304 *ptr_offset = convert (sizetype, arg1);
9305 return TREE_OPERAND (arg0, 0);
9307 else if (TREE_CODE (arg1) == ADDR_EXPR
9308 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9310 *ptr_offset = convert (sizetype, arg0);
9311 return TREE_OPERAND (arg1, 0);
9318 /* Expand code for a post- or pre- increment or decrement
9319 and return the RTX for the result.
9320 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9323 expand_increment (exp, post, ignore)
9329 tree incremented = TREE_OPERAND (exp, 0);
9330 optab this_optab = add_optab;
9332 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9333 int op0_is_copy = 0;
9334 int single_insn = 0;
9335 /* 1 means we can't store into OP0 directly,
9336 because it is a subreg narrower than a word,
9337 and we don't dare clobber the rest of the word. */
9340 /* Stabilize any component ref that might need to be
9341 evaluated more than once below. */
9343 || TREE_CODE (incremented) == BIT_FIELD_REF
9344 || (TREE_CODE (incremented) == COMPONENT_REF
9345 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9346 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9347 incremented = stabilize_reference (incremented);
9348 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9349 ones into save exprs so that they don't accidentally get evaluated
9350 more than once by the code below. */
9351 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9352 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9353 incremented = save_expr (incremented);
9355 /* Compute the operands as RTX.
9356 Note whether OP0 is the actual lvalue or a copy of it:
9357 I believe it is a copy iff it is a register or subreg
9358 and insns were generated in computing it. */
9360 temp = get_last_insn ();
9361 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9363 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9364 in place but instead must do sign- or zero-extension during assignment,
9365 so we copy it into a new register and let the code below use it as
9368 Note that we can safely modify this SUBREG since it is know not to be
9369 shared (it was made by the expand_expr call above). */
9371 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9374 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9378 else if (GET_CODE (op0) == SUBREG
9379 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9381 /* We cannot increment this SUBREG in place. If we are
9382 post-incrementing, get a copy of the old value. Otherwise,
9383 just mark that we cannot increment in place. */
9385 op0 = copy_to_reg (op0);
9390 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9391 && temp != get_last_insn ());
9392 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9394 /* Decide whether incrementing or decrementing. */
9395 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9396 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9397 this_optab = sub_optab;
9399 /* Convert decrement by a constant into a negative increment. */
9400 if (this_optab == sub_optab
9401 && GET_CODE (op1) == CONST_INT)
9403 op1 = GEN_INT (-INTVAL (op1));
9404 this_optab = add_optab;
9407 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9408 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9410 /* For a preincrement, see if we can do this with a single instruction. */
9413 icode = (int) this_optab->handlers[(int) mode].insn_code;
9414 if (icode != (int) CODE_FOR_nothing
9415 /* Make sure that OP0 is valid for operands 0 and 1
9416 of the insn we want to queue. */
9417 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9418 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9419 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9423 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9424 then we cannot just increment OP0. We must therefore contrive to
9425 increment the original value. Then, for postincrement, we can return
9426 OP0 since it is a copy of the old value. For preincrement, expand here
9427 unless we can do it with a single insn.
9429 Likewise if storing directly into OP0 would clobber high bits
9430 we need to preserve (bad_subreg). */
9431 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9433 /* This is the easiest way to increment the value wherever it is.
9434 Problems with multiple evaluation of INCREMENTED are prevented
9435 because either (1) it is a component_ref or preincrement,
9436 in which case it was stabilized above, or (2) it is an array_ref
9437 with constant index in an array in a register, which is
9438 safe to reevaluate. */
9439 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9440 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9441 ? MINUS_EXPR : PLUS_EXPR),
9444 TREE_OPERAND (exp, 1));
9446 while (TREE_CODE (incremented) == NOP_EXPR
9447 || TREE_CODE (incremented) == CONVERT_EXPR)
9449 newexp = convert (TREE_TYPE (incremented), newexp);
9450 incremented = TREE_OPERAND (incremented, 0);
9453 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9454 return post ? op0 : temp;
9459 /* We have a true reference to the value in OP0.
9460 If there is an insn to add or subtract in this mode, queue it.
9461 Queueing the increment insn avoids the register shuffling
9462 that often results if we must increment now and first save
9463 the old value for subsequent use. */
9465 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9466 op0 = stabilize (op0);
9469 icode = (int) this_optab->handlers[(int) mode].insn_code;
9470 if (icode != (int) CODE_FOR_nothing
9471 /* Make sure that OP0 is valid for operands 0 and 1
9472 of the insn we want to queue. */
9473 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9474 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9476 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9477 op1 = force_reg (mode, op1);
9479 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9481 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9483 rtx addr = (general_operand (XEXP (op0, 0), mode)
9484 ? force_reg (Pmode, XEXP (op0, 0))
9485 : copy_to_reg (XEXP (op0, 0)));
9488 op0 = replace_equiv_address (op0, addr);
9489 temp = force_reg (GET_MODE (op0), op0);
9490 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9491 op1 = force_reg (mode, op1);
9493 /* The increment queue is LIFO, thus we have to `queue'
9494 the instructions in reverse order. */
9495 enqueue_insn (op0, gen_move_insn (op0, temp));
9496 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9501 /* Preincrement, or we can't increment with one simple insn. */
9503 /* Save a copy of the value before inc or dec, to return it later. */
9504 temp = value = copy_to_reg (op0);
9506 /* Arrange to return the incremented value. */
9507 /* Copy the rtx because expand_binop will protect from the queue,
9508 and the results of that would be invalid for us to return
9509 if our caller does emit_queue before using our result. */
9510 temp = copy_rtx (value = op0);
9512 /* Increment however we can. */
9513 op1 = expand_binop (mode, this_optab, value, op1, op0,
9514 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9516 /* Make sure the value is stored into OP0. */
9518 emit_move_insn (op0, op1);
9523 /* At the start of a function, record that we have no previously-pushed
9524 arguments waiting to be popped. */
9527 init_pending_stack_adjust ()
9529 pending_stack_adjust = 0;
9532 /* When exiting from function, if safe, clear out any pending stack adjust
9533 so the adjustment won't get done.
9535 Note, if the current function calls alloca, then it must have a
9536 frame pointer regardless of the value of flag_omit_frame_pointer. */
9539 clear_pending_stack_adjust ()
9541 #ifdef EXIT_IGNORE_STACK
9543 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9544 && EXIT_IGNORE_STACK
9545 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9546 && ! flag_inline_functions)
9548 stack_pointer_delta -= pending_stack_adjust,
9549 pending_stack_adjust = 0;
9554 /* Pop any previously-pushed arguments that have not been popped yet. */
9557 do_pending_stack_adjust ()
9559 if (inhibit_defer_pop == 0)
9561 if (pending_stack_adjust != 0)
9562 adjust_stack (GEN_INT (pending_stack_adjust));
9563 pending_stack_adjust = 0;
9567 /* Expand conditional expressions. */
9569 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9570 LABEL is an rtx of code CODE_LABEL, in this function and all the
9574 jumpifnot (exp, label)
9578 do_jump (exp, label, NULL_RTX);
9581 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9588 do_jump (exp, NULL_RTX, label);
9591 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9592 the result is zero, or IF_TRUE_LABEL if the result is one.
9593 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9594 meaning fall through in that case.
9596 do_jump always does any pending stack adjust except when it does not
9597 actually perform a jump. An example where there is no jump
9598 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9600 This function is responsible for optimizing cases such as
9601 &&, || and comparison operators in EXP. */
9604 do_jump (exp, if_false_label, if_true_label)
9606 rtx if_false_label, if_true_label;
9608 enum tree_code code = TREE_CODE (exp);
9609 /* Some cases need to create a label to jump to
9610 in order to properly fall through.
9611 These cases set DROP_THROUGH_LABEL nonzero. */
9612 rtx drop_through_label = 0;
9616 enum machine_mode mode;
9618 #ifdef MAX_INTEGER_COMPUTATION_MODE
9619 check_max_integer_computation_mode (exp);
9630 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9636 /* This is not true with #pragma weak */
9638 /* The address of something can never be zero. */
9640 emit_jump (if_true_label);
9645 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9646 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9647 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9648 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9651 /* If we are narrowing the operand, we have to do the compare in the
9653 if ((TYPE_PRECISION (TREE_TYPE (exp))
9654 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9656 case NON_LVALUE_EXPR:
9657 case REFERENCE_EXPR:
9662 /* These cannot change zero->non-zero or vice versa. */
9663 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9666 case WITH_RECORD_EXPR:
9667 /* Put the object on the placeholder list, recurse through our first
9668 operand, and pop the list. */
9669 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9671 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9672 placeholder_list = TREE_CHAIN (placeholder_list);
9676 /* This is never less insns than evaluating the PLUS_EXPR followed by
9677 a test and can be longer if the test is eliminated. */
9679 /* Reduce to minus. */
9680 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9681 TREE_OPERAND (exp, 0),
9682 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9683 TREE_OPERAND (exp, 1))));
9684 /* Process as MINUS. */
9688 /* Non-zero iff operands of minus differ. */
9689 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9690 TREE_OPERAND (exp, 0),
9691 TREE_OPERAND (exp, 1)),
9692 NE, NE, if_false_label, if_true_label);
9696 /* If we are AND'ing with a small constant, do this comparison in the
9697 smallest type that fits. If the machine doesn't have comparisons
9698 that small, it will be converted back to the wider comparison.
9699 This helps if we are testing the sign bit of a narrower object.
9700 combine can't do this for us because it can't know whether a
9701 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9703 if (! SLOW_BYTE_ACCESS
9704 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9705 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9706 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9707 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9708 && (type = (*lang_hooks.types.type_for_mode) (mode, 1)) != 0
9709 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9710 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9711 != CODE_FOR_nothing))
9713 do_jump (convert (type, exp), if_false_label, if_true_label);
9718 case TRUTH_NOT_EXPR:
9719 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9722 case TRUTH_ANDIF_EXPR:
9723 if (if_false_label == 0)
9724 if_false_label = drop_through_label = gen_label_rtx ();
9725 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9726 start_cleanup_deferral ();
9727 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9728 end_cleanup_deferral ();
9731 case TRUTH_ORIF_EXPR:
9732 if (if_true_label == 0)
9733 if_true_label = drop_through_label = gen_label_rtx ();
9734 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9735 start_cleanup_deferral ();
9736 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9737 end_cleanup_deferral ();
9742 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9743 preserve_temp_slots (NULL_RTX);
9747 do_pending_stack_adjust ();
9748 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9754 case ARRAY_RANGE_REF:
9756 HOST_WIDE_INT bitsize, bitpos;
9758 enum machine_mode mode;
9763 /* Get description of this reference. We don't actually care
9764 about the underlying object here. */
9765 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9766 &unsignedp, &volatilep);
9768 type = (*lang_hooks.types.type_for_size) (bitsize, unsignedp);
9769 if (! SLOW_BYTE_ACCESS
9770 && type != 0 && bitsize >= 0
9771 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9772 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9773 != CODE_FOR_nothing))
9775 do_jump (convert (type, exp), if_false_label, if_true_label);
9782 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9783 if (integer_onep (TREE_OPERAND (exp, 1))
9784 && integer_zerop (TREE_OPERAND (exp, 2)))
9785 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9787 else if (integer_zerop (TREE_OPERAND (exp, 1))
9788 && integer_onep (TREE_OPERAND (exp, 2)))
9789 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9793 rtx label1 = gen_label_rtx ();
9794 drop_through_label = gen_label_rtx ();
9796 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9798 start_cleanup_deferral ();
9799 /* Now the THEN-expression. */
9800 do_jump (TREE_OPERAND (exp, 1),
9801 if_false_label ? if_false_label : drop_through_label,
9802 if_true_label ? if_true_label : drop_through_label);
9803 /* In case the do_jump just above never jumps. */
9804 do_pending_stack_adjust ();
9805 emit_label (label1);
9807 /* Now the ELSE-expression. */
9808 do_jump (TREE_OPERAND (exp, 2),
9809 if_false_label ? if_false_label : drop_through_label,
9810 if_true_label ? if_true_label : drop_through_label);
9811 end_cleanup_deferral ();
9817 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9819 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9820 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9822 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9823 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9826 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9827 fold (build (EQ_EXPR, TREE_TYPE (exp),
9828 fold (build1 (REALPART_EXPR,
9829 TREE_TYPE (inner_type),
9831 fold (build1 (REALPART_EXPR,
9832 TREE_TYPE (inner_type),
9834 fold (build (EQ_EXPR, TREE_TYPE (exp),
9835 fold (build1 (IMAGPART_EXPR,
9836 TREE_TYPE (inner_type),
9838 fold (build1 (IMAGPART_EXPR,
9839 TREE_TYPE (inner_type),
9841 if_false_label, if_true_label);
9844 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9845 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9847 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9848 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9849 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9851 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9857 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9859 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9860 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9862 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9863 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9866 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9867 fold (build (NE_EXPR, TREE_TYPE (exp),
9868 fold (build1 (REALPART_EXPR,
9869 TREE_TYPE (inner_type),
9871 fold (build1 (REALPART_EXPR,
9872 TREE_TYPE (inner_type),
9874 fold (build (NE_EXPR, TREE_TYPE (exp),
9875 fold (build1 (IMAGPART_EXPR,
9876 TREE_TYPE (inner_type),
9878 fold (build1 (IMAGPART_EXPR,
9879 TREE_TYPE (inner_type),
9881 if_false_label, if_true_label);
9884 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9885 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9887 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9888 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9889 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9891 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9896 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9897 if (GET_MODE_CLASS (mode) == MODE_INT
9898 && ! can_compare_p (LT, mode, ccp_jump))
9899 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9901 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9905 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9906 if (GET_MODE_CLASS (mode) == MODE_INT
9907 && ! can_compare_p (LE, mode, ccp_jump))
9908 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9910 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9914 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9915 if (GET_MODE_CLASS (mode) == MODE_INT
9916 && ! can_compare_p (GT, mode, ccp_jump))
9917 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9919 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9923 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9924 if (GET_MODE_CLASS (mode) == MODE_INT
9925 && ! can_compare_p (GE, mode, ccp_jump))
9926 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9928 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9931 case UNORDERED_EXPR:
9934 enum rtx_code cmp, rcmp;
9937 if (code == UNORDERED_EXPR)
9938 cmp = UNORDERED, rcmp = ORDERED;
9940 cmp = ORDERED, rcmp = UNORDERED;
9941 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9944 if (! can_compare_p (cmp, mode, ccp_jump)
9945 && (can_compare_p (rcmp, mode, ccp_jump)
9946 /* If the target doesn't provide either UNORDERED or ORDERED
9947 comparisons, canonicalize on UNORDERED for the library. */
9948 || rcmp == UNORDERED))
9952 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9954 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9959 enum rtx_code rcode1;
9960 enum tree_code tcode2;
9984 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9985 if (can_compare_p (rcode1, mode, ccp_jump))
9986 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9990 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9991 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9994 /* If the target doesn't support combined unordered
9995 compares, decompose into UNORDERED + comparison. */
9996 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9997 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9998 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9999 do_jump (exp, if_false_label, if_true_label);
10005 __builtin_expect (<test>, 0) and
10006 __builtin_expect (<test>, 1)
10008 We need to do this here, so that <test> is not converted to a SCC
10009 operation on machines that use condition code registers and COMPARE
10010 like the PowerPC, and then the jump is done based on whether the SCC
10011 operation produced a 1 or 0. */
10013 /* Check for a built-in function. */
10014 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
10016 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
10017 tree arglist = TREE_OPERAND (exp, 1);
10019 if (TREE_CODE (fndecl) == FUNCTION_DECL
10020 && DECL_BUILT_IN (fndecl)
10021 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
10022 && arglist != NULL_TREE
10023 && TREE_CHAIN (arglist) != NULL_TREE)
10025 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
10028 if (seq != NULL_RTX)
10035 /* fall through and generate the normal code. */
10039 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10041 /* This is not needed any more and causes poor code since it causes
10042 comparisons and tests from non-SI objects to have different code
10044 /* Copy to register to avoid generating bad insns by cse
10045 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10046 if (!cse_not_expected && GET_CODE (temp) == MEM)
10047 temp = copy_to_reg (temp);
10049 do_pending_stack_adjust ();
10050 /* Do any postincrements in the expression that was tested. */
10053 if (GET_CODE (temp) == CONST_INT
10054 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
10055 || GET_CODE (temp) == LABEL_REF)
10057 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
10059 emit_jump (target);
10061 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10062 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
10063 /* Note swapping the labels gives us not-equal. */
10064 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10065 else if (GET_MODE (temp) != VOIDmode)
10066 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
10067 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10068 GET_MODE (temp), NULL_RTX,
10069 if_false_label, if_true_label);
10074 if (drop_through_label)
10076 /* If do_jump produces code that might be jumped around,
10077 do any stack adjusts from that code, before the place
10078 where control merges in. */
10079 do_pending_stack_adjust ();
10080 emit_label (drop_through_label);
10084 /* Given a comparison expression EXP for values too wide to be compared
10085 with one insn, test the comparison and jump to the appropriate label.
10086 The code of EXP is ignored; we always test GT if SWAP is 0,
10087 and LT if SWAP is 1. */
10090 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10093 rtx if_false_label, if_true_label;
10095 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10096 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10097 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10098 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10100 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
10103 /* Compare OP0 with OP1, word at a time, in mode MODE.
10104 UNSIGNEDP says to do unsigned comparison.
10105 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10108 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10109 enum machine_mode mode;
10112 rtx if_false_label, if_true_label;
10114 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10115 rtx drop_through_label = 0;
10118 if (! if_true_label || ! if_false_label)
10119 drop_through_label = gen_label_rtx ();
10120 if (! if_true_label)
10121 if_true_label = drop_through_label;
10122 if (! if_false_label)
10123 if_false_label = drop_through_label;
10125 /* Compare a word at a time, high order first. */
10126 for (i = 0; i < nwords; i++)
10128 rtx op0_word, op1_word;
10130 if (WORDS_BIG_ENDIAN)
10132 op0_word = operand_subword_force (op0, i, mode);
10133 op1_word = operand_subword_force (op1, i, mode);
10137 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10138 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10141 /* All but high-order word must be compared as unsigned. */
10142 do_compare_rtx_and_jump (op0_word, op1_word, GT,
10143 (unsignedp || i > 0), word_mode, NULL_RTX,
10144 NULL_RTX, if_true_label);
10146 /* Consider lower words only if these are equal. */
10147 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
10148 NULL_RTX, NULL_RTX, if_false_label);
10151 if (if_false_label)
10152 emit_jump (if_false_label);
10153 if (drop_through_label)
10154 emit_label (drop_through_label);
10157 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10158 with one insn, test the comparison and jump to the appropriate label. */
10161 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10163 rtx if_false_label, if_true_label;
10165 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10166 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10167 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10168 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10170 rtx drop_through_label = 0;
10172 if (! if_false_label)
10173 drop_through_label = if_false_label = gen_label_rtx ();
10175 for (i = 0; i < nwords; i++)
10176 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10177 operand_subword_force (op1, i, mode),
10178 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10179 word_mode, NULL_RTX, if_false_label, NULL_RTX);
10182 emit_jump (if_true_label);
10183 if (drop_through_label)
10184 emit_label (drop_through_label);
10187 /* Jump according to whether OP0 is 0.
10188 We assume that OP0 has an integer mode that is too wide
10189 for the available compare insns. */
10192 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10194 rtx if_false_label, if_true_label;
10196 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10199 rtx drop_through_label = 0;
10201 /* The fastest way of doing this comparison on almost any machine is to
10202 "or" all the words and compare the result. If all have to be loaded
10203 from memory and this is a very wide item, it's possible this may
10204 be slower, but that's highly unlikely. */
10206 part = gen_reg_rtx (word_mode);
10207 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10208 for (i = 1; i < nwords && part != 0; i++)
10209 part = expand_binop (word_mode, ior_optab, part,
10210 operand_subword_force (op0, i, GET_MODE (op0)),
10211 part, 1, OPTAB_WIDEN);
10215 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10216 NULL_RTX, if_false_label, if_true_label);
10221 /* If we couldn't do the "or" simply, do this with a series of compares. */
10222 if (! if_false_label)
10223 drop_through_label = if_false_label = gen_label_rtx ();
10225 for (i = 0; i < nwords; i++)
10226 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10227 const0_rtx, EQ, 1, word_mode, NULL_RTX,
10228 if_false_label, NULL_RTX);
10231 emit_jump (if_true_label);
10233 if (drop_through_label)
10234 emit_label (drop_through_label);
10237 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10238 (including code to compute the values to be compared)
10239 and set (CC0) according to the result.
10240 The decision as to signed or unsigned comparison must be made by the caller.
10242 We force a stack adjustment unless there are currently
10243 things pushed on the stack that aren't yet used.
10245 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10249 compare_from_rtx (op0, op1, code, unsignedp, mode, size)
10251 enum rtx_code code;
10253 enum machine_mode mode;
10256 enum rtx_code ucode;
10259 /* If one operand is constant, make it the second one. Only do this
10260 if the other operand is not constant as well. */
10262 if (swap_commutative_operands_p (op0, op1))
10267 code = swap_condition (code);
10270 if (flag_force_mem)
10272 op0 = force_not_mem (op0);
10273 op1 = force_not_mem (op1);
10276 do_pending_stack_adjust ();
10278 ucode = unsignedp ? unsigned_condition (code) : code;
10279 if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
10283 /* There's no need to do this now that combine.c can eliminate lots of
10284 sign extensions. This can be less efficient in certain cases on other
10287 /* If this is a signed equality comparison, we can do it as an
10288 unsigned comparison since zero-extension is cheaper than sign
10289 extension and comparisons with zero are done as unsigned. This is
10290 the case even on machines that can do fast sign extension, since
10291 zero-extension is easier to combine with other operations than
10292 sign-extension is. If we are comparing against a constant, we must
10293 convert it to what it would look like unsigned. */
10294 if ((code == EQ || code == NE) && ! unsignedp
10295 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10297 if (GET_CODE (op1) == CONST_INT
10298 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10299 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10304 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
10307 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10309 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
10313 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10314 The decision as to signed or unsigned comparison must be made by the caller.
10316 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10320 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
10321 if_false_label, if_true_label)
10323 enum rtx_code code;
10325 enum machine_mode mode;
10327 rtx if_false_label, if_true_label;
10329 enum rtx_code ucode;
10331 int dummy_true_label = 0;
10333 /* Reverse the comparison if that is safe and we want to jump if it is
10335 if (! if_true_label && ! FLOAT_MODE_P (mode))
10337 if_true_label = if_false_label;
10338 if_false_label = 0;
10339 code = reverse_condition (code);
10342 /* If one operand is constant, make it the second one. Only do this
10343 if the other operand is not constant as well. */
10345 if (swap_commutative_operands_p (op0, op1))
10350 code = swap_condition (code);
10353 if (flag_force_mem)
10355 op0 = force_not_mem (op0);
10356 op1 = force_not_mem (op1);
10359 do_pending_stack_adjust ();
10361 ucode = unsignedp ? unsigned_condition (code) : code;
10362 if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
10364 if (tem == const_true_rtx)
10367 emit_jump (if_true_label);
10371 if (if_false_label)
10372 emit_jump (if_false_label);
10378 /* There's no need to do this now that combine.c can eliminate lots of
10379 sign extensions. This can be less efficient in certain cases on other
10382 /* If this is a signed equality comparison, we can do it as an
10383 unsigned comparison since zero-extension is cheaper than sign
10384 extension and comparisons with zero are done as unsigned. This is
10385 the case even on machines that can do fast sign extension, since
10386 zero-extension is easier to combine with other operations than
10387 sign-extension is. If we are comparing against a constant, we must
10388 convert it to what it would look like unsigned. */
10389 if ((code == EQ || code == NE) && ! unsignedp
10390 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10392 if (GET_CODE (op1) == CONST_INT
10393 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10394 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10399 if (! if_true_label)
10401 dummy_true_label = 1;
10402 if_true_label = gen_label_rtx ();
10405 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
10408 if (if_false_label)
10409 emit_jump (if_false_label);
10410 if (dummy_true_label)
10411 emit_label (if_true_label);
10414 /* Generate code for a comparison expression EXP (including code to compute
10415 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10416 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10417 generated code will drop through.
10418 SIGNED_CODE should be the rtx operation for this comparison for
10419 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10421 We force a stack adjustment unless there are currently
10422 things pushed on the stack that aren't yet used. */
10425 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10428 enum rtx_code signed_code, unsigned_code;
10429 rtx if_false_label, if_true_label;
10433 enum machine_mode mode;
10435 enum rtx_code code;
10437 /* Don't crash if the comparison was erroneous. */
10438 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10439 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10442 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10443 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10446 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10447 mode = TYPE_MODE (type);
10448 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10449 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10450 || (GET_MODE_BITSIZE (mode)
10451 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10454 /* op0 might have been replaced by promoted constant, in which
10455 case the type of second argument should be used. */
10456 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10457 mode = TYPE_MODE (type);
10459 unsignedp = TREE_UNSIGNED (type);
10460 code = unsignedp ? unsigned_code : signed_code;
10462 #ifdef HAVE_canonicalize_funcptr_for_compare
10463 /* If function pointers need to be "canonicalized" before they can
10464 be reliably compared, then canonicalize them. */
10465 if (HAVE_canonicalize_funcptr_for_compare
10466 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10467 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10470 rtx new_op0 = gen_reg_rtx (mode);
10472 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10476 if (HAVE_canonicalize_funcptr_for_compare
10477 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10478 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10481 rtx new_op1 = gen_reg_rtx (mode);
10483 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10488 /* Do any postincrements in the expression that was tested. */
10491 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10493 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10494 if_false_label, if_true_label);
10497 /* Generate code to calculate EXP using a store-flag instruction
10498 and return an rtx for the result. EXP is either a comparison
10499 or a TRUTH_NOT_EXPR whose operand is a comparison.
10501 If TARGET is nonzero, store the result there if convenient.
10503 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10506 Return zero if there is no suitable set-flag instruction
10507 available on this machine.
10509 Once expand_expr has been called on the arguments of the comparison,
10510 we are committed to doing the store flag, since it is not safe to
10511 re-evaluate the expression. We emit the store-flag insn by calling
10512 emit_store_flag, but only expand the arguments if we have a reason
10513 to believe that emit_store_flag will be successful. If we think that
10514 it will, but it isn't, we have to simulate the store-flag with a
10515 set/jump/set sequence. */
10518 do_store_flag (exp, target, mode, only_cheap)
10521 enum machine_mode mode;
10524 enum rtx_code code;
10525 tree arg0, arg1, type;
10527 enum machine_mode operand_mode;
10531 enum insn_code icode;
10532 rtx subtarget = target;
10535 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10536 result at the end. We can't simply invert the test since it would
10537 have already been inverted if it were valid. This case occurs for
10538 some floating-point comparisons. */
10540 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10541 invert = 1, exp = TREE_OPERAND (exp, 0);
10543 arg0 = TREE_OPERAND (exp, 0);
10544 arg1 = TREE_OPERAND (exp, 1);
10546 /* Don't crash if the comparison was erroneous. */
10547 if (arg0 == error_mark_node || arg1 == error_mark_node)
10550 type = TREE_TYPE (arg0);
10551 operand_mode = TYPE_MODE (type);
10552 unsignedp = TREE_UNSIGNED (type);
10554 /* We won't bother with BLKmode store-flag operations because it would mean
10555 passing a lot of information to emit_store_flag. */
10556 if (operand_mode == BLKmode)
10559 /* We won't bother with store-flag operations involving function pointers
10560 when function pointers must be canonicalized before comparisons. */
10561 #ifdef HAVE_canonicalize_funcptr_for_compare
10562 if (HAVE_canonicalize_funcptr_for_compare
10563 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10564 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10566 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10567 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10568 == FUNCTION_TYPE))))
10575 /* Get the rtx comparison code to use. We know that EXP is a comparison
10576 operation of some type. Some comparisons against 1 and -1 can be
10577 converted to comparisons with zero. Do so here so that the tests
10578 below will be aware that we have a comparison with zero. These
10579 tests will not catch constants in the first operand, but constants
10580 are rarely passed as the first operand. */
10582 switch (TREE_CODE (exp))
10591 if (integer_onep (arg1))
10592 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10594 code = unsignedp ? LTU : LT;
10597 if (! unsignedp && integer_all_onesp (arg1))
10598 arg1 = integer_zero_node, code = LT;
10600 code = unsignedp ? LEU : LE;
10603 if (! unsignedp && integer_all_onesp (arg1))
10604 arg1 = integer_zero_node, code = GE;
10606 code = unsignedp ? GTU : GT;
10609 if (integer_onep (arg1))
10610 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10612 code = unsignedp ? GEU : GE;
10615 case UNORDERED_EXPR:
10641 /* Put a constant second. */
10642 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10644 tem = arg0; arg0 = arg1; arg1 = tem;
10645 code = swap_condition (code);
10648 /* If this is an equality or inequality test of a single bit, we can
10649 do this by shifting the bit being tested to the low-order bit and
10650 masking the result with the constant 1. If the condition was EQ,
10651 we xor it with 1. This does not require an scc insn and is faster
10652 than an scc insn even if we have it. */
10654 if ((code == NE || code == EQ)
10655 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10656 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10658 tree inner = TREE_OPERAND (arg0, 0);
10659 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10662 /* If INNER is a right shift of a constant and it plus BITNUM does
10663 not overflow, adjust BITNUM and INNER. */
10665 if (TREE_CODE (inner) == RSHIFT_EXPR
10666 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10667 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10668 && bitnum < TYPE_PRECISION (type)
10669 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10670 bitnum - TYPE_PRECISION (type)))
10672 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10673 inner = TREE_OPERAND (inner, 0);
10676 /* If we are going to be able to omit the AND below, we must do our
10677 operations as unsigned. If we must use the AND, we have a choice.
10678 Normally unsigned is faster, but for some machines signed is. */
10679 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10680 #ifdef LOAD_EXTEND_OP
10681 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10687 if (! get_subtarget (subtarget)
10688 || GET_MODE (subtarget) != operand_mode
10689 || ! safe_from_p (subtarget, inner, 1))
10692 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10695 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10696 size_int (bitnum), subtarget, ops_unsignedp);
10698 if (GET_MODE (op0) != mode)
10699 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10701 if ((code == EQ && ! invert) || (code == NE && invert))
10702 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10703 ops_unsignedp, OPTAB_LIB_WIDEN);
10705 /* Put the AND last so it can combine with more things. */
10706 if (bitnum != TYPE_PRECISION (type) - 1)
10707 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10712 /* Now see if we are likely to be able to do this. Return if not. */
10713 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10716 icode = setcc_gen_code[(int) code];
10717 if (icode == CODE_FOR_nothing
10718 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10720 /* We can only do this if it is one of the special cases that
10721 can be handled without an scc insn. */
10722 if ((code == LT && integer_zerop (arg1))
10723 || (! only_cheap && code == GE && integer_zerop (arg1)))
10725 else if (BRANCH_COST >= 0
10726 && ! only_cheap && (code == NE || code == EQ)
10727 && TREE_CODE (type) != REAL_TYPE
10728 && ((abs_optab->handlers[(int) operand_mode].insn_code
10729 != CODE_FOR_nothing)
10730 || (ffs_optab->handlers[(int) operand_mode].insn_code
10731 != CODE_FOR_nothing)))
10737 if (! get_subtarget (target)
10738 || GET_MODE (subtarget) != operand_mode
10739 || ! safe_from_p (subtarget, arg1, 1))
10742 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10743 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10746 target = gen_reg_rtx (mode);
10748 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10749 because, if the emit_store_flag does anything it will succeed and
10750 OP0 and OP1 will not be used subsequently. */
10752 result = emit_store_flag (target, code,
10753 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10754 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10755 operand_mode, unsignedp, 1);
10760 result = expand_binop (mode, xor_optab, result, const1_rtx,
10761 result, 0, OPTAB_LIB_WIDEN);
10765 /* If this failed, we have to do this with set/compare/jump/set code. */
10766 if (GET_CODE (target) != REG
10767 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10768 target = gen_reg_rtx (GET_MODE (target));
10770 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10771 result = compare_from_rtx (op0, op1, code, unsignedp,
10772 operand_mode, NULL_RTX);
10773 if (GET_CODE (result) == CONST_INT)
10774 return (((result == const0_rtx && ! invert)
10775 || (result != const0_rtx && invert))
10776 ? const0_rtx : const1_rtx);
10778 /* The code of RESULT may not match CODE if compare_from_rtx
10779 decided to swap its operands and reverse the original code.
10781 We know that compare_from_rtx returns either a CONST_INT or
10782 a new comparison code, so it is safe to just extract the
10783 code from RESULT. */
10784 code = GET_CODE (result);
10786 label = gen_label_rtx ();
10787 if (bcc_gen_fctn[(int) code] == 0)
10790 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10791 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10792 emit_label (label);
10798 /* Stubs in case we haven't got a casesi insn. */
10799 #ifndef HAVE_casesi
10800 # define HAVE_casesi 0
10801 # define gen_casesi(a, b, c, d, e) (0)
10802 # define CODE_FOR_casesi CODE_FOR_nothing
10805 /* If the machine does not have a case insn that compares the bounds,
10806 this means extra overhead for dispatch tables, which raises the
10807 threshold for using them. */
10808 #ifndef CASE_VALUES_THRESHOLD
10809 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10810 #endif /* CASE_VALUES_THRESHOLD */
10813 case_values_threshold ()
10815 return CASE_VALUES_THRESHOLD;
10818 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10819 0 otherwise (i.e. if there is no casesi instruction). */
10821 try_casesi (index_type, index_expr, minval, range,
10822 table_label, default_label)
10823 tree index_type, index_expr, minval, range;
10824 rtx table_label ATTRIBUTE_UNUSED;
10827 enum machine_mode index_mode = SImode;
10828 int index_bits = GET_MODE_BITSIZE (index_mode);
10829 rtx op1, op2, index;
10830 enum machine_mode op_mode;
10835 /* Convert the index to SImode. */
10836 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10838 enum machine_mode omode = TYPE_MODE (index_type);
10839 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10841 /* We must handle the endpoints in the original mode. */
10842 index_expr = build (MINUS_EXPR, index_type,
10843 index_expr, minval);
10844 minval = integer_zero_node;
10845 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10846 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10847 omode, 1, default_label);
10848 /* Now we can safely truncate. */
10849 index = convert_to_mode (index_mode, index, 0);
10853 if (TYPE_MODE (index_type) != index_mode)
10855 index_expr = convert ((*lang_hooks.types.type_for_size)
10856 (index_bits, 0), index_expr);
10857 index_type = TREE_TYPE (index_expr);
10860 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10863 index = protect_from_queue (index, 0);
10864 do_pending_stack_adjust ();
10866 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10867 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10869 index = copy_to_mode_reg (op_mode, index);
10871 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10873 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10874 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10875 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10876 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10878 op1 = copy_to_mode_reg (op_mode, op1);
10880 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10882 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10883 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10884 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10885 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10887 op2 = copy_to_mode_reg (op_mode, op2);
10889 emit_jump_insn (gen_casesi (index, op1, op2,
10890 table_label, default_label));
10894 /* Attempt to generate a tablejump instruction; same concept. */
10895 #ifndef HAVE_tablejump
10896 #define HAVE_tablejump 0
10897 #define gen_tablejump(x, y) (0)
10900 /* Subroutine of the next function.
10902 INDEX is the value being switched on, with the lowest value
10903 in the table already subtracted.
10904 MODE is its expected mode (needed if INDEX is constant).
10905 RANGE is the length of the jump table.
10906 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10908 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10909 index value is out of range. */
10912 do_tablejump (index, mode, range, table_label, default_label)
10913 rtx index, range, table_label, default_label;
10914 enum machine_mode mode;
10918 /* Do an unsigned comparison (in the proper mode) between the index
10919 expression and the value which represents the length of the range.
10920 Since we just finished subtracting the lower bound of the range
10921 from the index expression, this comparison allows us to simultaneously
10922 check that the original index expression value is both greater than
10923 or equal to the minimum value of the range and less than or equal to
10924 the maximum value of the range. */
10926 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10929 /* If index is in range, it must fit in Pmode.
10930 Convert to Pmode so we can index with it. */
10932 index = convert_to_mode (Pmode, index, 1);
10934 /* Don't let a MEM slip thru, because then INDEX that comes
10935 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10936 and break_out_memory_refs will go to work on it and mess it up. */
10937 #ifdef PIC_CASE_VECTOR_ADDRESS
10938 if (flag_pic && GET_CODE (index) != REG)
10939 index = copy_to_mode_reg (Pmode, index);
10942 /* If flag_force_addr were to affect this address
10943 it could interfere with the tricky assumptions made
10944 about addresses that contain label-refs,
10945 which may be valid only very near the tablejump itself. */
10946 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10947 GET_MODE_SIZE, because this indicates how large insns are. The other
10948 uses should all be Pmode, because they are addresses. This code
10949 could fail if addresses and insns are not the same size. */
10950 index = gen_rtx_PLUS (Pmode,
10951 gen_rtx_MULT (Pmode, index,
10952 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10953 gen_rtx_LABEL_REF (Pmode, table_label));
10954 #ifdef PIC_CASE_VECTOR_ADDRESS
10956 index = PIC_CASE_VECTOR_ADDRESS (index);
10959 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10960 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10961 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10962 RTX_UNCHANGING_P (vector) = 1;
10963 convert_move (temp, vector, 0);
10965 emit_jump_insn (gen_tablejump (temp, table_label));
10967 /* If we are generating PIC code or if the table is PC-relative, the
10968 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10969 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10974 try_tablejump (index_type, index_expr, minval, range,
10975 table_label, default_label)
10976 tree index_type, index_expr, minval, range;
10977 rtx table_label, default_label;
10981 if (! HAVE_tablejump)
10984 index_expr = fold (build (MINUS_EXPR, index_type,
10985 convert (index_type, index_expr),
10986 convert (index_type, minval)));
10987 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10989 index = protect_from_queue (index, 0);
10990 do_pending_stack_adjust ();
10992 do_tablejump (index, TYPE_MODE (index_type),
10993 convert_modes (TYPE_MODE (index_type),
10994 TYPE_MODE (TREE_TYPE (range)),
10995 expand_expr (range, NULL_RTX,
10997 TREE_UNSIGNED (TREE_TYPE (range))),
10998 table_label, default_label);
11002 /* Nonzero if the mode is a valid vector mode for this architecture.
11003 This returns nonzero even if there is no hardware support for the
11004 vector mode, but we can emulate with narrower modes. */
11007 vector_mode_valid_p (mode)
11008 enum machine_mode mode;
11010 enum mode_class class = GET_MODE_CLASS (mode);
11011 enum machine_mode innermode;
11013 /* Doh! What's going on? */
11014 if (class != MODE_VECTOR_INT
11015 && class != MODE_VECTOR_FLOAT)
11018 /* Hardware support. Woo hoo! */
11019 if (VECTOR_MODE_SUPPORTED_P (mode))
11022 innermode = GET_MODE_INNER (mode);
11024 /* We should probably return 1 if requesting V4DI and we have no DI,
11025 but we have V2DI, but this is probably very unlikely. */
11027 /* If we have support for the inner mode, we can safely emulate it.
11028 We may not have V2DI, but me can emulate with a pair of DIs. */
11029 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
11032 #include "gt-expr.h"