1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000
3 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
31 #include "hard-reg-set.h"
34 #include "insn-flags.h"
35 #include "insn-codes.h"
36 #include "insn-config.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
42 #include "typeclass.h"
49 #ifndef ACCUMULATE_OUTGOING_ARGS
50 #define ACCUMULATE_OUTGOING_ARGS 0
53 /* Supply a default definition for PUSH_ARGS. */
56 #define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
62 /* Decide whether a function's arguments should be processed
63 from first to last or from last to first.
65 They should if the stack and args grow in opposite directions, but
66 only if we have push insns. */
70 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
71 #define PUSH_ARGS_REVERSED /* If it's last to first */
76 #ifndef STACK_PUSH_CODE
77 #ifdef STACK_GROWS_DOWNWARD
78 #define STACK_PUSH_CODE PRE_DEC
80 #define STACK_PUSH_CODE PRE_INC
84 /* Assume that case vectors are not pc-relative. */
85 #ifndef CASE_VECTOR_PC_RELATIVE
86 #define CASE_VECTOR_PC_RELATIVE 0
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
97 /* Nonzero to generate code for all the subroutines within an
98 expression before generating the upper levels of the expression.
99 Nowadays this is never zero. */
100 int do_preexpand_calls = 1;
102 /* Don't check memory usage, since code is being emitted to check a memory
103 usage. Used when current_function_check_memory_usage is true, to avoid
104 infinite recursion. */
105 static int in_check_memory_usage;
107 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
108 static tree placeholder_list = 0;
110 /* This structure is used by move_by_pieces to describe the move to
112 struct move_by_pieces
123 int explicit_inc_from;
131 /* This structure is used by clear_by_pieces to describe the clear to
134 struct clear_by_pieces
146 extern struct obstack permanent_obstack;
148 static rtx get_push_address PARAMS ((int));
150 static rtx enqueue_insn PARAMS ((rtx, rtx));
151 static int move_by_pieces_ninsns PARAMS ((unsigned int, unsigned int));
152 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
153 struct move_by_pieces *));
154 static void clear_by_pieces PARAMS ((rtx, int, unsigned int));
155 static void clear_by_pieces_1 PARAMS ((rtx (*) (rtx, ...),
157 struct clear_by_pieces *));
158 static rtx get_subtarget PARAMS ((rtx));
159 static int is_zeros_p PARAMS ((tree));
160 static int mostly_zeros_p PARAMS ((tree));
161 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
162 HOST_WIDE_INT, enum machine_mode,
163 tree, tree, unsigned int, int));
164 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
166 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
167 HOST_WIDE_INT, enum machine_mode,
168 tree, enum machine_mode, int,
169 unsigned int, HOST_WIDE_INT, int));
170 static enum memory_use_mode
171 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
172 static tree save_noncopied_parts PARAMS ((tree, tree));
173 static tree init_noncopied_parts PARAMS ((tree, tree));
174 static int safe_from_p PARAMS ((rtx, tree, int));
175 static int fixed_type_p PARAMS ((tree));
176 static rtx var_rtx PARAMS ((tree));
177 static int readonly_fields_p PARAMS ((tree));
178 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
179 static rtx expand_increment PARAMS ((tree, int, int));
180 static void preexpand_calls PARAMS ((tree));
181 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
182 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
183 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
185 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
187 /* Record for each mode whether we can move a register directly to or
188 from an object of that mode in memory. If we can't, we won't try
189 to use that mode directly when accessing a field of that mode. */
191 static char direct_load[NUM_MACHINE_MODES];
192 static char direct_store[NUM_MACHINE_MODES];
194 /* If a memory-to-memory move would take MOVE_RATIO or more simple
195 move-instruction sequences, we will do a movstr or libcall instead. */
198 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
201 /* If we are optimizing for space (-Os), cut down the default move ratio */
202 #define MOVE_RATIO (optimize_size ? 3 : 15)
206 /* This macro is used to determine whether move_by_pieces should be called
207 to perform a structure copy. */
208 #ifndef MOVE_BY_PIECES_P
209 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
210 (move_by_pieces_ninsns (SIZE, ALIGN) < MOVE_RATIO)
213 /* This array records the insn_code of insns to perform block moves. */
214 enum insn_code movstr_optab[NUM_MACHINE_MODES];
216 /* This array records the insn_code of insns to perform block clears. */
217 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
219 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
221 #ifndef SLOW_UNALIGNED_ACCESS
222 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
225 /* This is run once per compilation to set up which modes can be used
226 directly in memory and to initialize the block move optab. */
232 enum machine_mode mode;
239 /* Since we are on the permanent obstack, we must be sure we save this
240 spot AFTER we call start_sequence, since it will reuse the rtl it
242 free_point = (char *) oballoc (0);
244 /* Try indexing by frame ptr and try by stack ptr.
245 It is known that on the Convex the stack ptr isn't a valid index.
246 With luck, one or the other is valid on any machine. */
247 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
248 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
250 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
251 pat = PATTERN (insn);
253 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
254 mode = (enum machine_mode) ((int) mode + 1))
259 direct_load[(int) mode] = direct_store[(int) mode] = 0;
260 PUT_MODE (mem, mode);
261 PUT_MODE (mem1, mode);
263 /* See if there is some register that can be used in this mode and
264 directly loaded or stored from memory. */
266 if (mode != VOIDmode && mode != BLKmode)
267 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
268 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
271 if (! HARD_REGNO_MODE_OK (regno, mode))
274 reg = gen_rtx_REG (mode, regno);
277 SET_DEST (pat) = reg;
278 if (recog (pat, insn, &num_clobbers) >= 0)
279 direct_load[(int) mode] = 1;
281 SET_SRC (pat) = mem1;
282 SET_DEST (pat) = reg;
283 if (recog (pat, insn, &num_clobbers) >= 0)
284 direct_load[(int) mode] = 1;
287 SET_DEST (pat) = mem;
288 if (recog (pat, insn, &num_clobbers) >= 0)
289 direct_store[(int) mode] = 1;
292 SET_DEST (pat) = mem1;
293 if (recog (pat, insn, &num_clobbers) >= 0)
294 direct_store[(int) mode] = 1;
302 /* This is run at the start of compiling a function. */
307 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
310 pending_stack_adjust = 0;
311 stack_pointer_delta = 0;
312 inhibit_defer_pop = 0;
314 apply_args_value = 0;
320 struct expr_status *p;
325 ggc_mark_rtx (p->x_saveregs_value);
326 ggc_mark_rtx (p->x_apply_args_value);
327 ggc_mark_rtx (p->x_forced_labels);
338 /* Small sanity check that the queue is empty at the end of a function. */
341 finish_expr_for_function ()
347 /* Manage the queue of increment instructions to be output
348 for POSTINCREMENT_EXPR expressions, etc. */
350 /* Queue up to increment (or change) VAR later. BODY says how:
351 BODY should be the same thing you would pass to emit_insn
352 to increment right away. It will go to emit_insn later on.
354 The value is a QUEUED expression to be used in place of VAR
355 where you want to guarantee the pre-incrementation value of VAR. */
358 enqueue_insn (var, body)
361 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
362 body, pending_chain);
363 return pending_chain;
366 /* Use protect_from_queue to convert a QUEUED expression
367 into something that you can put immediately into an instruction.
368 If the queued incrementation has not happened yet,
369 protect_from_queue returns the variable itself.
370 If the incrementation has happened, protect_from_queue returns a temp
371 that contains a copy of the old value of the variable.
373 Any time an rtx which might possibly be a QUEUED is to be put
374 into an instruction, it must be passed through protect_from_queue first.
375 QUEUED expressions are not meaningful in instructions.
377 Do not pass a value through protect_from_queue and then hold
378 on to it for a while before putting it in an instruction!
379 If the queue is flushed in between, incorrect code will result. */
382 protect_from_queue (x, modify)
386 register RTX_CODE code = GET_CODE (x);
388 #if 0 /* A QUEUED can hang around after the queue is forced out. */
389 /* Shortcut for most common case. */
390 if (pending_chain == 0)
396 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
397 use of autoincrement. Make a copy of the contents of the memory
398 location rather than a copy of the address, but not if the value is
399 of mode BLKmode. Don't modify X in place since it might be
401 if (code == MEM && GET_MODE (x) != BLKmode
402 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
404 register rtx y = XEXP (x, 0);
405 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
407 MEM_COPY_ATTRIBUTES (new, x);
411 register rtx temp = gen_reg_rtx (GET_MODE (new));
412 emit_insn_before (gen_move_insn (temp, new),
418 /* Otherwise, recursively protect the subexpressions of all
419 the kinds of rtx's that can contain a QUEUED. */
422 rtx tem = protect_from_queue (XEXP (x, 0), 0);
423 if (tem != XEXP (x, 0))
429 else if (code == PLUS || code == MULT)
431 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
432 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
433 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
442 /* If the increment has not happened, use the variable itself. */
443 if (QUEUED_INSN (x) == 0)
444 return QUEUED_VAR (x);
445 /* If the increment has happened and a pre-increment copy exists,
447 if (QUEUED_COPY (x) != 0)
448 return QUEUED_COPY (x);
449 /* The increment has happened but we haven't set up a pre-increment copy.
450 Set one up now, and use it. */
451 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
452 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
454 return QUEUED_COPY (x);
457 /* Return nonzero if X contains a QUEUED expression:
458 if it contains anything that will be altered by a queued increment.
459 We handle only combinations of MEM, PLUS, MINUS and MULT operators
460 since memory addresses generally contain only those. */
466 register enum rtx_code code = GET_CODE (x);
472 return queued_subexp_p (XEXP (x, 0));
476 return (queued_subexp_p (XEXP (x, 0))
477 || queued_subexp_p (XEXP (x, 1)));
483 /* Perform all the pending incrementations. */
489 while ((p = pending_chain))
491 rtx body = QUEUED_BODY (p);
493 if (GET_CODE (body) == SEQUENCE)
495 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
496 emit_insn (QUEUED_BODY (p));
499 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
500 pending_chain = QUEUED_NEXT (p);
504 /* Copy data from FROM to TO, where the machine modes are not the same.
505 Both modes may be integer, or both may be floating.
506 UNSIGNEDP should be nonzero if FROM is an unsigned type.
507 This causes zero-extension instead of sign-extension. */
510 convert_move (to, from, unsignedp)
511 register rtx to, from;
514 enum machine_mode to_mode = GET_MODE (to);
515 enum machine_mode from_mode = GET_MODE (from);
516 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
517 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
521 /* rtx code for making an equivalent value. */
522 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
524 to = protect_from_queue (to, 1);
525 from = protect_from_queue (from, 0);
527 if (to_real != from_real)
530 /* If FROM is a SUBREG that indicates that we have already done at least
531 the required extension, strip it. We don't handle such SUBREGs as
534 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
535 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
536 >= GET_MODE_SIZE (to_mode))
537 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
538 from = gen_lowpart (to_mode, from), from_mode = to_mode;
540 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
543 if (to_mode == from_mode
544 || (from_mode == VOIDmode && CONSTANT_P (from)))
546 emit_move_insn (to, from);
554 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
556 /* Try converting directly if the insn is supported. */
557 if ((code = can_extend_p (to_mode, from_mode, 0))
560 emit_unop_insn (code, to, from, UNKNOWN);
565 #ifdef HAVE_trunchfqf2
566 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
568 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
572 #ifdef HAVE_trunctqfqf2
573 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
575 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
579 #ifdef HAVE_truncsfqf2
580 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
582 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
586 #ifdef HAVE_truncdfqf2
587 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
589 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
593 #ifdef HAVE_truncxfqf2
594 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
596 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
600 #ifdef HAVE_trunctfqf2
601 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
603 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
608 #ifdef HAVE_trunctqfhf2
609 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
611 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
615 #ifdef HAVE_truncsfhf2
616 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
618 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
622 #ifdef HAVE_truncdfhf2
623 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
625 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
629 #ifdef HAVE_truncxfhf2
630 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
632 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
636 #ifdef HAVE_trunctfhf2
637 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
639 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
644 #ifdef HAVE_truncsftqf2
645 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
647 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
651 #ifdef HAVE_truncdftqf2
652 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
654 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
658 #ifdef HAVE_truncxftqf2
659 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
661 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
665 #ifdef HAVE_trunctftqf2
666 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
668 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
673 #ifdef HAVE_truncdfsf2
674 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
676 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
680 #ifdef HAVE_truncxfsf2
681 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
683 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
687 #ifdef HAVE_trunctfsf2
688 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
690 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
694 #ifdef HAVE_truncxfdf2
695 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
697 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
701 #ifdef HAVE_trunctfdf2
702 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
704 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
716 libcall = extendsfdf2_libfunc;
720 libcall = extendsfxf2_libfunc;
724 libcall = extendsftf2_libfunc;
736 libcall = truncdfsf2_libfunc;
740 libcall = extenddfxf2_libfunc;
744 libcall = extenddftf2_libfunc;
756 libcall = truncxfsf2_libfunc;
760 libcall = truncxfdf2_libfunc;
772 libcall = trunctfsf2_libfunc;
776 libcall = trunctfdf2_libfunc;
788 if (libcall == (rtx) 0)
789 /* This conversion is not implemented yet. */
792 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
794 emit_move_insn (to, value);
798 /* Now both modes are integers. */
800 /* Handle expanding beyond a word. */
801 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
802 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
809 enum machine_mode lowpart_mode;
810 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
812 /* Try converting directly if the insn is supported. */
813 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
816 /* If FROM is a SUBREG, put it into a register. Do this
817 so that we always generate the same set of insns for
818 better cse'ing; if an intermediate assignment occurred,
819 we won't be doing the operation directly on the SUBREG. */
820 if (optimize > 0 && GET_CODE (from) == SUBREG)
821 from = force_reg (from_mode, from);
822 emit_unop_insn (code, to, from, equiv_code);
825 /* Next, try converting via full word. */
826 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
827 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
828 != CODE_FOR_nothing))
830 if (GET_CODE (to) == REG)
831 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
832 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
833 emit_unop_insn (code, to,
834 gen_lowpart (word_mode, to), equiv_code);
838 /* No special multiword conversion insn; do it by hand. */
841 /* Since we will turn this into a no conflict block, we must ensure
842 that the source does not overlap the target. */
844 if (reg_overlap_mentioned_p (to, from))
845 from = force_reg (from_mode, from);
847 /* Get a copy of FROM widened to a word, if necessary. */
848 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
849 lowpart_mode = word_mode;
851 lowpart_mode = from_mode;
853 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
855 lowpart = gen_lowpart (lowpart_mode, to);
856 emit_move_insn (lowpart, lowfrom);
858 /* Compute the value to put in each remaining word. */
860 fill_value = const0_rtx;
865 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
866 && STORE_FLAG_VALUE == -1)
868 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
870 fill_value = gen_reg_rtx (word_mode);
871 emit_insn (gen_slt (fill_value));
877 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
878 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
880 fill_value = convert_to_mode (word_mode, fill_value, 1);
884 /* Fill the remaining words. */
885 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
887 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
888 rtx subword = operand_subword (to, index, 1, to_mode);
893 if (fill_value != subword)
894 emit_move_insn (subword, fill_value);
897 insns = get_insns ();
900 emit_no_conflict_block (insns, to, from, NULL_RTX,
901 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
905 /* Truncating multi-word to a word or less. */
906 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
907 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
909 if (!((GET_CODE (from) == MEM
910 && ! MEM_VOLATILE_P (from)
911 && direct_load[(int) to_mode]
912 && ! mode_dependent_address_p (XEXP (from, 0)))
913 || GET_CODE (from) == REG
914 || GET_CODE (from) == SUBREG))
915 from = force_reg (from_mode, from);
916 convert_move (to, gen_lowpart (word_mode, from), 0);
920 /* Handle pointer conversion */ /* SPEE 900220 */
921 if (to_mode == PQImode)
923 if (from_mode != QImode)
924 from = convert_to_mode (QImode, from, unsignedp);
926 #ifdef HAVE_truncqipqi2
927 if (HAVE_truncqipqi2)
929 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
932 #endif /* HAVE_truncqipqi2 */
936 if (from_mode == PQImode)
938 if (to_mode != QImode)
940 from = convert_to_mode (QImode, from, unsignedp);
945 #ifdef HAVE_extendpqiqi2
946 if (HAVE_extendpqiqi2)
948 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
951 #endif /* HAVE_extendpqiqi2 */
956 if (to_mode == PSImode)
958 if (from_mode != SImode)
959 from = convert_to_mode (SImode, from, unsignedp);
961 #ifdef HAVE_truncsipsi2
962 if (HAVE_truncsipsi2)
964 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
967 #endif /* HAVE_truncsipsi2 */
971 if (from_mode == PSImode)
973 if (to_mode != SImode)
975 from = convert_to_mode (SImode, from, unsignedp);
980 #ifdef HAVE_extendpsisi2
981 if (HAVE_extendpsisi2)
983 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
986 #endif /* HAVE_extendpsisi2 */
991 if (to_mode == PDImode)
993 if (from_mode != DImode)
994 from = convert_to_mode (DImode, from, unsignedp);
996 #ifdef HAVE_truncdipdi2
997 if (HAVE_truncdipdi2)
999 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1002 #endif /* HAVE_truncdipdi2 */
1006 if (from_mode == PDImode)
1008 if (to_mode != DImode)
1010 from = convert_to_mode (DImode, from, unsignedp);
1015 #ifdef HAVE_extendpdidi2
1016 if (HAVE_extendpdidi2)
1018 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1021 #endif /* HAVE_extendpdidi2 */
1026 /* Now follow all the conversions between integers
1027 no more than a word long. */
1029 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1030 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1031 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1032 GET_MODE_BITSIZE (from_mode)))
1034 if (!((GET_CODE (from) == MEM
1035 && ! MEM_VOLATILE_P (from)
1036 && direct_load[(int) to_mode]
1037 && ! mode_dependent_address_p (XEXP (from, 0)))
1038 || GET_CODE (from) == REG
1039 || GET_CODE (from) == SUBREG))
1040 from = force_reg (from_mode, from);
1041 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1042 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1043 from = copy_to_reg (from);
1044 emit_move_insn (to, gen_lowpart (to_mode, from));
1048 /* Handle extension. */
1049 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1051 /* Convert directly if that works. */
1052 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1053 != CODE_FOR_nothing)
1055 emit_unop_insn (code, to, from, equiv_code);
1060 enum machine_mode intermediate;
1064 /* Search for a mode to convert via. */
1065 for (intermediate = from_mode; intermediate != VOIDmode;
1066 intermediate = GET_MODE_WIDER_MODE (intermediate))
1067 if (((can_extend_p (to_mode, intermediate, unsignedp)
1068 != CODE_FOR_nothing)
1069 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1070 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1071 GET_MODE_BITSIZE (intermediate))))
1072 && (can_extend_p (intermediate, from_mode, unsignedp)
1073 != CODE_FOR_nothing))
1075 convert_move (to, convert_to_mode (intermediate, from,
1076 unsignedp), unsignedp);
1080 /* No suitable intermediate mode.
1081 Generate what we need with shifts. */
1082 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1083 - GET_MODE_BITSIZE (from_mode), 0);
1084 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1085 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1087 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1090 emit_move_insn (to, tmp);
1095 /* Support special truncate insns for certain modes. */
1097 if (from_mode == DImode && to_mode == SImode)
1099 #ifdef HAVE_truncdisi2
1100 if (HAVE_truncdisi2)
1102 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1106 convert_move (to, force_reg (from_mode, from), unsignedp);
1110 if (from_mode == DImode && to_mode == HImode)
1112 #ifdef HAVE_truncdihi2
1113 if (HAVE_truncdihi2)
1115 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1119 convert_move (to, force_reg (from_mode, from), unsignedp);
1123 if (from_mode == DImode && to_mode == QImode)
1125 #ifdef HAVE_truncdiqi2
1126 if (HAVE_truncdiqi2)
1128 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1132 convert_move (to, force_reg (from_mode, from), unsignedp);
1136 if (from_mode == SImode && to_mode == HImode)
1138 #ifdef HAVE_truncsihi2
1139 if (HAVE_truncsihi2)
1141 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1145 convert_move (to, force_reg (from_mode, from), unsignedp);
1149 if (from_mode == SImode && to_mode == QImode)
1151 #ifdef HAVE_truncsiqi2
1152 if (HAVE_truncsiqi2)
1154 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1158 convert_move (to, force_reg (from_mode, from), unsignedp);
1162 if (from_mode == HImode && to_mode == QImode)
1164 #ifdef HAVE_trunchiqi2
1165 if (HAVE_trunchiqi2)
1167 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1171 convert_move (to, force_reg (from_mode, from), unsignedp);
1175 if (from_mode == TImode && to_mode == DImode)
1177 #ifdef HAVE_trunctidi2
1178 if (HAVE_trunctidi2)
1180 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1184 convert_move (to, force_reg (from_mode, from), unsignedp);
1188 if (from_mode == TImode && to_mode == SImode)
1190 #ifdef HAVE_trunctisi2
1191 if (HAVE_trunctisi2)
1193 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1197 convert_move (to, force_reg (from_mode, from), unsignedp);
1201 if (from_mode == TImode && to_mode == HImode)
1203 #ifdef HAVE_trunctihi2
1204 if (HAVE_trunctihi2)
1206 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1210 convert_move (to, force_reg (from_mode, from), unsignedp);
1214 if (from_mode == TImode && to_mode == QImode)
1216 #ifdef HAVE_trunctiqi2
1217 if (HAVE_trunctiqi2)
1219 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1223 convert_move (to, force_reg (from_mode, from), unsignedp);
1227 /* Handle truncation of volatile memrefs, and so on;
1228 the things that couldn't be truncated directly,
1229 and for which there was no special instruction. */
1230 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1232 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1233 emit_move_insn (to, temp);
1237 /* Mode combination is not recognized. */
1241 /* Return an rtx for a value that would result
1242 from converting X to mode MODE.
1243 Both X and MODE may be floating, or both integer.
1244 UNSIGNEDP is nonzero if X is an unsigned value.
1245 This can be done by referring to a part of X in place
1246 or by copying to a new temporary with conversion.
1248 This function *must not* call protect_from_queue
1249 except when putting X into an insn (in which case convert_move does it). */
1252 convert_to_mode (mode, x, unsignedp)
1253 enum machine_mode mode;
1257 return convert_modes (mode, VOIDmode, x, unsignedp);
1260 /* Return an rtx for a value that would result
1261 from converting X from mode OLDMODE to mode MODE.
1262 Both modes may be floating, or both integer.
1263 UNSIGNEDP is nonzero if X is an unsigned value.
1265 This can be done by referring to a part of X in place
1266 or by copying to a new temporary with conversion.
1268 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1270 This function *must not* call protect_from_queue
1271 except when putting X into an insn (in which case convert_move does it). */
1274 convert_modes (mode, oldmode, x, unsignedp)
1275 enum machine_mode mode, oldmode;
1281 /* If FROM is a SUBREG that indicates that we have already done at least
1282 the required extension, strip it. */
1284 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1285 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1286 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1287 x = gen_lowpart (mode, x);
1289 if (GET_MODE (x) != VOIDmode)
1290 oldmode = GET_MODE (x);
1292 if (mode == oldmode)
1295 /* There is one case that we must handle specially: If we are converting
1296 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1297 we are to interpret the constant as unsigned, gen_lowpart will do
1298 the wrong if the constant appears negative. What we want to do is
1299 make the high-order word of the constant zero, not all ones. */
1301 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1302 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1303 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1305 HOST_WIDE_INT val = INTVAL (x);
1307 if (oldmode != VOIDmode
1308 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1310 int width = GET_MODE_BITSIZE (oldmode);
1312 /* We need to zero extend VAL. */
1313 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1316 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1319 /* We can do this with a gen_lowpart if both desired and current modes
1320 are integer, and this is either a constant integer, a register, or a
1321 non-volatile MEM. Except for the constant case where MODE is no
1322 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1324 if ((GET_CODE (x) == CONST_INT
1325 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1326 || (GET_MODE_CLASS (mode) == MODE_INT
1327 && GET_MODE_CLASS (oldmode) == MODE_INT
1328 && (GET_CODE (x) == CONST_DOUBLE
1329 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1330 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1331 && direct_load[(int) mode])
1332 || (GET_CODE (x) == REG
1333 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1334 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1336 /* ?? If we don't know OLDMODE, we have to assume here that
1337 X does not need sign- or zero-extension. This may not be
1338 the case, but it's the best we can do. */
1339 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1340 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1342 HOST_WIDE_INT val = INTVAL (x);
1343 int width = GET_MODE_BITSIZE (oldmode);
1345 /* We must sign or zero-extend in this case. Start by
1346 zero-extending, then sign extend if we need to. */
1347 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1349 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1350 val |= (HOST_WIDE_INT) (-1) << width;
1352 return GEN_INT (val);
1355 return gen_lowpart (mode, x);
1358 temp = gen_reg_rtx (mode);
1359 convert_move (temp, x, unsignedp);
1364 /* This macro is used to determine what the largest unit size that
1365 move_by_pieces can use is. */
1367 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1368 move efficiently, as opposed to MOVE_MAX which is the maximum
1369 number of bytes we can move with a single instruction. */
1371 #ifndef MOVE_MAX_PIECES
1372 #define MOVE_MAX_PIECES MOVE_MAX
1375 /* Generate several move instructions to copy LEN bytes
1376 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1377 The caller must pass FROM and TO
1378 through protect_from_queue before calling.
1379 ALIGN is maximum alignment we can assume. */
1382 move_by_pieces (to, from, len, align)
1387 struct move_by_pieces data;
1388 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1389 unsigned int max_size = MOVE_MAX_PIECES + 1;
1390 enum machine_mode mode = VOIDmode, tmode;
1391 enum insn_code icode;
1394 data.to_addr = to_addr;
1395 data.from_addr = from_addr;
1399 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1400 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1402 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1403 || GET_CODE (from_addr) == POST_INC
1404 || GET_CODE (from_addr) == POST_DEC);
1406 data.explicit_inc_from = 0;
1407 data.explicit_inc_to = 0;
1409 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1410 if (data.reverse) data.offset = len;
1413 data.to_struct = MEM_IN_STRUCT_P (to);
1414 data.from_struct = MEM_IN_STRUCT_P (from);
1415 data.to_readonly = RTX_UNCHANGING_P (to);
1416 data.from_readonly = RTX_UNCHANGING_P (from);
1418 /* If copying requires more than two move insns,
1419 copy addresses to registers (to make displacements shorter)
1420 and use post-increment if available. */
1421 if (!(data.autinc_from && data.autinc_to)
1422 && move_by_pieces_ninsns (len, align) > 2)
1424 /* Find the mode of the largest move... */
1425 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1426 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1427 if (GET_MODE_SIZE (tmode) < max_size)
1430 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1432 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1433 data.autinc_from = 1;
1434 data.explicit_inc_from = -1;
1436 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1438 data.from_addr = copy_addr_to_reg (from_addr);
1439 data.autinc_from = 1;
1440 data.explicit_inc_from = 1;
1442 if (!data.autinc_from && CONSTANT_P (from_addr))
1443 data.from_addr = copy_addr_to_reg (from_addr);
1444 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1446 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1448 data.explicit_inc_to = -1;
1450 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1452 data.to_addr = copy_addr_to_reg (to_addr);
1454 data.explicit_inc_to = 1;
1456 if (!data.autinc_to && CONSTANT_P (to_addr))
1457 data.to_addr = copy_addr_to_reg (to_addr);
1460 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1461 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1462 align = MOVE_MAX * BITS_PER_UNIT;
1464 /* First move what we can in the largest integer mode, then go to
1465 successively smaller modes. */
1467 while (max_size > 1)
1469 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1470 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1471 if (GET_MODE_SIZE (tmode) < max_size)
1474 if (mode == VOIDmode)
1477 icode = mov_optab->handlers[(int) mode].insn_code;
1478 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1479 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1481 max_size = GET_MODE_SIZE (mode);
1484 /* The code above should have handled everything. */
1489 /* Return number of insns required to move L bytes by pieces.
1490 ALIGN (in bytes) is maximum alignment we can assume. */
1493 move_by_pieces_ninsns (l, align)
1497 register int n_insns = 0;
1498 unsigned int max_size = MOVE_MAX + 1;
1500 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1501 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1502 align = MOVE_MAX * BITS_PER_UNIT;
1504 while (max_size > 1)
1506 enum machine_mode mode = VOIDmode, tmode;
1507 enum insn_code icode;
1509 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1510 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1511 if (GET_MODE_SIZE (tmode) < max_size)
1514 if (mode == VOIDmode)
1517 icode = mov_optab->handlers[(int) mode].insn_code;
1518 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1519 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1521 max_size = GET_MODE_SIZE (mode);
1527 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1528 with move instructions for mode MODE. GENFUN is the gen_... function
1529 to make a move insn for that mode. DATA has all the other info. */
1532 move_by_pieces_1 (genfun, mode, data)
1533 rtx (*genfun) PARAMS ((rtx, ...));
1534 enum machine_mode mode;
1535 struct move_by_pieces *data;
1537 register int size = GET_MODE_SIZE (mode);
1538 register rtx to1, from1;
1540 while (data->len >= size)
1542 if (data->reverse) data->offset -= size;
1544 to1 = (data->autinc_to
1545 ? gen_rtx_MEM (mode, data->to_addr)
1546 : copy_rtx (change_address (data->to, mode,
1547 plus_constant (data->to_addr,
1549 MEM_IN_STRUCT_P (to1) = data->to_struct;
1550 RTX_UNCHANGING_P (to1) = data->to_readonly;
1553 = (data->autinc_from
1554 ? gen_rtx_MEM (mode, data->from_addr)
1555 : copy_rtx (change_address (data->from, mode,
1556 plus_constant (data->from_addr,
1558 MEM_IN_STRUCT_P (from1) = data->from_struct;
1559 RTX_UNCHANGING_P (from1) = data->from_readonly;
1561 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1562 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1563 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1564 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1566 emit_insn ((*genfun) (to1, from1));
1567 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1568 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1569 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1570 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1572 if (! data->reverse) data->offset += size;
1578 /* Emit code to move a block Y to a block X.
1579 This may be done with string-move instructions,
1580 with multiple scalar move instructions, or with a library call.
1582 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1584 SIZE is an rtx that says how long they are.
1585 ALIGN is the maximum alignment we can assume they have.
1587 Return the address of the new block, if memcpy is called and returns it,
1591 emit_block_move (x, y, size, align)
1597 #ifdef TARGET_MEM_FUNCTIONS
1599 tree call_expr, arg_list;
1602 if (GET_MODE (x) != BLKmode)
1605 if (GET_MODE (y) != BLKmode)
1608 x = protect_from_queue (x, 1);
1609 y = protect_from_queue (y, 0);
1610 size = protect_from_queue (size, 0);
1612 if (GET_CODE (x) != MEM)
1614 if (GET_CODE (y) != MEM)
1619 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1620 move_by_pieces (x, y, INTVAL (size), align);
1623 /* Try the most limited insn first, because there's no point
1624 including more than one in the machine description unless
1625 the more limited one has some advantage. */
1627 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1628 enum machine_mode mode;
1630 /* Since this is a move insn, we don't care about volatility. */
1633 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1634 mode = GET_MODE_WIDER_MODE (mode))
1636 enum insn_code code = movstr_optab[(int) mode];
1637 insn_operand_predicate_fn pred;
1639 if (code != CODE_FOR_nothing
1640 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1641 here because if SIZE is less than the mode mask, as it is
1642 returned by the macro, it will definitely be less than the
1643 actual mode mask. */
1644 && ((GET_CODE (size) == CONST_INT
1645 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1646 <= (GET_MODE_MASK (mode) >> 1)))
1647 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1648 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1649 || (*pred) (x, BLKmode))
1650 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1651 || (*pred) (y, BLKmode))
1652 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1653 || (*pred) (opalign, VOIDmode)))
1656 rtx last = get_last_insn ();
1659 op2 = convert_to_mode (mode, size, 1);
1660 pred = insn_data[(int) code].operand[2].predicate;
1661 if (pred != 0 && ! (*pred) (op2, mode))
1662 op2 = copy_to_mode_reg (mode, op2);
1664 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1672 delete_insns_since (last);
1678 /* X, Y, or SIZE may have been passed through protect_from_queue.
1680 It is unsafe to save the value generated by protect_from_queue
1681 and reuse it later. Consider what happens if emit_queue is
1682 called before the return value from protect_from_queue is used.
1684 Expansion of the CALL_EXPR below will call emit_queue before
1685 we are finished emitting RTL for argument setup. So if we are
1686 not careful we could get the wrong value for an argument.
1688 To avoid this problem we go ahead and emit code to copy X, Y &
1689 SIZE into new pseudos. We can then place those new pseudos
1690 into an RTL_EXPR and use them later, even after a call to
1693 Note this is not strictly needed for library calls since they
1694 do not call emit_queue before loading their arguments. However,
1695 we may need to have library calls call emit_queue in the future
1696 since failing to do so could cause problems for targets which
1697 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1698 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1699 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1701 #ifdef TARGET_MEM_FUNCTIONS
1702 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1704 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1705 TREE_UNSIGNED (integer_type_node));
1706 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1709 #ifdef TARGET_MEM_FUNCTIONS
1710 /* It is incorrect to use the libcall calling conventions to call
1711 memcpy in this context.
1713 This could be a user call to memcpy and the user may wish to
1714 examine the return value from memcpy.
1716 For targets where libcalls and normal calls have different conventions
1717 for returning pointers, we could end up generating incorrect code.
1719 So instead of using a libcall sequence we build up a suitable
1720 CALL_EXPR and expand the call in the normal fashion. */
1721 if (fn == NULL_TREE)
1725 /* This was copied from except.c, I don't know if all this is
1726 necessary in this context or not. */
1727 fn = get_identifier ("memcpy");
1728 push_obstacks_nochange ();
1729 end_temporary_allocation ();
1730 fntype = build_pointer_type (void_type_node);
1731 fntype = build_function_type (fntype, NULL_TREE);
1732 fn = build_decl (FUNCTION_DECL, fn, fntype);
1733 ggc_add_tree_root (&fn, 1);
1734 DECL_EXTERNAL (fn) = 1;
1735 TREE_PUBLIC (fn) = 1;
1736 DECL_ARTIFICIAL (fn) = 1;
1737 make_decl_rtl (fn, NULL_PTR, 1);
1738 assemble_external (fn);
1742 /* We need to make an argument list for the function call.
1744 memcpy has three arguments, the first two are void * addresses and
1745 the last is a size_t byte count for the copy. */
1747 = build_tree_list (NULL_TREE,
1748 make_tree (build_pointer_type (void_type_node), x));
1749 TREE_CHAIN (arg_list)
1750 = build_tree_list (NULL_TREE,
1751 make_tree (build_pointer_type (void_type_node), y));
1752 TREE_CHAIN (TREE_CHAIN (arg_list))
1753 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1754 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1756 /* Now we have to build up the CALL_EXPR itself. */
1757 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1758 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1759 call_expr, arg_list, NULL_TREE);
1760 TREE_SIDE_EFFECTS (call_expr) = 1;
1762 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1764 emit_library_call (bcopy_libfunc, 0,
1765 VOIDmode, 3, y, Pmode, x, Pmode,
1766 convert_to_mode (TYPE_MODE (integer_type_node), size,
1767 TREE_UNSIGNED (integer_type_node)),
1768 TYPE_MODE (integer_type_node));
1775 /* Copy all or part of a value X into registers starting at REGNO.
1776 The number of registers to be filled is NREGS. */
1779 move_block_to_reg (regno, x, nregs, mode)
1783 enum machine_mode mode;
1786 #ifdef HAVE_load_multiple
1794 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1795 x = validize_mem (force_const_mem (mode, x));
1797 /* See if the machine can do this with a load multiple insn. */
1798 #ifdef HAVE_load_multiple
1799 if (HAVE_load_multiple)
1801 last = get_last_insn ();
1802 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1810 delete_insns_since (last);
1814 for (i = 0; i < nregs; i++)
1815 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1816 operand_subword_force (x, i, mode));
1819 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1820 The number of registers to be filled is NREGS. SIZE indicates the number
1821 of bytes in the object X. */
1825 move_block_from_reg (regno, x, nregs, size)
1832 #ifdef HAVE_store_multiple
1836 enum machine_mode mode;
1838 /* If SIZE is that of a mode no bigger than a word, just use that
1839 mode's store operation. */
1840 if (size <= UNITS_PER_WORD
1841 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1843 emit_move_insn (change_address (x, mode, NULL),
1844 gen_rtx_REG (mode, regno));
1848 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1849 to the left before storing to memory. Note that the previous test
1850 doesn't handle all cases (e.g. SIZE == 3). */
1851 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1853 rtx tem = operand_subword (x, 0, 1, BLKmode);
1859 shift = expand_shift (LSHIFT_EXPR, word_mode,
1860 gen_rtx_REG (word_mode, regno),
1861 build_int_2 ((UNITS_PER_WORD - size)
1862 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1863 emit_move_insn (tem, shift);
1867 /* See if the machine can do this with a store multiple insn. */
1868 #ifdef HAVE_store_multiple
1869 if (HAVE_store_multiple)
1871 last = get_last_insn ();
1872 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1880 delete_insns_since (last);
1884 for (i = 0; i < nregs; i++)
1886 rtx tem = operand_subword (x, i, 1, BLKmode);
1891 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1895 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1896 registers represented by a PARALLEL. SSIZE represents the total size of
1897 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1899 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1900 the balance will be in what would be the low-order memory addresses, i.e.
1901 left justified for big endian, right justified for little endian. This
1902 happens to be true for the targets currently using this support. If this
1903 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1907 emit_group_load (dst, orig_src, ssize, align)
1915 if (GET_CODE (dst) != PARALLEL)
1918 /* Check for a NULL entry, used to indicate that the parameter goes
1919 both on the stack and in registers. */
1920 if (XEXP (XVECEXP (dst, 0, 0), 0))
1925 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1927 /* If we won't be loading directly from memory, protect the real source
1928 from strange tricks we might play. */
1930 if (GET_CODE (src) != MEM)
1932 if (GET_CODE (src) == VOIDmode)
1933 src = gen_reg_rtx (GET_MODE (dst));
1935 src = gen_reg_rtx (GET_MODE (orig_src));
1936 emit_move_insn (src, orig_src);
1939 /* Process the pieces. */
1940 for (i = start; i < XVECLEN (dst, 0); i++)
1942 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1943 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1944 unsigned int bytelen = GET_MODE_SIZE (mode);
1947 /* Handle trailing fragments that run over the size of the struct. */
1948 if (ssize >= 0 && bytepos + bytelen > ssize)
1950 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1951 bytelen = ssize - bytepos;
1956 /* Optimize the access just a bit. */
1957 if (GET_CODE (src) == MEM
1958 && align >= GET_MODE_ALIGNMENT (mode)
1959 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1960 && bytelen == GET_MODE_SIZE (mode))
1962 tmps[i] = gen_reg_rtx (mode);
1963 emit_move_insn (tmps[i],
1964 change_address (src, mode,
1965 plus_constant (XEXP (src, 0),
1968 else if (GET_CODE (src) == CONCAT)
1971 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1972 tmps[i] = XEXP (src, 0);
1973 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1974 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1975 tmps[i] = XEXP (src, 1);
1980 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1981 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1982 mode, mode, align, ssize);
1984 if (BYTES_BIG_ENDIAN && shift)
1985 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1986 tmps[i], 0, OPTAB_WIDEN);
1991 /* Copy the extracted pieces into the proper (probable) hard regs. */
1992 for (i = start; i < XVECLEN (dst, 0); i++)
1993 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1996 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1997 registers represented by a PARALLEL. SSIZE represents the total size of
1998 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2001 emit_group_store (orig_dst, src, ssize, align)
2009 if (GET_CODE (src) != PARALLEL)
2012 /* Check for a NULL entry, used to indicate that the parameter goes
2013 both on the stack and in registers. */
2014 if (XEXP (XVECEXP (src, 0, 0), 0))
2019 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
2021 /* Copy the (probable) hard regs into pseudos. */
2022 for (i = start; i < XVECLEN (src, 0); i++)
2024 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2025 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2026 emit_move_insn (tmps[i], reg);
2030 /* If we won't be storing directly into memory, protect the real destination
2031 from strange tricks we might play. */
2033 if (GET_CODE (dst) == PARALLEL)
2037 /* We can get a PARALLEL dst if there is a conditional expression in
2038 a return statement. In that case, the dst and src are the same,
2039 so no action is necessary. */
2040 if (rtx_equal_p (dst, src))
2043 /* It is unclear if we can ever reach here, but we may as well handle
2044 it. Allocate a temporary, and split this into a store/load to/from
2047 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2048 emit_group_store (temp, src, ssize, align);
2049 emit_group_load (dst, temp, ssize, align);
2052 else if (GET_CODE (dst) != MEM)
2054 dst = gen_reg_rtx (GET_MODE (orig_dst));
2055 /* Make life a bit easier for combine. */
2056 emit_move_insn (dst, const0_rtx);
2058 else if (! MEM_IN_STRUCT_P (dst))
2060 /* store_bit_field requires that memory operations have
2061 mem_in_struct_p set; we might not. */
2063 dst = copy_rtx (orig_dst);
2064 MEM_SET_IN_STRUCT_P (dst, 1);
2067 /* Process the pieces. */
2068 for (i = start; i < XVECLEN (src, 0); i++)
2070 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2071 enum machine_mode mode = GET_MODE (tmps[i]);
2072 unsigned int bytelen = GET_MODE_SIZE (mode);
2074 /* Handle trailing fragments that run over the size of the struct. */
2075 if (ssize >= 0 && bytepos + bytelen > ssize)
2077 if (BYTES_BIG_ENDIAN)
2079 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2080 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2081 tmps[i], 0, OPTAB_WIDEN);
2083 bytelen = ssize - bytepos;
2086 /* Optimize the access just a bit. */
2087 if (GET_CODE (dst) == MEM
2088 && align >= GET_MODE_ALIGNMENT (mode)
2089 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2090 && bytelen == GET_MODE_SIZE (mode))
2091 emit_move_insn (change_address (dst, mode,
2092 plus_constant (XEXP (dst, 0),
2096 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2097 mode, tmps[i], align, ssize);
2102 /* Copy from the pseudo into the (probable) hard reg. */
2103 if (GET_CODE (dst) == REG)
2104 emit_move_insn (orig_dst, dst);
2107 /* Generate code to copy a BLKmode object of TYPE out of a
2108 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2109 is null, a stack temporary is created. TGTBLK is returned.
2111 The primary purpose of this routine is to handle functions
2112 that return BLKmode structures in registers. Some machines
2113 (the PA for example) want to return all small structures
2114 in registers regardless of the structure's alignment. */
2117 copy_blkmode_from_reg (tgtblk, srcreg, type)
2122 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2123 rtx src = NULL, dst = NULL;
2124 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2125 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2129 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2130 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2131 preserve_temp_slots (tgtblk);
2134 /* This code assumes srcreg is at least a full word. If it isn't,
2135 copy it into a new pseudo which is a full word. */
2136 if (GET_MODE (srcreg) != BLKmode
2137 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2138 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2140 /* Structures whose size is not a multiple of a word are aligned
2141 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2142 machine, this means we must skip the empty high order bytes when
2143 calculating the bit offset. */
2144 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2145 big_endian_correction
2146 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2148 /* Copy the structure BITSIZE bites at a time.
2150 We could probably emit more efficient code for machines which do not use
2151 strict alignment, but it doesn't seem worth the effort at the current
2153 for (bitpos = 0, xbitpos = big_endian_correction;
2154 bitpos < bytes * BITS_PER_UNIT;
2155 bitpos += bitsize, xbitpos += bitsize)
2157 /* We need a new source operand each time xbitpos is on a
2158 word boundary and when xbitpos == big_endian_correction
2159 (the first time through). */
2160 if (xbitpos % BITS_PER_WORD == 0
2161 || xbitpos == big_endian_correction)
2162 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, BLKmode);
2164 /* We need a new destination operand each time bitpos is on
2166 if (bitpos % BITS_PER_WORD == 0)
2167 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2169 /* Use xbitpos for the source extraction (right justified) and
2170 xbitpos for the destination store (left justified). */
2171 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2172 extract_bit_field (src, bitsize,
2173 xbitpos % BITS_PER_WORD, 1,
2174 NULL_RTX, word_mode, word_mode,
2175 bitsize, BITS_PER_WORD),
2176 bitsize, BITS_PER_WORD);
2182 /* Add a USE expression for REG to the (possibly empty) list pointed
2183 to by CALL_FUSAGE. REG must denote a hard register. */
2186 use_reg (call_fusage, reg)
2187 rtx *call_fusage, reg;
2189 if (GET_CODE (reg) != REG
2190 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2194 = gen_rtx_EXPR_LIST (VOIDmode,
2195 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2198 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2199 starting at REGNO. All of these registers must be hard registers. */
2202 use_regs (call_fusage, regno, nregs)
2209 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2212 for (i = 0; i < nregs; i++)
2213 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2216 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2217 PARALLEL REGS. This is for calls that pass values in multiple
2218 non-contiguous locations. The Irix 6 ABI has examples of this. */
2221 use_group_regs (call_fusage, regs)
2227 for (i = 0; i < XVECLEN (regs, 0); i++)
2229 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2231 /* A NULL entry means the parameter goes both on the stack and in
2232 registers. This can also be a MEM for targets that pass values
2233 partially on the stack and partially in registers. */
2234 if (reg != 0 && GET_CODE (reg) == REG)
2235 use_reg (call_fusage, reg);
2239 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2240 rtx with BLKmode). The caller must pass TO through protect_from_queue
2241 before calling. ALIGN is maximum alignment we can assume. */
2244 clear_by_pieces (to, len, align)
2249 struct clear_by_pieces data;
2250 rtx to_addr = XEXP (to, 0);
2251 unsigned int max_size = MOVE_MAX_PIECES + 1;
2252 enum machine_mode mode = VOIDmode, tmode;
2253 enum insn_code icode;
2256 data.to_addr = to_addr;
2259 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2260 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2262 data.explicit_inc_to = 0;
2264 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2265 if (data.reverse) data.offset = len;
2268 data.to_struct = MEM_IN_STRUCT_P (to);
2270 /* If copying requires more than two move insns,
2271 copy addresses to registers (to make displacements shorter)
2272 and use post-increment if available. */
2274 && move_by_pieces_ninsns (len, align) > 2)
2276 /* Determine the main mode we'll be using */
2277 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2278 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2279 if (GET_MODE_SIZE (tmode) < max_size)
2282 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2284 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2286 data.explicit_inc_to = -1;
2288 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
2290 data.to_addr = copy_addr_to_reg (to_addr);
2292 data.explicit_inc_to = 1;
2294 if (!data.autinc_to && CONSTANT_P (to_addr))
2295 data.to_addr = copy_addr_to_reg (to_addr);
2298 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2299 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2300 align = MOVE_MAX * BITS_PER_UNIT;
2302 /* First move what we can in the largest integer mode, then go to
2303 successively smaller modes. */
2305 while (max_size > 1)
2307 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2308 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2309 if (GET_MODE_SIZE (tmode) < max_size)
2312 if (mode == VOIDmode)
2315 icode = mov_optab->handlers[(int) mode].insn_code;
2316 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2317 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2319 max_size = GET_MODE_SIZE (mode);
2322 /* The code above should have handled everything. */
2327 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2328 with move instructions for mode MODE. GENFUN is the gen_... function
2329 to make a move insn for that mode. DATA has all the other info. */
2332 clear_by_pieces_1 (genfun, mode, data)
2333 rtx (*genfun) PARAMS ((rtx, ...));
2334 enum machine_mode mode;
2335 struct clear_by_pieces *data;
2337 register int size = GET_MODE_SIZE (mode);
2340 while (data->len >= size)
2342 if (data->reverse) data->offset -= size;
2344 to1 = (data->autinc_to
2345 ? gen_rtx_MEM (mode, data->to_addr)
2346 : copy_rtx (change_address (data->to, mode,
2347 plus_constant (data->to_addr,
2349 MEM_IN_STRUCT_P (to1) = data->to_struct;
2351 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2352 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2354 emit_insn ((*genfun) (to1, const0_rtx));
2355 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2356 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2358 if (! data->reverse) data->offset += size;
2364 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2365 its length in bytes and ALIGN is the maximum alignment we can is has.
2367 If we call a function that returns the length of the block, return it. */
2370 clear_storage (object, size, align)
2375 #ifdef TARGET_MEM_FUNCTIONS
2377 tree call_expr, arg_list;
2381 if (GET_MODE (object) == BLKmode)
2383 object = protect_from_queue (object, 1);
2384 size = protect_from_queue (size, 0);
2386 if (GET_CODE (size) == CONST_INT
2387 && MOVE_BY_PIECES_P (INTVAL (size), align))
2388 clear_by_pieces (object, INTVAL (size), align);
2391 /* Try the most limited insn first, because there's no point
2392 including more than one in the machine description unless
2393 the more limited one has some advantage. */
2395 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2396 enum machine_mode mode;
2398 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2399 mode = GET_MODE_WIDER_MODE (mode))
2401 enum insn_code code = clrstr_optab[(int) mode];
2402 insn_operand_predicate_fn pred;
2404 if (code != CODE_FOR_nothing
2405 /* We don't need MODE to be narrower than
2406 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2407 the mode mask, as it is returned by the macro, it will
2408 definitely be less than the actual mode mask. */
2409 && ((GET_CODE (size) == CONST_INT
2410 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2411 <= (GET_MODE_MASK (mode) >> 1)))
2412 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2413 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2414 || (*pred) (object, BLKmode))
2415 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2416 || (*pred) (opalign, VOIDmode)))
2419 rtx last = get_last_insn ();
2422 op1 = convert_to_mode (mode, size, 1);
2423 pred = insn_data[(int) code].operand[1].predicate;
2424 if (pred != 0 && ! (*pred) (op1, mode))
2425 op1 = copy_to_mode_reg (mode, op1);
2427 pat = GEN_FCN ((int) code) (object, op1, opalign);
2434 delete_insns_since (last);
2438 /* OBJECT or SIZE may have been passed through protect_from_queue.
2440 It is unsafe to save the value generated by protect_from_queue
2441 and reuse it later. Consider what happens if emit_queue is
2442 called before the return value from protect_from_queue is used.
2444 Expansion of the CALL_EXPR below will call emit_queue before
2445 we are finished emitting RTL for argument setup. So if we are
2446 not careful we could get the wrong value for an argument.
2448 To avoid this problem we go ahead and emit code to copy OBJECT
2449 and SIZE into new pseudos. We can then place those new pseudos
2450 into an RTL_EXPR and use them later, even after a call to
2453 Note this is not strictly needed for library calls since they
2454 do not call emit_queue before loading their arguments. However,
2455 we may need to have library calls call emit_queue in the future
2456 since failing to do so could cause problems for targets which
2457 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2458 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2460 #ifdef TARGET_MEM_FUNCTIONS
2461 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2463 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2464 TREE_UNSIGNED (integer_type_node));
2465 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2469 #ifdef TARGET_MEM_FUNCTIONS
2470 /* It is incorrect to use the libcall calling conventions to call
2471 memset in this context.
2473 This could be a user call to memset and the user may wish to
2474 examine the return value from memset.
2476 For targets where libcalls and normal calls have different
2477 conventions for returning pointers, we could end up generating
2480 So instead of using a libcall sequence we build up a suitable
2481 CALL_EXPR and expand the call in the normal fashion. */
2482 if (fn == NULL_TREE)
2486 /* This was copied from except.c, I don't know if all this is
2487 necessary in this context or not. */
2488 fn = get_identifier ("memset");
2489 push_obstacks_nochange ();
2490 end_temporary_allocation ();
2491 fntype = build_pointer_type (void_type_node);
2492 fntype = build_function_type (fntype, NULL_TREE);
2493 fn = build_decl (FUNCTION_DECL, fn, fntype);
2494 ggc_add_tree_root (&fn, 1);
2495 DECL_EXTERNAL (fn) = 1;
2496 TREE_PUBLIC (fn) = 1;
2497 DECL_ARTIFICIAL (fn) = 1;
2498 make_decl_rtl (fn, NULL_PTR, 1);
2499 assemble_external (fn);
2503 /* We need to make an argument list for the function call.
2505 memset has three arguments, the first is a void * addresses, the
2506 second a integer with the initialization value, the last is a
2507 size_t byte count for the copy. */
2509 = build_tree_list (NULL_TREE,
2510 make_tree (build_pointer_type (void_type_node),
2512 TREE_CHAIN (arg_list)
2513 = build_tree_list (NULL_TREE,
2514 make_tree (integer_type_node, const0_rtx));
2515 TREE_CHAIN (TREE_CHAIN (arg_list))
2516 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2517 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2519 /* Now we have to build up the CALL_EXPR itself. */
2520 call_expr = build1 (ADDR_EXPR,
2521 build_pointer_type (TREE_TYPE (fn)), fn);
2522 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2523 call_expr, arg_list, NULL_TREE);
2524 TREE_SIDE_EFFECTS (call_expr) = 1;
2526 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2528 emit_library_call (bzero_libfunc, 0,
2529 VOIDmode, 2, object, Pmode, size,
2530 TYPE_MODE (integer_type_node));
2535 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2540 /* Generate code to copy Y into X.
2541 Both Y and X must have the same mode, except that
2542 Y can be a constant with VOIDmode.
2543 This mode cannot be BLKmode; use emit_block_move for that.
2545 Return the last instruction emitted. */
2548 emit_move_insn (x, y)
2551 enum machine_mode mode = GET_MODE (x);
2553 x = protect_from_queue (x, 1);
2554 y = protect_from_queue (y, 0);
2556 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2559 /* Never force constant_p_rtx to memory. */
2560 if (GET_CODE (y) == CONSTANT_P_RTX)
2562 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2563 y = force_const_mem (mode, y);
2565 /* If X or Y are memory references, verify that their addresses are valid
2567 if (GET_CODE (x) == MEM
2568 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2569 && ! push_operand (x, GET_MODE (x)))
2571 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2572 x = change_address (x, VOIDmode, XEXP (x, 0));
2574 if (GET_CODE (y) == MEM
2575 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2577 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2578 y = change_address (y, VOIDmode, XEXP (y, 0));
2580 if (mode == BLKmode)
2583 return emit_move_insn_1 (x, y);
2586 /* Low level part of emit_move_insn.
2587 Called just like emit_move_insn, but assumes X and Y
2588 are basically valid. */
2591 emit_move_insn_1 (x, y)
2594 enum machine_mode mode = GET_MODE (x);
2595 enum machine_mode submode;
2596 enum mode_class class = GET_MODE_CLASS (mode);
2599 if (mode >= MAX_MACHINE_MODE)
2602 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2604 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2606 /* Expand complex moves by moving real part and imag part, if possible. */
2607 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2608 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2610 (class == MODE_COMPLEX_INT
2611 ? MODE_INT : MODE_FLOAT),
2613 && (mov_optab->handlers[(int) submode].insn_code
2614 != CODE_FOR_nothing))
2616 /* Don't split destination if it is a stack push. */
2617 int stack = push_operand (x, GET_MODE (x));
2619 /* If this is a stack, push the highpart first, so it
2620 will be in the argument order.
2622 In that case, change_address is used only to convert
2623 the mode, not to change the address. */
2626 /* Note that the real part always precedes the imag part in memory
2627 regardless of machine's endianness. */
2628 #ifdef STACK_GROWS_DOWNWARD
2629 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2630 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2631 gen_imagpart (submode, y)));
2632 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2633 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2634 gen_realpart (submode, y)));
2636 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2637 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2638 gen_realpart (submode, y)));
2639 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2640 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2641 gen_imagpart (submode, y)));
2646 rtx realpart_x, realpart_y;
2647 rtx imagpart_x, imagpart_y;
2649 /* If this is a complex value with each part being smaller than a
2650 word, the usual calling sequence will likely pack the pieces into
2651 a single register. Unfortunately, SUBREG of hard registers only
2652 deals in terms of words, so we have a problem converting input
2653 arguments to the CONCAT of two registers that is used elsewhere
2654 for complex values. If this is before reload, we can copy it into
2655 memory and reload. FIXME, we should see about using extract and
2656 insert on integer registers, but complex short and complex char
2657 variables should be rarely used. */
2658 if (GET_MODE_BITSIZE (mode) < 2*BITS_PER_WORD
2659 && (reload_in_progress | reload_completed) == 0)
2661 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2662 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2664 if (packed_dest_p || packed_src_p)
2666 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2667 ? MODE_FLOAT : MODE_INT);
2669 enum machine_mode reg_mode =
2670 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2672 if (reg_mode != BLKmode)
2674 rtx mem = assign_stack_temp (reg_mode,
2675 GET_MODE_SIZE (mode), 0);
2677 rtx cmem = change_address (mem, mode, NULL_RTX);
2679 cfun->cannot_inline = N_("function using short complex types cannot be inline");
2683 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2684 emit_move_insn_1 (cmem, y);
2685 return emit_move_insn_1 (sreg, mem);
2689 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2690 emit_move_insn_1 (mem, sreg);
2691 return emit_move_insn_1 (x, cmem);
2697 realpart_x = gen_realpart (submode, x);
2698 realpart_y = gen_realpart (submode, y);
2699 imagpart_x = gen_imagpart (submode, x);
2700 imagpart_y = gen_imagpart (submode, y);
2702 /* Show the output dies here. This is necessary for SUBREGs
2703 of pseudos since we cannot track their lifetimes correctly;
2704 hard regs shouldn't appear here except as return values.
2705 We never want to emit such a clobber after reload. */
2707 && ! (reload_in_progress || reload_completed)
2708 && (GET_CODE (realpart_x) == SUBREG
2709 || GET_CODE (imagpart_x) == SUBREG))
2711 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2714 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2715 (realpart_x, realpart_y));
2716 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2717 (imagpart_x, imagpart_y));
2720 return get_last_insn ();
2723 /* This will handle any multi-word mode that lacks a move_insn pattern.
2724 However, you will get better code if you define such patterns,
2725 even if they must turn into multiple assembler instructions. */
2726 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2732 #ifdef PUSH_ROUNDING
2734 /* If X is a push on the stack, do the push now and replace
2735 X with a reference to the stack pointer. */
2736 if (push_operand (x, GET_MODE (x)))
2738 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2739 x = change_address (x, VOIDmode, stack_pointer_rtx);
2743 /* If we are in reload, see if either operand is a MEM whose address
2744 is scheduled for replacement. */
2745 if (reload_in_progress && GET_CODE (x) == MEM
2746 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2748 rtx new = gen_rtx_MEM (GET_MODE (x), inner);
2750 MEM_COPY_ATTRIBUTES (new, x);
2753 if (reload_in_progress && GET_CODE (y) == MEM
2754 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2756 rtx new = gen_rtx_MEM (GET_MODE (y), inner);
2758 MEM_COPY_ATTRIBUTES (new, y);
2766 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2769 rtx xpart = operand_subword (x, i, 1, mode);
2770 rtx ypart = operand_subword (y, i, 1, mode);
2772 /* If we can't get a part of Y, put Y into memory if it is a
2773 constant. Otherwise, force it into a register. If we still
2774 can't get a part of Y, abort. */
2775 if (ypart == 0 && CONSTANT_P (y))
2777 y = force_const_mem (mode, y);
2778 ypart = operand_subword (y, i, 1, mode);
2780 else if (ypart == 0)
2781 ypart = operand_subword_force (y, i, mode);
2783 if (xpart == 0 || ypart == 0)
2786 need_clobber |= (GET_CODE (xpart) == SUBREG);
2788 last_insn = emit_move_insn (xpart, ypart);
2791 seq = gen_sequence ();
2794 /* Show the output dies here. This is necessary for SUBREGs
2795 of pseudos since we cannot track their lifetimes correctly;
2796 hard regs shouldn't appear here except as return values.
2797 We never want to emit such a clobber after reload. */
2799 && ! (reload_in_progress || reload_completed)
2800 && need_clobber != 0)
2802 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2813 /* Pushing data onto the stack. */
2815 /* Push a block of length SIZE (perhaps variable)
2816 and return an rtx to address the beginning of the block.
2817 Note that it is not possible for the value returned to be a QUEUED.
2818 The value may be virtual_outgoing_args_rtx.
2820 EXTRA is the number of bytes of padding to push in addition to SIZE.
2821 BELOW nonzero means this padding comes at low addresses;
2822 otherwise, the padding comes at high addresses. */
2825 push_block (size, extra, below)
2831 size = convert_modes (Pmode, ptr_mode, size, 1);
2832 if (CONSTANT_P (size))
2833 anti_adjust_stack (plus_constant (size, extra));
2834 else if (GET_CODE (size) == REG && extra == 0)
2835 anti_adjust_stack (size);
2838 temp = copy_to_mode_reg (Pmode, size);
2840 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2841 temp, 0, OPTAB_LIB_WIDEN);
2842 anti_adjust_stack (temp);
2845 #ifndef STACK_GROWS_DOWNWARD
2846 #ifdef ARGS_GROW_DOWNWARD
2847 if (!ACCUMULATE_OUTGOING_ARGS)
2855 /* Return the lowest stack address when STACK or ARGS grow downward and
2856 we are not aaccumulating outgoing arguments (the c4x port uses such
2858 temp = virtual_outgoing_args_rtx;
2859 if (extra != 0 && below)
2860 temp = plus_constant (temp, extra);
2864 if (GET_CODE (size) == CONST_INT)
2865 temp = plus_constant (virtual_outgoing_args_rtx,
2866 - INTVAL (size) - (below ? 0 : extra));
2867 else if (extra != 0 && !below)
2868 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2869 negate_rtx (Pmode, plus_constant (size, extra)));
2871 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2872 negate_rtx (Pmode, size));
2875 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2881 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2884 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2885 block of SIZE bytes. */
2888 get_push_address (size)
2893 if (STACK_PUSH_CODE == POST_DEC)
2894 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2895 else if (STACK_PUSH_CODE == POST_INC)
2896 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2898 temp = stack_pointer_rtx;
2900 return copy_to_reg (temp);
2903 /* Generate code to push X onto the stack, assuming it has mode MODE and
2905 MODE is redundant except when X is a CONST_INT (since they don't
2907 SIZE is an rtx for the size of data to be copied (in bytes),
2908 needed only if X is BLKmode.
2910 ALIGN is maximum alignment we can assume.
2912 If PARTIAL and REG are both nonzero, then copy that many of the first
2913 words of X into registers starting with REG, and push the rest of X.
2914 The amount of space pushed is decreased by PARTIAL words,
2915 rounded *down* to a multiple of PARM_BOUNDARY.
2916 REG must be a hard register in this case.
2917 If REG is zero but PARTIAL is not, take any all others actions for an
2918 argument partially in registers, but do not actually load any
2921 EXTRA is the amount in bytes of extra space to leave next to this arg.
2922 This is ignored if an argument block has already been allocated.
2924 On a machine that lacks real push insns, ARGS_ADDR is the address of
2925 the bottom of the argument block for this call. We use indexing off there
2926 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2927 argument block has not been preallocated.
2929 ARGS_SO_FAR is the size of args previously pushed for this call.
2931 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2932 for arguments passed in registers. If nonzero, it will be the number
2933 of bytes required. */
2936 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2937 args_addr, args_so_far, reg_parm_stack_space,
2940 enum machine_mode mode;
2949 int reg_parm_stack_space;
2953 enum direction stack_direction
2954 #ifdef STACK_GROWS_DOWNWARD
2960 /* Decide where to pad the argument: `downward' for below,
2961 `upward' for above, or `none' for don't pad it.
2962 Default is below for small data on big-endian machines; else above. */
2963 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2965 /* Invert direction if stack is post-update. */
2966 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2967 if (where_pad != none)
2968 where_pad = (where_pad == downward ? upward : downward);
2970 xinner = x = protect_from_queue (x, 0);
2972 if (mode == BLKmode)
2974 /* Copy a block into the stack, entirely or partially. */
2977 int used = partial * UNITS_PER_WORD;
2978 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2986 /* USED is now the # of bytes we need not copy to the stack
2987 because registers will take care of them. */
2990 xinner = change_address (xinner, BLKmode,
2991 plus_constant (XEXP (xinner, 0), used));
2993 /* If the partial register-part of the arg counts in its stack size,
2994 skip the part of stack space corresponding to the registers.
2995 Otherwise, start copying to the beginning of the stack space,
2996 by setting SKIP to 0. */
2997 skip = (reg_parm_stack_space == 0) ? 0 : used;
2999 #ifdef PUSH_ROUNDING
3000 /* Do it with several push insns if that doesn't take lots of insns
3001 and if there is no difficulty with push insns that skip bytes
3002 on the stack for alignment purposes. */
3005 && GET_CODE (size) == CONST_INT
3007 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3008 /* Here we avoid the case of a structure whose weak alignment
3009 forces many pushes of a small amount of data,
3010 and such small pushes do rounding that causes trouble. */
3011 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3012 || align >= BIGGEST_ALIGNMENT
3013 || PUSH_ROUNDING (align) == align)
3014 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3016 /* Push padding now if padding above and stack grows down,
3017 or if padding below and stack grows up.
3018 But if space already allocated, this has already been done. */
3019 if (extra && args_addr == 0
3020 && where_pad != none && where_pad != stack_direction)
3021 anti_adjust_stack (GEN_INT (extra));
3023 stack_pointer_delta += INTVAL (size) - used;
3024 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
3025 INTVAL (size) - used, align);
3027 if (current_function_check_memory_usage && ! in_check_memory_usage)
3031 in_check_memory_usage = 1;
3032 temp = get_push_address (INTVAL(size) - used);
3033 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3034 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3036 XEXP (xinner, 0), Pmode,
3037 GEN_INT (INTVAL(size) - used),
3038 TYPE_MODE (sizetype));
3040 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3042 GEN_INT (INTVAL(size) - used),
3043 TYPE_MODE (sizetype),
3044 GEN_INT (MEMORY_USE_RW),
3045 TYPE_MODE (integer_type_node));
3046 in_check_memory_usage = 0;
3050 #endif /* PUSH_ROUNDING */
3052 /* Otherwise make space on the stack and copy the data
3053 to the address of that space. */
3055 /* Deduct words put into registers from the size we must copy. */
3058 if (GET_CODE (size) == CONST_INT)
3059 size = GEN_INT (INTVAL (size) - used);
3061 size = expand_binop (GET_MODE (size), sub_optab, size,
3062 GEN_INT (used), NULL_RTX, 0,
3066 /* Get the address of the stack space.
3067 In this case, we do not deal with EXTRA separately.
3068 A single stack adjust will do. */
3071 temp = push_block (size, extra, where_pad == downward);
3074 else if (GET_CODE (args_so_far) == CONST_INT)
3075 temp = memory_address (BLKmode,
3076 plus_constant (args_addr,
3077 skip + INTVAL (args_so_far)));
3079 temp = memory_address (BLKmode,
3080 plus_constant (gen_rtx_PLUS (Pmode,
3084 if (current_function_check_memory_usage && ! in_check_memory_usage)
3088 in_check_memory_usage = 1;
3089 target = copy_to_reg (temp);
3090 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3091 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3093 XEXP (xinner, 0), Pmode,
3094 size, TYPE_MODE (sizetype));
3096 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3098 size, TYPE_MODE (sizetype),
3099 GEN_INT (MEMORY_USE_RW),
3100 TYPE_MODE (integer_type_node));
3101 in_check_memory_usage = 0;
3104 /* TEMP is the address of the block. Copy the data there. */
3105 if (GET_CODE (size) == CONST_INT
3106 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3108 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
3109 INTVAL (size), align);
3114 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3115 enum machine_mode mode;
3116 rtx target = gen_rtx_MEM (BLKmode, temp);
3118 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3120 mode = GET_MODE_WIDER_MODE (mode))
3122 enum insn_code code = movstr_optab[(int) mode];
3123 insn_operand_predicate_fn pred;
3125 if (code != CODE_FOR_nothing
3126 && ((GET_CODE (size) == CONST_INT
3127 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3128 <= (GET_MODE_MASK (mode) >> 1)))
3129 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3130 && (!(pred = insn_data[(int) code].operand[0].predicate)
3131 || ((*pred) (target, BLKmode)))
3132 && (!(pred = insn_data[(int) code].operand[1].predicate)
3133 || ((*pred) (xinner, BLKmode)))
3134 && (!(pred = insn_data[(int) code].operand[3].predicate)
3135 || ((*pred) (opalign, VOIDmode))))
3137 rtx op2 = convert_to_mode (mode, size, 1);
3138 rtx last = get_last_insn ();
3141 pred = insn_data[(int) code].operand[2].predicate;
3142 if (pred != 0 && ! (*pred) (op2, mode))
3143 op2 = copy_to_mode_reg (mode, op2);
3145 pat = GEN_FCN ((int) code) (target, xinner,
3153 delete_insns_since (last);
3158 if (!ACCUMULATE_OUTGOING_ARGS)
3160 /* If the source is referenced relative to the stack pointer,
3161 copy it to another register to stabilize it. We do not need
3162 to do this if we know that we won't be changing sp. */
3164 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3165 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3166 temp = copy_to_reg (temp);
3169 /* Make inhibit_defer_pop nonzero around the library call
3170 to force it to pop the bcopy-arguments right away. */
3172 #ifdef TARGET_MEM_FUNCTIONS
3173 emit_library_call (memcpy_libfunc, 0,
3174 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3175 convert_to_mode (TYPE_MODE (sizetype),
3176 size, TREE_UNSIGNED (sizetype)),
3177 TYPE_MODE (sizetype));
3179 emit_library_call (bcopy_libfunc, 0,
3180 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3181 convert_to_mode (TYPE_MODE (integer_type_node),
3183 TREE_UNSIGNED (integer_type_node)),
3184 TYPE_MODE (integer_type_node));
3189 else if (partial > 0)
3191 /* Scalar partly in registers. */
3193 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3196 /* # words of start of argument
3197 that we must make space for but need not store. */
3198 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3199 int args_offset = INTVAL (args_so_far);
3202 /* Push padding now if padding above and stack grows down,
3203 or if padding below and stack grows up.
3204 But if space already allocated, this has already been done. */
3205 if (extra && args_addr == 0
3206 && where_pad != none && where_pad != stack_direction)
3207 anti_adjust_stack (GEN_INT (extra));
3209 /* If we make space by pushing it, we might as well push
3210 the real data. Otherwise, we can leave OFFSET nonzero
3211 and leave the space uninitialized. */
3215 /* Now NOT_STACK gets the number of words that we don't need to
3216 allocate on the stack. */
3217 not_stack = partial - offset;
3219 /* If the partial register-part of the arg counts in its stack size,
3220 skip the part of stack space corresponding to the registers.
3221 Otherwise, start copying to the beginning of the stack space,
3222 by setting SKIP to 0. */
3223 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3225 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3226 x = validize_mem (force_const_mem (mode, x));
3228 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3229 SUBREGs of such registers are not allowed. */
3230 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3231 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3232 x = copy_to_reg (x);
3234 /* Loop over all the words allocated on the stack for this arg. */
3235 /* We can do it by words, because any scalar bigger than a word
3236 has a size a multiple of a word. */
3237 #ifndef PUSH_ARGS_REVERSED
3238 for (i = not_stack; i < size; i++)
3240 for (i = size - 1; i >= not_stack; i--)
3242 if (i >= not_stack + offset)
3243 emit_push_insn (operand_subword_force (x, i, mode),
3244 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3246 GEN_INT (args_offset + ((i - not_stack + skip)
3248 reg_parm_stack_space, alignment_pad);
3253 rtx target = NULL_RTX;
3255 /* Push padding now if padding above and stack grows down,
3256 or if padding below and stack grows up.
3257 But if space already allocated, this has already been done. */
3258 if (extra && args_addr == 0
3259 && where_pad != none && where_pad != stack_direction)
3260 anti_adjust_stack (GEN_INT (extra));
3262 #ifdef PUSH_ROUNDING
3263 if (args_addr == 0 && PUSH_ARGS)
3265 addr = gen_push_operand ();
3266 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3271 if (GET_CODE (args_so_far) == CONST_INT)
3273 = memory_address (mode,
3274 plus_constant (args_addr,
3275 INTVAL (args_so_far)));
3277 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3282 emit_move_insn (gen_rtx_MEM (mode, addr), x);
3284 if (current_function_check_memory_usage && ! in_check_memory_usage)
3286 in_check_memory_usage = 1;
3288 target = get_push_address (GET_MODE_SIZE (mode));
3290 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3291 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3294 GEN_INT (GET_MODE_SIZE (mode)),
3295 TYPE_MODE (sizetype));
3297 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3299 GEN_INT (GET_MODE_SIZE (mode)),
3300 TYPE_MODE (sizetype),
3301 GEN_INT (MEMORY_USE_RW),
3302 TYPE_MODE (integer_type_node));
3303 in_check_memory_usage = 0;
3308 /* If part should go in registers, copy that part
3309 into the appropriate registers. Do this now, at the end,
3310 since mem-to-mem copies above may do function calls. */
3311 if (partial > 0 && reg != 0)
3313 /* Handle calls that pass values in multiple non-contiguous locations.
3314 The Irix 6 ABI has examples of this. */
3315 if (GET_CODE (reg) == PARALLEL)
3316 emit_group_load (reg, x, -1, align); /* ??? size? */
3318 move_block_to_reg (REGNO (reg), x, partial, mode);
3321 if (extra && args_addr == 0 && where_pad == stack_direction)
3322 anti_adjust_stack (GEN_INT (extra));
3325 anti_adjust_stack (alignment_pad);
3328 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3336 /* Only registers can be subtargets. */
3337 || GET_CODE (x) != REG
3338 /* If the register is readonly, it can't be set more than once. */
3339 || RTX_UNCHANGING_P (x)
3340 /* Don't use hard regs to avoid extending their life. */
3341 || REGNO (x) < FIRST_PSEUDO_REGISTER
3342 /* Avoid subtargets inside loops,
3343 since they hide some invariant expressions. */
3344 || preserve_subexpressions_p ())
3348 /* Expand an assignment that stores the value of FROM into TO.
3349 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3350 (This may contain a QUEUED rtx;
3351 if the value is constant, this rtx is a constant.)
3352 Otherwise, the returned value is NULL_RTX.
3354 SUGGEST_REG is no longer actually used.
3355 It used to mean, copy the value through a register
3356 and return that register, if that is possible.
3357 We now use WANT_VALUE to decide whether to do this. */
3360 expand_assignment (to, from, want_value, suggest_reg)
3363 int suggest_reg ATTRIBUTE_UNUSED;
3365 register rtx to_rtx = 0;
3368 /* Don't crash if the lhs of the assignment was erroneous. */
3370 if (TREE_CODE (to) == ERROR_MARK)
3372 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3373 return want_value ? result : NULL_RTX;
3376 /* Assignment of a structure component needs special treatment
3377 if the structure component's rtx is not simply a MEM.
3378 Assignment of an array element at a constant index, and assignment of
3379 an array element in an unaligned packed structure field, has the same
3382 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3383 || TREE_CODE (to) == ARRAY_REF)
3385 enum machine_mode mode1;
3386 HOST_WIDE_INT bitsize, bitpos;
3391 unsigned int alignment;
3394 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3395 &unsignedp, &volatilep, &alignment);
3397 /* If we are going to use store_bit_field and extract_bit_field,
3398 make sure to_rtx will be safe for multiple use. */
3400 if (mode1 == VOIDmode && want_value)
3401 tem = stabilize_reference (tem);
3403 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3406 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3408 if (GET_CODE (to_rtx) != MEM)
3411 if (GET_MODE (offset_rtx) != ptr_mode)
3413 #ifdef POINTERS_EXTEND_UNSIGNED
3414 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3416 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3420 /* A constant address in TO_RTX can have VOIDmode, we must not try
3421 to call force_reg for that case. Avoid that case. */
3422 if (GET_CODE (to_rtx) == MEM
3423 && GET_MODE (to_rtx) == BLKmode
3424 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3426 && (bitpos % bitsize) == 0
3427 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3428 && alignment == GET_MODE_ALIGNMENT (mode1))
3430 rtx temp = change_address (to_rtx, mode1,
3431 plus_constant (XEXP (to_rtx, 0),
3434 if (GET_CODE (XEXP (temp, 0)) == REG)
3437 to_rtx = change_address (to_rtx, mode1,
3438 force_reg (GET_MODE (XEXP (temp, 0)),
3443 to_rtx = change_address (to_rtx, VOIDmode,
3444 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3445 force_reg (ptr_mode,
3451 if (GET_CODE (to_rtx) == MEM)
3453 /* When the offset is zero, to_rtx is the address of the
3454 structure we are storing into, and hence may be shared.
3455 We must make a new MEM before setting the volatile bit. */
3457 to_rtx = copy_rtx (to_rtx);
3459 MEM_VOLATILE_P (to_rtx) = 1;
3461 #if 0 /* This was turned off because, when a field is volatile
3462 in an object which is not volatile, the object may be in a register,
3463 and then we would abort over here. */
3469 if (TREE_CODE (to) == COMPONENT_REF
3470 && TREE_READONLY (TREE_OPERAND (to, 1)))
3473 to_rtx = copy_rtx (to_rtx);
3475 RTX_UNCHANGING_P (to_rtx) = 1;
3478 /* Check the access. */
3479 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3484 enum machine_mode best_mode;
3486 best_mode = get_best_mode (bitsize, bitpos,
3487 TYPE_ALIGN (TREE_TYPE (tem)),
3489 if (best_mode == VOIDmode)
3492 best_mode_size = GET_MODE_BITSIZE (best_mode);
3493 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3494 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3495 size *= GET_MODE_SIZE (best_mode);
3497 /* Check the access right of the pointer. */
3499 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3501 GEN_INT (size), TYPE_MODE (sizetype),
3502 GEN_INT (MEMORY_USE_WO),
3503 TYPE_MODE (integer_type_node));
3506 /* If this is a varying-length object, we must get the address of
3507 the source and do an explicit block move. */
3510 unsigned int from_align;
3511 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3513 = change_address (to_rtx, VOIDmode,
3514 plus_constant (XEXP (to_rtx, 0),
3515 bitpos / BITS_PER_UNIT));
3517 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3518 MIN (alignment, from_align));
3525 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3527 /* Spurious cast for HPUX compiler. */
3528 ? ((enum machine_mode)
3529 TYPE_MODE (TREE_TYPE (to)))
3533 int_size_in_bytes (TREE_TYPE (tem)),
3534 get_alias_set (to));
3536 preserve_temp_slots (result);
3540 /* If the value is meaningful, convert RESULT to the proper mode.
3541 Otherwise, return nothing. */
3542 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3543 TYPE_MODE (TREE_TYPE (from)),
3545 TREE_UNSIGNED (TREE_TYPE (to)))
3550 /* If the rhs is a function call and its value is not an aggregate,
3551 call the function before we start to compute the lhs.
3552 This is needed for correct code for cases such as
3553 val = setjmp (buf) on machines where reference to val
3554 requires loading up part of an address in a separate insn.
3556 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3557 since it might be a promoted variable where the zero- or sign- extension
3558 needs to be done. Handling this in the normal way is safe because no
3559 computation is done before the call. */
3560 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3561 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3562 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3563 && GET_CODE (DECL_RTL (to)) == REG))
3568 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3570 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3572 /* Handle calls that return values in multiple non-contiguous locations.
3573 The Irix 6 ABI has examples of this. */
3574 if (GET_CODE (to_rtx) == PARALLEL)
3575 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3576 TYPE_ALIGN (TREE_TYPE (from)));
3577 else if (GET_MODE (to_rtx) == BLKmode)
3578 emit_block_move (to_rtx, value, expr_size (from),
3579 TYPE_ALIGN (TREE_TYPE (from)));
3582 #ifdef POINTERS_EXTEND_UNSIGNED
3583 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3584 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3585 value = convert_memory_address (GET_MODE (to_rtx), value);
3587 emit_move_insn (to_rtx, value);
3589 preserve_temp_slots (to_rtx);
3592 return want_value ? to_rtx : NULL_RTX;
3595 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3596 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3600 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3601 if (GET_CODE (to_rtx) == MEM)
3602 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3605 /* Don't move directly into a return register. */
3606 if (TREE_CODE (to) == RESULT_DECL
3607 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3612 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3614 if (GET_CODE (to_rtx) == PARALLEL)
3615 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3616 TYPE_ALIGN (TREE_TYPE (from)));
3618 emit_move_insn (to_rtx, temp);
3620 preserve_temp_slots (to_rtx);
3623 return want_value ? to_rtx : NULL_RTX;
3626 /* In case we are returning the contents of an object which overlaps
3627 the place the value is being stored, use a safe function when copying
3628 a value through a pointer into a structure value return block. */
3629 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3630 && current_function_returns_struct
3631 && !current_function_returns_pcc_struct)
3636 size = expr_size (from);
3637 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3638 EXPAND_MEMORY_USE_DONT);
3640 /* Copy the rights of the bitmap. */
3641 if (current_function_check_memory_usage)
3642 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3643 XEXP (to_rtx, 0), Pmode,
3644 XEXP (from_rtx, 0), Pmode,
3645 convert_to_mode (TYPE_MODE (sizetype),
3646 size, TREE_UNSIGNED (sizetype)),
3647 TYPE_MODE (sizetype));
3649 #ifdef TARGET_MEM_FUNCTIONS
3650 emit_library_call (memcpy_libfunc, 0,
3651 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3652 XEXP (from_rtx, 0), Pmode,
3653 convert_to_mode (TYPE_MODE (sizetype),
3654 size, TREE_UNSIGNED (sizetype)),
3655 TYPE_MODE (sizetype));
3657 emit_library_call (bcopy_libfunc, 0,
3658 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3659 XEXP (to_rtx, 0), Pmode,
3660 convert_to_mode (TYPE_MODE (integer_type_node),
3661 size, TREE_UNSIGNED (integer_type_node)),
3662 TYPE_MODE (integer_type_node));
3665 preserve_temp_slots (to_rtx);
3668 return want_value ? to_rtx : NULL_RTX;
3671 /* Compute FROM and store the value in the rtx we got. */
3674 result = store_expr (from, to_rtx, want_value);
3675 preserve_temp_slots (result);
3678 return want_value ? result : NULL_RTX;
3681 /* Generate code for computing expression EXP,
3682 and storing the value into TARGET.
3683 TARGET may contain a QUEUED rtx.
3685 If WANT_VALUE is nonzero, return a copy of the value
3686 not in TARGET, so that we can be sure to use the proper
3687 value in a containing expression even if TARGET has something
3688 else stored in it. If possible, we copy the value through a pseudo
3689 and return that pseudo. Or, if the value is constant, we try to
3690 return the constant. In some cases, we return a pseudo
3691 copied *from* TARGET.
3693 If the mode is BLKmode then we may return TARGET itself.
3694 It turns out that in BLKmode it doesn't cause a problem.
3695 because C has no operators that could combine two different
3696 assignments into the same BLKmode object with different values
3697 with no sequence point. Will other languages need this to
3700 If WANT_VALUE is 0, we return NULL, to make sure
3701 to catch quickly any cases where the caller uses the value
3702 and fails to set WANT_VALUE. */
3705 store_expr (exp, target, want_value)
3707 register rtx target;
3711 int dont_return_target = 0;
3713 if (TREE_CODE (exp) == COMPOUND_EXPR)
3715 /* Perform first part of compound expression, then assign from second
3717 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3719 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3721 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3723 /* For conditional expression, get safe form of the target. Then
3724 test the condition, doing the appropriate assignment on either
3725 side. This avoids the creation of unnecessary temporaries.
3726 For non-BLKmode, it is more efficient not to do this. */
3728 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3731 target = protect_from_queue (target, 1);
3733 do_pending_stack_adjust ();
3735 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3736 start_cleanup_deferral ();
3737 store_expr (TREE_OPERAND (exp, 1), target, 0);
3738 end_cleanup_deferral ();
3740 emit_jump_insn (gen_jump (lab2));
3743 start_cleanup_deferral ();
3744 store_expr (TREE_OPERAND (exp, 2), target, 0);
3745 end_cleanup_deferral ();
3750 return want_value ? target : NULL_RTX;
3752 else if (queued_subexp_p (target))
3753 /* If target contains a postincrement, let's not risk
3754 using it as the place to generate the rhs. */
3756 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3758 /* Expand EXP into a new pseudo. */
3759 temp = gen_reg_rtx (GET_MODE (target));
3760 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3763 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3765 /* If target is volatile, ANSI requires accessing the value
3766 *from* the target, if it is accessed. So make that happen.
3767 In no case return the target itself. */
3768 if (! MEM_VOLATILE_P (target) && want_value)
3769 dont_return_target = 1;
3771 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3772 && GET_MODE (target) != BLKmode)
3773 /* If target is in memory and caller wants value in a register instead,
3774 arrange that. Pass TARGET as target for expand_expr so that,
3775 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3776 We know expand_expr will not use the target in that case.
3777 Don't do this if TARGET is volatile because we are supposed
3778 to write it and then read it. */
3780 temp = expand_expr (exp, target, GET_MODE (target), 0);
3781 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3782 temp = copy_to_reg (temp);
3783 dont_return_target = 1;
3785 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3786 /* If this is an scalar in a register that is stored in a wider mode
3787 than the declared mode, compute the result into its declared mode
3788 and then convert to the wider mode. Our value is the computed
3791 /* If we don't want a value, we can do the conversion inside EXP,
3792 which will often result in some optimizations. Do the conversion
3793 in two steps: first change the signedness, if needed, then
3794 the extend. But don't do this if the type of EXP is a subtype
3795 of something else since then the conversion might involve
3796 more than just converting modes. */
3797 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3798 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3800 if (TREE_UNSIGNED (TREE_TYPE (exp))
3801 != SUBREG_PROMOTED_UNSIGNED_P (target))
3804 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3808 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3809 SUBREG_PROMOTED_UNSIGNED_P (target)),
3813 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3815 /* If TEMP is a volatile MEM and we want a result value, make
3816 the access now so it gets done only once. Likewise if
3817 it contains TARGET. */
3818 if (GET_CODE (temp) == MEM && want_value
3819 && (MEM_VOLATILE_P (temp)
3820 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3821 temp = copy_to_reg (temp);
3823 /* If TEMP is a VOIDmode constant, use convert_modes to make
3824 sure that we properly convert it. */
3825 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3826 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3827 TYPE_MODE (TREE_TYPE (exp)), temp,
3828 SUBREG_PROMOTED_UNSIGNED_P (target));
3830 convert_move (SUBREG_REG (target), temp,
3831 SUBREG_PROMOTED_UNSIGNED_P (target));
3833 /* If we promoted a constant, change the mode back down to match
3834 target. Otherwise, the caller might get confused by a result whose
3835 mode is larger than expected. */
3837 if (want_value && GET_MODE (temp) != GET_MODE (target)
3838 && GET_MODE (temp) != VOIDmode)
3840 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3841 SUBREG_PROMOTED_VAR_P (temp) = 1;
3842 SUBREG_PROMOTED_UNSIGNED_P (temp)
3843 = SUBREG_PROMOTED_UNSIGNED_P (target);
3846 return want_value ? temp : NULL_RTX;
3850 temp = expand_expr (exp, target, GET_MODE (target), 0);
3851 /* Return TARGET if it's a specified hardware register.
3852 If TARGET is a volatile mem ref, either return TARGET
3853 or return a reg copied *from* TARGET; ANSI requires this.
3855 Otherwise, if TEMP is not TARGET, return TEMP
3856 if it is constant (for efficiency),
3857 or if we really want the correct value. */
3858 if (!(target && GET_CODE (target) == REG
3859 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3860 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3861 && ! rtx_equal_p (temp, target)
3862 && (CONSTANT_P (temp) || want_value))
3863 dont_return_target = 1;
3866 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3867 the same as that of TARGET, adjust the constant. This is needed, for
3868 example, in case it is a CONST_DOUBLE and we want only a word-sized
3870 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3871 && TREE_CODE (exp) != ERROR_MARK
3872 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3873 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3874 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3876 if (current_function_check_memory_usage
3877 && GET_CODE (target) == MEM
3878 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3880 if (GET_CODE (temp) == MEM)
3881 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3882 XEXP (target, 0), Pmode,
3883 XEXP (temp, 0), Pmode,
3884 expr_size (exp), TYPE_MODE (sizetype));
3886 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3887 XEXP (target, 0), Pmode,
3888 expr_size (exp), TYPE_MODE (sizetype),
3889 GEN_INT (MEMORY_USE_WO),
3890 TYPE_MODE (integer_type_node));
3893 /* If value was not generated in the target, store it there.
3894 Convert the value to TARGET's type first if nec. */
3895 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3896 one or both of them are volatile memory refs, we have to distinguish
3898 - expand_expr has used TARGET. In this case, we must not generate
3899 another copy. This can be detected by TARGET being equal according
3901 - expand_expr has not used TARGET - that means that the source just
3902 happens to have the same RTX form. Since temp will have been created
3903 by expand_expr, it will compare unequal according to == .
3904 We must generate a copy in this case, to reach the correct number
3905 of volatile memory references. */
3907 if ((! rtx_equal_p (temp, target)
3908 || (temp != target && (side_effects_p (temp)
3909 || side_effects_p (target))))
3910 && TREE_CODE (exp) != ERROR_MARK)
3912 target = protect_from_queue (target, 1);
3913 if (GET_MODE (temp) != GET_MODE (target)
3914 && GET_MODE (temp) != VOIDmode)
3916 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3917 if (dont_return_target)
3919 /* In this case, we will return TEMP,
3920 so make sure it has the proper mode.
3921 But don't forget to store the value into TARGET. */
3922 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3923 emit_move_insn (target, temp);
3926 convert_move (target, temp, unsignedp);
3929 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3931 /* Handle copying a string constant into an array.
3932 The string constant may be shorter than the array.
3933 So copy just the string's actual length, and clear the rest. */
3937 /* Get the size of the data type of the string,
3938 which is actually the size of the target. */
3939 size = expr_size (exp);
3940 if (GET_CODE (size) == CONST_INT
3941 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3942 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
3945 /* Compute the size of the data to copy from the string. */
3947 = size_binop (MIN_EXPR,
3948 make_tree (sizetype, size),
3949 size_int (TREE_STRING_LENGTH (exp)));
3950 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
3951 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3955 /* Copy that much. */
3956 emit_block_move (target, temp, copy_size_rtx,
3957 TYPE_ALIGN (TREE_TYPE (exp)));
3959 /* Figure out how much is left in TARGET that we have to clear.
3960 Do all calculations in ptr_mode. */
3962 addr = XEXP (target, 0);
3963 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3965 if (GET_CODE (copy_size_rtx) == CONST_INT)
3967 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3968 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3969 align = MIN (align, (BITS_PER_UNIT
3970 * (INTVAL (copy_size_rtx)
3971 & - INTVAL (copy_size_rtx))));
3975 addr = force_reg (ptr_mode, addr);
3976 addr = expand_binop (ptr_mode, add_optab, addr,
3977 copy_size_rtx, NULL_RTX, 0,
3980 size = expand_binop (ptr_mode, sub_optab, size,
3981 copy_size_rtx, NULL_RTX, 0,
3984 align = BITS_PER_UNIT;
3985 label = gen_label_rtx ();
3986 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3987 GET_MODE (size), 0, 0, label);
3989 align = MIN (align, expr_align (copy_size));
3991 if (size != const0_rtx)
3993 /* Be sure we can write on ADDR. */
3994 if (current_function_check_memory_usage)
3995 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3997 size, TYPE_MODE (sizetype),
3998 GEN_INT (MEMORY_USE_WO),
3999 TYPE_MODE (integer_type_node));
4000 clear_storage (gen_rtx_MEM (BLKmode, addr), size, align);
4007 /* Handle calls that return values in multiple non-contiguous locations.
4008 The Irix 6 ABI has examples of this. */
4009 else if (GET_CODE (target) == PARALLEL)
4010 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4011 TYPE_ALIGN (TREE_TYPE (exp)));
4012 else if (GET_MODE (temp) == BLKmode)
4013 emit_block_move (target, temp, expr_size (exp),
4014 TYPE_ALIGN (TREE_TYPE (exp)));
4016 emit_move_insn (target, temp);
4019 /* If we don't want a value, return NULL_RTX. */
4023 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4024 ??? The latter test doesn't seem to make sense. */
4025 else if (dont_return_target && GET_CODE (temp) != MEM)
4028 /* Return TARGET itself if it is a hard register. */
4029 else if (want_value && GET_MODE (target) != BLKmode
4030 && ! (GET_CODE (target) == REG
4031 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4032 return copy_to_reg (target);
4038 /* Return 1 if EXP just contains zeros. */
4046 switch (TREE_CODE (exp))
4050 case NON_LVALUE_EXPR:
4051 return is_zeros_p (TREE_OPERAND (exp, 0));
4054 return integer_zerop (exp);
4058 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4061 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4064 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4065 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4066 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4067 if (! is_zeros_p (TREE_VALUE (elt)))
4077 /* Return 1 if EXP contains mostly (3/4) zeros. */
4080 mostly_zeros_p (exp)
4083 if (TREE_CODE (exp) == CONSTRUCTOR)
4085 int elts = 0, zeros = 0;
4086 tree elt = CONSTRUCTOR_ELTS (exp);
4087 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4089 /* If there are no ranges of true bits, it is all zero. */
4090 return elt == NULL_TREE;
4092 for (; elt; elt = TREE_CHAIN (elt))
4094 /* We do not handle the case where the index is a RANGE_EXPR,
4095 so the statistic will be somewhat inaccurate.
4096 We do make a more accurate count in store_constructor itself,
4097 so since this function is only used for nested array elements,
4098 this should be close enough. */
4099 if (mostly_zeros_p (TREE_VALUE (elt)))
4104 return 4 * zeros >= 3 * elts;
4107 return is_zeros_p (exp);
4110 /* Helper function for store_constructor.
4111 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4112 TYPE is the type of the CONSTRUCTOR, not the element type.
4113 ALIGN and CLEARED are as for store_constructor.
4115 This provides a recursive shortcut back to store_constructor when it isn't
4116 necessary to go through store_field. This is so that we can pass through
4117 the cleared field to let store_constructor know that we may not have to
4118 clear a substructure if the outer structure has already been cleared. */
4121 store_constructor_field (target, bitsize, bitpos,
4122 mode, exp, type, align, cleared)
4124 unsigned HOST_WIDE_INT bitsize;
4125 HOST_WIDE_INT bitpos;
4126 enum machine_mode mode;
4131 if (TREE_CODE (exp) == CONSTRUCTOR
4132 && bitpos % BITS_PER_UNIT == 0
4133 /* If we have a non-zero bitpos for a register target, then we just
4134 let store_field do the bitfield handling. This is unlikely to
4135 generate unnecessary clear instructions anyways. */
4136 && (bitpos == 0 || GET_CODE (target) == MEM))
4140 = change_address (target,
4141 GET_MODE (target) == BLKmode
4143 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4144 ? BLKmode : VOIDmode,
4145 plus_constant (XEXP (target, 0),
4146 bitpos / BITS_PER_UNIT));
4147 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4150 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4151 int_size_in_bytes (type), 0);
4154 /* Store the value of constructor EXP into the rtx TARGET.
4155 TARGET is either a REG or a MEM.
4156 ALIGN is the maximum known alignment for TARGET.
4157 CLEARED is true if TARGET is known to have been zero'd.
4158 SIZE is the number of bytes of TARGET we are allowed to modify: this
4159 may not be the same as the size of EXP if we are assigning to a field
4160 which has been packed to exclude padding bits. */
4163 store_constructor (exp, target, align, cleared, size)
4170 tree type = TREE_TYPE (exp);
4171 #ifdef WORD_REGISTER_OPERATIONS
4172 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4175 /* We know our target cannot conflict, since safe_from_p has been called. */
4177 /* Don't try copying piece by piece into a hard register
4178 since that is vulnerable to being clobbered by EXP.
4179 Instead, construct in a pseudo register and then copy it all. */
4180 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4182 rtx temp = gen_reg_rtx (GET_MODE (target));
4183 store_constructor (exp, temp, align, cleared, size);
4184 emit_move_insn (target, temp);
4189 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4190 || TREE_CODE (type) == QUAL_UNION_TYPE)
4194 /* Inform later passes that the whole union value is dead. */
4195 if ((TREE_CODE (type) == UNION_TYPE
4196 || TREE_CODE (type) == QUAL_UNION_TYPE)
4199 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4201 /* If the constructor is empty, clear the union. */
4202 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4203 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4206 /* If we are building a static constructor into a register,
4207 set the initial value as zero so we can fold the value into
4208 a constant. But if more than one register is involved,
4209 this probably loses. */
4210 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4211 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4214 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4219 /* If the constructor has fewer fields than the structure
4220 or if we are initializing the structure to mostly zeros,
4221 clear the whole structure first. */
4223 && ((list_length (CONSTRUCTOR_ELTS (exp))
4224 != fields_length (type))
4225 || mostly_zeros_p (exp)))
4228 clear_storage (target, GEN_INT (size), align);
4233 /* Inform later passes that the old value is dead. */
4234 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4236 /* Store each element of the constructor into
4237 the corresponding field of TARGET. */
4239 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4241 register tree field = TREE_PURPOSE (elt);
4242 #ifdef WORD_REGISTER_OPERATIONS
4243 tree value = TREE_VALUE (elt);
4245 register enum machine_mode mode;
4246 HOST_WIDE_INT bitsize;
4247 HOST_WIDE_INT bitpos = 0;
4250 rtx to_rtx = target;
4252 /* Just ignore missing fields.
4253 We cleared the whole structure, above,
4254 if any fields are missing. */
4258 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4261 if (host_integerp (DECL_SIZE (field), 1))
4262 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4266 unsignedp = TREE_UNSIGNED (field);
4267 mode = DECL_MODE (field);
4268 if (DECL_BIT_FIELD (field))
4271 offset = DECL_FIELD_OFFSET (field);
4272 if (host_integerp (offset, 0)
4273 && host_integerp (bit_position (field), 0))
4275 bitpos = int_bit_position (field);
4279 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4285 if (contains_placeholder_p (offset))
4286 offset = build (WITH_RECORD_EXPR, sizetype,
4287 offset, make_tree (TREE_TYPE (exp), target));
4289 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4290 if (GET_CODE (to_rtx) != MEM)
4293 if (GET_MODE (offset_rtx) != ptr_mode)
4295 #ifdef POINTERS_EXTEND_UNSIGNED
4296 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4298 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4303 = change_address (to_rtx, VOIDmode,
4304 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4305 force_reg (ptr_mode,
4307 align = DECL_OFFSET_ALIGN (field);
4310 if (TREE_READONLY (field))
4312 if (GET_CODE (to_rtx) == MEM)
4313 to_rtx = copy_rtx (to_rtx);
4315 RTX_UNCHANGING_P (to_rtx) = 1;
4318 #ifdef WORD_REGISTER_OPERATIONS
4319 /* If this initializes a field that is smaller than a word, at the
4320 start of a word, try to widen it to a full word.
4321 This special case allows us to output C++ member function
4322 initializations in a form that the optimizers can understand. */
4323 if (GET_CODE (target) == REG
4324 && bitsize < BITS_PER_WORD
4325 && bitpos % BITS_PER_WORD == 0
4326 && GET_MODE_CLASS (mode) == MODE_INT
4327 && TREE_CODE (value) == INTEGER_CST
4329 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4331 tree type = TREE_TYPE (value);
4332 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4334 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4335 value = convert (type, value);
4337 if (BYTES_BIG_ENDIAN)
4339 = fold (build (LSHIFT_EXPR, type, value,
4340 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4341 bitsize = BITS_PER_WORD;
4345 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4346 TREE_VALUE (elt), type, align, cleared);
4349 else if (TREE_CODE (type) == ARRAY_TYPE)
4354 tree domain = TYPE_DOMAIN (type);
4355 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4356 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4357 tree elttype = TREE_TYPE (type);
4359 /* If the constructor has fewer elements than the array,
4360 clear the whole array first. Similarly if this is
4361 static constructor of a non-BLKmode object. */
4362 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4366 HOST_WIDE_INT count = 0, zero_count = 0;
4368 /* This loop is a more accurate version of the loop in
4369 mostly_zeros_p (it handles RANGE_EXPR in an index).
4370 It is also needed to check for missing elements. */
4371 for (elt = CONSTRUCTOR_ELTS (exp);
4373 elt = TREE_CHAIN (elt))
4375 tree index = TREE_PURPOSE (elt);
4376 HOST_WIDE_INT this_node_count;
4378 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4380 tree lo_index = TREE_OPERAND (index, 0);
4381 tree hi_index = TREE_OPERAND (index, 1);
4383 if (! host_integerp (lo_index, 1)
4384 || ! host_integerp (hi_index, 1))
4390 this_node_count = (tree_low_cst (hi_index, 1)
4391 - tree_low_cst (lo_index, 1) + 1);
4394 this_node_count = 1;
4395 count += this_node_count;
4396 if (mostly_zeros_p (TREE_VALUE (elt)))
4397 zero_count += this_node_count;
4399 /* Clear the entire array first if there are any missing elements,
4400 or if the incidence of zero elements is >= 75%. */
4401 if (count < maxelt - minelt + 1
4402 || 4 * zero_count >= 3 * count)
4405 if (need_to_clear && size > 0)
4408 clear_storage (target, GEN_INT (size), align);
4412 /* Inform later passes that the old value is dead. */
4413 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4415 /* Store each element of the constructor into
4416 the corresponding element of TARGET, determined
4417 by counting the elements. */
4418 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4420 elt = TREE_CHAIN (elt), i++)
4422 register enum machine_mode mode;
4423 HOST_WIDE_INT bitsize;
4424 HOST_WIDE_INT bitpos;
4426 tree value = TREE_VALUE (elt);
4427 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4428 tree index = TREE_PURPOSE (elt);
4429 rtx xtarget = target;
4431 if (cleared && is_zeros_p (value))
4434 unsignedp = TREE_UNSIGNED (elttype);
4435 mode = TYPE_MODE (elttype);
4436 if (mode == BLKmode)
4437 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4438 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4441 bitsize = GET_MODE_BITSIZE (mode);
4443 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4445 tree lo_index = TREE_OPERAND (index, 0);
4446 tree hi_index = TREE_OPERAND (index, 1);
4447 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4448 struct nesting *loop;
4449 HOST_WIDE_INT lo, hi, count;
4452 /* If the range is constant and "small", unroll the loop. */
4453 if (host_integerp (lo_index, 0)
4454 && host_integerp (hi_index, 0)
4455 && (lo = tree_low_cst (lo_index, 0),
4456 hi = tree_low_cst (hi_index, 0),
4457 count = hi - lo + 1,
4458 (GET_CODE (target) != MEM
4460 || (host_integerp (TYPE_SIZE (elttype), 1)
4461 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4464 lo -= minelt; hi -= minelt;
4465 for (; lo <= hi; lo++)
4467 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4468 store_constructor_field (target, bitsize, bitpos, mode,
4469 value, type, align, cleared);
4474 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4475 loop_top = gen_label_rtx ();
4476 loop_end = gen_label_rtx ();
4478 unsignedp = TREE_UNSIGNED (domain);
4480 index = build_decl (VAR_DECL, NULL_TREE, domain);
4482 DECL_RTL (index) = index_r
4483 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4486 if (TREE_CODE (value) == SAVE_EXPR
4487 && SAVE_EXPR_RTL (value) == 0)
4489 /* Make sure value gets expanded once before the
4491 expand_expr (value, const0_rtx, VOIDmode, 0);
4494 store_expr (lo_index, index_r, 0);
4495 loop = expand_start_loop (0);
4497 /* Assign value to element index. */
4499 = convert (ssizetype,
4500 fold (build (MINUS_EXPR, TREE_TYPE (index),
4501 index, TYPE_MIN_VALUE (domain))));
4502 position = size_binop (MULT_EXPR, position,
4504 TYPE_SIZE_UNIT (elttype)));
4506 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4507 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4508 xtarget = change_address (target, mode, addr);
4509 if (TREE_CODE (value) == CONSTRUCTOR)
4510 store_constructor (value, xtarget, align, cleared,
4511 bitsize / BITS_PER_UNIT);
4513 store_expr (value, xtarget, 0);
4515 expand_exit_loop_if_false (loop,
4516 build (LT_EXPR, integer_type_node,
4519 expand_increment (build (PREINCREMENT_EXPR,
4521 index, integer_one_node), 0, 0);
4523 emit_label (loop_end);
4526 else if ((index != 0 && ! host_integerp (index, 0))
4527 || ! host_integerp (TYPE_SIZE (elttype), 1))
4533 index = ssize_int (1);
4536 index = convert (ssizetype,
4537 fold (build (MINUS_EXPR, index,
4538 TYPE_MIN_VALUE (domain))));
4540 position = size_binop (MULT_EXPR, index,
4542 TYPE_SIZE_UNIT (elttype)));
4543 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4544 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4545 xtarget = change_address (target, mode, addr);
4546 store_expr (value, xtarget, 0);
4551 bitpos = ((tree_low_cst (index, 0) - minelt)
4552 * tree_low_cst (TYPE_SIZE (elttype), 1));
4554 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4556 store_constructor_field (target, bitsize, bitpos, mode, value,
4557 type, align, cleared);
4562 /* Set constructor assignments */
4563 else if (TREE_CODE (type) == SET_TYPE)
4565 tree elt = CONSTRUCTOR_ELTS (exp);
4566 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4567 tree domain = TYPE_DOMAIN (type);
4568 tree domain_min, domain_max, bitlength;
4570 /* The default implementation strategy is to extract the constant
4571 parts of the constructor, use that to initialize the target,
4572 and then "or" in whatever non-constant ranges we need in addition.
4574 If a large set is all zero or all ones, it is
4575 probably better to set it using memset (if available) or bzero.
4576 Also, if a large set has just a single range, it may also be
4577 better to first clear all the first clear the set (using
4578 bzero/memset), and set the bits we want. */
4580 /* Check for all zeros. */
4581 if (elt == NULL_TREE && size > 0)
4584 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4588 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4589 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4590 bitlength = size_binop (PLUS_EXPR,
4591 size_diffop (domain_max, domain_min),
4594 nbits = tree_low_cst (bitlength, 1);
4596 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4597 are "complicated" (more than one range), initialize (the
4598 constant parts) by copying from a constant. */
4599 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4600 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4602 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4603 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4604 char *bit_buffer = (char *) alloca (nbits);
4605 HOST_WIDE_INT word = 0;
4606 unsigned int bit_pos = 0;
4607 unsigned int ibit = 0;
4608 unsigned int offset = 0; /* In bytes from beginning of set. */
4610 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4613 if (bit_buffer[ibit])
4615 if (BYTES_BIG_ENDIAN)
4616 word |= (1 << (set_word_size - 1 - bit_pos));
4618 word |= 1 << bit_pos;
4622 if (bit_pos >= set_word_size || ibit == nbits)
4624 if (word != 0 || ! cleared)
4626 rtx datum = GEN_INT (word);
4629 /* The assumption here is that it is safe to use
4630 XEXP if the set is multi-word, but not if
4631 it's single-word. */
4632 if (GET_CODE (target) == MEM)
4634 to_rtx = plus_constant (XEXP (target, 0), offset);
4635 to_rtx = change_address (target, mode, to_rtx);
4637 else if (offset == 0)
4641 emit_move_insn (to_rtx, datum);
4648 offset += set_word_size / BITS_PER_UNIT;
4653 /* Don't bother clearing storage if the set is all ones. */
4654 if (TREE_CHAIN (elt) != NULL_TREE
4655 || (TREE_PURPOSE (elt) == NULL_TREE
4657 : ( ! host_integerp (TREE_VALUE (elt), 0)
4658 || ! host_integerp (TREE_PURPOSE (elt), 0)
4659 || (tree_low_cst (TREE_VALUE (elt), 0)
4660 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4661 != (HOST_WIDE_INT) nbits))))
4662 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4664 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4666 /* start of range of element or NULL */
4667 tree startbit = TREE_PURPOSE (elt);
4668 /* end of range of element, or element value */
4669 tree endbit = TREE_VALUE (elt);
4670 #ifdef TARGET_MEM_FUNCTIONS
4671 HOST_WIDE_INT startb, endb;
4673 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4675 bitlength_rtx = expand_expr (bitlength,
4676 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4678 /* handle non-range tuple element like [ expr ] */
4679 if (startbit == NULL_TREE)
4681 startbit = save_expr (endbit);
4685 startbit = convert (sizetype, startbit);
4686 endbit = convert (sizetype, endbit);
4687 if (! integer_zerop (domain_min))
4689 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4690 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4692 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4693 EXPAND_CONST_ADDRESS);
4694 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4695 EXPAND_CONST_ADDRESS);
4699 targetx = assign_stack_temp (GET_MODE (target),
4700 GET_MODE_SIZE (GET_MODE (target)),
4702 emit_move_insn (targetx, target);
4705 else if (GET_CODE (target) == MEM)
4710 #ifdef TARGET_MEM_FUNCTIONS
4711 /* Optimization: If startbit and endbit are
4712 constants divisible by BITS_PER_UNIT,
4713 call memset instead. */
4714 if (TREE_CODE (startbit) == INTEGER_CST
4715 && TREE_CODE (endbit) == INTEGER_CST
4716 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4717 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4719 emit_library_call (memset_libfunc, 0,
4721 plus_constant (XEXP (targetx, 0),
4722 startb / BITS_PER_UNIT),
4724 constm1_rtx, TYPE_MODE (integer_type_node),
4725 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4726 TYPE_MODE (sizetype));
4730 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4731 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4732 bitlength_rtx, TYPE_MODE (sizetype),
4733 startbit_rtx, TYPE_MODE (sizetype),
4734 endbit_rtx, TYPE_MODE (sizetype));
4737 emit_move_insn (target, targetx);
4745 /* Store the value of EXP (an expression tree)
4746 into a subfield of TARGET which has mode MODE and occupies
4747 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4748 If MODE is VOIDmode, it means that we are storing into a bit-field.
4750 If VALUE_MODE is VOIDmode, return nothing in particular.
4751 UNSIGNEDP is not used in this case.
4753 Otherwise, return an rtx for the value stored. This rtx
4754 has mode VALUE_MODE if that is convenient to do.
4755 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4757 ALIGN is the alignment that TARGET is known to have.
4758 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4760 ALIAS_SET is the alias set for the destination. This value will
4761 (in general) be different from that for TARGET, since TARGET is a
4762 reference to the containing structure. */
4765 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4766 unsignedp, align, total_size, alias_set)
4768 HOST_WIDE_INT bitsize;
4769 HOST_WIDE_INT bitpos;
4770 enum machine_mode mode;
4772 enum machine_mode value_mode;
4775 HOST_WIDE_INT total_size;
4778 HOST_WIDE_INT width_mask = 0;
4780 if (TREE_CODE (exp) == ERROR_MARK)
4783 if (bitsize < HOST_BITS_PER_WIDE_INT)
4784 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4786 /* If we are storing into an unaligned field of an aligned union that is
4787 in a register, we may have the mode of TARGET being an integer mode but
4788 MODE == BLKmode. In that case, get an aligned object whose size and
4789 alignment are the same as TARGET and store TARGET into it (we can avoid
4790 the store if the field being stored is the entire width of TARGET). Then
4791 call ourselves recursively to store the field into a BLKmode version of
4792 that object. Finally, load from the object into TARGET. This is not
4793 very efficient in general, but should only be slightly more expensive
4794 than the otherwise-required unaligned accesses. Perhaps this can be
4795 cleaned up later. */
4798 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4800 rtx object = assign_stack_temp (GET_MODE (target),
4801 GET_MODE_SIZE (GET_MODE (target)), 0);
4802 rtx blk_object = copy_rtx (object);
4804 MEM_SET_IN_STRUCT_P (object, 1);
4805 MEM_SET_IN_STRUCT_P (blk_object, 1);
4806 PUT_MODE (blk_object, BLKmode);
4808 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4809 emit_move_insn (object, target);
4811 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4812 align, total_size, alias_set);
4814 /* Even though we aren't returning target, we need to
4815 give it the updated value. */
4816 emit_move_insn (target, object);
4821 if (GET_CODE (target) == CONCAT)
4823 /* We're storing into a struct containing a single __complex. */
4827 return store_expr (exp, target, 0);
4830 /* If the structure is in a register or if the component
4831 is a bit field, we cannot use addressing to access it.
4832 Use bit-field techniques or SUBREG to store in it. */
4834 if (mode == VOIDmode
4835 || (mode != BLKmode && ! direct_store[(int) mode]
4836 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4837 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4838 || GET_CODE (target) == REG
4839 || GET_CODE (target) == SUBREG
4840 /* If the field isn't aligned enough to store as an ordinary memref,
4841 store it as a bit field. */
4842 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4843 && (align < GET_MODE_ALIGNMENT (mode)
4844 || bitpos % GET_MODE_ALIGNMENT (mode)))
4845 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4846 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
4847 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
4848 /* If the RHS and field are a constant size and the size of the
4849 RHS isn't the same size as the bitfield, we must use bitfield
4852 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
4853 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
4855 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4857 /* If BITSIZE is narrower than the size of the type of EXP
4858 we will be narrowing TEMP. Normally, what's wanted are the
4859 low-order bits. However, if EXP's type is a record and this is
4860 big-endian machine, we want the upper BITSIZE bits. */
4861 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4862 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4863 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4864 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4865 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4869 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4871 if (mode != VOIDmode && mode != BLKmode
4872 && mode != TYPE_MODE (TREE_TYPE (exp)))
4873 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4875 /* If the modes of TARGET and TEMP are both BLKmode, both
4876 must be in memory and BITPOS must be aligned on a byte
4877 boundary. If so, we simply do a block copy. */
4878 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4880 unsigned int exp_align = expr_align (exp);
4882 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4883 || bitpos % BITS_PER_UNIT != 0)
4886 target = change_address (target, VOIDmode,
4887 plus_constant (XEXP (target, 0),
4888 bitpos / BITS_PER_UNIT));
4890 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
4891 align = MIN (exp_align, align);
4893 /* Find an alignment that is consistent with the bit position. */
4894 while ((bitpos % align) != 0)
4897 emit_block_move (target, temp,
4898 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4902 return value_mode == VOIDmode ? const0_rtx : target;
4905 /* Store the value in the bitfield. */
4906 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4907 if (value_mode != VOIDmode)
4909 /* The caller wants an rtx for the value. */
4910 /* If possible, avoid refetching from the bitfield itself. */
4912 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4915 enum machine_mode tmode;
4918 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4919 tmode = GET_MODE (temp);
4920 if (tmode == VOIDmode)
4922 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4923 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4924 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4926 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4927 NULL_RTX, value_mode, 0, align,
4934 rtx addr = XEXP (target, 0);
4937 /* If a value is wanted, it must be the lhs;
4938 so make the address stable for multiple use. */
4940 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4941 && ! CONSTANT_ADDRESS_P (addr)
4942 /* A frame-pointer reference is already stable. */
4943 && ! (GET_CODE (addr) == PLUS
4944 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4945 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4946 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4947 addr = copy_to_reg (addr);
4949 /* Now build a reference to just the desired component. */
4951 to_rtx = copy_rtx (change_address (target, mode,
4952 plus_constant (addr,
4954 / BITS_PER_UNIT))));
4955 MEM_SET_IN_STRUCT_P (to_rtx, 1);
4956 MEM_ALIAS_SET (to_rtx) = alias_set;
4958 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4962 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4963 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4964 ARRAY_REFs and find the ultimate containing object, which we return.
4966 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4967 bit position, and *PUNSIGNEDP to the signedness of the field.
4968 If the position of the field is variable, we store a tree
4969 giving the variable offset (in units) in *POFFSET.
4970 This offset is in addition to the bit position.
4971 If the position is not variable, we store 0 in *POFFSET.
4972 We set *PALIGNMENT to the alignment of the address that will be
4973 computed. This is the alignment of the thing we return if *POFFSET
4974 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4976 If any of the extraction expressions is volatile,
4977 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4979 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4980 is a mode that can be used to access the field. In that case, *PBITSIZE
4983 If the field describes a variable-sized object, *PMODE is set to
4984 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4985 this case, but the address of the object can be found. */
4988 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4989 punsignedp, pvolatilep, palignment)
4991 HOST_WIDE_INT *pbitsize;
4992 HOST_WIDE_INT *pbitpos;
4994 enum machine_mode *pmode;
4997 unsigned int *palignment;
5000 enum machine_mode mode = VOIDmode;
5001 tree offset = size_zero_node;
5002 tree bit_offset = bitsize_zero_node;
5003 unsigned int alignment = BIGGEST_ALIGNMENT;
5006 /* First get the mode, signedness, and size. We do this from just the
5007 outermost expression. */
5008 if (TREE_CODE (exp) == COMPONENT_REF)
5010 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5011 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5012 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5014 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5016 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5018 size_tree = TREE_OPERAND (exp, 1);
5019 *punsignedp = TREE_UNSIGNED (exp);
5023 mode = TYPE_MODE (TREE_TYPE (exp));
5024 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5026 if (mode == BLKmode)
5027 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5029 *pbitsize = GET_MODE_BITSIZE (mode);
5034 if (! host_integerp (size_tree, 1))
5035 mode = BLKmode, *pbitsize = -1;
5037 *pbitsize = tree_low_cst (size_tree, 1);
5040 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5041 and find the ultimate containing object. */
5044 if (TREE_CODE (exp) == BIT_FIELD_REF)
5045 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5046 else if (TREE_CODE (exp) == COMPONENT_REF)
5048 tree field = TREE_OPERAND (exp, 1);
5049 tree this_offset = DECL_FIELD_OFFSET (field);
5051 /* If this field hasn't been filled in yet, don't go
5052 past it. This should only happen when folding expressions
5053 made during type construction. */
5054 if (this_offset == 0)
5056 else if (! TREE_CONSTANT (this_offset)
5057 && contains_placeholder_p (this_offset))
5058 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5060 offset = size_binop (PLUS_EXPR, offset, this_offset);
5061 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5062 DECL_FIELD_BIT_OFFSET (field));
5064 if (! host_integerp (offset, 0))
5065 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5068 else if (TREE_CODE (exp) == ARRAY_REF)
5070 tree index = TREE_OPERAND (exp, 1);
5071 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5072 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5073 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (exp));
5075 /* We assume all arrays have sizes that are a multiple of a byte.
5076 First subtract the lower bound, if any, in the type of the
5077 index, then convert to sizetype and multiply by the size of the
5079 if (low_bound != 0 && ! integer_zerop (low_bound))
5080 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5083 /* If the index has a self-referential type, pass it to a
5084 WITH_RECORD_EXPR; if the component size is, pass our
5085 component to one. */
5086 if (! TREE_CONSTANT (index)
5087 && contains_placeholder_p (index))
5088 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5089 if (! TREE_CONSTANT (unit_size)
5090 && contains_placeholder_p (unit_size))
5091 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size,
5092 TREE_OPERAND (exp, 0));
5094 offset = size_binop (PLUS_EXPR, offset,
5095 size_binop (MULT_EXPR,
5096 convert (sizetype, index),
5100 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5101 && ! ((TREE_CODE (exp) == NOP_EXPR
5102 || TREE_CODE (exp) == CONVERT_EXPR)
5103 && (TYPE_MODE (TREE_TYPE (exp))
5104 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5107 /* If any reference in the chain is volatile, the effect is volatile. */
5108 if (TREE_THIS_VOLATILE (exp))
5111 /* If the offset is non-constant already, then we can't assume any
5112 alignment more than the alignment here. */
5113 if (! TREE_CONSTANT (offset))
5114 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5116 exp = TREE_OPERAND (exp, 0);
5120 alignment = MIN (alignment, DECL_ALIGN (exp));
5121 else if (TREE_TYPE (exp) != 0)
5122 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5124 /* If OFFSET is constant, see if we can return the whole thing as a
5125 constant bit position. Otherwise, split it up. */
5126 if (host_integerp (offset, 0)
5127 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5129 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5130 && host_integerp (tem, 0))
5131 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5133 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5136 *palignment = alignment;
5140 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5142 static enum memory_use_mode
5143 get_memory_usage_from_modifier (modifier)
5144 enum expand_modifier modifier;
5150 return MEMORY_USE_RO;
5152 case EXPAND_MEMORY_USE_WO:
5153 return MEMORY_USE_WO;
5155 case EXPAND_MEMORY_USE_RW:
5156 return MEMORY_USE_RW;
5158 case EXPAND_MEMORY_USE_DONT:
5159 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5160 MEMORY_USE_DONT, because they are modifiers to a call of
5161 expand_expr in the ADDR_EXPR case of expand_expr. */
5162 case EXPAND_CONST_ADDRESS:
5163 case EXPAND_INITIALIZER:
5164 return MEMORY_USE_DONT;
5165 case EXPAND_MEMORY_USE_BAD:
5171 /* Given an rtx VALUE that may contain additions and multiplications,
5172 return an equivalent value that just refers to a register or memory.
5173 This is done by generating instructions to perform the arithmetic
5174 and returning a pseudo-register containing the value.
5176 The returned value may be a REG, SUBREG, MEM or constant. */
5179 force_operand (value, target)
5182 register optab binoptab = 0;
5183 /* Use a temporary to force order of execution of calls to
5187 /* Use subtarget as the target for operand 0 of a binary operation. */
5188 register rtx subtarget = get_subtarget (target);
5190 /* Check for a PIC address load. */
5192 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5193 && XEXP (value, 0) == pic_offset_table_rtx
5194 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5195 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5196 || GET_CODE (XEXP (value, 1)) == CONST))
5199 subtarget = gen_reg_rtx (GET_MODE (value));
5200 emit_move_insn (subtarget, value);
5204 if (GET_CODE (value) == PLUS)
5205 binoptab = add_optab;
5206 else if (GET_CODE (value) == MINUS)
5207 binoptab = sub_optab;
5208 else if (GET_CODE (value) == MULT)
5210 op2 = XEXP (value, 1);
5211 if (!CONSTANT_P (op2)
5212 && !(GET_CODE (op2) == REG && op2 != subtarget))
5214 tmp = force_operand (XEXP (value, 0), subtarget);
5215 return expand_mult (GET_MODE (value), tmp,
5216 force_operand (op2, NULL_RTX),
5222 op2 = XEXP (value, 1);
5223 if (!CONSTANT_P (op2)
5224 && !(GET_CODE (op2) == REG && op2 != subtarget))
5226 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5228 binoptab = add_optab;
5229 op2 = negate_rtx (GET_MODE (value), op2);
5232 /* Check for an addition with OP2 a constant integer and our first
5233 operand a PLUS of a virtual register and something else. In that
5234 case, we want to emit the sum of the virtual register and the
5235 constant first and then add the other value. This allows virtual
5236 register instantiation to simply modify the constant rather than
5237 creating another one around this addition. */
5238 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5239 && GET_CODE (XEXP (value, 0)) == PLUS
5240 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5241 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5242 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5244 rtx temp = expand_binop (GET_MODE (value), binoptab,
5245 XEXP (XEXP (value, 0), 0), op2,
5246 subtarget, 0, OPTAB_LIB_WIDEN);
5247 return expand_binop (GET_MODE (value), binoptab, temp,
5248 force_operand (XEXP (XEXP (value, 0), 1), 0),
5249 target, 0, OPTAB_LIB_WIDEN);
5252 tmp = force_operand (XEXP (value, 0), subtarget);
5253 return expand_binop (GET_MODE (value), binoptab, tmp,
5254 force_operand (op2, NULL_RTX),
5255 target, 0, OPTAB_LIB_WIDEN);
5256 /* We give UNSIGNEDP = 0 to expand_binop
5257 because the only operations we are expanding here are signed ones. */
5262 /* Subroutine of expand_expr:
5263 save the non-copied parts (LIST) of an expr (LHS), and return a list
5264 which can restore these values to their previous values,
5265 should something modify their storage. */
5268 save_noncopied_parts (lhs, list)
5275 for (tail = list; tail; tail = TREE_CHAIN (tail))
5276 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5277 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5280 tree part = TREE_VALUE (tail);
5281 tree part_type = TREE_TYPE (part);
5282 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5283 rtx target = assign_temp (part_type, 0, 1, 1);
5284 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5285 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5286 parts = tree_cons (to_be_saved,
5287 build (RTL_EXPR, part_type, NULL_TREE,
5290 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5295 /* Subroutine of expand_expr:
5296 record the non-copied parts (LIST) of an expr (LHS), and return a list
5297 which specifies the initial values of these parts. */
5300 init_noncopied_parts (lhs, list)
5307 for (tail = list; tail; tail = TREE_CHAIN (tail))
5308 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5309 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5310 else if (TREE_PURPOSE (tail))
5312 tree part = TREE_VALUE (tail);
5313 tree part_type = TREE_TYPE (part);
5314 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5315 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5320 /* Subroutine of expand_expr: return nonzero iff there is no way that
5321 EXP can reference X, which is being modified. TOP_P is nonzero if this
5322 call is going to be used to determine whether we need a temporary
5323 for EXP, as opposed to a recursive call to this function.
5325 It is always safe for this routine to return zero since it merely
5326 searches for optimization opportunities. */
5329 safe_from_p (x, exp, top_p)
5336 static int save_expr_count;
5337 static int save_expr_size = 0;
5338 static tree *save_expr_rewritten;
5339 static tree save_expr_trees[256];
5342 /* If EXP has varying size, we MUST use a target since we currently
5343 have no way of allocating temporaries of variable size
5344 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5345 So we assume here that something at a higher level has prevented a
5346 clash. This is somewhat bogus, but the best we can do. Only
5347 do this when X is BLKmode and when we are at the top level. */
5348 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5349 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5350 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5351 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5352 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5354 && GET_MODE (x) == BLKmode))
5357 if (top_p && save_expr_size == 0)
5361 save_expr_count = 0;
5362 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5363 save_expr_rewritten = &save_expr_trees[0];
5365 rtn = safe_from_p (x, exp, 1);
5367 for (i = 0; i < save_expr_count; ++i)
5369 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5371 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5379 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5380 find the underlying pseudo. */
5381 if (GET_CODE (x) == SUBREG)
5384 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5388 /* If X is a location in the outgoing argument area, it is always safe. */
5389 if (GET_CODE (x) == MEM
5390 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5391 || (GET_CODE (XEXP (x, 0)) == PLUS
5392 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5395 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5398 exp_rtl = DECL_RTL (exp);
5405 if (TREE_CODE (exp) == TREE_LIST)
5406 return ((TREE_VALUE (exp) == 0
5407 || safe_from_p (x, TREE_VALUE (exp), 0))
5408 && (TREE_CHAIN (exp) == 0
5409 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5410 else if (TREE_CODE (exp) == ERROR_MARK)
5411 return 1; /* An already-visited SAVE_EXPR? */
5416 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5420 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5421 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5425 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5426 the expression. If it is set, we conflict iff we are that rtx or
5427 both are in memory. Otherwise, we check all operands of the
5428 expression recursively. */
5430 switch (TREE_CODE (exp))
5433 return (staticp (TREE_OPERAND (exp, 0))
5434 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5435 || TREE_STATIC (exp));
5438 if (GET_CODE (x) == MEM)
5443 exp_rtl = CALL_EXPR_RTL (exp);
5446 /* Assume that the call will clobber all hard registers and
5448 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5449 || GET_CODE (x) == MEM)
5456 /* If a sequence exists, we would have to scan every instruction
5457 in the sequence to see if it was safe. This is probably not
5459 if (RTL_EXPR_SEQUENCE (exp))
5462 exp_rtl = RTL_EXPR_RTL (exp);
5465 case WITH_CLEANUP_EXPR:
5466 exp_rtl = RTL_EXPR_RTL (exp);
5469 case CLEANUP_POINT_EXPR:
5470 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5473 exp_rtl = SAVE_EXPR_RTL (exp);
5477 /* This SAVE_EXPR might appear many times in the top-level
5478 safe_from_p() expression, and if it has a complex
5479 subexpression, examining it multiple times could result
5480 in a combinatorial explosion. E.g. on an Alpha
5481 running at least 200MHz, a Fortran test case compiled with
5482 optimization took about 28 minutes to compile -- even though
5483 it was only a few lines long, and the complicated line causing
5484 so much time to be spent in the earlier version of safe_from_p()
5485 had only 293 or so unique nodes.
5487 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5488 where it is so we can turn it back in the top-level safe_from_p()
5491 /* For now, don't bother re-sizing the array. */
5492 if (save_expr_count >= save_expr_size)
5494 save_expr_rewritten[save_expr_count++] = exp;
5496 nops = tree_code_length[(int) SAVE_EXPR];
5497 for (i = 0; i < nops; i++)
5499 tree operand = TREE_OPERAND (exp, i);
5500 if (operand == NULL_TREE)
5502 TREE_SET_CODE (exp, ERROR_MARK);
5503 if (!safe_from_p (x, operand, 0))
5505 TREE_SET_CODE (exp, SAVE_EXPR);
5507 TREE_SET_CODE (exp, ERROR_MARK);
5511 /* The only operand we look at is operand 1. The rest aren't
5512 part of the expression. */
5513 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5515 case METHOD_CALL_EXPR:
5516 /* This takes a rtx argument, but shouldn't appear here. */
5523 /* If we have an rtx, we do not need to scan our operands. */
5527 nops = tree_code_length[(int) TREE_CODE (exp)];
5528 for (i = 0; i < nops; i++)
5529 if (TREE_OPERAND (exp, i) != 0
5530 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5534 /* If we have an rtl, find any enclosed object. Then see if we conflict
5538 if (GET_CODE (exp_rtl) == SUBREG)
5540 exp_rtl = SUBREG_REG (exp_rtl);
5541 if (GET_CODE (exp_rtl) == REG
5542 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5546 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5547 are memory and EXP is not readonly. */
5548 return ! (rtx_equal_p (x, exp_rtl)
5549 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5550 && ! TREE_READONLY (exp)));
5553 /* If we reach here, it is safe. */
5557 /* Subroutine of expand_expr: return nonzero iff EXP is an
5558 expression whose type is statically determinable. */
5564 if (TREE_CODE (exp) == PARM_DECL
5565 || TREE_CODE (exp) == VAR_DECL
5566 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5567 || TREE_CODE (exp) == COMPONENT_REF
5568 || TREE_CODE (exp) == ARRAY_REF)
5573 /* Subroutine of expand_expr: return rtx if EXP is a
5574 variable or parameter; else return 0. */
5581 switch (TREE_CODE (exp))
5585 return DECL_RTL (exp);
5591 #ifdef MAX_INTEGER_COMPUTATION_MODE
5593 check_max_integer_computation_mode (exp)
5596 enum tree_code code;
5597 enum machine_mode mode;
5599 /* Strip any NOPs that don't change the mode. */
5601 code = TREE_CODE (exp);
5603 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5604 if (code == NOP_EXPR
5605 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5608 /* First check the type of the overall operation. We need only look at
5609 unary, binary and relational operations. */
5610 if (TREE_CODE_CLASS (code) == '1'
5611 || TREE_CODE_CLASS (code) == '2'
5612 || TREE_CODE_CLASS (code) == '<')
5614 mode = TYPE_MODE (TREE_TYPE (exp));
5615 if (GET_MODE_CLASS (mode) == MODE_INT
5616 && mode > MAX_INTEGER_COMPUTATION_MODE)
5617 fatal ("unsupported wide integer operation");
5620 /* Check operand of a unary op. */
5621 if (TREE_CODE_CLASS (code) == '1')
5623 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5624 if (GET_MODE_CLASS (mode) == MODE_INT
5625 && mode > MAX_INTEGER_COMPUTATION_MODE)
5626 fatal ("unsupported wide integer operation");
5629 /* Check operands of a binary/comparison op. */
5630 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5632 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5633 if (GET_MODE_CLASS (mode) == MODE_INT
5634 && mode > MAX_INTEGER_COMPUTATION_MODE)
5635 fatal ("unsupported wide integer operation");
5637 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5638 if (GET_MODE_CLASS (mode) == MODE_INT
5639 && mode > MAX_INTEGER_COMPUTATION_MODE)
5640 fatal ("unsupported wide integer operation");
5646 /* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5647 has any readonly fields. If any of the fields have types that
5648 contain readonly fields, return true as well. */
5651 readonly_fields_p (type)
5656 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
5657 if (TREE_CODE (field) == FIELD_DECL
5658 && (TREE_READONLY (field)
5659 || (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
5660 && readonly_fields_p (TREE_TYPE (field)))))
5666 /* expand_expr: generate code for computing expression EXP.
5667 An rtx for the computed value is returned. The value is never null.
5668 In the case of a void EXP, const0_rtx is returned.
5670 The value may be stored in TARGET if TARGET is nonzero.
5671 TARGET is just a suggestion; callers must assume that
5672 the rtx returned may not be the same as TARGET.
5674 If TARGET is CONST0_RTX, it means that the value will be ignored.
5676 If TMODE is not VOIDmode, it suggests generating the
5677 result in mode TMODE. But this is done only when convenient.
5678 Otherwise, TMODE is ignored and the value generated in its natural mode.
5679 TMODE is just a suggestion; callers must assume that
5680 the rtx returned may not have mode TMODE.
5682 Note that TARGET may have neither TMODE nor MODE. In that case, it
5683 probably will not be used.
5685 If MODIFIER is EXPAND_SUM then when EXP is an addition
5686 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5687 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5688 products as above, or REG or MEM, or constant.
5689 Ordinarily in such cases we would output mul or add instructions
5690 and then return a pseudo reg containing the sum.
5692 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5693 it also marks a label as absolutely required (it can't be dead).
5694 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5695 This is used for outputting expressions used in initializers.
5697 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5698 with a constant address even if that address is not normally legitimate.
5699 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5702 expand_expr (exp, target, tmode, modifier)
5705 enum machine_mode tmode;
5706 enum expand_modifier modifier;
5708 register rtx op0, op1, temp;
5709 tree type = TREE_TYPE (exp);
5710 int unsignedp = TREE_UNSIGNED (type);
5711 register enum machine_mode mode;
5712 register enum tree_code code = TREE_CODE (exp);
5714 rtx subtarget, original_target;
5717 /* Used by check-memory-usage to make modifier read only. */
5718 enum expand_modifier ro_modifier;
5720 /* Handle ERROR_MARK before anybody tries to access its type. */
5721 if (TREE_CODE (exp) == ERROR_MARK)
5723 op0 = CONST0_RTX (tmode);
5729 mode = TYPE_MODE (type);
5730 /* Use subtarget as the target for operand 0 of a binary operation. */
5731 subtarget = get_subtarget (target);
5732 original_target = target;
5733 ignore = (target == const0_rtx
5734 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5735 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5736 || code == COND_EXPR)
5737 && TREE_CODE (type) == VOID_TYPE));
5739 /* Make a read-only version of the modifier. */
5740 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5741 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5742 ro_modifier = modifier;
5744 ro_modifier = EXPAND_NORMAL;
5746 /* If we are going to ignore this result, we need only do something
5747 if there is a side-effect somewhere in the expression. If there
5748 is, short-circuit the most common cases here. Note that we must
5749 not call expand_expr with anything but const0_rtx in case this
5750 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5754 if (! TREE_SIDE_EFFECTS (exp))
5757 /* Ensure we reference a volatile object even if value is ignored, but
5758 don't do this if all we are doing is taking its address. */
5759 if (TREE_THIS_VOLATILE (exp)
5760 && TREE_CODE (exp) != FUNCTION_DECL
5761 && mode != VOIDmode && mode != BLKmode
5762 && modifier != EXPAND_CONST_ADDRESS)
5764 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5765 if (GET_CODE (temp) == MEM)
5766 temp = copy_to_reg (temp);
5770 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5771 || code == INDIRECT_REF || code == BUFFER_REF)
5772 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5773 VOIDmode, ro_modifier);
5774 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5775 || code == ARRAY_REF)
5777 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5778 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5781 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5782 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5783 /* If the second operand has no side effects, just evaluate
5785 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5786 VOIDmode, ro_modifier);
5787 else if (code == BIT_FIELD_REF)
5789 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5790 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5791 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
5798 #ifdef MAX_INTEGER_COMPUTATION_MODE
5799 /* Only check stuff here if the mode we want is different from the mode
5800 of the expression; if it's the same, check_max_integer_computiation_mode
5801 will handle it. Do we really need to check this stuff at all? */
5804 && GET_MODE (target) != mode
5805 && TREE_CODE (exp) != INTEGER_CST
5806 && TREE_CODE (exp) != PARM_DECL
5807 && TREE_CODE (exp) != ARRAY_REF
5808 && TREE_CODE (exp) != COMPONENT_REF
5809 && TREE_CODE (exp) != BIT_FIELD_REF
5810 && TREE_CODE (exp) != INDIRECT_REF
5811 && TREE_CODE (exp) != CALL_EXPR
5812 && TREE_CODE (exp) != VAR_DECL
5813 && TREE_CODE (exp) != RTL_EXPR)
5815 enum machine_mode mode = GET_MODE (target);
5817 if (GET_MODE_CLASS (mode) == MODE_INT
5818 && mode > MAX_INTEGER_COMPUTATION_MODE)
5819 fatal ("unsupported wide integer operation");
5823 && TREE_CODE (exp) != INTEGER_CST
5824 && TREE_CODE (exp) != PARM_DECL
5825 && TREE_CODE (exp) != ARRAY_REF
5826 && TREE_CODE (exp) != COMPONENT_REF
5827 && TREE_CODE (exp) != BIT_FIELD_REF
5828 && TREE_CODE (exp) != INDIRECT_REF
5829 && TREE_CODE (exp) != VAR_DECL
5830 && TREE_CODE (exp) != CALL_EXPR
5831 && TREE_CODE (exp) != RTL_EXPR
5832 && GET_MODE_CLASS (tmode) == MODE_INT
5833 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5834 fatal ("unsupported wide integer operation");
5836 check_max_integer_computation_mode (exp);
5839 /* If will do cse, generate all results into pseudo registers
5840 since 1) that allows cse to find more things
5841 and 2) otherwise cse could produce an insn the machine
5844 if (! cse_not_expected && mode != BLKmode && target
5845 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5852 tree function = decl_function_context (exp);
5853 /* Handle using a label in a containing function. */
5854 if (function != current_function_decl
5855 && function != inline_function_decl && function != 0)
5857 struct function *p = find_function_data (function);
5858 /* Allocate in the memory associated with the function
5859 that the label is in. */
5860 push_obstacks (p->function_obstack,
5861 p->function_maybepermanent_obstack);
5863 p->expr->x_forced_labels
5864 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5865 p->expr->x_forced_labels);
5870 if (modifier == EXPAND_INITIALIZER)
5871 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5876 temp = gen_rtx_MEM (FUNCTION_MODE,
5877 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5878 if (function != current_function_decl
5879 && function != inline_function_decl && function != 0)
5880 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5885 if (DECL_RTL (exp) == 0)
5887 error_with_decl (exp, "prior parameter's size depends on `%s'");
5888 return CONST0_RTX (mode);
5891 /* ... fall through ... */
5894 /* If a static var's type was incomplete when the decl was written,
5895 but the type is complete now, lay out the decl now. */
5896 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5897 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5899 push_obstacks_nochange ();
5900 end_temporary_allocation ();
5901 layout_decl (exp, 0);
5902 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5906 /* Although static-storage variables start off initialized, according to
5907 ANSI C, a memcpy could overwrite them with uninitialized values. So
5908 we check them too. This also lets us check for read-only variables
5909 accessed via a non-const declaration, in case it won't be detected
5910 any other way (e.g., in an embedded system or OS kernel without
5913 Aggregates are not checked here; they're handled elsewhere. */
5914 if (cfun && current_function_check_memory_usage
5916 && GET_CODE (DECL_RTL (exp)) == MEM
5917 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5919 enum memory_use_mode memory_usage;
5920 memory_usage = get_memory_usage_from_modifier (modifier);
5922 if (memory_usage != MEMORY_USE_DONT)
5923 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5924 XEXP (DECL_RTL (exp), 0), Pmode,
5925 GEN_INT (int_size_in_bytes (type)),
5926 TYPE_MODE (sizetype),
5927 GEN_INT (memory_usage),
5928 TYPE_MODE (integer_type_node));
5931 /* ... fall through ... */
5935 if (DECL_RTL (exp) == 0)
5938 /* Ensure variable marked as used even if it doesn't go through
5939 a parser. If it hasn't be used yet, write out an external
5941 if (! TREE_USED (exp))
5943 assemble_external (exp);
5944 TREE_USED (exp) = 1;
5947 /* Show we haven't gotten RTL for this yet. */
5950 /* Handle variables inherited from containing functions. */
5951 context = decl_function_context (exp);
5953 /* We treat inline_function_decl as an alias for the current function
5954 because that is the inline function whose vars, types, etc.
5955 are being merged into the current function.
5956 See expand_inline_function. */
5958 if (context != 0 && context != current_function_decl
5959 && context != inline_function_decl
5960 /* If var is static, we don't need a static chain to access it. */
5961 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5962 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5966 /* Mark as non-local and addressable. */
5967 DECL_NONLOCAL (exp) = 1;
5968 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5970 mark_addressable (exp);
5971 if (GET_CODE (DECL_RTL (exp)) != MEM)
5973 addr = XEXP (DECL_RTL (exp), 0);
5974 if (GET_CODE (addr) == MEM)
5975 addr = gen_rtx_MEM (Pmode,
5976 fix_lexical_addr (XEXP (addr, 0), exp));
5978 addr = fix_lexical_addr (addr, exp);
5979 temp = change_address (DECL_RTL (exp), mode, addr);
5982 /* This is the case of an array whose size is to be determined
5983 from its initializer, while the initializer is still being parsed.
5986 else if (GET_CODE (DECL_RTL (exp)) == MEM
5987 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5988 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5989 XEXP (DECL_RTL (exp), 0));
5991 /* If DECL_RTL is memory, we are in the normal case and either
5992 the address is not valid or it is not a register and -fforce-addr
5993 is specified, get the address into a register. */
5995 else if (GET_CODE (DECL_RTL (exp)) == MEM
5996 && modifier != EXPAND_CONST_ADDRESS
5997 && modifier != EXPAND_SUM
5998 && modifier != EXPAND_INITIALIZER
5999 && (! memory_address_p (DECL_MODE (exp),
6000 XEXP (DECL_RTL (exp), 0))
6002 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6003 temp = change_address (DECL_RTL (exp), VOIDmode,
6004 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6006 /* If we got something, return it. But first, set the alignment
6007 the address is a register. */
6010 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6011 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6016 /* If the mode of DECL_RTL does not match that of the decl, it
6017 must be a promoted value. We return a SUBREG of the wanted mode,
6018 but mark it so that we know that it was already extended. */
6020 if (GET_CODE (DECL_RTL (exp)) == REG
6021 && GET_MODE (DECL_RTL (exp)) != mode)
6023 /* Get the signedness used for this variable. Ensure we get the
6024 same mode we got when the variable was declared. */
6025 if (GET_MODE (DECL_RTL (exp))
6026 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6029 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
6030 SUBREG_PROMOTED_VAR_P (temp) = 1;
6031 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6035 return DECL_RTL (exp);
6038 return immed_double_const (TREE_INT_CST_LOW (exp),
6039 TREE_INT_CST_HIGH (exp), mode);
6042 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6043 EXPAND_MEMORY_USE_BAD);
6046 /* If optimized, generate immediate CONST_DOUBLE
6047 which will be turned into memory by reload if necessary.
6049 We used to force a register so that loop.c could see it. But
6050 this does not allow gen_* patterns to perform optimizations with
6051 the constants. It also produces two insns in cases like "x = 1.0;".
6052 On most machines, floating-point constants are not permitted in
6053 many insns, so we'd end up copying it to a register in any case.
6055 Now, we do the copying in expand_binop, if appropriate. */
6056 return immed_real_const (exp);
6060 if (! TREE_CST_RTL (exp))
6061 output_constant_def (exp);
6063 /* TREE_CST_RTL probably contains a constant address.
6064 On RISC machines where a constant address isn't valid,
6065 make some insns to get that address into a register. */
6066 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6067 && modifier != EXPAND_CONST_ADDRESS
6068 && modifier != EXPAND_INITIALIZER
6069 && modifier != EXPAND_SUM
6070 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6072 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6073 return change_address (TREE_CST_RTL (exp), VOIDmode,
6074 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6075 return TREE_CST_RTL (exp);
6077 case EXPR_WITH_FILE_LOCATION:
6080 const char *saved_input_filename = input_filename;
6081 int saved_lineno = lineno;
6082 input_filename = EXPR_WFL_FILENAME (exp);
6083 lineno = EXPR_WFL_LINENO (exp);
6084 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6085 emit_line_note (input_filename, lineno);
6086 /* Possibly avoid switching back and force here */
6087 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6088 input_filename = saved_input_filename;
6089 lineno = saved_lineno;
6094 context = decl_function_context (exp);
6096 /* If this SAVE_EXPR was at global context, assume we are an
6097 initialization function and move it into our context. */
6099 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6101 /* We treat inline_function_decl as an alias for the current function
6102 because that is the inline function whose vars, types, etc.
6103 are being merged into the current function.
6104 See expand_inline_function. */
6105 if (context == current_function_decl || context == inline_function_decl)
6108 /* If this is non-local, handle it. */
6111 /* The following call just exists to abort if the context is
6112 not of a containing function. */
6113 find_function_data (context);
6115 temp = SAVE_EXPR_RTL (exp);
6116 if (temp && GET_CODE (temp) == REG)
6118 put_var_into_stack (exp);
6119 temp = SAVE_EXPR_RTL (exp);
6121 if (temp == 0 || GET_CODE (temp) != MEM)
6123 return change_address (temp, mode,
6124 fix_lexical_addr (XEXP (temp, 0), exp));
6126 if (SAVE_EXPR_RTL (exp) == 0)
6128 if (mode == VOIDmode)
6131 temp = assign_temp (type, 3, 0, 0);
6133 SAVE_EXPR_RTL (exp) = temp;
6134 if (!optimize && GET_CODE (temp) == REG)
6135 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6138 /* If the mode of TEMP does not match that of the expression, it
6139 must be a promoted value. We pass store_expr a SUBREG of the
6140 wanted mode but mark it so that we know that it was already
6141 extended. Note that `unsignedp' was modified above in
6144 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6146 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6147 SUBREG_PROMOTED_VAR_P (temp) = 1;
6148 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6151 if (temp == const0_rtx)
6152 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6153 EXPAND_MEMORY_USE_BAD);
6155 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6157 TREE_USED (exp) = 1;
6160 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6161 must be a promoted value. We return a SUBREG of the wanted mode,
6162 but mark it so that we know that it was already extended. */
6164 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6165 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6167 /* Compute the signedness and make the proper SUBREG. */
6168 promote_mode (type, mode, &unsignedp, 0);
6169 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6170 SUBREG_PROMOTED_VAR_P (temp) = 1;
6171 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6175 return SAVE_EXPR_RTL (exp);
6180 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6181 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6185 case PLACEHOLDER_EXPR:
6187 tree placeholder_expr;
6189 /* If there is an object on the head of the placeholder list,
6190 see if some object in it of type TYPE or a pointer to it. For
6191 further information, see tree.def. */
6192 for (placeholder_expr = placeholder_list;
6193 placeholder_expr != 0;
6194 placeholder_expr = TREE_CHAIN (placeholder_expr))
6196 tree need_type = TYPE_MAIN_VARIANT (type);
6198 tree old_list = placeholder_list;
6201 /* Find the outermost reference that is of the type we want.
6202 If none, see if any object has a type that is a pointer to
6203 the type we want. */
6204 for (elt = TREE_PURPOSE (placeholder_expr);
6205 elt != 0 && object == 0;
6207 = ((TREE_CODE (elt) == COMPOUND_EXPR
6208 || TREE_CODE (elt) == COND_EXPR)
6209 ? TREE_OPERAND (elt, 1)
6210 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6211 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6212 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6213 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6214 ? TREE_OPERAND (elt, 0) : 0))
6215 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6218 for (elt = TREE_PURPOSE (placeholder_expr);
6219 elt != 0 && object == 0;
6221 = ((TREE_CODE (elt) == COMPOUND_EXPR
6222 || TREE_CODE (elt) == COND_EXPR)
6223 ? TREE_OPERAND (elt, 1)
6224 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6225 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6226 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6227 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6228 ? TREE_OPERAND (elt, 0) : 0))
6229 if (POINTER_TYPE_P (TREE_TYPE (elt))
6230 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6232 object = build1 (INDIRECT_REF, need_type, elt);
6236 /* Expand this object skipping the list entries before
6237 it was found in case it is also a PLACEHOLDER_EXPR.
6238 In that case, we want to translate it using subsequent
6240 placeholder_list = TREE_CHAIN (placeholder_expr);
6241 temp = expand_expr (object, original_target, tmode,
6243 placeholder_list = old_list;
6249 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6252 case WITH_RECORD_EXPR:
6253 /* Put the object on the placeholder list, expand our first operand,
6254 and pop the list. */
6255 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6257 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6258 tmode, ro_modifier);
6259 placeholder_list = TREE_CHAIN (placeholder_list);
6263 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6264 expand_goto (TREE_OPERAND (exp, 0));
6266 expand_computed_goto (TREE_OPERAND (exp, 0));
6270 expand_exit_loop_if_false (NULL_PTR,
6271 invert_truthvalue (TREE_OPERAND (exp, 0)));
6274 case LABELED_BLOCK_EXPR:
6275 if (LABELED_BLOCK_BODY (exp))
6276 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6277 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6280 case EXIT_BLOCK_EXPR:
6281 if (EXIT_BLOCK_RETURN (exp))
6282 sorry ("returned value in block_exit_expr");
6283 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6288 expand_start_loop (1);
6289 expand_expr_stmt (TREE_OPERAND (exp, 0));
6297 tree vars = TREE_OPERAND (exp, 0);
6298 int vars_need_expansion = 0;
6300 /* Need to open a binding contour here because
6301 if there are any cleanups they must be contained here. */
6302 expand_start_bindings (2);
6304 /* Mark the corresponding BLOCK for output in its proper place. */
6305 if (TREE_OPERAND (exp, 2) != 0
6306 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6307 insert_block (TREE_OPERAND (exp, 2));
6309 /* If VARS have not yet been expanded, expand them now. */
6312 if (DECL_RTL (vars) == 0)
6314 vars_need_expansion = 1;
6317 expand_decl_init (vars);
6318 vars = TREE_CHAIN (vars);
6321 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6323 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6329 if (RTL_EXPR_SEQUENCE (exp))
6331 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6333 emit_insns (RTL_EXPR_SEQUENCE (exp));
6334 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6336 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6337 free_temps_for_rtl_expr (exp);
6338 return RTL_EXPR_RTL (exp);
6341 /* If we don't need the result, just ensure we evaluate any
6346 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6347 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6348 EXPAND_MEMORY_USE_BAD);
6352 /* All elts simple constants => refer to a constant in memory. But
6353 if this is a non-BLKmode mode, let it store a field at a time
6354 since that should make a CONST_INT or CONST_DOUBLE when we
6355 fold. Likewise, if we have a target we can use, it is best to
6356 store directly into the target unless the type is large enough
6357 that memcpy will be used. If we are making an initializer and
6358 all operands are constant, put it in memory as well. */
6359 else if ((TREE_STATIC (exp)
6360 && ((mode == BLKmode
6361 && ! (target != 0 && safe_from_p (target, exp, 1)))
6362 || TREE_ADDRESSABLE (exp)
6363 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6364 && (! MOVE_BY_PIECES_P
6365 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6367 && ! mostly_zeros_p (exp))))
6368 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6370 rtx constructor = output_constant_def (exp);
6372 if (modifier != EXPAND_CONST_ADDRESS
6373 && modifier != EXPAND_INITIALIZER
6374 && modifier != EXPAND_SUM
6375 && (! memory_address_p (GET_MODE (constructor),
6376 XEXP (constructor, 0))
6378 && GET_CODE (XEXP (constructor, 0)) != REG)))
6379 constructor = change_address (constructor, VOIDmode,
6380 XEXP (constructor, 0));
6386 /* Handle calls that pass values in multiple non-contiguous
6387 locations. The Irix 6 ABI has examples of this. */
6388 if (target == 0 || ! safe_from_p (target, exp, 1)
6389 || GET_CODE (target) == PARALLEL)
6391 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6392 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6394 target = assign_temp (type, 0, 1, 1);
6397 if (TREE_READONLY (exp))
6399 if (GET_CODE (target) == MEM)
6400 target = copy_rtx (target);
6402 RTX_UNCHANGING_P (target) = 1;
6405 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6406 int_size_in_bytes (TREE_TYPE (exp)));
6412 tree exp1 = TREE_OPERAND (exp, 0);
6415 tree string = string_constant (exp1, &index);
6417 /* Try to optimize reads from const strings. */
6419 && TREE_CODE (string) == STRING_CST
6420 && TREE_CODE (index) == INTEGER_CST
6421 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6422 && GET_MODE_CLASS (mode) == MODE_INT
6423 && GET_MODE_SIZE (mode) == 1
6424 && modifier != EXPAND_MEMORY_USE_WO)
6426 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6428 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6429 op0 = memory_address (mode, op0);
6431 if (cfun && current_function_check_memory_usage
6432 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6434 enum memory_use_mode memory_usage;
6435 memory_usage = get_memory_usage_from_modifier (modifier);
6437 if (memory_usage != MEMORY_USE_DONT)
6439 in_check_memory_usage = 1;
6440 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6442 GEN_INT (int_size_in_bytes (type)),
6443 TYPE_MODE (sizetype),
6444 GEN_INT (memory_usage),
6445 TYPE_MODE (integer_type_node));
6446 in_check_memory_usage = 0;
6450 temp = gen_rtx_MEM (mode, op0);
6451 /* If address was computed by addition,
6452 mark this as an element of an aggregate. */
6453 if (TREE_CODE (exp1) == PLUS_EXPR
6454 || (TREE_CODE (exp1) == SAVE_EXPR
6455 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
6456 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
6457 || (TREE_CODE (exp1) == ADDR_EXPR
6458 && (exp2 = TREE_OPERAND (exp1, 0))
6459 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6460 MEM_SET_IN_STRUCT_P (temp, 1);
6462 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6463 MEM_ALIAS_SET (temp) = get_alias_set (exp);
6465 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6466 here, because, in C and C++, the fact that a location is accessed
6467 through a pointer to const does not mean that the value there can
6468 never change. Languages where it can never change should
6469 also set TREE_STATIC. */
6470 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6472 /* If we are writing to this object and its type is a record with
6473 readonly fields, we must mark it as readonly so it will
6474 conflict with readonly references to those fields. */
6475 if (modifier == EXPAND_MEMORY_USE_WO
6476 && TREE_CODE (type) == RECORD_TYPE && readonly_fields_p (type))
6477 RTX_UNCHANGING_P (temp) = 1;
6483 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6487 tree array = TREE_OPERAND (exp, 0);
6488 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6489 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6490 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6493 /* Optimize the special-case of a zero lower bound.
6495 We convert the low_bound to sizetype to avoid some problems
6496 with constant folding. (E.g. suppose the lower bound is 1,
6497 and its mode is QI. Without the conversion, (ARRAY
6498 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6499 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6501 if (! integer_zerop (low_bound))
6502 index = size_diffop (index, convert (sizetype, low_bound));
6504 /* Fold an expression like: "foo"[2].
6505 This is not done in fold so it won't happen inside &.
6506 Don't fold if this is for wide characters since it's too
6507 difficult to do correctly and this is a very rare case. */
6509 if (TREE_CODE (array) == STRING_CST
6510 && TREE_CODE (index) == INTEGER_CST
6511 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6512 && GET_MODE_CLASS (mode) == MODE_INT
6513 && GET_MODE_SIZE (mode) == 1)
6515 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6517 /* If this is a constant index into a constant array,
6518 just get the value from the array. Handle both the cases when
6519 we have an explicit constructor and when our operand is a variable
6520 that was declared const. */
6522 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6523 && TREE_CODE (index) == INTEGER_CST
6524 && 0 > compare_tree_int (index,
6525 list_length (CONSTRUCTOR_ELTS
6526 (TREE_OPERAND (exp, 0)))))
6530 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6531 i = TREE_INT_CST_LOW (index);
6532 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6536 return expand_expr (fold (TREE_VALUE (elem)), target,
6537 tmode, ro_modifier);
6540 else if (optimize >= 1
6541 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6542 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6543 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6545 if (TREE_CODE (index) == INTEGER_CST)
6547 tree init = DECL_INITIAL (array);
6549 if (TREE_CODE (init) == CONSTRUCTOR)
6553 for (elem = CONSTRUCTOR_ELTS (init);
6555 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6556 elem = TREE_CHAIN (elem))
6560 return expand_expr (fold (TREE_VALUE (elem)), target,
6561 tmode, ro_modifier);
6563 else if (TREE_CODE (init) == STRING_CST
6564 && 0 > compare_tree_int (index,
6565 TREE_STRING_LENGTH (init)))
6567 (TREE_STRING_POINTER
6568 (init)[TREE_INT_CST_LOW (index)]));
6573 /* ... fall through ... */
6577 /* If the operand is a CONSTRUCTOR, we can just extract the
6578 appropriate field if it is present. Don't do this if we have
6579 already written the data since we want to refer to that copy
6580 and varasm.c assumes that's what we'll do. */
6581 if (code != ARRAY_REF
6582 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6583 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6587 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6588 elt = TREE_CHAIN (elt))
6589 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6590 /* We can normally use the value of the field in the
6591 CONSTRUCTOR. However, if this is a bitfield in
6592 an integral mode that we can fit in a HOST_WIDE_INT,
6593 we must mask only the number of bits in the bitfield,
6594 since this is done implicitly by the constructor. If
6595 the bitfield does not meet either of those conditions,
6596 we can't do this optimization. */
6597 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6598 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6600 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6601 <= HOST_BITS_PER_WIDE_INT))))
6603 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6604 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6606 HOST_WIDE_INT bitsize
6607 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6609 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6611 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6612 op0 = expand_and (op0, op1, target);
6616 enum machine_mode imode
6617 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6619 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6622 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6624 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6634 enum machine_mode mode1;
6635 HOST_WIDE_INT bitsize, bitpos;
6638 unsigned int alignment;
6639 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6640 &mode1, &unsignedp, &volatilep,
6643 /* If we got back the original object, something is wrong. Perhaps
6644 we are evaluating an expression too early. In any event, don't
6645 infinitely recurse. */
6649 /* If TEM's type is a union of variable size, pass TARGET to the inner
6650 computation, since it will need a temporary and TARGET is known
6651 to have to do. This occurs in unchecked conversion in Ada. */
6653 op0 = expand_expr (tem,
6654 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6655 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6657 ? target : NULL_RTX),
6659 (modifier == EXPAND_INITIALIZER
6660 || modifier == EXPAND_CONST_ADDRESS)
6661 ? modifier : EXPAND_NORMAL);
6663 /* If this is a constant, put it into a register if it is a
6664 legitimate constant and OFFSET is 0 and memory if it isn't. */
6665 if (CONSTANT_P (op0))
6667 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6668 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6670 op0 = force_reg (mode, op0);
6672 op0 = validize_mem (force_const_mem (mode, op0));
6677 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6679 /* If this object is in memory, put it into a register.
6680 This case can't occur in C, but can in Ada if we have
6681 unchecked conversion of an expression from a scalar type to
6682 an array or record type. */
6683 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6684 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6686 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
6688 mark_temp_addr_taken (memloc);
6689 emit_move_insn (memloc, op0);
6693 if (GET_CODE (op0) != MEM)
6696 if (GET_MODE (offset_rtx) != ptr_mode)
6698 #ifdef POINTERS_EXTEND_UNSIGNED
6699 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6701 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6705 /* A constant address in OP0 can have VOIDmode, we must not try
6706 to call force_reg for that case. Avoid that case. */
6707 if (GET_CODE (op0) == MEM
6708 && GET_MODE (op0) == BLKmode
6709 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6711 && (bitpos % bitsize) == 0
6712 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6713 && alignment == GET_MODE_ALIGNMENT (mode1))
6715 rtx temp = change_address (op0, mode1,
6716 plus_constant (XEXP (op0, 0),
6719 if (GET_CODE (XEXP (temp, 0)) == REG)
6722 op0 = change_address (op0, mode1,
6723 force_reg (GET_MODE (XEXP (temp, 0)),
6729 op0 = change_address (op0, VOIDmode,
6730 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6731 force_reg (ptr_mode,
6735 /* Don't forget about volatility even if this is a bitfield. */
6736 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6738 op0 = copy_rtx (op0);
6739 MEM_VOLATILE_P (op0) = 1;
6742 /* Check the access. */
6743 if (cfun != 0 && current_function_check_memory_usage
6744 && GET_CODE (op0) == MEM)
6746 enum memory_use_mode memory_usage;
6747 memory_usage = get_memory_usage_from_modifier (modifier);
6749 if (memory_usage != MEMORY_USE_DONT)
6754 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6755 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6757 /* Check the access right of the pointer. */
6758 if (size > BITS_PER_UNIT)
6759 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6761 GEN_INT (size / BITS_PER_UNIT),
6762 TYPE_MODE (sizetype),
6763 GEN_INT (memory_usage),
6764 TYPE_MODE (integer_type_node));
6768 /* In cases where an aligned union has an unaligned object
6769 as a field, we might be extracting a BLKmode value from
6770 an integer-mode (e.g., SImode) object. Handle this case
6771 by doing the extract into an object as wide as the field
6772 (which we know to be the width of a basic mode), then
6773 storing into memory, and changing the mode to BLKmode.
6774 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6775 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6776 if (mode1 == VOIDmode
6777 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6778 || (modifier != EXPAND_CONST_ADDRESS
6779 && modifier != EXPAND_INITIALIZER
6780 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6781 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6782 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6783 /* If the field isn't aligned enough to fetch as a memref,
6784 fetch it as a bit field. */
6785 || (mode1 != BLKmode
6786 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
6787 && ((TYPE_ALIGN (TREE_TYPE (tem))
6788 < GET_MODE_ALIGNMENT (mode))
6789 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6790 /* If the type and the field are a constant size and the
6791 size of the type isn't the same size as the bitfield,
6792 we must use bitfield operations. */
6794 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6796 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6798 || (modifier != EXPAND_CONST_ADDRESS
6799 && modifier != EXPAND_INITIALIZER
6801 && SLOW_UNALIGNED_ACCESS (mode, alignment)
6802 && (TYPE_ALIGN (type) > alignment
6803 || bitpos % TYPE_ALIGN (type) != 0)))
6805 enum machine_mode ext_mode = mode;
6807 if (ext_mode == BLKmode
6808 && ! (target != 0 && GET_CODE (op0) == MEM
6809 && GET_CODE (target) == MEM
6810 && bitpos % BITS_PER_UNIT == 0))
6811 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6813 if (ext_mode == BLKmode)
6815 /* In this case, BITPOS must start at a byte boundary and
6816 TARGET, if specified, must be a MEM. */
6817 if (GET_CODE (op0) != MEM
6818 || (target != 0 && GET_CODE (target) != MEM)
6819 || bitpos % BITS_PER_UNIT != 0)
6822 op0 = change_address (op0, VOIDmode,
6823 plus_constant (XEXP (op0, 0),
6824 bitpos / BITS_PER_UNIT));
6826 target = assign_temp (type, 0, 1, 1);
6828 emit_block_move (target, op0,
6829 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6836 op0 = validize_mem (op0);
6838 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6839 mark_reg_pointer (XEXP (op0, 0), alignment);
6841 op0 = extract_bit_field (op0, bitsize, bitpos,
6842 unsignedp, target, ext_mode, ext_mode,
6844 int_size_in_bytes (TREE_TYPE (tem)));
6846 /* If the result is a record type and BITSIZE is narrower than
6847 the mode of OP0, an integral mode, and this is a big endian
6848 machine, we must put the field into the high-order bits. */
6849 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6850 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6851 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6852 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6853 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6857 if (mode == BLKmode)
6859 rtx new = assign_stack_temp (ext_mode,
6860 bitsize / BITS_PER_UNIT, 0);
6862 emit_move_insn (new, op0);
6863 op0 = copy_rtx (new);
6864 PUT_MODE (op0, BLKmode);
6865 MEM_SET_IN_STRUCT_P (op0, 1);
6871 /* If the result is BLKmode, use that to access the object
6873 if (mode == BLKmode)
6876 /* Get a reference to just this component. */
6877 if (modifier == EXPAND_CONST_ADDRESS
6878 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6879 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6880 (bitpos / BITS_PER_UNIT)));
6882 op0 = change_address (op0, mode1,
6883 plus_constant (XEXP (op0, 0),
6884 (bitpos / BITS_PER_UNIT)));
6886 if (GET_CODE (op0) == MEM)
6887 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6889 if (GET_CODE (XEXP (op0, 0)) == REG)
6890 mark_reg_pointer (XEXP (op0, 0), alignment);
6892 MEM_SET_IN_STRUCT_P (op0, 1);
6893 MEM_VOLATILE_P (op0) |= volatilep;
6894 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6895 || modifier == EXPAND_CONST_ADDRESS
6896 || modifier == EXPAND_INITIALIZER)
6898 else if (target == 0)
6899 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6901 convert_move (target, op0, unsignedp);
6905 /* Intended for a reference to a buffer of a file-object in Pascal.
6906 But it's not certain that a special tree code will really be
6907 necessary for these. INDIRECT_REF might work for them. */
6913 /* Pascal set IN expression.
6916 rlo = set_low - (set_low%bits_per_word);
6917 the_word = set [ (index - rlo)/bits_per_word ];
6918 bit_index = index % bits_per_word;
6919 bitmask = 1 << bit_index;
6920 return !!(the_word & bitmask); */
6922 tree set = TREE_OPERAND (exp, 0);
6923 tree index = TREE_OPERAND (exp, 1);
6924 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6925 tree set_type = TREE_TYPE (set);
6926 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6927 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6928 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6929 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6930 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6931 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6932 rtx setaddr = XEXP (setval, 0);
6933 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6935 rtx diff, quo, rem, addr, bit, result;
6937 preexpand_calls (exp);
6939 /* If domain is empty, answer is no. Likewise if index is constant
6940 and out of bounds. */
6941 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6942 && TREE_CODE (set_low_bound) == INTEGER_CST
6943 && tree_int_cst_lt (set_high_bound, set_low_bound))
6944 || (TREE_CODE (index) == INTEGER_CST
6945 && TREE_CODE (set_low_bound) == INTEGER_CST
6946 && tree_int_cst_lt (index, set_low_bound))
6947 || (TREE_CODE (set_high_bound) == INTEGER_CST
6948 && TREE_CODE (index) == INTEGER_CST
6949 && tree_int_cst_lt (set_high_bound, index))))
6953 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6955 /* If we get here, we have to generate the code for both cases
6956 (in range and out of range). */
6958 op0 = gen_label_rtx ();
6959 op1 = gen_label_rtx ();
6961 if (! (GET_CODE (index_val) == CONST_INT
6962 && GET_CODE (lo_r) == CONST_INT))
6964 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6965 GET_MODE (index_val), iunsignedp, 0, op1);
6968 if (! (GET_CODE (index_val) == CONST_INT
6969 && GET_CODE (hi_r) == CONST_INT))
6971 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6972 GET_MODE (index_val), iunsignedp, 0, op1);
6975 /* Calculate the element number of bit zero in the first word
6977 if (GET_CODE (lo_r) == CONST_INT)
6978 rlow = GEN_INT (INTVAL (lo_r)
6979 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6981 rlow = expand_binop (index_mode, and_optab, lo_r,
6982 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6983 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6985 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6986 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6988 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6989 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6990 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6991 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6993 addr = memory_address (byte_mode,
6994 expand_binop (index_mode, add_optab, diff,
6995 setaddr, NULL_RTX, iunsignedp,
6998 /* Extract the bit we want to examine */
6999 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7000 gen_rtx_MEM (byte_mode, addr),
7001 make_tree (TREE_TYPE (index), rem),
7003 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7004 GET_MODE (target) == byte_mode ? target : 0,
7005 1, OPTAB_LIB_WIDEN);
7007 if (result != target)
7008 convert_move (target, result, 1);
7010 /* Output the code to handle the out-of-range case. */
7013 emit_move_insn (target, const0_rtx);
7018 case WITH_CLEANUP_EXPR:
7019 if (RTL_EXPR_RTL (exp) == 0)
7022 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7023 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7025 /* That's it for this cleanup. */
7026 TREE_OPERAND (exp, 2) = 0;
7028 return RTL_EXPR_RTL (exp);
7030 case CLEANUP_POINT_EXPR:
7032 /* Start a new binding layer that will keep track of all cleanup
7033 actions to be performed. */
7034 expand_start_bindings (2);
7036 target_temp_slot_level = temp_slot_level;
7038 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7039 /* If we're going to use this value, load it up now. */
7041 op0 = force_not_mem (op0);
7042 preserve_temp_slots (op0);
7043 expand_end_bindings (NULL_TREE, 0, 0);
7048 /* Check for a built-in function. */
7049 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7050 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7052 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7053 return expand_builtin (exp, target, subtarget, tmode, ignore);
7055 /* If this call was expanded already by preexpand_calls,
7056 just return the result we got. */
7057 if (CALL_EXPR_RTL (exp) != 0)
7058 return CALL_EXPR_RTL (exp);
7060 return expand_call (exp, target, ignore);
7062 case NON_LVALUE_EXPR:
7065 case REFERENCE_EXPR:
7066 if (TREE_CODE (type) == UNION_TYPE)
7068 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7070 /* If both input and output are BLKmode, this conversion
7071 isn't actually doing anything unless we need to make the
7072 alignment stricter. */
7073 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7074 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7075 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7076 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7081 if (mode != BLKmode)
7082 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7084 target = assign_temp (type, 0, 1, 1);
7087 if (GET_CODE (target) == MEM)
7088 /* Store data into beginning of memory target. */
7089 store_expr (TREE_OPERAND (exp, 0),
7090 change_address (target, TYPE_MODE (valtype), 0), 0);
7092 else if (GET_CODE (target) == REG)
7093 /* Store this field into a union of the proper type. */
7094 store_field (target,
7095 MIN ((int_size_in_bytes (TREE_TYPE
7096 (TREE_OPERAND (exp, 0)))
7098 GET_MODE_BITSIZE (mode)),
7099 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7100 VOIDmode, 0, BITS_PER_UNIT,
7101 int_size_in_bytes (type), 0);
7105 /* Return the entire union. */
7109 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7111 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7114 /* If the signedness of the conversion differs and OP0 is
7115 a promoted SUBREG, clear that indication since we now
7116 have to do the proper extension. */
7117 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7118 && GET_CODE (op0) == SUBREG)
7119 SUBREG_PROMOTED_VAR_P (op0) = 0;
7124 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7125 if (GET_MODE (op0) == mode)
7128 /* If OP0 is a constant, just convert it into the proper mode. */
7129 if (CONSTANT_P (op0))
7131 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7132 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7134 if (modifier == EXPAND_INITIALIZER)
7135 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7139 convert_to_mode (mode, op0,
7140 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7142 convert_move (target, op0,
7143 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7147 /* We come here from MINUS_EXPR when the second operand is a
7150 this_optab = add_optab;
7152 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7153 something else, make sure we add the register to the constant and
7154 then to the other thing. This case can occur during strength
7155 reduction and doing it this way will produce better code if the
7156 frame pointer or argument pointer is eliminated.
7158 fold-const.c will ensure that the constant is always in the inner
7159 PLUS_EXPR, so the only case we need to do anything about is if
7160 sp, ap, or fp is our second argument, in which case we must swap
7161 the innermost first argument and our second argument. */
7163 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7164 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7165 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7166 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7167 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7168 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7170 tree t = TREE_OPERAND (exp, 1);
7172 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7173 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7176 /* If the result is to be ptr_mode and we are adding an integer to
7177 something, we might be forming a constant. So try to use
7178 plus_constant. If it produces a sum and we can't accept it,
7179 use force_operand. This allows P = &ARR[const] to generate
7180 efficient code on machines where a SYMBOL_REF is not a valid
7183 If this is an EXPAND_SUM call, always return the sum. */
7184 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7185 || mode == ptr_mode)
7187 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7188 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7189 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7193 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7195 /* Use immed_double_const to ensure that the constant is
7196 truncated according to the mode of OP1, then sign extended
7197 to a HOST_WIDE_INT. Using the constant directly can result
7198 in non-canonical RTL in a 64x32 cross compile. */
7200 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7202 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7203 op1 = plus_constant (op1, INTVAL (constant_part));
7204 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7205 op1 = force_operand (op1, target);
7209 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7210 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7211 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7215 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7217 if (! CONSTANT_P (op0))
7219 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7220 VOIDmode, modifier);
7221 /* Don't go to both_summands if modifier
7222 says it's not right to return a PLUS. */
7223 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7227 /* Use immed_double_const to ensure that the constant is
7228 truncated according to the mode of OP1, then sign extended
7229 to a HOST_WIDE_INT. Using the constant directly can result
7230 in non-canonical RTL in a 64x32 cross compile. */
7232 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7234 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7235 op0 = plus_constant (op0, INTVAL (constant_part));
7236 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7237 op0 = force_operand (op0, target);
7242 /* No sense saving up arithmetic to be done
7243 if it's all in the wrong mode to form part of an address.
7244 And force_operand won't know whether to sign-extend or
7246 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7247 || mode != ptr_mode)
7250 preexpand_calls (exp);
7251 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7254 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7255 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7258 /* Make sure any term that's a sum with a constant comes last. */
7259 if (GET_CODE (op0) == PLUS
7260 && CONSTANT_P (XEXP (op0, 1)))
7266 /* If adding to a sum including a constant,
7267 associate it to put the constant outside. */
7268 if (GET_CODE (op1) == PLUS
7269 && CONSTANT_P (XEXP (op1, 1)))
7271 rtx constant_term = const0_rtx;
7273 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7276 /* Ensure that MULT comes first if there is one. */
7277 else if (GET_CODE (op0) == MULT)
7278 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7280 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7282 /* Let's also eliminate constants from op0 if possible. */
7283 op0 = eliminate_constant_term (op0, &constant_term);
7285 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7286 their sum should be a constant. Form it into OP1, since the
7287 result we want will then be OP0 + OP1. */
7289 temp = simplify_binary_operation (PLUS, mode, constant_term,
7294 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7297 /* Put a constant term last and put a multiplication first. */
7298 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7299 temp = op1, op1 = op0, op0 = temp;
7301 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7302 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7305 /* For initializers, we are allowed to return a MINUS of two
7306 symbolic constants. Here we handle all cases when both operands
7308 /* Handle difference of two symbolic constants,
7309 for the sake of an initializer. */
7310 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7311 && really_constant_p (TREE_OPERAND (exp, 0))
7312 && really_constant_p (TREE_OPERAND (exp, 1)))
7314 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7315 VOIDmode, ro_modifier);
7316 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7317 VOIDmode, ro_modifier);
7319 /* If the last operand is a CONST_INT, use plus_constant of
7320 the negated constant. Else make the MINUS. */
7321 if (GET_CODE (op1) == CONST_INT)
7322 return plus_constant (op0, - INTVAL (op1));
7324 return gen_rtx_MINUS (mode, op0, op1);
7326 /* Convert A - const to A + (-const). */
7327 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7329 tree negated = fold (build1 (NEGATE_EXPR, type,
7330 TREE_OPERAND (exp, 1)));
7332 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7333 /* If we can't negate the constant in TYPE, leave it alone and
7334 expand_binop will negate it for us. We used to try to do it
7335 here in the signed version of TYPE, but that doesn't work
7336 on POINTER_TYPEs. */;
7339 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7343 this_optab = sub_optab;
7347 preexpand_calls (exp);
7348 /* If first operand is constant, swap them.
7349 Thus the following special case checks need only
7350 check the second operand. */
7351 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7353 register tree t1 = TREE_OPERAND (exp, 0);
7354 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7355 TREE_OPERAND (exp, 1) = t1;
7358 /* Attempt to return something suitable for generating an
7359 indexed address, for machines that support that. */
7361 if (modifier == EXPAND_SUM && mode == ptr_mode
7362 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7363 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7365 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7368 /* Apply distributive law if OP0 is x+c. */
7369 if (GET_CODE (op0) == PLUS
7370 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7375 (mode, XEXP (op0, 0),
7376 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7377 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7378 * INTVAL (XEXP (op0, 1))));
7380 if (GET_CODE (op0) != REG)
7381 op0 = force_operand (op0, NULL_RTX);
7382 if (GET_CODE (op0) != REG)
7383 op0 = copy_to_mode_reg (mode, op0);
7386 gen_rtx_MULT (mode, op0,
7387 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7390 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7393 /* Check for multiplying things that have been extended
7394 from a narrower type. If this machine supports multiplying
7395 in that narrower type with a result in the desired type,
7396 do it that way, and avoid the explicit type-conversion. */
7397 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7398 && TREE_CODE (type) == INTEGER_TYPE
7399 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7400 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7401 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7402 && int_fits_type_p (TREE_OPERAND (exp, 1),
7403 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7404 /* Don't use a widening multiply if a shift will do. */
7405 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7406 > HOST_BITS_PER_WIDE_INT)
7407 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7409 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7410 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7412 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7413 /* If both operands are extended, they must either both
7414 be zero-extended or both be sign-extended. */
7415 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7417 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7419 enum machine_mode innermode
7420 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7421 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7422 ? smul_widen_optab : umul_widen_optab);
7423 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7424 ? umul_widen_optab : smul_widen_optab);
7425 if (mode == GET_MODE_WIDER_MODE (innermode))
7427 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7429 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7430 NULL_RTX, VOIDmode, 0);
7431 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7432 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7435 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7436 NULL_RTX, VOIDmode, 0);
7439 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7440 && innermode == word_mode)
7443 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7444 NULL_RTX, VOIDmode, 0);
7445 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7446 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7449 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7450 NULL_RTX, VOIDmode, 0);
7451 temp = expand_binop (mode, other_optab, op0, op1, target,
7452 unsignedp, OPTAB_LIB_WIDEN);
7453 htem = expand_mult_highpart_adjust (innermode,
7454 gen_highpart (innermode, temp),
7456 gen_highpart (innermode, temp),
7458 emit_move_insn (gen_highpart (innermode, temp), htem);
7463 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7464 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7465 return expand_mult (mode, op0, op1, target, unsignedp);
7467 case TRUNC_DIV_EXPR:
7468 case FLOOR_DIV_EXPR:
7470 case ROUND_DIV_EXPR:
7471 case EXACT_DIV_EXPR:
7472 preexpand_calls (exp);
7473 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7475 /* Possible optimization: compute the dividend with EXPAND_SUM
7476 then if the divisor is constant can optimize the case
7477 where some terms of the dividend have coeffs divisible by it. */
7478 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7479 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7480 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7483 this_optab = flodiv_optab;
7486 case TRUNC_MOD_EXPR:
7487 case FLOOR_MOD_EXPR:
7489 case ROUND_MOD_EXPR:
7490 preexpand_calls (exp);
7491 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7493 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7494 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7495 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7497 case FIX_ROUND_EXPR:
7498 case FIX_FLOOR_EXPR:
7500 abort (); /* Not used for C. */
7502 case FIX_TRUNC_EXPR:
7503 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7505 target = gen_reg_rtx (mode);
7506 expand_fix (target, op0, unsignedp);
7510 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7512 target = gen_reg_rtx (mode);
7513 /* expand_float can't figure out what to do if FROM has VOIDmode.
7514 So give it the correct mode. With -O, cse will optimize this. */
7515 if (GET_MODE (op0) == VOIDmode)
7516 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7518 expand_float (target, op0,
7519 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7523 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7524 temp = expand_unop (mode, neg_optab, op0, target, 0);
7530 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7532 /* Handle complex values specially. */
7533 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7534 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7535 return expand_complex_abs (mode, op0, target, unsignedp);
7537 /* Unsigned abs is simply the operand. Testing here means we don't
7538 risk generating incorrect code below. */
7539 if (TREE_UNSIGNED (type))
7542 return expand_abs (mode, op0, target,
7543 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7547 target = original_target;
7548 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7549 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7550 || GET_MODE (target) != mode
7551 || (GET_CODE (target) == REG
7552 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7553 target = gen_reg_rtx (mode);
7554 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7555 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7557 /* First try to do it with a special MIN or MAX instruction.
7558 If that does not win, use a conditional jump to select the proper
7560 this_optab = (TREE_UNSIGNED (type)
7561 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7562 : (code == MIN_EXPR ? smin_optab : smax_optab));
7564 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7569 /* At this point, a MEM target is no longer useful; we will get better
7572 if (GET_CODE (target) == MEM)
7573 target = gen_reg_rtx (mode);
7576 emit_move_insn (target, op0);
7578 op0 = gen_label_rtx ();
7580 /* If this mode is an integer too wide to compare properly,
7581 compare word by word. Rely on cse to optimize constant cases. */
7582 if (GET_MODE_CLASS (mode) == MODE_INT
7583 && ! can_compare_p (GE, mode, ccp_jump))
7585 if (code == MAX_EXPR)
7586 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7587 target, op1, NULL_RTX, op0);
7589 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7590 op1, target, NULL_RTX, op0);
7594 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7595 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7596 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7599 emit_move_insn (target, op1);
7604 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7605 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7611 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7612 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7617 /* ??? Can optimize bitwise operations with one arg constant.
7618 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7619 and (a bitwise1 b) bitwise2 b (etc)
7620 but that is probably not worth while. */
7622 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7623 boolean values when we want in all cases to compute both of them. In
7624 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7625 as actual zero-or-1 values and then bitwise anding. In cases where
7626 there cannot be any side effects, better code would be made by
7627 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7628 how to recognize those cases. */
7630 case TRUTH_AND_EXPR:
7632 this_optab = and_optab;
7637 this_optab = ior_optab;
7640 case TRUTH_XOR_EXPR:
7642 this_optab = xor_optab;
7649 preexpand_calls (exp);
7650 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7652 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7653 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7656 /* Could determine the answer when only additive constants differ. Also,
7657 the addition of one can be handled by changing the condition. */
7664 case UNORDERED_EXPR:
7671 preexpand_calls (exp);
7672 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7676 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7677 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7679 && GET_CODE (original_target) == REG
7680 && (GET_MODE (original_target)
7681 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7683 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7686 if (temp != original_target)
7687 temp = copy_to_reg (temp);
7689 op1 = gen_label_rtx ();
7690 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7691 GET_MODE (temp), unsignedp, 0, op1);
7692 emit_move_insn (temp, const1_rtx);
7697 /* If no set-flag instruction, must generate a conditional
7698 store into a temporary variable. Drop through
7699 and handle this like && and ||. */
7701 case TRUTH_ANDIF_EXPR:
7702 case TRUTH_ORIF_EXPR:
7704 && (target == 0 || ! safe_from_p (target, exp, 1)
7705 /* Make sure we don't have a hard reg (such as function's return
7706 value) live across basic blocks, if not optimizing. */
7707 || (!optimize && GET_CODE (target) == REG
7708 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7709 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7712 emit_clr_insn (target);
7714 op1 = gen_label_rtx ();
7715 jumpifnot (exp, op1);
7718 emit_0_to_1_insn (target);
7721 return ignore ? const0_rtx : target;
7723 case TRUTH_NOT_EXPR:
7724 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7725 /* The parser is careful to generate TRUTH_NOT_EXPR
7726 only with operands that are always zero or one. */
7727 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7728 target, 1, OPTAB_LIB_WIDEN);
7734 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7736 return expand_expr (TREE_OPERAND (exp, 1),
7737 (ignore ? const0_rtx : target),
7741 /* If we would have a "singleton" (see below) were it not for a
7742 conversion in each arm, bring that conversion back out. */
7743 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7744 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7745 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7746 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7748 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7749 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7751 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7752 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7753 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7754 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7755 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7756 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7757 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7758 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7759 return expand_expr (build1 (NOP_EXPR, type,
7760 build (COND_EXPR, TREE_TYPE (true),
7761 TREE_OPERAND (exp, 0),
7763 target, tmode, modifier);
7767 /* Note that COND_EXPRs whose type is a structure or union
7768 are required to be constructed to contain assignments of
7769 a temporary variable, so that we can evaluate them here
7770 for side effect only. If type is void, we must do likewise. */
7772 /* If an arm of the branch requires a cleanup,
7773 only that cleanup is performed. */
7776 tree binary_op = 0, unary_op = 0;
7778 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7779 convert it to our mode, if necessary. */
7780 if (integer_onep (TREE_OPERAND (exp, 1))
7781 && integer_zerop (TREE_OPERAND (exp, 2))
7782 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7786 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7791 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7792 if (GET_MODE (op0) == mode)
7796 target = gen_reg_rtx (mode);
7797 convert_move (target, op0, unsignedp);
7801 /* Check for X ? A + B : A. If we have this, we can copy A to the
7802 output and conditionally add B. Similarly for unary operations.
7803 Don't do this if X has side-effects because those side effects
7804 might affect A or B and the "?" operation is a sequence point in
7805 ANSI. (operand_equal_p tests for side effects.) */
7807 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7808 && operand_equal_p (TREE_OPERAND (exp, 2),
7809 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7810 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7811 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7812 && operand_equal_p (TREE_OPERAND (exp, 1),
7813 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7814 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7815 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7816 && operand_equal_p (TREE_OPERAND (exp, 2),
7817 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7818 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7819 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7820 && operand_equal_p (TREE_OPERAND (exp, 1),
7821 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7822 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7824 /* If we are not to produce a result, we have no target. Otherwise,
7825 if a target was specified use it; it will not be used as an
7826 intermediate target unless it is safe. If no target, use a
7831 else if (original_target
7832 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7833 || (singleton && GET_CODE (original_target) == REG
7834 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7835 && original_target == var_rtx (singleton)))
7836 && GET_MODE (original_target) == mode
7837 #ifdef HAVE_conditional_move
7838 && (! can_conditionally_move_p (mode)
7839 || GET_CODE (original_target) == REG
7840 || TREE_ADDRESSABLE (type))
7842 && ! (GET_CODE (original_target) == MEM
7843 && MEM_VOLATILE_P (original_target)))
7844 temp = original_target;
7845 else if (TREE_ADDRESSABLE (type))
7848 temp = assign_temp (type, 0, 0, 1);
7850 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7851 do the test of X as a store-flag operation, do this as
7852 A + ((X != 0) << log C). Similarly for other simple binary
7853 operators. Only do for C == 1 if BRANCH_COST is low. */
7854 if (temp && singleton && binary_op
7855 && (TREE_CODE (binary_op) == PLUS_EXPR
7856 || TREE_CODE (binary_op) == MINUS_EXPR
7857 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7858 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7859 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7860 : integer_onep (TREE_OPERAND (binary_op, 1)))
7861 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7864 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7865 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7866 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7869 /* If we had X ? A : A + 1, do this as A + (X == 0).
7871 We have to invert the truth value here and then put it
7872 back later if do_store_flag fails. We cannot simply copy
7873 TREE_OPERAND (exp, 0) to another variable and modify that
7874 because invert_truthvalue can modify the tree pointed to
7876 if (singleton == TREE_OPERAND (exp, 1))
7877 TREE_OPERAND (exp, 0)
7878 = invert_truthvalue (TREE_OPERAND (exp, 0));
7880 result = do_store_flag (TREE_OPERAND (exp, 0),
7881 (safe_from_p (temp, singleton, 1)
7883 mode, BRANCH_COST <= 1);
7885 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7886 result = expand_shift (LSHIFT_EXPR, mode, result,
7887 build_int_2 (tree_log2
7891 (safe_from_p (temp, singleton, 1)
7892 ? temp : NULL_RTX), 0);
7896 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7897 return expand_binop (mode, boptab, op1, result, temp,
7898 unsignedp, OPTAB_LIB_WIDEN);
7900 else if (singleton == TREE_OPERAND (exp, 1))
7901 TREE_OPERAND (exp, 0)
7902 = invert_truthvalue (TREE_OPERAND (exp, 0));
7905 do_pending_stack_adjust ();
7907 op0 = gen_label_rtx ();
7909 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7913 /* If the target conflicts with the other operand of the
7914 binary op, we can't use it. Also, we can't use the target
7915 if it is a hard register, because evaluating the condition
7916 might clobber it. */
7918 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7919 || (GET_CODE (temp) == REG
7920 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7921 temp = gen_reg_rtx (mode);
7922 store_expr (singleton, temp, 0);
7925 expand_expr (singleton,
7926 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7927 if (singleton == TREE_OPERAND (exp, 1))
7928 jumpif (TREE_OPERAND (exp, 0), op0);
7930 jumpifnot (TREE_OPERAND (exp, 0), op0);
7932 start_cleanup_deferral ();
7933 if (binary_op && temp == 0)
7934 /* Just touch the other operand. */
7935 expand_expr (TREE_OPERAND (binary_op, 1),
7936 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7938 store_expr (build (TREE_CODE (binary_op), type,
7939 make_tree (type, temp),
7940 TREE_OPERAND (binary_op, 1)),
7943 store_expr (build1 (TREE_CODE (unary_op), type,
7944 make_tree (type, temp)),
7948 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7949 comparison operator. If we have one of these cases, set the
7950 output to A, branch on A (cse will merge these two references),
7951 then set the output to FOO. */
7953 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7954 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7955 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7956 TREE_OPERAND (exp, 1), 0)
7957 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7958 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7959 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7961 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7962 temp = gen_reg_rtx (mode);
7963 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7964 jumpif (TREE_OPERAND (exp, 0), op0);
7966 start_cleanup_deferral ();
7967 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7971 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7972 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7973 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7974 TREE_OPERAND (exp, 2), 0)
7975 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7976 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7977 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7979 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7980 temp = gen_reg_rtx (mode);
7981 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7982 jumpifnot (TREE_OPERAND (exp, 0), op0);
7984 start_cleanup_deferral ();
7985 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7990 op1 = gen_label_rtx ();
7991 jumpifnot (TREE_OPERAND (exp, 0), op0);
7993 start_cleanup_deferral ();
7995 /* One branch of the cond can be void, if it never returns. For
7996 example A ? throw : E */
7998 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
7999 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8001 expand_expr (TREE_OPERAND (exp, 1),
8002 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8003 end_cleanup_deferral ();
8005 emit_jump_insn (gen_jump (op1));
8008 start_cleanup_deferral ();
8010 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8011 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8013 expand_expr (TREE_OPERAND (exp, 2),
8014 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8017 end_cleanup_deferral ();
8028 /* Something needs to be initialized, but we didn't know
8029 where that thing was when building the tree. For example,
8030 it could be the return value of a function, or a parameter
8031 to a function which lays down in the stack, or a temporary
8032 variable which must be passed by reference.
8034 We guarantee that the expression will either be constructed
8035 or copied into our original target. */
8037 tree slot = TREE_OPERAND (exp, 0);
8038 tree cleanups = NULL_TREE;
8041 if (TREE_CODE (slot) != VAR_DECL)
8045 target = original_target;
8047 /* Set this here so that if we get a target that refers to a
8048 register variable that's already been used, put_reg_into_stack
8049 knows that it should fix up those uses. */
8050 TREE_USED (slot) = 1;
8054 if (DECL_RTL (slot) != 0)
8056 target = DECL_RTL (slot);
8057 /* If we have already expanded the slot, so don't do
8059 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8064 target = assign_temp (type, 2, 0, 1);
8065 /* All temp slots at this level must not conflict. */
8066 preserve_temp_slots (target);
8067 DECL_RTL (slot) = target;
8068 if (TREE_ADDRESSABLE (slot))
8070 TREE_ADDRESSABLE (slot) = 0;
8071 mark_addressable (slot);
8074 /* Since SLOT is not known to the called function
8075 to belong to its stack frame, we must build an explicit
8076 cleanup. This case occurs when we must build up a reference
8077 to pass the reference as an argument. In this case,
8078 it is very likely that such a reference need not be
8081 if (TREE_OPERAND (exp, 2) == 0)
8082 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8083 cleanups = TREE_OPERAND (exp, 2);
8088 /* This case does occur, when expanding a parameter which
8089 needs to be constructed on the stack. The target
8090 is the actual stack address that we want to initialize.
8091 The function we call will perform the cleanup in this case. */
8093 /* If we have already assigned it space, use that space,
8094 not target that we were passed in, as our target
8095 parameter is only a hint. */
8096 if (DECL_RTL (slot) != 0)
8098 target = DECL_RTL (slot);
8099 /* If we have already expanded the slot, so don't do
8101 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8106 DECL_RTL (slot) = target;
8107 /* If we must have an addressable slot, then make sure that
8108 the RTL that we just stored in slot is OK. */
8109 if (TREE_ADDRESSABLE (slot))
8111 TREE_ADDRESSABLE (slot) = 0;
8112 mark_addressable (slot);
8117 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8118 /* Mark it as expanded. */
8119 TREE_OPERAND (exp, 1) = NULL_TREE;
8121 store_expr (exp1, target, 0);
8123 expand_decl_cleanup (NULL_TREE, cleanups);
8130 tree lhs = TREE_OPERAND (exp, 0);
8131 tree rhs = TREE_OPERAND (exp, 1);
8132 tree noncopied_parts = 0;
8133 tree lhs_type = TREE_TYPE (lhs);
8135 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8136 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8137 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8138 TYPE_NONCOPIED_PARTS (lhs_type));
8139 while (noncopied_parts != 0)
8141 expand_assignment (TREE_VALUE (noncopied_parts),
8142 TREE_PURPOSE (noncopied_parts), 0, 0);
8143 noncopied_parts = TREE_CHAIN (noncopied_parts);
8150 /* If lhs is complex, expand calls in rhs before computing it.
8151 That's so we don't compute a pointer and save it over a call.
8152 If lhs is simple, compute it first so we can give it as a
8153 target if the rhs is just a call. This avoids an extra temp and copy
8154 and that prevents a partial-subsumption which makes bad code.
8155 Actually we could treat component_ref's of vars like vars. */
8157 tree lhs = TREE_OPERAND (exp, 0);
8158 tree rhs = TREE_OPERAND (exp, 1);
8159 tree noncopied_parts = 0;
8160 tree lhs_type = TREE_TYPE (lhs);
8164 if (TREE_CODE (lhs) != VAR_DECL
8165 && TREE_CODE (lhs) != RESULT_DECL
8166 && TREE_CODE (lhs) != PARM_DECL
8167 && ! (TREE_CODE (lhs) == INDIRECT_REF
8168 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
8169 preexpand_calls (exp);
8171 /* Check for |= or &= of a bitfield of size one into another bitfield
8172 of size 1. In this case, (unless we need the result of the
8173 assignment) we can do this more efficiently with a
8174 test followed by an assignment, if necessary.
8176 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8177 things change so we do, this code should be enhanced to
8180 && TREE_CODE (lhs) == COMPONENT_REF
8181 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8182 || TREE_CODE (rhs) == BIT_AND_EXPR)
8183 && TREE_OPERAND (rhs, 0) == lhs
8184 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8185 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8186 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8188 rtx label = gen_label_rtx ();
8190 do_jump (TREE_OPERAND (rhs, 1),
8191 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8192 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8193 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8194 (TREE_CODE (rhs) == BIT_IOR_EXPR
8196 : integer_zero_node)),
8198 do_pending_stack_adjust ();
8203 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8204 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8205 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8206 TYPE_NONCOPIED_PARTS (lhs_type));
8208 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8209 while (noncopied_parts != 0)
8211 expand_assignment (TREE_PURPOSE (noncopied_parts),
8212 TREE_VALUE (noncopied_parts), 0, 0);
8213 noncopied_parts = TREE_CHAIN (noncopied_parts);
8219 if (!TREE_OPERAND (exp, 0))
8220 expand_null_return ();
8222 expand_return (TREE_OPERAND (exp, 0));
8225 case PREINCREMENT_EXPR:
8226 case PREDECREMENT_EXPR:
8227 return expand_increment (exp, 0, ignore);
8229 case POSTINCREMENT_EXPR:
8230 case POSTDECREMENT_EXPR:
8231 /* Faster to treat as pre-increment if result is not used. */
8232 return expand_increment (exp, ! ignore, ignore);
8235 /* If nonzero, TEMP will be set to the address of something that might
8236 be a MEM corresponding to a stack slot. */
8239 /* Are we taking the address of a nested function? */
8240 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8241 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8242 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8243 && ! TREE_STATIC (exp))
8245 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8246 op0 = force_operand (op0, target);
8248 /* If we are taking the address of something erroneous, just
8250 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8254 /* We make sure to pass const0_rtx down if we came in with
8255 ignore set, to avoid doing the cleanups twice for something. */
8256 op0 = expand_expr (TREE_OPERAND (exp, 0),
8257 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8258 (modifier == EXPAND_INITIALIZER
8259 ? modifier : EXPAND_CONST_ADDRESS));
8261 /* If we are going to ignore the result, OP0 will have been set
8262 to const0_rtx, so just return it. Don't get confused and
8263 think we are taking the address of the constant. */
8267 op0 = protect_from_queue (op0, 0);
8269 /* We would like the object in memory. If it is a constant, we can
8270 have it be statically allocated into memory. For a non-constant,
8271 we need to allocate some memory and store the value into it. */
8273 if (CONSTANT_P (op0))
8274 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8276 else if (GET_CODE (op0) == MEM)
8278 mark_temp_addr_taken (op0);
8279 temp = XEXP (op0, 0);
8282 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8283 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8285 /* If this object is in a register, it must be not
8287 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8288 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8290 mark_temp_addr_taken (memloc);
8291 emit_move_insn (memloc, op0);
8295 if (GET_CODE (op0) != MEM)
8298 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8300 temp = XEXP (op0, 0);
8301 #ifdef POINTERS_EXTEND_UNSIGNED
8302 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8303 && mode == ptr_mode)
8304 temp = convert_memory_address (ptr_mode, temp);
8309 op0 = force_operand (XEXP (op0, 0), target);
8312 if (flag_force_addr && GET_CODE (op0) != REG)
8313 op0 = force_reg (Pmode, op0);
8315 if (GET_CODE (op0) == REG
8316 && ! REG_USERVAR_P (op0))
8317 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8319 /* If we might have had a temp slot, add an equivalent address
8322 update_temp_slot_address (temp, op0);
8324 #ifdef POINTERS_EXTEND_UNSIGNED
8325 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8326 && mode == ptr_mode)
8327 op0 = convert_memory_address (ptr_mode, op0);
8332 case ENTRY_VALUE_EXPR:
8335 /* COMPLEX type for Extended Pascal & Fortran */
8338 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8341 /* Get the rtx code of the operands. */
8342 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8343 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8346 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8350 /* Move the real (op0) and imaginary (op1) parts to their location. */
8351 emit_move_insn (gen_realpart (mode, target), op0);
8352 emit_move_insn (gen_imagpart (mode, target), op1);
8354 insns = get_insns ();
8357 /* Complex construction should appear as a single unit. */
8358 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8359 each with a separate pseudo as destination.
8360 It's not correct for flow to treat them as a unit. */
8361 if (GET_CODE (target) != CONCAT)
8362 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8370 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8371 return gen_realpart (mode, op0);
8374 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8375 return gen_imagpart (mode, op0);
8379 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8383 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8386 target = gen_reg_rtx (mode);
8390 /* Store the realpart and the negated imagpart to target. */
8391 emit_move_insn (gen_realpart (partmode, target),
8392 gen_realpart (partmode, op0));
8394 imag_t = gen_imagpart (partmode, target);
8395 temp = expand_unop (partmode, neg_optab,
8396 gen_imagpart (partmode, op0), imag_t, 0);
8398 emit_move_insn (imag_t, temp);
8400 insns = get_insns ();
8403 /* Conjugate should appear as a single unit
8404 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8405 each with a separate pseudo as destination.
8406 It's not correct for flow to treat them as a unit. */
8407 if (GET_CODE (target) != CONCAT)
8408 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8415 case TRY_CATCH_EXPR:
8417 tree handler = TREE_OPERAND (exp, 1);
8419 expand_eh_region_start ();
8421 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8423 expand_eh_region_end (handler);
8428 case TRY_FINALLY_EXPR:
8430 tree try_block = TREE_OPERAND (exp, 0);
8431 tree finally_block = TREE_OPERAND (exp, 1);
8432 rtx finally_label = gen_label_rtx ();
8433 rtx done_label = gen_label_rtx ();
8434 rtx return_link = gen_reg_rtx (Pmode);
8435 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8436 (tree) finally_label, (tree) return_link);
8437 TREE_SIDE_EFFECTS (cleanup) = 1;
8439 /* Start a new binding layer that will keep track of all cleanup
8440 actions to be performed. */
8441 expand_start_bindings (2);
8443 target_temp_slot_level = temp_slot_level;
8445 expand_decl_cleanup (NULL_TREE, cleanup);
8446 op0 = expand_expr (try_block, target, tmode, modifier);
8448 preserve_temp_slots (op0);
8449 expand_end_bindings (NULL_TREE, 0, 0);
8450 emit_jump (done_label);
8451 emit_label (finally_label);
8452 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8453 emit_indirect_jump (return_link);
8454 emit_label (done_label);
8458 case GOTO_SUBROUTINE_EXPR:
8460 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8461 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8462 rtx return_address = gen_label_rtx ();
8463 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8465 emit_label (return_address);
8471 rtx dcc = get_dynamic_cleanup_chain ();
8472 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8478 rtx dhc = get_dynamic_handler_chain ();
8479 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8484 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8487 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8490 /* Here to do an ordinary binary operator, generating an instruction
8491 from the optab already placed in `this_optab'. */
8493 preexpand_calls (exp);
8494 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8496 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8497 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8499 temp = expand_binop (mode, this_optab, op0, op1, target,
8500 unsignedp, OPTAB_LIB_WIDEN);
8506 /* Similar to expand_expr, except that we don't specify a target, target
8507 mode, or modifier and we return the alignment of the inner type. This is
8508 used in cases where it is not necessary to align the result to the
8509 alignment of its type as long as we know the alignment of the result, for
8510 example for comparisons of BLKmode values. */
8513 expand_expr_unaligned (exp, palign)
8515 unsigned int *palign;
8518 tree type = TREE_TYPE (exp);
8519 register enum machine_mode mode = TYPE_MODE (type);
8521 /* Default the alignment we return to that of the type. */
8522 *palign = TYPE_ALIGN (type);
8524 /* The only cases in which we do anything special is if the resulting mode
8526 if (mode != BLKmode)
8527 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8529 switch (TREE_CODE (exp))
8533 case NON_LVALUE_EXPR:
8534 /* Conversions between BLKmode values don't change the underlying
8535 alignment or value. */
8536 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8537 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8541 /* Much of the code for this case is copied directly from expand_expr.
8542 We need to duplicate it here because we will do something different
8543 in the fall-through case, so we need to handle the same exceptions
8546 tree array = TREE_OPERAND (exp, 0);
8547 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8548 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8549 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8552 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8555 /* Optimize the special-case of a zero lower bound.
8557 We convert the low_bound to sizetype to avoid some problems
8558 with constant folding. (E.g. suppose the lower bound is 1,
8559 and its mode is QI. Without the conversion, (ARRAY
8560 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8561 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8563 if (! integer_zerop (low_bound))
8564 index = size_diffop (index, convert (sizetype, low_bound));
8566 /* If this is a constant index into a constant array,
8567 just get the value from the array. Handle both the cases when
8568 we have an explicit constructor and when our operand is a variable
8569 that was declared const. */
8571 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8572 && 0 > compare_tree_int (index,
8573 list_length (CONSTRUCTOR_ELTS
8574 (TREE_OPERAND (exp, 0)))))
8578 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8579 i = TREE_INT_CST_LOW (index);
8580 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8584 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8587 else if (optimize >= 1
8588 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8589 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8590 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8592 if (TREE_CODE (index) == INTEGER_CST)
8594 tree init = DECL_INITIAL (array);
8596 if (TREE_CODE (init) == CONSTRUCTOR)
8600 for (elem = CONSTRUCTOR_ELTS (init);
8601 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8602 elem = TREE_CHAIN (elem))
8606 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8613 /* ... fall through ... */
8617 /* If the operand is a CONSTRUCTOR, we can just extract the
8618 appropriate field if it is present. Don't do this if we have
8619 already written the data since we want to refer to that copy
8620 and varasm.c assumes that's what we'll do. */
8621 if (TREE_CODE (exp) != ARRAY_REF
8622 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8623 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8627 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8628 elt = TREE_CHAIN (elt))
8629 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8630 /* Note that unlike the case in expand_expr, we know this is
8631 BLKmode and hence not an integer. */
8632 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8636 enum machine_mode mode1;
8637 HOST_WIDE_INT bitsize, bitpos;
8640 unsigned int alignment;
8642 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8643 &mode1, &unsignedp, &volatilep,
8646 /* If we got back the original object, something is wrong. Perhaps
8647 we are evaluating an expression too early. In any event, don't
8648 infinitely recurse. */
8652 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8654 /* If this is a constant, put it into a register if it is a
8655 legitimate constant and OFFSET is 0 and memory if it isn't. */
8656 if (CONSTANT_P (op0))
8658 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8660 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8662 op0 = force_reg (inner_mode, op0);
8664 op0 = validize_mem (force_const_mem (inner_mode, op0));
8669 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8671 /* If this object is in a register, put it into memory.
8672 This case can't occur in C, but can in Ada if we have
8673 unchecked conversion of an expression from a scalar type to
8674 an array or record type. */
8675 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8676 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8678 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
8680 mark_temp_addr_taken (memloc);
8681 emit_move_insn (memloc, op0);
8685 if (GET_CODE (op0) != MEM)
8688 if (GET_MODE (offset_rtx) != ptr_mode)
8690 #ifdef POINTERS_EXTEND_UNSIGNED
8691 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8693 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8697 op0 = change_address (op0, VOIDmode,
8698 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8699 force_reg (ptr_mode,
8703 /* Don't forget about volatility even if this is a bitfield. */
8704 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8706 op0 = copy_rtx (op0);
8707 MEM_VOLATILE_P (op0) = 1;
8710 /* Check the access. */
8711 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8716 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8717 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8719 /* Check the access right of the pointer. */
8720 if (size > BITS_PER_UNIT)
8721 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
8722 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8723 TYPE_MODE (sizetype),
8724 GEN_INT (MEMORY_USE_RO),
8725 TYPE_MODE (integer_type_node));
8728 /* In cases where an aligned union has an unaligned object
8729 as a field, we might be extracting a BLKmode value from
8730 an integer-mode (e.g., SImode) object. Handle this case
8731 by doing the extract into an object as wide as the field
8732 (which we know to be the width of a basic mode), then
8733 storing into memory, and changing the mode to BLKmode.
8734 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8735 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8736 if (mode1 == VOIDmode
8737 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8738 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
8739 && (TYPE_ALIGN (type) > alignment
8740 || bitpos % TYPE_ALIGN (type) != 0)))
8742 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8744 if (ext_mode == BLKmode)
8746 /* In this case, BITPOS must start at a byte boundary. */
8747 if (GET_CODE (op0) != MEM
8748 || bitpos % BITS_PER_UNIT != 0)
8751 op0 = change_address (op0, VOIDmode,
8752 plus_constant (XEXP (op0, 0),
8753 bitpos / BITS_PER_UNIT));
8757 rtx new = assign_stack_temp (ext_mode,
8758 bitsize / BITS_PER_UNIT, 0);
8760 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8761 unsignedp, NULL_RTX, ext_mode,
8762 ext_mode, alignment,
8763 int_size_in_bytes (TREE_TYPE (tem)));
8765 /* If the result is a record type and BITSIZE is narrower than
8766 the mode of OP0, an integral mode, and this is a big endian
8767 machine, we must put the field into the high-order bits. */
8768 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8769 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8770 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
8771 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
8772 size_int (GET_MODE_BITSIZE
8778 emit_move_insn (new, op0);
8779 op0 = copy_rtx (new);
8780 PUT_MODE (op0, BLKmode);
8784 /* Get a reference to just this component. */
8785 op0 = change_address (op0, mode1,
8786 plus_constant (XEXP (op0, 0),
8787 (bitpos / BITS_PER_UNIT)));
8789 MEM_ALIAS_SET (op0) = get_alias_set (exp);
8791 /* Adjust the alignment in case the bit position is not
8792 a multiple of the alignment of the inner object. */
8793 while (bitpos % alignment != 0)
8796 if (GET_CODE (XEXP (op0, 0)) == REG)
8797 mark_reg_pointer (XEXP (op0, 0), alignment);
8799 MEM_IN_STRUCT_P (op0) = 1;
8800 MEM_VOLATILE_P (op0) |= volatilep;
8802 *palign = alignment;
8811 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8814 /* Return the tree node if a ARG corresponds to a string constant or zero
8815 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8816 in bytes within the string that ARG is accessing. The type of the
8817 offset will be `sizetype'. */
8820 string_constant (arg, ptr_offset)
8826 if (TREE_CODE (arg) == ADDR_EXPR
8827 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8829 *ptr_offset = size_zero_node;
8830 return TREE_OPERAND (arg, 0);
8832 else if (TREE_CODE (arg) == PLUS_EXPR)
8834 tree arg0 = TREE_OPERAND (arg, 0);
8835 tree arg1 = TREE_OPERAND (arg, 1);
8840 if (TREE_CODE (arg0) == ADDR_EXPR
8841 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8843 *ptr_offset = convert (sizetype, arg1);
8844 return TREE_OPERAND (arg0, 0);
8846 else if (TREE_CODE (arg1) == ADDR_EXPR
8847 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8849 *ptr_offset = convert (sizetype, arg0);
8850 return TREE_OPERAND (arg1, 0);
8857 /* Expand code for a post- or pre- increment or decrement
8858 and return the RTX for the result.
8859 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8862 expand_increment (exp, post, ignore)
8866 register rtx op0, op1;
8867 register rtx temp, value;
8868 register tree incremented = TREE_OPERAND (exp, 0);
8869 optab this_optab = add_optab;
8871 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8872 int op0_is_copy = 0;
8873 int single_insn = 0;
8874 /* 1 means we can't store into OP0 directly,
8875 because it is a subreg narrower than a word,
8876 and we don't dare clobber the rest of the word. */
8879 /* Stabilize any component ref that might need to be
8880 evaluated more than once below. */
8882 || TREE_CODE (incremented) == BIT_FIELD_REF
8883 || (TREE_CODE (incremented) == COMPONENT_REF
8884 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8885 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8886 incremented = stabilize_reference (incremented);
8887 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8888 ones into save exprs so that they don't accidentally get evaluated
8889 more than once by the code below. */
8890 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8891 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8892 incremented = save_expr (incremented);
8894 /* Compute the operands as RTX.
8895 Note whether OP0 is the actual lvalue or a copy of it:
8896 I believe it is a copy iff it is a register or subreg
8897 and insns were generated in computing it. */
8899 temp = get_last_insn ();
8900 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
8902 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8903 in place but instead must do sign- or zero-extension during assignment,
8904 so we copy it into a new register and let the code below use it as
8907 Note that we can safely modify this SUBREG since it is know not to be
8908 shared (it was made by the expand_expr call above). */
8910 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8913 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8917 else if (GET_CODE (op0) == SUBREG
8918 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8920 /* We cannot increment this SUBREG in place. If we are
8921 post-incrementing, get a copy of the old value. Otherwise,
8922 just mark that we cannot increment in place. */
8924 op0 = copy_to_reg (op0);
8929 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8930 && temp != get_last_insn ());
8931 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8932 EXPAND_MEMORY_USE_BAD);
8934 /* Decide whether incrementing or decrementing. */
8935 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8936 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8937 this_optab = sub_optab;
8939 /* Convert decrement by a constant into a negative increment. */
8940 if (this_optab == sub_optab
8941 && GET_CODE (op1) == CONST_INT)
8943 op1 = GEN_INT (- INTVAL (op1));
8944 this_optab = add_optab;
8947 /* For a preincrement, see if we can do this with a single instruction. */
8950 icode = (int) this_optab->handlers[(int) mode].insn_code;
8951 if (icode != (int) CODE_FOR_nothing
8952 /* Make sure that OP0 is valid for operands 0 and 1
8953 of the insn we want to queue. */
8954 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8955 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8956 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8960 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8961 then we cannot just increment OP0. We must therefore contrive to
8962 increment the original value. Then, for postincrement, we can return
8963 OP0 since it is a copy of the old value. For preincrement, expand here
8964 unless we can do it with a single insn.
8966 Likewise if storing directly into OP0 would clobber high bits
8967 we need to preserve (bad_subreg). */
8968 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8970 /* This is the easiest way to increment the value wherever it is.
8971 Problems with multiple evaluation of INCREMENTED are prevented
8972 because either (1) it is a component_ref or preincrement,
8973 in which case it was stabilized above, or (2) it is an array_ref
8974 with constant index in an array in a register, which is
8975 safe to reevaluate. */
8976 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8977 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8978 ? MINUS_EXPR : PLUS_EXPR),
8981 TREE_OPERAND (exp, 1));
8983 while (TREE_CODE (incremented) == NOP_EXPR
8984 || TREE_CODE (incremented) == CONVERT_EXPR)
8986 newexp = convert (TREE_TYPE (incremented), newexp);
8987 incremented = TREE_OPERAND (incremented, 0);
8990 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8991 return post ? op0 : temp;
8996 /* We have a true reference to the value in OP0.
8997 If there is an insn to add or subtract in this mode, queue it.
8998 Queueing the increment insn avoids the register shuffling
8999 that often results if we must increment now and first save
9000 the old value for subsequent use. */
9002 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9003 op0 = stabilize (op0);
9006 icode = (int) this_optab->handlers[(int) mode].insn_code;
9007 if (icode != (int) CODE_FOR_nothing
9008 /* Make sure that OP0 is valid for operands 0 and 1
9009 of the insn we want to queue. */
9010 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9011 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9013 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9014 op1 = force_reg (mode, op1);
9016 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9018 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9020 rtx addr = (general_operand (XEXP (op0, 0), mode)
9021 ? force_reg (Pmode, XEXP (op0, 0))
9022 : copy_to_reg (XEXP (op0, 0)));
9025 op0 = change_address (op0, VOIDmode, addr);
9026 temp = force_reg (GET_MODE (op0), op0);
9027 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9028 op1 = force_reg (mode, op1);
9030 /* The increment queue is LIFO, thus we have to `queue'
9031 the instructions in reverse order. */
9032 enqueue_insn (op0, gen_move_insn (op0, temp));
9033 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9038 /* Preincrement, or we can't increment with one simple insn. */
9040 /* Save a copy of the value before inc or dec, to return it later. */
9041 temp = value = copy_to_reg (op0);
9043 /* Arrange to return the incremented value. */
9044 /* Copy the rtx because expand_binop will protect from the queue,
9045 and the results of that would be invalid for us to return
9046 if our caller does emit_queue before using our result. */
9047 temp = copy_rtx (value = op0);
9049 /* Increment however we can. */
9050 op1 = expand_binop (mode, this_optab, value, op1,
9051 current_function_check_memory_usage ? NULL_RTX : op0,
9052 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9053 /* Make sure the value is stored into OP0. */
9055 emit_move_insn (op0, op1);
9060 /* Expand all function calls contained within EXP, innermost ones first.
9061 But don't look within expressions that have sequence points.
9062 For each CALL_EXPR, record the rtx for its value
9063 in the CALL_EXPR_RTL field. */
9066 preexpand_calls (exp)
9069 register int nops, i;
9070 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9072 if (! do_preexpand_calls)
9075 /* Only expressions and references can contain calls. */
9077 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9080 switch (TREE_CODE (exp))
9083 /* Do nothing if already expanded. */
9084 if (CALL_EXPR_RTL (exp) != 0
9085 /* Do nothing if the call returns a variable-sized object. */
9086 || (TREE_CODE (TREE_TYPE (exp)) != VOID_TYPE
9087 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9088 /* Do nothing to built-in functions. */
9089 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9090 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9092 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9095 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9100 case TRUTH_ANDIF_EXPR:
9101 case TRUTH_ORIF_EXPR:
9102 /* If we find one of these, then we can be sure
9103 the adjust will be done for it (since it makes jumps).
9104 Do it now, so that if this is inside an argument
9105 of a function, we don't get the stack adjustment
9106 after some other args have already been pushed. */
9107 do_pending_stack_adjust ();
9112 case WITH_CLEANUP_EXPR:
9113 case CLEANUP_POINT_EXPR:
9114 case TRY_CATCH_EXPR:
9118 if (SAVE_EXPR_RTL (exp) != 0)
9125 nops = tree_code_length[(int) TREE_CODE (exp)];
9126 for (i = 0; i < nops; i++)
9127 if (TREE_OPERAND (exp, i) != 0)
9129 if (TREE_CODE (exp) == TARGET_EXPR && i == 2)
9130 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
9131 It doesn't happen before the call is made. */
9135 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9136 if (type == 'e' || type == '<' || type == '1' || type == '2'
9138 preexpand_calls (TREE_OPERAND (exp, i));
9143 /* At the start of a function, record that we have no previously-pushed
9144 arguments waiting to be popped. */
9147 init_pending_stack_adjust ()
9149 pending_stack_adjust = 0;
9152 /* When exiting from function, if safe, clear out any pending stack adjust
9153 so the adjustment won't get done.
9155 Note, if the current function calls alloca, then it must have a
9156 frame pointer regardless of the value of flag_omit_frame_pointer. */
9159 clear_pending_stack_adjust ()
9161 #ifdef EXIT_IGNORE_STACK
9163 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9164 && EXIT_IGNORE_STACK
9165 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9166 && ! flag_inline_functions)
9168 stack_pointer_delta -= pending_stack_adjust,
9169 pending_stack_adjust = 0;
9174 /* Pop any previously-pushed arguments that have not been popped yet. */
9177 do_pending_stack_adjust ()
9179 if (inhibit_defer_pop == 0)
9181 if (pending_stack_adjust != 0)
9182 adjust_stack (GEN_INT (pending_stack_adjust));
9183 pending_stack_adjust = 0;
9187 /* Expand conditional expressions. */
9189 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9190 LABEL is an rtx of code CODE_LABEL, in this function and all the
9194 jumpifnot (exp, label)
9198 do_jump (exp, label, NULL_RTX);
9201 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9208 do_jump (exp, NULL_RTX, label);
9211 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9212 the result is zero, or IF_TRUE_LABEL if the result is one.
9213 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9214 meaning fall through in that case.
9216 do_jump always does any pending stack adjust except when it does not
9217 actually perform a jump. An example where there is no jump
9218 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9220 This function is responsible for optimizing cases such as
9221 &&, || and comparison operators in EXP. */
9224 do_jump (exp, if_false_label, if_true_label)
9226 rtx if_false_label, if_true_label;
9228 register enum tree_code code = TREE_CODE (exp);
9229 /* Some cases need to create a label to jump to
9230 in order to properly fall through.
9231 These cases set DROP_THROUGH_LABEL nonzero. */
9232 rtx drop_through_label = 0;
9236 enum machine_mode mode;
9238 #ifdef MAX_INTEGER_COMPUTATION_MODE
9239 check_max_integer_computation_mode (exp);
9250 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9256 /* This is not true with #pragma weak */
9258 /* The address of something can never be zero. */
9260 emit_jump (if_true_label);
9265 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9266 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9267 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9270 /* If we are narrowing the operand, we have to do the compare in the
9272 if ((TYPE_PRECISION (TREE_TYPE (exp))
9273 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9275 case NON_LVALUE_EXPR:
9276 case REFERENCE_EXPR:
9281 /* These cannot change zero->non-zero or vice versa. */
9282 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9285 case WITH_RECORD_EXPR:
9286 /* Put the object on the placeholder list, recurse through our first
9287 operand, and pop the list. */
9288 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9290 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9291 placeholder_list = TREE_CHAIN (placeholder_list);
9295 /* This is never less insns than evaluating the PLUS_EXPR followed by
9296 a test and can be longer if the test is eliminated. */
9298 /* Reduce to minus. */
9299 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9300 TREE_OPERAND (exp, 0),
9301 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9302 TREE_OPERAND (exp, 1))));
9303 /* Process as MINUS. */
9307 /* Non-zero iff operands of minus differ. */
9308 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9309 TREE_OPERAND (exp, 0),
9310 TREE_OPERAND (exp, 1)),
9311 NE, NE, if_false_label, if_true_label);
9315 /* If we are AND'ing with a small constant, do this comparison in the
9316 smallest type that fits. If the machine doesn't have comparisons
9317 that small, it will be converted back to the wider comparison.
9318 This helps if we are testing the sign bit of a narrower object.
9319 combine can't do this for us because it can't know whether a
9320 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9322 if (! SLOW_BYTE_ACCESS
9323 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9324 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9325 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9326 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9327 && (type = type_for_mode (mode, 1)) != 0
9328 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9329 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9330 != CODE_FOR_nothing))
9332 do_jump (convert (type, exp), if_false_label, if_true_label);
9337 case TRUTH_NOT_EXPR:
9338 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9341 case TRUTH_ANDIF_EXPR:
9342 if (if_false_label == 0)
9343 if_false_label = drop_through_label = gen_label_rtx ();
9344 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9345 start_cleanup_deferral ();
9346 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9347 end_cleanup_deferral ();
9350 case TRUTH_ORIF_EXPR:
9351 if (if_true_label == 0)
9352 if_true_label = drop_through_label = gen_label_rtx ();
9353 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9354 start_cleanup_deferral ();
9355 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9356 end_cleanup_deferral ();
9361 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9362 preserve_temp_slots (NULL_RTX);
9366 do_pending_stack_adjust ();
9367 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9374 HOST_WIDE_INT bitsize, bitpos;
9376 enum machine_mode mode;
9380 unsigned int alignment;
9382 /* Get description of this reference. We don't actually care
9383 about the underlying object here. */
9384 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9385 &unsignedp, &volatilep, &alignment);
9387 type = type_for_size (bitsize, unsignedp);
9388 if (! SLOW_BYTE_ACCESS
9389 && type != 0 && bitsize >= 0
9390 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9391 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9392 != CODE_FOR_nothing))
9394 do_jump (convert (type, exp), if_false_label, if_true_label);
9401 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9402 if (integer_onep (TREE_OPERAND (exp, 1))
9403 && integer_zerop (TREE_OPERAND (exp, 2)))
9404 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9406 else if (integer_zerop (TREE_OPERAND (exp, 1))
9407 && integer_onep (TREE_OPERAND (exp, 2)))
9408 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9412 register rtx label1 = gen_label_rtx ();
9413 drop_through_label = gen_label_rtx ();
9415 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9417 start_cleanup_deferral ();
9418 /* Now the THEN-expression. */
9419 do_jump (TREE_OPERAND (exp, 1),
9420 if_false_label ? if_false_label : drop_through_label,
9421 if_true_label ? if_true_label : drop_through_label);
9422 /* In case the do_jump just above never jumps. */
9423 do_pending_stack_adjust ();
9424 emit_label (label1);
9426 /* Now the ELSE-expression. */
9427 do_jump (TREE_OPERAND (exp, 2),
9428 if_false_label ? if_false_label : drop_through_label,
9429 if_true_label ? if_true_label : drop_through_label);
9430 end_cleanup_deferral ();
9436 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9438 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9439 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9441 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9442 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9445 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9446 fold (build (EQ_EXPR, TREE_TYPE (exp),
9447 fold (build1 (REALPART_EXPR,
9448 TREE_TYPE (inner_type),
9450 fold (build1 (REALPART_EXPR,
9451 TREE_TYPE (inner_type),
9453 fold (build (EQ_EXPR, TREE_TYPE (exp),
9454 fold (build1 (IMAGPART_EXPR,
9455 TREE_TYPE (inner_type),
9457 fold (build1 (IMAGPART_EXPR,
9458 TREE_TYPE (inner_type),
9460 if_false_label, if_true_label);
9463 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9464 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9466 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9467 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9468 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9470 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9476 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9478 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9479 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9481 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9482 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9485 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9486 fold (build (NE_EXPR, TREE_TYPE (exp),
9487 fold (build1 (REALPART_EXPR,
9488 TREE_TYPE (inner_type),
9490 fold (build1 (REALPART_EXPR,
9491 TREE_TYPE (inner_type),
9493 fold (build (NE_EXPR, TREE_TYPE (exp),
9494 fold (build1 (IMAGPART_EXPR,
9495 TREE_TYPE (inner_type),
9497 fold (build1 (IMAGPART_EXPR,
9498 TREE_TYPE (inner_type),
9500 if_false_label, if_true_label);
9503 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9504 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9506 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9507 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9508 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9510 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9515 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9516 if (GET_MODE_CLASS (mode) == MODE_INT
9517 && ! can_compare_p (LT, mode, ccp_jump))
9518 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9520 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9524 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9525 if (GET_MODE_CLASS (mode) == MODE_INT
9526 && ! can_compare_p (LE, mode, ccp_jump))
9527 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9529 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9533 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9534 if (GET_MODE_CLASS (mode) == MODE_INT
9535 && ! can_compare_p (GT, mode, ccp_jump))
9536 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9538 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9542 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9543 if (GET_MODE_CLASS (mode) == MODE_INT
9544 && ! can_compare_p (GE, mode, ccp_jump))
9545 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9547 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9550 case UNORDERED_EXPR:
9553 enum rtx_code cmp, rcmp;
9556 if (code == UNORDERED_EXPR)
9557 cmp = UNORDERED, rcmp = ORDERED;
9559 cmp = ORDERED, rcmp = UNORDERED;
9560 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9563 if (! can_compare_p (cmp, mode, ccp_jump)
9564 && (can_compare_p (rcmp, mode, ccp_jump)
9565 /* If the target doesn't provide either UNORDERED or ORDERED
9566 comparisons, canonicalize on UNORDERED for the library. */
9567 || rcmp == UNORDERED))
9571 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9573 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9578 enum rtx_code rcode1;
9579 enum tree_code tcode2;
9603 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9604 if (can_compare_p (rcode1, mode, ccp_jump))
9605 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9609 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9610 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9613 /* If the target doesn't support combined unordered
9614 compares, decompose into UNORDERED + comparison. */
9615 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9616 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9617 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9618 do_jump (exp, if_false_label, if_true_label);
9625 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9627 /* This is not needed any more and causes poor code since it causes
9628 comparisons and tests from non-SI objects to have different code
9630 /* Copy to register to avoid generating bad insns by cse
9631 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9632 if (!cse_not_expected && GET_CODE (temp) == MEM)
9633 temp = copy_to_reg (temp);
9635 do_pending_stack_adjust ();
9636 /* Do any postincrements in the expression that was tested. */
9639 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9641 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9645 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9646 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9647 /* Note swapping the labels gives us not-equal. */
9648 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9649 else if (GET_MODE (temp) != VOIDmode)
9650 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9651 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9652 GET_MODE (temp), NULL_RTX, 0,
9653 if_false_label, if_true_label);
9658 if (drop_through_label)
9660 /* If do_jump produces code that might be jumped around,
9661 do any stack adjusts from that code, before the place
9662 where control merges in. */
9663 do_pending_stack_adjust ();
9664 emit_label (drop_through_label);
9668 /* Given a comparison expression EXP for values too wide to be compared
9669 with one insn, test the comparison and jump to the appropriate label.
9670 The code of EXP is ignored; we always test GT if SWAP is 0,
9671 and LT if SWAP is 1. */
9674 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9677 rtx if_false_label, if_true_label;
9679 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9680 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9681 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9682 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9684 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9687 /* Compare OP0 with OP1, word at a time, in mode MODE.
9688 UNSIGNEDP says to do unsigned comparison.
9689 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9692 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9693 enum machine_mode mode;
9696 rtx if_false_label, if_true_label;
9698 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9699 rtx drop_through_label = 0;
9702 if (! if_true_label || ! if_false_label)
9703 drop_through_label = gen_label_rtx ();
9704 if (! if_true_label)
9705 if_true_label = drop_through_label;
9706 if (! if_false_label)
9707 if_false_label = drop_through_label;
9709 /* Compare a word at a time, high order first. */
9710 for (i = 0; i < nwords; i++)
9712 rtx op0_word, op1_word;
9714 if (WORDS_BIG_ENDIAN)
9716 op0_word = operand_subword_force (op0, i, mode);
9717 op1_word = operand_subword_force (op1, i, mode);
9721 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9722 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9725 /* All but high-order word must be compared as unsigned. */
9726 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9727 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9728 NULL_RTX, if_true_label);
9730 /* Consider lower words only if these are equal. */
9731 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9732 NULL_RTX, 0, NULL_RTX, if_false_label);
9736 emit_jump (if_false_label);
9737 if (drop_through_label)
9738 emit_label (drop_through_label);
9741 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9742 with one insn, test the comparison and jump to the appropriate label. */
9745 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9747 rtx if_false_label, if_true_label;
9749 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9750 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9751 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9752 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9754 rtx drop_through_label = 0;
9756 if (! if_false_label)
9757 drop_through_label = if_false_label = gen_label_rtx ();
9759 for (i = 0; i < nwords; i++)
9760 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9761 operand_subword_force (op1, i, mode),
9762 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9763 word_mode, NULL_RTX, 0, if_false_label,
9767 emit_jump (if_true_label);
9768 if (drop_through_label)
9769 emit_label (drop_through_label);
9772 /* Jump according to whether OP0 is 0.
9773 We assume that OP0 has an integer mode that is too wide
9774 for the available compare insns. */
9777 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9779 rtx if_false_label, if_true_label;
9781 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9784 rtx drop_through_label = 0;
9786 /* The fastest way of doing this comparison on almost any machine is to
9787 "or" all the words and compare the result. If all have to be loaded
9788 from memory and this is a very wide item, it's possible this may
9789 be slower, but that's highly unlikely. */
9791 part = gen_reg_rtx (word_mode);
9792 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9793 for (i = 1; i < nwords && part != 0; i++)
9794 part = expand_binop (word_mode, ior_optab, part,
9795 operand_subword_force (op0, i, GET_MODE (op0)),
9796 part, 1, OPTAB_WIDEN);
9800 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9801 NULL_RTX, 0, if_false_label, if_true_label);
9806 /* If we couldn't do the "or" simply, do this with a series of compares. */
9807 if (! if_false_label)
9808 drop_through_label = if_false_label = gen_label_rtx ();
9810 for (i = 0; i < nwords; i++)
9811 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9812 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9813 if_false_label, NULL_RTX);
9816 emit_jump (if_true_label);
9818 if (drop_through_label)
9819 emit_label (drop_through_label);
9822 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9823 (including code to compute the values to be compared)
9824 and set (CC0) according to the result.
9825 The decision as to signed or unsigned comparison must be made by the caller.
9827 We force a stack adjustment unless there are currently
9828 things pushed on the stack that aren't yet used.
9830 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9833 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9834 size of MODE should be used. */
9837 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9838 register rtx op0, op1;
9841 enum machine_mode mode;
9847 /* If one operand is constant, make it the second one. Only do this
9848 if the other operand is not constant as well. */
9850 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9851 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9856 code = swap_condition (code);
9861 op0 = force_not_mem (op0);
9862 op1 = force_not_mem (op1);
9865 do_pending_stack_adjust ();
9867 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9868 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9872 /* There's no need to do this now that combine.c can eliminate lots of
9873 sign extensions. This can be less efficient in certain cases on other
9876 /* If this is a signed equality comparison, we can do it as an
9877 unsigned comparison since zero-extension is cheaper than sign
9878 extension and comparisons with zero are done as unsigned. This is
9879 the case even on machines that can do fast sign extension, since
9880 zero-extension is easier to combine with other operations than
9881 sign-extension is. If we are comparing against a constant, we must
9882 convert it to what it would look like unsigned. */
9883 if ((code == EQ || code == NE) && ! unsignedp
9884 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9886 if (GET_CODE (op1) == CONST_INT
9887 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9888 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9893 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9895 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9898 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9899 The decision as to signed or unsigned comparison must be made by the caller.
9901 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9904 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9905 size of MODE should be used. */
9908 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9909 if_false_label, if_true_label)
9910 register rtx op0, op1;
9913 enum machine_mode mode;
9916 rtx if_false_label, if_true_label;
9919 int dummy_true_label = 0;
9921 /* Reverse the comparison if that is safe and we want to jump if it is
9923 if (! if_true_label && ! FLOAT_MODE_P (mode))
9925 if_true_label = if_false_label;
9927 code = reverse_condition (code);
9930 /* If one operand is constant, make it the second one. Only do this
9931 if the other operand is not constant as well. */
9933 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9934 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9939 code = swap_condition (code);
9944 op0 = force_not_mem (op0);
9945 op1 = force_not_mem (op1);
9948 do_pending_stack_adjust ();
9950 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9951 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9953 if (tem == const_true_rtx)
9956 emit_jump (if_true_label);
9961 emit_jump (if_false_label);
9967 /* There's no need to do this now that combine.c can eliminate lots of
9968 sign extensions. This can be less efficient in certain cases on other
9971 /* If this is a signed equality comparison, we can do it as an
9972 unsigned comparison since zero-extension is cheaper than sign
9973 extension and comparisons with zero are done as unsigned. This is
9974 the case even on machines that can do fast sign extension, since
9975 zero-extension is easier to combine with other operations than
9976 sign-extension is. If we are comparing against a constant, we must
9977 convert it to what it would look like unsigned. */
9978 if ((code == EQ || code == NE) && ! unsignedp
9979 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9981 if (GET_CODE (op1) == CONST_INT
9982 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9983 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9988 if (! if_true_label)
9990 dummy_true_label = 1;
9991 if_true_label = gen_label_rtx ();
9994 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
9998 emit_jump (if_false_label);
9999 if (dummy_true_label)
10000 emit_label (if_true_label);
10003 /* Generate code for a comparison expression EXP (including code to compute
10004 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10005 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10006 generated code will drop through.
10007 SIGNED_CODE should be the rtx operation for this comparison for
10008 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10010 We force a stack adjustment unless there are currently
10011 things pushed on the stack that aren't yet used. */
10014 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10017 enum rtx_code signed_code, unsigned_code;
10018 rtx if_false_label, if_true_label;
10020 unsigned int align0, align1;
10021 register rtx op0, op1;
10022 register tree type;
10023 register enum machine_mode mode;
10025 enum rtx_code code;
10027 /* Don't crash if the comparison was erroneous. */
10028 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10029 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10032 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10033 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10034 mode = TYPE_MODE (type);
10035 unsignedp = TREE_UNSIGNED (type);
10036 code = unsignedp ? unsigned_code : signed_code;
10038 #ifdef HAVE_canonicalize_funcptr_for_compare
10039 /* If function pointers need to be "canonicalized" before they can
10040 be reliably compared, then canonicalize them. */
10041 if (HAVE_canonicalize_funcptr_for_compare
10042 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10043 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10046 rtx new_op0 = gen_reg_rtx (mode);
10048 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10052 if (HAVE_canonicalize_funcptr_for_compare
10053 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10054 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10057 rtx new_op1 = gen_reg_rtx (mode);
10059 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10064 /* Do any postincrements in the expression that was tested. */
10067 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10069 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10070 MIN (align0, align1),
10071 if_false_label, if_true_label);
10074 /* Generate code to calculate EXP using a store-flag instruction
10075 and return an rtx for the result. EXP is either a comparison
10076 or a TRUTH_NOT_EXPR whose operand is a comparison.
10078 If TARGET is nonzero, store the result there if convenient.
10080 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10083 Return zero if there is no suitable set-flag instruction
10084 available on this machine.
10086 Once expand_expr has been called on the arguments of the comparison,
10087 we are committed to doing the store flag, since it is not safe to
10088 re-evaluate the expression. We emit the store-flag insn by calling
10089 emit_store_flag, but only expand the arguments if we have a reason
10090 to believe that emit_store_flag will be successful. If we think that
10091 it will, but it isn't, we have to simulate the store-flag with a
10092 set/jump/set sequence. */
10095 do_store_flag (exp, target, mode, only_cheap)
10098 enum machine_mode mode;
10101 enum rtx_code code;
10102 tree arg0, arg1, type;
10104 enum machine_mode operand_mode;
10108 enum insn_code icode;
10109 rtx subtarget = target;
10112 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10113 result at the end. We can't simply invert the test since it would
10114 have already been inverted if it were valid. This case occurs for
10115 some floating-point comparisons. */
10117 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10118 invert = 1, exp = TREE_OPERAND (exp, 0);
10120 arg0 = TREE_OPERAND (exp, 0);
10121 arg1 = TREE_OPERAND (exp, 1);
10122 type = TREE_TYPE (arg0);
10123 operand_mode = TYPE_MODE (type);
10124 unsignedp = TREE_UNSIGNED (type);
10126 /* We won't bother with BLKmode store-flag operations because it would mean
10127 passing a lot of information to emit_store_flag. */
10128 if (operand_mode == BLKmode)
10131 /* We won't bother with store-flag operations involving function pointers
10132 when function pointers must be canonicalized before comparisons. */
10133 #ifdef HAVE_canonicalize_funcptr_for_compare
10134 if (HAVE_canonicalize_funcptr_for_compare
10135 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10136 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10138 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10139 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10140 == FUNCTION_TYPE))))
10147 /* Get the rtx comparison code to use. We know that EXP is a comparison
10148 operation of some type. Some comparisons against 1 and -1 can be
10149 converted to comparisons with zero. Do so here so that the tests
10150 below will be aware that we have a comparison with zero. These
10151 tests will not catch constants in the first operand, but constants
10152 are rarely passed as the first operand. */
10154 switch (TREE_CODE (exp))
10163 if (integer_onep (arg1))
10164 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10166 code = unsignedp ? LTU : LT;
10169 if (! unsignedp && integer_all_onesp (arg1))
10170 arg1 = integer_zero_node, code = LT;
10172 code = unsignedp ? LEU : LE;
10175 if (! unsignedp && integer_all_onesp (arg1))
10176 arg1 = integer_zero_node, code = GE;
10178 code = unsignedp ? GTU : GT;
10181 if (integer_onep (arg1))
10182 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10184 code = unsignedp ? GEU : GE;
10187 case UNORDERED_EXPR:
10213 /* Put a constant second. */
10214 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10216 tem = arg0; arg0 = arg1; arg1 = tem;
10217 code = swap_condition (code);
10220 /* If this is an equality or inequality test of a single bit, we can
10221 do this by shifting the bit being tested to the low-order bit and
10222 masking the result with the constant 1. If the condition was EQ,
10223 we xor it with 1. This does not require an scc insn and is faster
10224 than an scc insn even if we have it. */
10226 if ((code == NE || code == EQ)
10227 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10228 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10230 tree inner = TREE_OPERAND (arg0, 0);
10231 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10234 /* If INNER is a right shift of a constant and it plus BITNUM does
10235 not overflow, adjust BITNUM and INNER. */
10237 if (TREE_CODE (inner) == RSHIFT_EXPR
10238 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10239 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10240 && bitnum < TYPE_PRECISION (type)
10241 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10242 bitnum - TYPE_PRECISION (type)))
10244 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10245 inner = TREE_OPERAND (inner, 0);
10248 /* If we are going to be able to omit the AND below, we must do our
10249 operations as unsigned. If we must use the AND, we have a choice.
10250 Normally unsigned is faster, but for some machines signed is. */
10251 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10252 #ifdef LOAD_EXTEND_OP
10253 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10259 if (! get_subtarget (subtarget)
10260 || GET_MODE (subtarget) != operand_mode
10261 || ! safe_from_p (subtarget, inner, 1))
10264 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10267 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10268 size_int (bitnum), subtarget, ops_unsignedp);
10270 if (GET_MODE (op0) != mode)
10271 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10273 if ((code == EQ && ! invert) || (code == NE && invert))
10274 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10275 ops_unsignedp, OPTAB_LIB_WIDEN);
10277 /* Put the AND last so it can combine with more things. */
10278 if (bitnum != TYPE_PRECISION (type) - 1)
10279 op0 = expand_and (op0, const1_rtx, subtarget);
10284 /* Now see if we are likely to be able to do this. Return if not. */
10285 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10288 icode = setcc_gen_code[(int) code];
10289 if (icode == CODE_FOR_nothing
10290 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10292 /* We can only do this if it is one of the special cases that
10293 can be handled without an scc insn. */
10294 if ((code == LT && integer_zerop (arg1))
10295 || (! only_cheap && code == GE && integer_zerop (arg1)))
10297 else if (BRANCH_COST >= 0
10298 && ! only_cheap && (code == NE || code == EQ)
10299 && TREE_CODE (type) != REAL_TYPE
10300 && ((abs_optab->handlers[(int) operand_mode].insn_code
10301 != CODE_FOR_nothing)
10302 || (ffs_optab->handlers[(int) operand_mode].insn_code
10303 != CODE_FOR_nothing)))
10309 preexpand_calls (exp);
10310 if (! get_subtarget (target)
10311 || GET_MODE (subtarget) != operand_mode
10312 || ! safe_from_p (subtarget, arg1, 1))
10315 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10316 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10319 target = gen_reg_rtx (mode);
10321 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10322 because, if the emit_store_flag does anything it will succeed and
10323 OP0 and OP1 will not be used subsequently. */
10325 result = emit_store_flag (target, code,
10326 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10327 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10328 operand_mode, unsignedp, 1);
10333 result = expand_binop (mode, xor_optab, result, const1_rtx,
10334 result, 0, OPTAB_LIB_WIDEN);
10338 /* If this failed, we have to do this with set/compare/jump/set code. */
10339 if (GET_CODE (target) != REG
10340 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10341 target = gen_reg_rtx (GET_MODE (target));
10343 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10344 result = compare_from_rtx (op0, op1, code, unsignedp,
10345 operand_mode, NULL_RTX, 0);
10346 if (GET_CODE (result) == CONST_INT)
10347 return (((result == const0_rtx && ! invert)
10348 || (result != const0_rtx && invert))
10349 ? const0_rtx : const1_rtx);
10351 label = gen_label_rtx ();
10352 if (bcc_gen_fctn[(int) code] == 0)
10355 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10356 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10357 emit_label (label);
10362 /* Generate a tablejump instruction (used for switch statements). */
10364 #ifdef HAVE_tablejump
10366 /* INDEX is the value being switched on, with the lowest value
10367 in the table already subtracted.
10368 MODE is its expected mode (needed if INDEX is constant).
10369 RANGE is the length of the jump table.
10370 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10372 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10373 index value is out of range. */
10376 do_tablejump (index, mode, range, table_label, default_label)
10377 rtx index, range, table_label, default_label;
10378 enum machine_mode mode;
10380 register rtx temp, vector;
10382 /* Do an unsigned comparison (in the proper mode) between the index
10383 expression and the value which represents the length of the range.
10384 Since we just finished subtracting the lower bound of the range
10385 from the index expression, this comparison allows us to simultaneously
10386 check that the original index expression value is both greater than
10387 or equal to the minimum value of the range and less than or equal to
10388 the maximum value of the range. */
10390 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10393 /* If index is in range, it must fit in Pmode.
10394 Convert to Pmode so we can index with it. */
10396 index = convert_to_mode (Pmode, index, 1);
10398 /* Don't let a MEM slip thru, because then INDEX that comes
10399 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10400 and break_out_memory_refs will go to work on it and mess it up. */
10401 #ifdef PIC_CASE_VECTOR_ADDRESS
10402 if (flag_pic && GET_CODE (index) != REG)
10403 index = copy_to_mode_reg (Pmode, index);
10406 /* If flag_force_addr were to affect this address
10407 it could interfere with the tricky assumptions made
10408 about addresses that contain label-refs,
10409 which may be valid only very near the tablejump itself. */
10410 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10411 GET_MODE_SIZE, because this indicates how large insns are. The other
10412 uses should all be Pmode, because they are addresses. This code
10413 could fail if addresses and insns are not the same size. */
10414 index = gen_rtx_PLUS (Pmode,
10415 gen_rtx_MULT (Pmode, index,
10416 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10417 gen_rtx_LABEL_REF (Pmode, table_label));
10418 #ifdef PIC_CASE_VECTOR_ADDRESS
10420 index = PIC_CASE_VECTOR_ADDRESS (index);
10423 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10424 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10425 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10426 RTX_UNCHANGING_P (vector) = 1;
10427 convert_move (temp, vector, 0);
10429 emit_jump_insn (gen_tablejump (temp, table_label));
10431 /* If we are generating PIC code or if the table is PC-relative, the
10432 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10433 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10437 #endif /* HAVE_tablejump */