1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
28 #include "insn-flags.h"
29 #include "insn-codes.h"
31 #include "insn-config.h"
34 #include "typeclass.h"
37 #include "bc-opcode.h"
38 #include "bc-typecd.h"
43 #define CEIL(x,y) (((x) + (y) - 1) / (y))
45 /* Decide whether a function's arguments should be processed
46 from first to last or from last to first.
48 They should if the stack and args grow in opposite directions, but
49 only if we have push insns. */
53 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
54 #define PUSH_ARGS_REVERSED /* If it's last to first */
59 #ifndef STACK_PUSH_CODE
60 #ifdef STACK_GROWS_DOWNWARD
61 #define STACK_PUSH_CODE PRE_DEC
63 #define STACK_PUSH_CODE PRE_INC
67 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
68 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
70 /* If this is nonzero, we do not bother generating VOLATILE
71 around volatile memory references, and we are willing to
72 output indirect addresses. If cse is to follow, we reject
73 indirect addresses so a useful potential cse is generated;
74 if it is used only once, instruction combination will produce
75 the same indirect address eventually. */
78 /* Nonzero to generate code for all the subroutines within an
79 expression before generating the upper levels of the expression.
80 Nowadays this is never zero. */
81 int do_preexpand_calls = 1;
83 /* Number of units that we should eventually pop off the stack.
84 These are the arguments to function calls that have already returned. */
85 int pending_stack_adjust;
87 /* Nonzero means stack pops must not be deferred, and deferred stack
88 pops must not be output. It is nonzero inside a function call,
89 inside a conditional expression, inside a statement expression,
90 and in other cases as well. */
91 int inhibit_defer_pop;
93 /* A list of all cleanups which belong to the arguments of
94 function calls being expanded by expand_call. */
95 tree cleanups_this_call;
97 /* Nonzero means __builtin_saveregs has already been done in this function.
98 The value is the pseudoreg containing the value __builtin_saveregs
100 static rtx saveregs_value;
102 /* Similarly for __builtin_apply_args. */
103 static rtx apply_args_value;
105 /* This structure is used by move_by_pieces to describe the move to
108 struct move_by_pieces
117 int explicit_inc_from;
123 /* Used to generate bytecodes: keep track of size of local variables,
124 as well as depth of arithmetic stack. (Notice that variables are
125 stored on the machine's stack, not the arithmetic stack.) */
128 extern int stack_depth;
129 extern int max_stack_depth;
130 extern struct obstack permanent_obstack;
133 static rtx enqueue_insn PROTO((rtx, rtx));
134 static int queued_subexp_p PROTO((rtx));
135 static void init_queue PROTO((void));
136 static void move_by_pieces PROTO((rtx, rtx, int, int));
137 static int move_by_pieces_ninsns PROTO((unsigned int, int));
138 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
139 struct move_by_pieces *));
140 static void group_insns PROTO((rtx));
141 static void store_constructor PROTO((tree, rtx));
142 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
143 enum machine_mode, int, int, int));
144 static tree save_noncopied_parts PROTO((tree, tree));
145 static tree init_noncopied_parts PROTO((tree, tree));
146 static int safe_from_p PROTO((rtx, tree));
147 static int fixed_type_p PROTO((tree));
148 static int get_pointer_alignment PROTO((tree, unsigned));
149 static tree string_constant PROTO((tree, tree *));
150 static tree c_strlen PROTO((tree));
151 static rtx expand_builtin PROTO((tree, rtx, rtx,
152 enum machine_mode, int));
153 static int apply_args_size PROTO((void));
154 static int apply_result_size PROTO((void));
155 static rtx result_vector PROTO((int, rtx));
156 static rtx expand_builtin_apply_args PROTO((void));
157 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
158 static void expand_builtin_return PROTO((rtx));
159 static rtx expand_increment PROTO((tree, int));
160 rtx bc_expand_increment PROTO((struct increment_operator *, tree));
161 tree bc_runtime_type_code PROTO((tree));
162 rtx bc_allocate_local PROTO((int, int));
163 void bc_store_memory PROTO((tree, tree));
164 tree bc_expand_component_address PROTO((tree));
165 tree bc_expand_address PROTO((tree));
166 void bc_expand_constructor PROTO((tree));
167 void bc_adjust_stack PROTO((int));
168 tree bc_canonicalize_array_ref PROTO((tree));
169 void bc_load_memory PROTO((tree, tree));
170 void bc_load_externaddr PROTO((rtx));
171 void bc_load_externaddr_id PROTO((tree, int));
172 void bc_load_localaddr PROTO((rtx));
173 void bc_load_parmaddr PROTO((rtx));
174 static void preexpand_calls PROTO((tree));
175 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
176 static void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
177 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
178 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
179 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
180 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
181 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
183 /* Record for each mode whether we can move a register directly to or
184 from an object of that mode in memory. If we can't, we won't try
185 to use that mode directly when accessing a field of that mode. */
187 static char direct_load[NUM_MACHINE_MODES];
188 static char direct_store[NUM_MACHINE_MODES];
190 /* MOVE_RATIO is the number of move instructions that is better than
194 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
197 /* A value of around 6 would minimize code size; infinity would minimize
199 #define MOVE_RATIO 15
203 /* This array records the insn_code of insns to perform block moves. */
204 enum insn_code movstr_optab[NUM_MACHINE_MODES];
206 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
208 #ifndef SLOW_UNALIGNED_ACCESS
209 #define SLOW_UNALIGNED_ACCESS 0
212 /* Register mappings for target machines without register windows. */
213 #ifndef INCOMING_REGNO
214 #define INCOMING_REGNO(OUT) (OUT)
216 #ifndef OUTGOING_REGNO
217 #define OUTGOING_REGNO(IN) (IN)
220 /* Maps used to convert modes to const, load, and store bytecodes. */
221 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
222 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
223 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
225 /* Initialize maps used to convert modes to const, load, and store
228 bc_init_mode_to_opcode_maps ()
232 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
233 mode_to_const_map[mode] =
234 mode_to_load_map[mode] =
235 mode_to_store_map[mode] = neverneverland;
237 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
238 mode_to_const_map[(int) SYM] = CONST; \
239 mode_to_load_map[(int) SYM] = LOAD; \
240 mode_to_store_map[(int) SYM] = STORE;
242 #include "modemap.def"
246 /* This is run once per compilation to set up which modes can be used
247 directly in memory and to initialize the block move optab. */
253 enum machine_mode mode;
254 /* Try indexing by frame ptr and try by stack ptr.
255 It is known that on the Convex the stack ptr isn't a valid index.
256 With luck, one or the other is valid on any machine. */
257 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
258 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
261 insn = emit_insn (gen_rtx (SET, 0, 0));
262 pat = PATTERN (insn);
264 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
265 mode = (enum machine_mode) ((int) mode + 1))
271 direct_load[(int) mode] = direct_store[(int) mode] = 0;
272 PUT_MODE (mem, mode);
273 PUT_MODE (mem1, mode);
275 /* See if there is some register that can be used in this mode and
276 directly loaded or stored from memory. */
278 if (mode != VOIDmode && mode != BLKmode)
279 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
280 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
283 if (! HARD_REGNO_MODE_OK (regno, mode))
286 reg = gen_rtx (REG, mode, regno);
289 SET_DEST (pat) = reg;
290 if (recog (pat, insn, &num_clobbers) >= 0)
291 direct_load[(int) mode] = 1;
293 SET_SRC (pat) = mem1;
294 SET_DEST (pat) = reg;
295 if (recog (pat, insn, &num_clobbers) >= 0)
296 direct_load[(int) mode] = 1;
299 SET_DEST (pat) = mem;
300 if (recog (pat, insn, &num_clobbers) >= 0)
301 direct_store[(int) mode] = 1;
304 SET_DEST (pat) = mem1;
305 if (recog (pat, insn, &num_clobbers) >= 0)
306 direct_store[(int) mode] = 1;
313 /* This is run at the start of compiling a function. */
320 pending_stack_adjust = 0;
321 inhibit_defer_pop = 0;
322 cleanups_this_call = 0;
324 apply_args_value = 0;
328 /* Save all variables describing the current status into the structure *P.
329 This is used before starting a nested function. */
335 /* Instead of saving the postincrement queue, empty it. */
338 p->pending_stack_adjust = pending_stack_adjust;
339 p->inhibit_defer_pop = inhibit_defer_pop;
340 p->cleanups_this_call = cleanups_this_call;
341 p->saveregs_value = saveregs_value;
342 p->apply_args_value = apply_args_value;
343 p->forced_labels = forced_labels;
345 pending_stack_adjust = 0;
346 inhibit_defer_pop = 0;
347 cleanups_this_call = 0;
349 apply_args_value = 0;
353 /* Restore all variables describing the current status from the structure *P.
354 This is used after a nested function. */
357 restore_expr_status (p)
360 pending_stack_adjust = p->pending_stack_adjust;
361 inhibit_defer_pop = p->inhibit_defer_pop;
362 cleanups_this_call = p->cleanups_this_call;
363 saveregs_value = p->saveregs_value;
364 apply_args_value = p->apply_args_value;
365 forced_labels = p->forced_labels;
368 /* Manage the queue of increment instructions to be output
369 for POSTINCREMENT_EXPR expressions, etc. */
371 static rtx pending_chain;
373 /* Queue up to increment (or change) VAR later. BODY says how:
374 BODY should be the same thing you would pass to emit_insn
375 to increment right away. It will go to emit_insn later on.
377 The value is a QUEUED expression to be used in place of VAR
378 where you want to guarantee the pre-incrementation value of VAR. */
381 enqueue_insn (var, body)
384 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
385 var, NULL_RTX, NULL_RTX, body, pending_chain);
386 return pending_chain;
389 /* Use protect_from_queue to convert a QUEUED expression
390 into something that you can put immediately into an instruction.
391 If the queued incrementation has not happened yet,
392 protect_from_queue returns the variable itself.
393 If the incrementation has happened, protect_from_queue returns a temp
394 that contains a copy of the old value of the variable.
396 Any time an rtx which might possibly be a QUEUED is to be put
397 into an instruction, it must be passed through protect_from_queue first.
398 QUEUED expressions are not meaningful in instructions.
400 Do not pass a value through protect_from_queue and then hold
401 on to it for a while before putting it in an instruction!
402 If the queue is flushed in between, incorrect code will result. */
405 protect_from_queue (x, modify)
409 register RTX_CODE code = GET_CODE (x);
411 #if 0 /* A QUEUED can hang around after the queue is forced out. */
412 /* Shortcut for most common case. */
413 if (pending_chain == 0)
419 /* A special hack for read access to (MEM (QUEUED ...))
420 to facilitate use of autoincrement.
421 Make a copy of the contents of the memory location
422 rather than a copy of the address, but not
423 if the value is of mode BLKmode. */
424 if (code == MEM && GET_MODE (x) != BLKmode
425 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
427 register rtx y = XEXP (x, 0);
428 XEXP (x, 0) = QUEUED_VAR (y);
431 register rtx temp = gen_reg_rtx (GET_MODE (x));
432 emit_insn_before (gen_move_insn (temp, x),
438 /* Otherwise, recursively protect the subexpressions of all
439 the kinds of rtx's that can contain a QUEUED. */
442 rtx tem = protect_from_queue (XEXP (x, 0), 0);
443 if (tem != XEXP (x, 0))
449 else if (code == PLUS || code == MULT)
451 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
452 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
453 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
462 /* If the increment has not happened, use the variable itself. */
463 if (QUEUED_INSN (x) == 0)
464 return QUEUED_VAR (x);
465 /* If the increment has happened and a pre-increment copy exists,
467 if (QUEUED_COPY (x) != 0)
468 return QUEUED_COPY (x);
469 /* The increment has happened but we haven't set up a pre-increment copy.
470 Set one up now, and use it. */
471 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
472 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
474 return QUEUED_COPY (x);
477 /* Return nonzero if X contains a QUEUED expression:
478 if it contains anything that will be altered by a queued increment.
479 We handle only combinations of MEM, PLUS, MINUS and MULT operators
480 since memory addresses generally contain only those. */
486 register enum rtx_code code = GET_CODE (x);
492 return queued_subexp_p (XEXP (x, 0));
496 return queued_subexp_p (XEXP (x, 0))
497 || queued_subexp_p (XEXP (x, 1));
502 /* Perform all the pending incrementations. */
508 while (p = pending_chain)
510 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
511 pending_chain = QUEUED_NEXT (p);
522 /* Copy data from FROM to TO, where the machine modes are not the same.
523 Both modes may be integer, or both may be floating.
524 UNSIGNEDP should be nonzero if FROM is an unsigned type.
525 This causes zero-extension instead of sign-extension. */
528 convert_move (to, from, unsignedp)
529 register rtx to, from;
532 enum machine_mode to_mode = GET_MODE (to);
533 enum machine_mode from_mode = GET_MODE (from);
534 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
535 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
539 /* rtx code for making an equivalent value. */
540 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
542 to = protect_from_queue (to, 1);
543 from = protect_from_queue (from, 0);
545 if (to_real != from_real)
548 /* If FROM is a SUBREG that indicates that we have already done at least
549 the required extension, strip it. We don't handle such SUBREGs as
552 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
553 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
554 >= GET_MODE_SIZE (to_mode))
555 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
556 from = gen_lowpart (to_mode, from), from_mode = to_mode;
558 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
561 if (to_mode == from_mode
562 || (from_mode == VOIDmode && CONSTANT_P (from)))
564 emit_move_insn (to, from);
572 #ifdef HAVE_extendqfhf2
573 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
575 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
579 #ifdef HAVE_extendqfsf2
580 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
582 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
586 #ifdef HAVE_extendqfdf2
587 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
589 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
593 #ifdef HAVE_extendqfxf2
594 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
596 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
600 #ifdef HAVE_extendqftf2
601 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
603 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
608 #ifdef HAVE_extendhfsf2
609 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
611 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
615 #ifdef HAVE_extendhfdf2
616 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
618 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
622 #ifdef HAVE_extendhfxf2
623 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
625 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
629 #ifdef HAVE_extendhftf2
630 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
632 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
637 #ifdef HAVE_extendsfdf2
638 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
640 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
644 #ifdef HAVE_extendsfxf2
645 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
647 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
651 #ifdef HAVE_extendsftf2
652 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
654 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
658 #ifdef HAVE_extenddfxf2
659 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
661 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
665 #ifdef HAVE_extenddftf2
666 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
668 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
673 #ifdef HAVE_trunchfqf2
674 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
676 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
680 #ifdef HAVE_truncsfqf2
681 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
683 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
687 #ifdef HAVE_truncdfqf2
688 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
690 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
694 #ifdef HAVE_truncxfqf2
695 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
697 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
701 #ifdef HAVE_trunctfqf2
702 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
704 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
708 #ifdef HAVE_truncsfhf2
709 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
711 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
715 #ifdef HAVE_truncdfhf2
716 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
718 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
722 #ifdef HAVE_truncxfhf2
723 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
725 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
729 #ifdef HAVE_trunctfhf2
730 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
732 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
736 #ifdef HAVE_truncdfsf2
737 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
739 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
743 #ifdef HAVE_truncxfsf2
744 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
746 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
750 #ifdef HAVE_trunctfsf2
751 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
753 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
757 #ifdef HAVE_truncxfdf2
758 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
760 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
764 #ifdef HAVE_trunctfdf2
765 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
767 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
779 libcall = extendsfdf2_libfunc;
783 libcall = extendsfxf2_libfunc;
787 libcall = extendsftf2_libfunc;
796 libcall = truncdfsf2_libfunc;
800 libcall = extenddfxf2_libfunc;
804 libcall = extenddftf2_libfunc;
813 libcall = truncxfsf2_libfunc;
817 libcall = truncxfdf2_libfunc;
826 libcall = trunctfsf2_libfunc;
830 libcall = trunctfdf2_libfunc;
836 if (libcall == (rtx) 0)
837 /* This conversion is not implemented yet. */
840 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
842 emit_move_insn (to, value);
846 /* Now both modes are integers. */
848 /* Handle expanding beyond a word. */
849 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
850 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
857 enum machine_mode lowpart_mode;
858 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
860 /* Try converting directly if the insn is supported. */
861 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
864 /* If FROM is a SUBREG, put it into a register. Do this
865 so that we always generate the same set of insns for
866 better cse'ing; if an intermediate assignment occurred,
867 we won't be doing the operation directly on the SUBREG. */
868 if (optimize > 0 && GET_CODE (from) == SUBREG)
869 from = force_reg (from_mode, from);
870 emit_unop_insn (code, to, from, equiv_code);
873 /* Next, try converting via full word. */
874 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
875 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
876 != CODE_FOR_nothing))
878 if (GET_CODE (to) == REG)
879 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
880 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
881 emit_unop_insn (code, to,
882 gen_lowpart (word_mode, to), equiv_code);
886 /* No special multiword conversion insn; do it by hand. */
889 /* Get a copy of FROM widened to a word, if necessary. */
890 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
891 lowpart_mode = word_mode;
893 lowpart_mode = from_mode;
895 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
897 lowpart = gen_lowpart (lowpart_mode, to);
898 emit_move_insn (lowpart, lowfrom);
900 /* Compute the value to put in each remaining word. */
902 fill_value = const0_rtx;
907 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
908 && STORE_FLAG_VALUE == -1)
910 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
912 fill_value = gen_reg_rtx (word_mode);
913 emit_insn (gen_slt (fill_value));
919 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
920 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
922 fill_value = convert_to_mode (word_mode, fill_value, 1);
926 /* Fill the remaining words. */
927 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
929 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
930 rtx subword = operand_subword (to, index, 1, to_mode);
935 if (fill_value != subword)
936 emit_move_insn (subword, fill_value);
939 insns = get_insns ();
942 emit_no_conflict_block (insns, to, from, NULL_RTX,
943 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
947 /* Truncating multi-word to a word or less. */
948 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
949 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
951 if (!((GET_CODE (from) == MEM
952 && ! MEM_VOLATILE_P (from)
953 && direct_load[(int) to_mode]
954 && ! mode_dependent_address_p (XEXP (from, 0)))
955 || GET_CODE (from) == REG
956 || GET_CODE (from) == SUBREG))
957 from = force_reg (from_mode, from);
958 convert_move (to, gen_lowpart (word_mode, from), 0);
962 /* Handle pointer conversion */ /* SPEE 900220 */
963 if (to_mode == PSImode)
965 if (from_mode != SImode)
966 from = convert_to_mode (SImode, from, unsignedp);
968 #ifdef HAVE_truncsipsi
971 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
974 #endif /* HAVE_truncsipsi */
978 if (from_mode == PSImode)
980 if (to_mode != SImode)
982 from = convert_to_mode (SImode, from, unsignedp);
987 #ifdef HAVE_extendpsisi
988 if (HAVE_extendpsisi)
990 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
993 #endif /* HAVE_extendpsisi */
998 /* Now follow all the conversions between integers
999 no more than a word long. */
1001 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1002 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1003 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1004 GET_MODE_BITSIZE (from_mode)))
1006 if (!((GET_CODE (from) == MEM
1007 && ! MEM_VOLATILE_P (from)
1008 && direct_load[(int) to_mode]
1009 && ! mode_dependent_address_p (XEXP (from, 0)))
1010 || GET_CODE (from) == REG
1011 || GET_CODE (from) == SUBREG))
1012 from = force_reg (from_mode, from);
1013 emit_move_insn (to, gen_lowpart (to_mode, from));
1017 /* Handle extension. */
1018 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1020 /* Convert directly if that works. */
1021 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1022 != CODE_FOR_nothing)
1024 /* If FROM is a SUBREG, put it into a register. Do this
1025 so that we always generate the same set of insns for
1026 better cse'ing; if an intermediate assignment occurred,
1027 we won't be doing the operation directly on the SUBREG. */
1028 if (optimize > 0 && GET_CODE (from) == SUBREG)
1029 from = force_reg (from_mode, from);
1030 emit_unop_insn (code, to, from, equiv_code);
1035 enum machine_mode intermediate;
1037 /* Search for a mode to convert via. */
1038 for (intermediate = from_mode; intermediate != VOIDmode;
1039 intermediate = GET_MODE_WIDER_MODE (intermediate))
1040 if ((can_extend_p (to_mode, intermediate, unsignedp)
1041 != CODE_FOR_nothing)
1042 && (can_extend_p (intermediate, from_mode, unsignedp)
1043 != CODE_FOR_nothing))
1045 convert_move (to, convert_to_mode (intermediate, from,
1046 unsignedp), unsignedp);
1050 /* No suitable intermediate mode. */
1055 /* Support special truncate insns for certain modes. */
1057 if (from_mode == DImode && to_mode == SImode)
1059 #ifdef HAVE_truncdisi2
1060 if (HAVE_truncdisi2)
1062 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1066 convert_move (to, force_reg (from_mode, from), unsignedp);
1070 if (from_mode == DImode && to_mode == HImode)
1072 #ifdef HAVE_truncdihi2
1073 if (HAVE_truncdihi2)
1075 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1079 convert_move (to, force_reg (from_mode, from), unsignedp);
1083 if (from_mode == DImode && to_mode == QImode)
1085 #ifdef HAVE_truncdiqi2
1086 if (HAVE_truncdiqi2)
1088 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1092 convert_move (to, force_reg (from_mode, from), unsignedp);
1096 if (from_mode == SImode && to_mode == HImode)
1098 #ifdef HAVE_truncsihi2
1099 if (HAVE_truncsihi2)
1101 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1105 convert_move (to, force_reg (from_mode, from), unsignedp);
1109 if (from_mode == SImode && to_mode == QImode)
1111 #ifdef HAVE_truncsiqi2
1112 if (HAVE_truncsiqi2)
1114 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1118 convert_move (to, force_reg (from_mode, from), unsignedp);
1122 if (from_mode == HImode && to_mode == QImode)
1124 #ifdef HAVE_trunchiqi2
1125 if (HAVE_trunchiqi2)
1127 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1131 convert_move (to, force_reg (from_mode, from), unsignedp);
1135 /* Handle truncation of volatile memrefs, and so on;
1136 the things that couldn't be truncated directly,
1137 and for which there was no special instruction. */
1138 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1140 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1141 emit_move_insn (to, temp);
1145 /* Mode combination is not recognized. */
1149 /* Return an rtx for a value that would result
1150 from converting X to mode MODE.
1151 Both X and MODE may be floating, or both integer.
1152 UNSIGNEDP is nonzero if X is an unsigned value.
1153 This can be done by referring to a part of X in place
1154 or by copying to a new temporary with conversion.
1156 This function *must not* call protect_from_queue
1157 except when putting X into an insn (in which case convert_move does it). */
1160 convert_to_mode (mode, x, unsignedp)
1161 enum machine_mode mode;
1165 return convert_modes (mode, VOIDmode, x, unsignedp);
1168 /* Return an rtx for a value that would result
1169 from converting X from mode OLDMODE to mode MODE.
1170 Both modes may be floating, or both integer.
1171 UNSIGNEDP is nonzero if X is an unsigned value.
1173 This can be done by referring to a part of X in place
1174 or by copying to a new temporary with conversion.
1176 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1178 This function *must not* call protect_from_queue
1179 except when putting X into an insn (in which case convert_move does it). */
1182 convert_modes (mode, oldmode, x, unsignedp)
1183 enum machine_mode mode, oldmode;
1189 /* If FROM is a SUBREG that indicates that we have already done at least
1190 the required extension, strip it. */
1192 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1193 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1194 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1195 x = gen_lowpart (mode, x);
1197 if (GET_MODE (x) != VOIDmode)
1198 oldmode = GET_MODE (x);
1200 if (mode == oldmode)
1203 /* There is one case that we must handle specially: If we are converting
1204 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1205 we are to interpret the constant as unsigned, gen_lowpart will do
1206 the wrong if the constant appears negative. What we want to do is
1207 make the high-order word of the constant zero, not all ones. */
1209 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1210 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1211 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1212 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1214 /* We can do this with a gen_lowpart if both desired and current modes
1215 are integer, and this is either a constant integer, a register, or a
1216 non-volatile MEM. Except for the constant case where MODE is no
1217 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1219 if ((GET_CODE (x) == CONST_INT
1220 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1221 || (GET_MODE_CLASS (mode) == MODE_INT
1222 && GET_MODE_CLASS (oldmode) == MODE_INT
1223 && (GET_CODE (x) == CONST_DOUBLE
1224 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1225 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1226 && direct_load[(int) mode])
1227 || (GET_CODE (x) == REG
1228 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1229 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1231 /* ?? If we don't know OLDMODE, we have to assume here that
1232 X does not need sign- or zero-extension. This may not be
1233 the case, but it's the best we can do. */
1234 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1235 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1237 HOST_WIDE_INT val = INTVAL (x);
1238 int width = GET_MODE_BITSIZE (oldmode);
1240 /* We must sign or zero-extend in this case. Start by
1241 zero-extending, then sign extend if we need to. */
1242 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1244 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1245 val |= (HOST_WIDE_INT) (-1) << width;
1247 return GEN_INT (val);
1250 return gen_lowpart (mode, x);
1253 temp = gen_reg_rtx (mode);
1254 convert_move (temp, x, unsignedp);
1258 /* Generate several move instructions to copy LEN bytes
1259 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1260 The caller must pass FROM and TO
1261 through protect_from_queue before calling.
1262 ALIGN (in bytes) is maximum alignment we can assume. */
1265 move_by_pieces (to, from, len, align)
1269 struct move_by_pieces data;
1270 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1271 int max_size = MOVE_MAX + 1;
1274 data.to_addr = to_addr;
1275 data.from_addr = from_addr;
1279 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1280 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1282 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1283 || GET_CODE (from_addr) == POST_INC
1284 || GET_CODE (from_addr) == POST_DEC);
1286 data.explicit_inc_from = 0;
1287 data.explicit_inc_to = 0;
1289 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1290 if (data.reverse) data.offset = len;
1293 /* If copying requires more than two move insns,
1294 copy addresses to registers (to make displacements shorter)
1295 and use post-increment if available. */
1296 if (!(data.autinc_from && data.autinc_to)
1297 && move_by_pieces_ninsns (len, align) > 2)
1299 #ifdef HAVE_PRE_DECREMENT
1300 if (data.reverse && ! data.autinc_from)
1302 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1303 data.autinc_from = 1;
1304 data.explicit_inc_from = -1;
1307 #ifdef HAVE_POST_INCREMENT
1308 if (! data.autinc_from)
1310 data.from_addr = copy_addr_to_reg (from_addr);
1311 data.autinc_from = 1;
1312 data.explicit_inc_from = 1;
1315 if (!data.autinc_from && CONSTANT_P (from_addr))
1316 data.from_addr = copy_addr_to_reg (from_addr);
1317 #ifdef HAVE_PRE_DECREMENT
1318 if (data.reverse && ! data.autinc_to)
1320 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1322 data.explicit_inc_to = -1;
1325 #ifdef HAVE_POST_INCREMENT
1326 if (! data.reverse && ! data.autinc_to)
1328 data.to_addr = copy_addr_to_reg (to_addr);
1330 data.explicit_inc_to = 1;
1333 if (!data.autinc_to && CONSTANT_P (to_addr))
1334 data.to_addr = copy_addr_to_reg (to_addr);
1337 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1338 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1341 /* First move what we can in the largest integer mode, then go to
1342 successively smaller modes. */
1344 while (max_size > 1)
1346 enum machine_mode mode = VOIDmode, tmode;
1347 enum insn_code icode;
1349 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1350 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1351 if (GET_MODE_SIZE (tmode) < max_size)
1354 if (mode == VOIDmode)
1357 icode = mov_optab->handlers[(int) mode].insn_code;
1358 if (icode != CODE_FOR_nothing
1359 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1360 GET_MODE_SIZE (mode)))
1361 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1363 max_size = GET_MODE_SIZE (mode);
1366 /* The code above should have handled everything. */
1371 /* Return number of insns required to move L bytes by pieces.
1372 ALIGN (in bytes) is maximum alignment we can assume. */
1375 move_by_pieces_ninsns (l, align)
1379 register int n_insns = 0;
1380 int max_size = MOVE_MAX + 1;
1382 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1383 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1386 while (max_size > 1)
1388 enum machine_mode mode = VOIDmode, tmode;
1389 enum insn_code icode;
1391 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1392 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1393 if (GET_MODE_SIZE (tmode) < max_size)
1396 if (mode == VOIDmode)
1399 icode = mov_optab->handlers[(int) mode].insn_code;
1400 if (icode != CODE_FOR_nothing
1401 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1402 GET_MODE_SIZE (mode)))
1403 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1405 max_size = GET_MODE_SIZE (mode);
1411 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1412 with move instructions for mode MODE. GENFUN is the gen_... function
1413 to make a move insn for that mode. DATA has all the other info. */
1416 move_by_pieces_1 (genfun, mode, data)
1418 enum machine_mode mode;
1419 struct move_by_pieces *data;
1421 register int size = GET_MODE_SIZE (mode);
1422 register rtx to1, from1;
1424 while (data->len >= size)
1426 if (data->reverse) data->offset -= size;
1428 to1 = (data->autinc_to
1429 ? gen_rtx (MEM, mode, data->to_addr)
1430 : change_address (data->to, mode,
1431 plus_constant (data->to_addr, data->offset)));
1434 ? gen_rtx (MEM, mode, data->from_addr)
1435 : change_address (data->from, mode,
1436 plus_constant (data->from_addr, data->offset)));
1438 #ifdef HAVE_PRE_DECREMENT
1439 if (data->explicit_inc_to < 0)
1440 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1441 if (data->explicit_inc_from < 0)
1442 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1445 emit_insn ((*genfun) (to1, from1));
1446 #ifdef HAVE_POST_INCREMENT
1447 if (data->explicit_inc_to > 0)
1448 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1449 if (data->explicit_inc_from > 0)
1450 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1453 if (! data->reverse) data->offset += size;
1459 /* Emit code to move a block Y to a block X.
1460 This may be done with string-move instructions,
1461 with multiple scalar move instructions, or with a library call.
1463 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1465 SIZE is an rtx that says how long they are.
1466 ALIGN is the maximum alignment we can assume they have,
1467 measured in bytes. */
1470 emit_block_move (x, y, size, align)
1475 if (GET_MODE (x) != BLKmode)
1478 if (GET_MODE (y) != BLKmode)
1481 x = protect_from_queue (x, 1);
1482 y = protect_from_queue (y, 0);
1483 size = protect_from_queue (size, 0);
1485 if (GET_CODE (x) != MEM)
1487 if (GET_CODE (y) != MEM)
1492 if (GET_CODE (size) == CONST_INT
1493 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1494 move_by_pieces (x, y, INTVAL (size), align);
1497 /* Try the most limited insn first, because there's no point
1498 including more than one in the machine description unless
1499 the more limited one has some advantage. */
1501 rtx opalign = GEN_INT (align);
1502 enum machine_mode mode;
1504 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1505 mode = GET_MODE_WIDER_MODE (mode))
1507 enum insn_code code = movstr_optab[(int) mode];
1509 if (code != CODE_FOR_nothing
1510 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1511 here because if SIZE is less than the mode mask, as it is
1512 returned by the macro, it will definitely be less than the
1513 actual mode mask. */
1514 && (unsigned HOST_WIDE_INT) INTVAL (size) <= GET_MODE_MASK (mode)
1515 && (insn_operand_predicate[(int) code][0] == 0
1516 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1517 && (insn_operand_predicate[(int) code][1] == 0
1518 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1519 && (insn_operand_predicate[(int) code][3] == 0
1520 || (*insn_operand_predicate[(int) code][3]) (opalign,
1524 rtx last = get_last_insn ();
1527 op2 = convert_to_mode (mode, size, 1);
1528 if (insn_operand_predicate[(int) code][2] != 0
1529 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1530 op2 = copy_to_mode_reg (mode, op2);
1532 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1539 delete_insns_since (last);
1543 #ifdef TARGET_MEM_FUNCTIONS
1544 emit_library_call (memcpy_libfunc, 0,
1545 VOIDmode, 3, XEXP (x, 0), Pmode,
1547 convert_to_mode (TYPE_MODE (sizetype), size,
1548 TREE_UNSIGNED (sizetype)),
1549 TYPE_MODE (sizetype));
1551 emit_library_call (bcopy_libfunc, 0,
1552 VOIDmode, 3, XEXP (y, 0), Pmode,
1554 convert_to_mode (TYPE_MODE (sizetype), size,
1555 TREE_UNSIGNED (sizetype)),
1556 TYPE_MODE (sizetype));
1561 /* Copy all or part of a value X into registers starting at REGNO.
1562 The number of registers to be filled is NREGS. */
1565 move_block_to_reg (regno, x, nregs, mode)
1569 enum machine_mode mode;
1574 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1575 x = validize_mem (force_const_mem (mode, x));
1577 /* See if the machine can do this with a load multiple insn. */
1578 #ifdef HAVE_load_multiple
1579 if (HAVE_load_multiple)
1581 last = get_last_insn ();
1582 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1590 delete_insns_since (last);
1594 for (i = 0; i < nregs; i++)
1595 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1596 operand_subword_force (x, i, mode));
1599 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1600 The number of registers to be filled is NREGS. SIZE indicates the number
1601 of bytes in the object X. */
1605 move_block_from_reg (regno, x, nregs, size)
1614 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1615 to the left before storing to memory. */
1616 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1618 rtx tem = operand_subword (x, 0, 1, BLKmode);
1624 shift = expand_shift (LSHIFT_EXPR, word_mode,
1625 gen_rtx (REG, word_mode, regno),
1626 build_int_2 ((UNITS_PER_WORD - size)
1627 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1628 emit_move_insn (tem, shift);
1632 /* See if the machine can do this with a store multiple insn. */
1633 #ifdef HAVE_store_multiple
1634 if (HAVE_store_multiple)
1636 last = get_last_insn ();
1637 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1645 delete_insns_since (last);
1649 for (i = 0; i < nregs; i++)
1651 rtx tem = operand_subword (x, i, 1, BLKmode);
1656 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1660 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1663 use_regs (regno, nregs)
1669 for (i = 0; i < nregs; i++)
1670 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1673 /* Mark the instructions since PREV as a libcall block.
1674 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1683 /* Find the instructions to mark */
1685 insn_first = NEXT_INSN (prev);
1687 insn_first = get_insns ();
1689 insn_last = get_last_insn ();
1691 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1692 REG_NOTES (insn_last));
1694 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1695 REG_NOTES (insn_first));
1698 /* Write zeros through the storage of OBJECT.
1699 If OBJECT has BLKmode, SIZE is its length in bytes. */
1702 clear_storage (object, size)
1706 if (GET_MODE (object) == BLKmode)
1708 #ifdef TARGET_MEM_FUNCTIONS
1709 emit_library_call (memset_libfunc, 0,
1711 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1712 GEN_INT (size), Pmode);
1714 emit_library_call (bzero_libfunc, 0,
1716 XEXP (object, 0), Pmode,
1717 GEN_INT (size), Pmode);
1721 emit_move_insn (object, const0_rtx);
1724 /* Generate code to copy Y into X.
1725 Both Y and X must have the same mode, except that
1726 Y can be a constant with VOIDmode.
1727 This mode cannot be BLKmode; use emit_block_move for that.
1729 Return the last instruction emitted. */
1732 emit_move_insn (x, y)
1735 enum machine_mode mode = GET_MODE (x);
1736 enum machine_mode submode;
1737 enum mode_class class = GET_MODE_CLASS (mode);
1740 x = protect_from_queue (x, 1);
1741 y = protect_from_queue (y, 0);
1743 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1746 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1747 y = force_const_mem (mode, y);
1749 /* If X or Y are memory references, verify that their addresses are valid
1751 if (GET_CODE (x) == MEM
1752 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1753 && ! push_operand (x, GET_MODE (x)))
1755 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1756 x = change_address (x, VOIDmode, XEXP (x, 0));
1758 if (GET_CODE (y) == MEM
1759 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1761 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1762 y = change_address (y, VOIDmode, XEXP (y, 0));
1764 if (mode == BLKmode)
1767 return emit_move_insn_1 (x, y);
1770 /* Low level part of emit_move_insn.
1771 Called just like emit_move_insn, but assumes X and Y
1772 are basically valid. */
1775 emit_move_insn_1 (x, y)
1778 enum machine_mode mode = GET_MODE (x);
1779 enum machine_mode submode;
1780 enum mode_class class = GET_MODE_CLASS (mode);
1783 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1784 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1785 (class == MODE_COMPLEX_INT
1786 ? MODE_INT : MODE_FLOAT),
1789 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1791 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1793 /* Expand complex moves by moving real part and imag part, if possible. */
1794 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1795 && submode != BLKmode
1796 && (mov_optab->handlers[(int) submode].insn_code
1797 != CODE_FOR_nothing))
1799 /* Don't split destination if it is a stack push. */
1800 int stack = push_operand (x, GET_MODE (x));
1801 rtx prev = get_last_insn ();
1803 /* Tell flow that the whole of the destination is being set. */
1804 if (GET_CODE (x) == REG)
1805 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1807 /* If this is a stack, push the highpart first, so it
1808 will be in the argument order.
1810 In that case, change_address is used only to convert
1811 the mode, not to change the address. */
1814 /* Note that the real part always precedes the imag part in memory
1815 regardless of machine's endianness. */
1816 #ifdef STACK_GROWS_DOWNWARD
1817 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1818 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1819 gen_imagpart (submode, y)));
1820 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1821 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1822 gen_realpart (submode, y)));
1824 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1825 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1826 gen_realpart (submode, y)));
1827 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1828 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1829 gen_imagpart (submode, y)));
1834 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1835 (gen_highpart (submode, x), gen_highpart (submode, y)));
1836 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1837 (gen_lowpart (submode, x), gen_lowpart (submode, y)));
1840 if (GET_CODE (x) != CONCAT)
1841 /* If X is a CONCAT, we got insns like RD = RS, ID = IS,
1842 each with a separate pseudo as destination.
1843 It's not correct for flow to treat them as a unit. */
1846 return get_last_insn ();
1849 /* This will handle any multi-word mode that lacks a move_insn pattern.
1850 However, you will get better code if you define such patterns,
1851 even if they must turn into multiple assembler instructions. */
1852 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1855 rtx prev_insn = get_last_insn ();
1858 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1861 rtx xpart = operand_subword (x, i, 1, mode);
1862 rtx ypart = operand_subword (y, i, 1, mode);
1864 /* If we can't get a part of Y, put Y into memory if it is a
1865 constant. Otherwise, force it into a register. If we still
1866 can't get a part of Y, abort. */
1867 if (ypart == 0 && CONSTANT_P (y))
1869 y = force_const_mem (mode, y);
1870 ypart = operand_subword (y, i, 1, mode);
1872 else if (ypart == 0)
1873 ypart = operand_subword_force (y, i, mode);
1875 if (xpart == 0 || ypart == 0)
1878 last_insn = emit_move_insn (xpart, ypart);
1880 /* Mark these insns as a libcall block. */
1881 group_insns (prev_insn);
1889 /* Pushing data onto the stack. */
1891 /* Push a block of length SIZE (perhaps variable)
1892 and return an rtx to address the beginning of the block.
1893 Note that it is not possible for the value returned to be a QUEUED.
1894 The value may be virtual_outgoing_args_rtx.
1896 EXTRA is the number of bytes of padding to push in addition to SIZE.
1897 BELOW nonzero means this padding comes at low addresses;
1898 otherwise, the padding comes at high addresses. */
1901 push_block (size, extra, below)
1906 if (CONSTANT_P (size))
1907 anti_adjust_stack (plus_constant (size, extra));
1908 else if (GET_CODE (size) == REG && extra == 0)
1909 anti_adjust_stack (size);
1912 rtx temp = copy_to_mode_reg (Pmode, size);
1914 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
1915 temp, 0, OPTAB_LIB_WIDEN);
1916 anti_adjust_stack (temp);
1919 #ifdef STACK_GROWS_DOWNWARD
1920 temp = virtual_outgoing_args_rtx;
1921 if (extra != 0 && below)
1922 temp = plus_constant (temp, extra);
1924 if (GET_CODE (size) == CONST_INT)
1925 temp = plus_constant (virtual_outgoing_args_rtx,
1926 - INTVAL (size) - (below ? 0 : extra));
1927 else if (extra != 0 && !below)
1928 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1929 negate_rtx (Pmode, plus_constant (size, extra)));
1931 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1932 negate_rtx (Pmode, size));
1935 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1941 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1944 /* Generate code to push X onto the stack, assuming it has mode MODE and
1946 MODE is redundant except when X is a CONST_INT (since they don't
1948 SIZE is an rtx for the size of data to be copied (in bytes),
1949 needed only if X is BLKmode.
1951 ALIGN (in bytes) is maximum alignment we can assume.
1953 If PARTIAL and REG are both nonzero, then copy that many of the first
1954 words of X into registers starting with REG, and push the rest of X.
1955 The amount of space pushed is decreased by PARTIAL words,
1956 rounded *down* to a multiple of PARM_BOUNDARY.
1957 REG must be a hard register in this case.
1958 If REG is zero but PARTIAL is not, take any all others actions for an
1959 argument partially in registers, but do not actually load any
1962 EXTRA is the amount in bytes of extra space to leave next to this arg.
1963 This is ignored if an argument block has already been allocated.
1965 On a machine that lacks real push insns, ARGS_ADDR is the address of
1966 the bottom of the argument block for this call. We use indexing off there
1967 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1968 argument block has not been preallocated.
1970 ARGS_SO_FAR is the size of args previously pushed for this call. */
1973 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1974 args_addr, args_so_far)
1976 enum machine_mode mode;
1987 enum direction stack_direction
1988 #ifdef STACK_GROWS_DOWNWARD
1994 /* Decide where to pad the argument: `downward' for below,
1995 `upward' for above, or `none' for don't pad it.
1996 Default is below for small data on big-endian machines; else above. */
1997 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1999 /* Invert direction if stack is post-update. */
2000 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2001 if (where_pad != none)
2002 where_pad = (where_pad == downward ? upward : downward);
2004 xinner = x = protect_from_queue (x, 0);
2006 if (mode == BLKmode)
2008 /* Copy a block into the stack, entirely or partially. */
2011 int used = partial * UNITS_PER_WORD;
2012 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2020 /* USED is now the # of bytes we need not copy to the stack
2021 because registers will take care of them. */
2024 xinner = change_address (xinner, BLKmode,
2025 plus_constant (XEXP (xinner, 0), used));
2027 /* If the partial register-part of the arg counts in its stack size,
2028 skip the part of stack space corresponding to the registers.
2029 Otherwise, start copying to the beginning of the stack space,
2030 by setting SKIP to 0. */
2031 #ifndef REG_PARM_STACK_SPACE
2037 #ifdef PUSH_ROUNDING
2038 /* Do it with several push insns if that doesn't take lots of insns
2039 and if there is no difficulty with push insns that skip bytes
2040 on the stack for alignment purposes. */
2042 && GET_CODE (size) == CONST_INT
2044 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2046 /* Here we avoid the case of a structure whose weak alignment
2047 forces many pushes of a small amount of data,
2048 and such small pushes do rounding that causes trouble. */
2049 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
2050 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2051 || PUSH_ROUNDING (align) == align)
2052 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2054 /* Push padding now if padding above and stack grows down,
2055 or if padding below and stack grows up.
2056 But if space already allocated, this has already been done. */
2057 if (extra && args_addr == 0
2058 && where_pad != none && where_pad != stack_direction)
2059 anti_adjust_stack (GEN_INT (extra));
2061 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2062 INTVAL (size) - used, align);
2065 #endif /* PUSH_ROUNDING */
2067 /* Otherwise make space on the stack and copy the data
2068 to the address of that space. */
2070 /* Deduct words put into registers from the size we must copy. */
2073 if (GET_CODE (size) == CONST_INT)
2074 size = GEN_INT (INTVAL (size) - used);
2076 size = expand_binop (GET_MODE (size), sub_optab, size,
2077 GEN_INT (used), NULL_RTX, 0,
2081 /* Get the address of the stack space.
2082 In this case, we do not deal with EXTRA separately.
2083 A single stack adjust will do. */
2086 temp = push_block (size, extra, where_pad == downward);
2089 else if (GET_CODE (args_so_far) == CONST_INT)
2090 temp = memory_address (BLKmode,
2091 plus_constant (args_addr,
2092 skip + INTVAL (args_so_far)));
2094 temp = memory_address (BLKmode,
2095 plus_constant (gen_rtx (PLUS, Pmode,
2096 args_addr, args_so_far),
2099 /* TEMP is the address of the block. Copy the data there. */
2100 if (GET_CODE (size) == CONST_INT
2101 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2104 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2105 INTVAL (size), align);
2108 /* Try the most limited insn first, because there's no point
2109 including more than one in the machine description unless
2110 the more limited one has some advantage. */
2111 #ifdef HAVE_movstrqi
2113 && GET_CODE (size) == CONST_INT
2114 && ((unsigned) INTVAL (size)
2115 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2117 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2118 xinner, size, GEN_INT (align));
2126 #ifdef HAVE_movstrhi
2128 && GET_CODE (size) == CONST_INT
2129 && ((unsigned) INTVAL (size)
2130 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2132 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2133 xinner, size, GEN_INT (align));
2141 #ifdef HAVE_movstrsi
2144 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2145 xinner, size, GEN_INT (align));
2153 #ifdef HAVE_movstrdi
2156 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2157 xinner, size, GEN_INT (align));
2166 #ifndef ACCUMULATE_OUTGOING_ARGS
2167 /* If the source is referenced relative to the stack pointer,
2168 copy it to another register to stabilize it. We do not need
2169 to do this if we know that we won't be changing sp. */
2171 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2172 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2173 temp = copy_to_reg (temp);
2176 /* Make inhibit_defer_pop nonzero around the library call
2177 to force it to pop the bcopy-arguments right away. */
2179 #ifdef TARGET_MEM_FUNCTIONS
2180 emit_library_call (memcpy_libfunc, 0,
2181 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2182 convert_to_mode (TYPE_MODE (sizetype),
2183 size, TREE_UNSIGNED (sizetype)),
2184 TYPE_MODE (sizetype));
2186 emit_library_call (bcopy_libfunc, 0,
2187 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2188 convert_to_mode (TYPE_MODE (sizetype),
2189 size, TREE_UNSIGNED (sizetype)),
2190 TYPE_MODE (sizetype));
2195 else if (partial > 0)
2197 /* Scalar partly in registers. */
2199 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2202 /* # words of start of argument
2203 that we must make space for but need not store. */
2204 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2205 int args_offset = INTVAL (args_so_far);
2208 /* Push padding now if padding above and stack grows down,
2209 or if padding below and stack grows up.
2210 But if space already allocated, this has already been done. */
2211 if (extra && args_addr == 0
2212 && where_pad != none && where_pad != stack_direction)
2213 anti_adjust_stack (GEN_INT (extra));
2215 /* If we make space by pushing it, we might as well push
2216 the real data. Otherwise, we can leave OFFSET nonzero
2217 and leave the space uninitialized. */
2221 /* Now NOT_STACK gets the number of words that we don't need to
2222 allocate on the stack. */
2223 not_stack = partial - offset;
2225 /* If the partial register-part of the arg counts in its stack size,
2226 skip the part of stack space corresponding to the registers.
2227 Otherwise, start copying to the beginning of the stack space,
2228 by setting SKIP to 0. */
2229 #ifndef REG_PARM_STACK_SPACE
2235 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2236 x = validize_mem (force_const_mem (mode, x));
2238 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2239 SUBREGs of such registers are not allowed. */
2240 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2241 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2242 x = copy_to_reg (x);
2244 /* Loop over all the words allocated on the stack for this arg. */
2245 /* We can do it by words, because any scalar bigger than a word
2246 has a size a multiple of a word. */
2247 #ifndef PUSH_ARGS_REVERSED
2248 for (i = not_stack; i < size; i++)
2250 for (i = size - 1; i >= not_stack; i--)
2252 if (i >= not_stack + offset)
2253 emit_push_insn (operand_subword_force (x, i, mode),
2254 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2256 GEN_INT (args_offset + ((i - not_stack + skip)
2257 * UNITS_PER_WORD)));
2263 /* Push padding now if padding above and stack grows down,
2264 or if padding below and stack grows up.
2265 But if space already allocated, this has already been done. */
2266 if (extra && args_addr == 0
2267 && where_pad != none && where_pad != stack_direction)
2268 anti_adjust_stack (GEN_INT (extra));
2270 #ifdef PUSH_ROUNDING
2272 addr = gen_push_operand ();
2275 if (GET_CODE (args_so_far) == CONST_INT)
2277 = memory_address (mode,
2278 plus_constant (args_addr, INTVAL (args_so_far)));
2280 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2283 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2287 /* If part should go in registers, copy that part
2288 into the appropriate registers. Do this now, at the end,
2289 since mem-to-mem copies above may do function calls. */
2290 if (partial > 0 && reg != 0)
2291 move_block_to_reg (REGNO (reg), x, partial, mode);
2293 if (extra && args_addr == 0 && where_pad == stack_direction)
2294 anti_adjust_stack (GEN_INT (extra));
2297 /* Expand an assignment that stores the value of FROM into TO.
2298 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2299 (This may contain a QUEUED rtx;
2300 if the value is constant, this rtx is a constant.)
2301 Otherwise, the returned value is NULL_RTX.
2303 SUGGEST_REG is no longer actually used.
2304 It used to mean, copy the value through a register
2305 and return that register, if that is possible.
2306 We now use WANT_VALUE to decide whether to do this. */
2309 expand_assignment (to, from, want_value, suggest_reg)
2314 register rtx to_rtx = 0;
2317 /* Don't crash if the lhs of the assignment was erroneous. */
2319 if (TREE_CODE (to) == ERROR_MARK)
2321 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2322 return want_value ? result : NULL_RTX;
2325 if (output_bytecode)
2327 tree dest_innermost;
2329 bc_expand_expr (from);
2330 bc_emit_instruction (duplicate);
2332 dest_innermost = bc_expand_address (to);
2334 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2335 take care of it here. */
2337 bc_store_memory (TREE_TYPE (to), dest_innermost);
2341 /* Assignment of a structure component needs special treatment
2342 if the structure component's rtx is not simply a MEM.
2343 Assignment of an array element at a constant index
2344 has the same problem. */
2346 if (TREE_CODE (to) == COMPONENT_REF
2347 || TREE_CODE (to) == BIT_FIELD_REF
2348 || (TREE_CODE (to) == ARRAY_REF
2349 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2350 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2352 enum machine_mode mode1;
2362 tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2363 &mode1, &unsignedp, &volatilep);
2365 /* If we are going to use store_bit_field and extract_bit_field,
2366 make sure to_rtx will be safe for multiple use. */
2368 if (mode1 == VOIDmode && want_value)
2369 tem = stabilize_reference (tem);
2371 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
2372 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2375 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2377 if (GET_CODE (to_rtx) != MEM)
2379 to_rtx = change_address (to_rtx, VOIDmode,
2380 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2381 force_reg (Pmode, offset_rtx)));
2382 /* If we have a variable offset, the known alignment
2383 is only that of the innermost structure containing the field.
2384 (Actually, we could sometimes do better by using the
2385 align of an element of the innermost array, but no need.) */
2386 if (TREE_CODE (to) == COMPONENT_REF
2387 || TREE_CODE (to) == BIT_FIELD_REF)
2389 = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to, 0))) / BITS_PER_UNIT;
2393 if (GET_CODE (to_rtx) == MEM)
2394 MEM_VOLATILE_P (to_rtx) = 1;
2395 #if 0 /* This was turned off because, when a field is volatile
2396 in an object which is not volatile, the object may be in a register,
2397 and then we would abort over here. */
2403 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2405 /* Spurious cast makes HPUX compiler happy. */
2406 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2409 /* Required alignment of containing datum. */
2411 int_size_in_bytes (TREE_TYPE (tem)));
2412 preserve_temp_slots (result);
2416 /* If the value is meaningful, convert RESULT to the proper mode.
2417 Otherwise, return nothing. */
2418 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2419 TYPE_MODE (TREE_TYPE (from)),
2421 TREE_UNSIGNED (TREE_TYPE (to)))
2425 /* If the rhs is a function call and its value is not an aggregate,
2426 call the function before we start to compute the lhs.
2427 This is needed for correct code for cases such as
2428 val = setjmp (buf) on machines where reference to val
2429 requires loading up part of an address in a separate insn.
2431 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2432 a promoted variable where the zero- or sign- extension needs to be done.
2433 Handling this in the normal way is safe because no computation is done
2435 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2436 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
2441 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2443 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2444 emit_move_insn (to_rtx, value);
2445 preserve_temp_slots (to_rtx);
2448 return want_value ? to_rtx : NULL_RTX;
2451 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2452 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2455 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2457 /* Don't move directly into a return register. */
2458 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2463 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2464 emit_move_insn (to_rtx, temp);
2465 preserve_temp_slots (to_rtx);
2468 return want_value ? to_rtx : NULL_RTX;
2471 /* In case we are returning the contents of an object which overlaps
2472 the place the value is being stored, use a safe function when copying
2473 a value through a pointer into a structure value return block. */
2474 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2475 && current_function_returns_struct
2476 && !current_function_returns_pcc_struct)
2481 size = expr_size (from);
2482 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2484 #ifdef TARGET_MEM_FUNCTIONS
2485 emit_library_call (memcpy_libfunc, 0,
2486 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2487 XEXP (from_rtx, 0), Pmode,
2488 convert_to_mode (TYPE_MODE (sizetype),
2489 size, TREE_UNSIGNED (sizetype)),
2490 TYPE_MODE (sizetype));
2492 emit_library_call (bcopy_libfunc, 0,
2493 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2494 XEXP (to_rtx, 0), Pmode,
2495 convert_to_mode (TYPE_MODE (sizetype),
2496 size, TREE_UNSIGNED (sizetype)),
2497 TYPE_MODE (sizetype));
2500 preserve_temp_slots (to_rtx);
2503 return want_value ? to_rtx : NULL_RTX;
2506 /* Compute FROM and store the value in the rtx we got. */
2509 result = store_expr (from, to_rtx, want_value);
2510 preserve_temp_slots (result);
2513 return want_value ? result : NULL_RTX;
2516 /* Generate code for computing expression EXP,
2517 and storing the value into TARGET.
2518 TARGET may contain a QUEUED rtx.
2520 If WANT_VALUE is nonzero, return a copy of the value
2521 not in TARGET, so that we can be sure to use the proper
2522 value in a containing expression even if TARGET has something
2523 else stored in it. If possible, we copy the value through a pseudo
2524 and return that pseudo. Or, if the value is constant, we try to
2525 return the constant. In some cases, we return a pseudo
2526 copied *from* TARGET.
2528 If the mode is BLKmode then we may return TARGET itself.
2529 It turns out that in BLKmode it doesn't cause a problem.
2530 because C has no operators that could combine two different
2531 assignments into the same BLKmode object with different values
2532 with no sequence point. Will other languages need this to
2535 If WANT_VALUE is 0, we return NULL, to make sure
2536 to catch quickly any cases where the caller uses the value
2537 and fails to set WANT_VALUE. */
2540 store_expr (exp, target, want_value)
2542 register rtx target;
2546 int dont_return_target = 0;
2548 if (TREE_CODE (exp) == COMPOUND_EXPR)
2550 /* Perform first part of compound expression, then assign from second
2552 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2554 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
2556 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2558 /* For conditional expression, get safe form of the target. Then
2559 test the condition, doing the appropriate assignment on either
2560 side. This avoids the creation of unnecessary temporaries.
2561 For non-BLKmode, it is more efficient not to do this. */
2563 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2566 target = protect_from_queue (target, 1);
2569 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2570 store_expr (TREE_OPERAND (exp, 1), target, 0);
2572 emit_jump_insn (gen_jump (lab2));
2575 store_expr (TREE_OPERAND (exp, 2), target, 0);
2579 return want_value ? target : NULL_RTX;
2581 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
2582 && GET_MODE (target) != BLKmode)
2583 /* If target is in memory and caller wants value in a register instead,
2584 arrange that. Pass TARGET as target for expand_expr so that,
2585 if EXP is another assignment, WANT_VALUE will be nonzero for it.
2586 We know expand_expr will not use the target in that case.
2587 Don't do this if TARGET is volatile because we are supposed
2588 to write it and then read it. */
2590 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2591 GET_MODE (target), 0);
2592 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2593 temp = copy_to_reg (temp);
2594 dont_return_target = 1;
2596 else if (queued_subexp_p (target))
2597 /* If target contains a postincrement, let's not risk
2598 using it as the place to generate the rhs. */
2600 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2602 /* Expand EXP into a new pseudo. */
2603 temp = gen_reg_rtx (GET_MODE (target));
2604 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2607 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2609 /* If target is volatile, ANSI requires accessing the value
2610 *from* the target, if it is accessed. So make that happen.
2611 In no case return the target itself. */
2612 if (! MEM_VOLATILE_P (target) && want_value)
2613 dont_return_target = 1;
2615 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2616 /* If this is an scalar in a register that is stored in a wider mode
2617 than the declared mode, compute the result into its declared mode
2618 and then convert to the wider mode. Our value is the computed
2621 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2623 /* If TEMP is a VOIDmode constant, use convert_modes to make
2624 sure that we properly convert it. */
2625 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2626 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2627 TYPE_MODE (TREE_TYPE (exp)), temp,
2628 SUBREG_PROMOTED_UNSIGNED_P (target));
2630 convert_move (SUBREG_REG (target), temp,
2631 SUBREG_PROMOTED_UNSIGNED_P (target));
2632 return want_value ? temp : NULL_RTX;
2636 temp = expand_expr (exp, target, GET_MODE (target), 0);
2637 /* DO return TARGET if it's a specified hardware register.
2638 expand_return relies on this.
2639 If TARGET is a volatile mem ref, either return TARGET
2640 or return a reg copied *from* TARGET; ANSI requires this.
2642 Otherwise, if TEMP is not TARGET, return TEMP
2643 if it is constant (for efficiency),
2644 or if we really want the correct value. */
2645 if (!(target && GET_CODE (target) == REG
2646 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2647 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
2649 && (CONSTANT_P (temp) || want_value))
2650 dont_return_target = 1;
2653 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
2654 the same as that of TARGET, adjust the constant. This is needed, for
2655 example, in case it is a CONST_DOUBLE and we want only a word-sized
2657 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
2658 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2659 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
2660 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
2662 /* If value was not generated in the target, store it there.
2663 Convert the value to TARGET's type first if nec. */
2665 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2667 target = protect_from_queue (target, 1);
2668 if (GET_MODE (temp) != GET_MODE (target)
2669 && GET_MODE (temp) != VOIDmode)
2671 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2672 if (dont_return_target)
2674 /* In this case, we will return TEMP,
2675 so make sure it has the proper mode.
2676 But don't forget to store the value into TARGET. */
2677 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2678 emit_move_insn (target, temp);
2681 convert_move (target, temp, unsignedp);
2684 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2686 /* Handle copying a string constant into an array.
2687 The string constant may be shorter than the array.
2688 So copy just the string's actual length, and clear the rest. */
2691 /* Get the size of the data type of the string,
2692 which is actually the size of the target. */
2693 size = expr_size (exp);
2694 if (GET_CODE (size) == CONST_INT
2695 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2696 emit_block_move (target, temp, size,
2697 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2700 /* Compute the size of the data to copy from the string. */
2702 = size_binop (MIN_EXPR,
2703 make_tree (sizetype, size),
2705 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
2706 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2710 /* Copy that much. */
2711 emit_block_move (target, temp, copy_size_rtx,
2712 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2714 /* Figure out how much is left in TARGET
2715 that we have to clear. */
2716 if (GET_CODE (copy_size_rtx) == CONST_INT)
2718 temp = plus_constant (XEXP (target, 0),
2719 TREE_STRING_LENGTH (exp));
2720 size = plus_constant (size,
2721 - TREE_STRING_LENGTH (exp));
2725 enum machine_mode size_mode = Pmode;
2727 temp = force_reg (Pmode, XEXP (target, 0));
2728 temp = expand_binop (size_mode, add_optab, temp,
2729 copy_size_rtx, NULL_RTX, 0,
2732 size = expand_binop (size_mode, sub_optab, size,
2733 copy_size_rtx, NULL_RTX, 0,
2736 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2737 GET_MODE (size), 0, 0);
2738 label = gen_label_rtx ();
2739 emit_jump_insn (gen_blt (label));
2742 if (size != const0_rtx)
2744 #ifdef TARGET_MEM_FUNCTIONS
2745 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
2746 temp, Pmode, const0_rtx, Pmode, size, Pmode);
2748 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2749 temp, Pmode, size, Pmode);
2756 else if (GET_MODE (temp) == BLKmode)
2757 emit_block_move (target, temp, expr_size (exp),
2758 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2760 emit_move_insn (target, temp);
2763 if (dont_return_target && GET_CODE (temp) != MEM)
2765 if (want_value && GET_MODE (target) != BLKmode)
2766 return copy_to_reg (target);
2772 /* Store the value of constructor EXP into the rtx TARGET.
2773 TARGET is either a REG or a MEM. */
2776 store_constructor (exp, target)
2780 tree type = TREE_TYPE (exp);
2782 /* We know our target cannot conflict, since safe_from_p has been called. */
2784 /* Don't try copying piece by piece into a hard register
2785 since that is vulnerable to being clobbered by EXP.
2786 Instead, construct in a pseudo register and then copy it all. */
2787 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2789 rtx temp = gen_reg_rtx (GET_MODE (target));
2790 store_constructor (exp, temp);
2791 emit_move_insn (target, temp);
2796 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
2797 || TREE_CODE (type) == QUAL_UNION_TYPE)
2801 /* Inform later passes that the whole union value is dead. */
2802 if (TREE_CODE (type) == UNION_TYPE
2803 || TREE_CODE (type) == QUAL_UNION_TYPE)
2804 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2806 /* If we are building a static constructor into a register,
2807 set the initial value as zero so we can fold the value into
2809 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2810 emit_move_insn (target, const0_rtx);
2812 /* If the constructor has fewer fields than the structure,
2813 clear the whole structure first. */
2814 else if (list_length (CONSTRUCTOR_ELTS (exp))
2815 != list_length (TYPE_FIELDS (type)))
2816 clear_storage (target, int_size_in_bytes (type));
2818 /* Inform later passes that the old value is dead. */
2819 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2821 /* Store each element of the constructor into
2822 the corresponding field of TARGET. */
2824 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2826 register tree field = TREE_PURPOSE (elt);
2827 register enum machine_mode mode;
2831 tree pos, constant = 0, offset = 0;
2832 rtx to_rtx = target;
2834 /* Just ignore missing fields.
2835 We cleared the whole structure, above,
2836 if any fields are missing. */
2840 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2841 unsignedp = TREE_UNSIGNED (field);
2842 mode = DECL_MODE (field);
2843 if (DECL_BIT_FIELD (field))
2846 pos = DECL_FIELD_BITPOS (field);
2847 if (TREE_CODE (pos) == INTEGER_CST)
2849 else if (TREE_CODE (pos) == PLUS_EXPR
2850 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2851 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
2856 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2862 if (contains_placeholder_p (offset))
2863 offset = build (WITH_RECORD_EXPR, sizetype,
2866 offset = size_binop (FLOOR_DIV_EXPR, offset,
2867 size_int (BITS_PER_UNIT));
2869 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2870 if (GET_CODE (to_rtx) != MEM)
2874 = change_address (to_rtx, VOIDmode,
2875 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2876 force_reg (Pmode, offset_rtx)));
2879 store_field (to_rtx, bitsize, bitpos, mode, TREE_VALUE (elt),
2880 /* The alignment of TARGET is
2881 at least what its type requires. */
2883 TYPE_ALIGN (type) / BITS_PER_UNIT,
2884 int_size_in_bytes (type));
2887 else if (TREE_CODE (type) == ARRAY_TYPE)
2891 tree domain = TYPE_DOMAIN (type);
2892 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2893 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
2894 tree elttype = TREE_TYPE (type);
2896 /* If the constructor has fewer fields than the structure,
2897 clear the whole structure first. Similarly if this this is
2898 static constructor of a non-BLKmode object. */
2900 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2901 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
2902 clear_storage (target, int_size_in_bytes (type));
2904 /* Inform later passes that the old value is dead. */
2905 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2907 /* Store each element of the constructor into
2908 the corresponding element of TARGET, determined
2909 by counting the elements. */
2910 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2912 elt = TREE_CHAIN (elt), i++)
2914 register enum machine_mode mode;
2918 tree index = TREE_PURPOSE (elt);
2919 rtx xtarget = target;
2921 mode = TYPE_MODE (elttype);
2922 bitsize = GET_MODE_BITSIZE (mode);
2923 unsignedp = TREE_UNSIGNED (elttype);
2925 if (index != 0 && TREE_CODE (index) != INTEGER_CST)
2927 /* We don't currently allow variable indices in a
2928 C initializer, but let's try here to support them. */
2929 rtx pos_rtx, addr, xtarget;
2932 position = size_binop (MULT_EXPR, index, TYPE_SIZE (elttype));
2933 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
2934 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
2935 xtarget = change_address (target, mode, addr);
2936 store_expr (TREE_VALUE (elt), xtarget, 0);
2941 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
2942 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2944 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2946 store_field (xtarget, bitsize, bitpos, mode, TREE_VALUE (elt),
2947 /* The alignment of TARGET is
2948 at least what its type requires. */
2950 TYPE_ALIGN (type) / BITS_PER_UNIT,
2951 int_size_in_bytes (type));
2960 /* Store the value of EXP (an expression tree)
2961 into a subfield of TARGET which has mode MODE and occupies
2962 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2963 If MODE is VOIDmode, it means that we are storing into a bit-field.
2965 If VALUE_MODE is VOIDmode, return nothing in particular.
2966 UNSIGNEDP is not used in this case.
2968 Otherwise, return an rtx for the value stored. This rtx
2969 has mode VALUE_MODE if that is convenient to do.
2970 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2972 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2973 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2976 store_field (target, bitsize, bitpos, mode, exp, value_mode,
2977 unsignedp, align, total_size)
2979 int bitsize, bitpos;
2980 enum machine_mode mode;
2982 enum machine_mode value_mode;
2987 HOST_WIDE_INT width_mask = 0;
2989 if (bitsize < HOST_BITS_PER_WIDE_INT)
2990 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
2992 /* If we are storing into an unaligned field of an aligned union that is
2993 in a register, we may have the mode of TARGET being an integer mode but
2994 MODE == BLKmode. In that case, get an aligned object whose size and
2995 alignment are the same as TARGET and store TARGET into it (we can avoid
2996 the store if the field being stored is the entire width of TARGET). Then
2997 call ourselves recursively to store the field into a BLKmode version of
2998 that object. Finally, load from the object into TARGET. This is not
2999 very efficient in general, but should only be slightly more expensive
3000 than the otherwise-required unaligned accesses. Perhaps this can be
3001 cleaned up later. */
3004 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
3006 rtx object = assign_stack_temp (GET_MODE (target),
3007 GET_MODE_SIZE (GET_MODE (target)), 0);
3008 rtx blk_object = copy_rtx (object);
3010 PUT_MODE (blk_object, BLKmode);
3012 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
3013 emit_move_insn (object, target);
3015 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
3018 /* Even though we aren't returning target, we need to
3019 give it the updated value. */
3020 emit_move_insn (target, object);
3025 /* If the structure is in a register or if the component
3026 is a bit field, we cannot use addressing to access it.
3027 Use bit-field techniques or SUBREG to store in it. */
3029 if (mode == VOIDmode
3030 || (mode != BLKmode && ! direct_store[(int) mode])
3031 || GET_CODE (target) == REG
3032 || GET_CODE (target) == SUBREG
3033 /* If the field isn't aligned enough to store as an ordinary memref,
3034 store it as a bit field. */
3035 || (STRICT_ALIGNMENT
3036 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
3037 || (STRICT_ALIGNMENT && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
3039 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3041 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
3043 if (mode != VOIDmode && mode != BLKmode
3044 && mode != TYPE_MODE (TREE_TYPE (exp)))
3045 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
3047 /* Store the value in the bitfield. */
3048 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
3049 if (value_mode != VOIDmode)
3051 /* The caller wants an rtx for the value. */
3052 /* If possible, avoid refetching from the bitfield itself. */
3054 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
3057 enum machine_mode tmode;
3060 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
3061 tmode = GET_MODE (temp);
3062 if (tmode == VOIDmode)
3064 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
3065 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
3066 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
3068 return extract_bit_field (target, bitsize, bitpos, unsignedp,
3069 NULL_RTX, value_mode, 0, align,
3076 rtx addr = XEXP (target, 0);
3079 /* If a value is wanted, it must be the lhs;
3080 so make the address stable for multiple use. */
3082 if (value_mode != VOIDmode && GET_CODE (addr) != REG
3083 && ! CONSTANT_ADDRESS_P (addr)
3084 /* A frame-pointer reference is already stable. */
3085 && ! (GET_CODE (addr) == PLUS
3086 && GET_CODE (XEXP (addr, 1)) == CONST_INT
3087 && (XEXP (addr, 0) == virtual_incoming_args_rtx
3088 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
3089 addr = copy_to_reg (addr);
3091 /* Now build a reference to just the desired component. */
3093 to_rtx = change_address (target, mode,
3094 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
3095 MEM_IN_STRUCT_P (to_rtx) = 1;
3097 return store_expr (exp, to_rtx, value_mode != VOIDmode);
3101 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
3102 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
3103 ARRAY_REFs and find the ultimate containing object, which we return.
3105 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
3106 bit position, and *PUNSIGNEDP to the signedness of the field.
3107 If the position of the field is variable, we store a tree
3108 giving the variable offset (in units) in *POFFSET.
3109 This offset is in addition to the bit position.
3110 If the position is not variable, we store 0 in *POFFSET.
3112 If any of the extraction expressions is volatile,
3113 we store 1 in *PVOLATILEP. Otherwise we don't change that.
3115 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
3116 is a mode that can be used to access the field. In that case, *PBITSIZE
3119 If the field describes a variable-sized object, *PMODE is set to
3120 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
3121 this case, but the address of the object can be found. */
3124 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
3125 punsignedp, pvolatilep)
3130 enum machine_mode *pmode;
3134 tree orig_exp = exp;
3136 enum machine_mode mode = VOIDmode;
3137 tree offset = integer_zero_node;
3139 if (TREE_CODE (exp) == COMPONENT_REF)
3141 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
3142 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
3143 mode = DECL_MODE (TREE_OPERAND (exp, 1));
3144 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
3146 else if (TREE_CODE (exp) == BIT_FIELD_REF)
3148 size_tree = TREE_OPERAND (exp, 1);
3149 *punsignedp = TREE_UNSIGNED (exp);
3153 mode = TYPE_MODE (TREE_TYPE (exp));
3154 *pbitsize = GET_MODE_BITSIZE (mode);
3155 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3160 if (TREE_CODE (size_tree) != INTEGER_CST)
3161 mode = BLKmode, *pbitsize = -1;
3163 *pbitsize = TREE_INT_CST_LOW (size_tree);
3166 /* Compute cumulative bit-offset for nested component-refs and array-refs,
3167 and find the ultimate containing object. */
3173 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
3175 tree pos = (TREE_CODE (exp) == COMPONENT_REF
3176 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
3177 : TREE_OPERAND (exp, 2));
3179 /* If this field hasn't been filled in yet, don't go
3180 past it. This should only happen when folding expressions
3181 made during type construction. */
3185 if (TREE_CODE (pos) == PLUS_EXPR)
3188 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
3190 constant = TREE_OPERAND (pos, 0);
3191 var = TREE_OPERAND (pos, 1);
3193 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3195 constant = TREE_OPERAND (pos, 1);
3196 var = TREE_OPERAND (pos, 0);
3201 *pbitpos += TREE_INT_CST_LOW (constant);
3202 offset = size_binop (PLUS_EXPR, offset,
3203 size_binop (FLOOR_DIV_EXPR, var,
3204 size_int (BITS_PER_UNIT)));
3206 else if (TREE_CODE (pos) == INTEGER_CST)
3207 *pbitpos += TREE_INT_CST_LOW (pos);
3210 /* Assume here that the offset is a multiple of a unit.
3211 If not, there should be an explicitly added constant. */
3212 offset = size_binop (PLUS_EXPR, offset,
3213 size_binop (FLOOR_DIV_EXPR, pos,
3214 size_int (BITS_PER_UNIT)));
3218 else if (TREE_CODE (exp) == ARRAY_REF)
3220 /* This code is based on the code in case ARRAY_REF in expand_expr
3221 below. We assume here that the size of an array element is
3222 always an integral multiple of BITS_PER_UNIT. */
3224 tree index = TREE_OPERAND (exp, 1);
3225 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
3227 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3228 tree index_type = TREE_TYPE (index);
3230 if (! integer_zerop (low_bound))
3231 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3233 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
3235 index = convert (type_for_size (POINTER_SIZE, 0), index);
3236 index_type = TREE_TYPE (index);
3239 index = fold (build (MULT_EXPR, index_type, index,
3240 TYPE_SIZE (TREE_TYPE (exp))));
3242 if (TREE_CODE (index) == INTEGER_CST
3243 && TREE_INT_CST_HIGH (index) == 0)
3244 *pbitpos += TREE_INT_CST_LOW (index);
3246 offset = size_binop (PLUS_EXPR, offset,
3247 size_binop (FLOOR_DIV_EXPR, index,
3248 size_int (BITS_PER_UNIT)));
3250 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
3251 && ! ((TREE_CODE (exp) == NOP_EXPR
3252 || TREE_CODE (exp) == CONVERT_EXPR)
3253 && (TYPE_MODE (TREE_TYPE (exp))
3254 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3257 /* If any reference in the chain is volatile, the effect is volatile. */
3258 if (TREE_THIS_VOLATILE (exp))
3260 exp = TREE_OPERAND (exp, 0);
3263 /* If this was a bit-field, see if there is a mode that allows direct
3264 access in case EXP is in memory. */
3265 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
3267 mode = mode_for_size (*pbitsize, MODE_INT, 0);
3268 if (mode == BLKmode)
3272 if (integer_zerop (offset))
3275 if (offset != 0 && contains_placeholder_p (offset))
3276 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
3283 /* Given an rtx VALUE that may contain additions and multiplications,
3284 return an equivalent value that just refers to a register or memory.
3285 This is done by generating instructions to perform the arithmetic
3286 and returning a pseudo-register containing the value.
3288 The returned value may be a REG, SUBREG, MEM or constant. */
3291 force_operand (value, target)
3294 register optab binoptab = 0;
3295 /* Use a temporary to force order of execution of calls to
3299 /* Use subtarget as the target for operand 0 of a binary operation. */
3300 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3302 if (GET_CODE (value) == PLUS)
3303 binoptab = add_optab;
3304 else if (GET_CODE (value) == MINUS)
3305 binoptab = sub_optab;
3306 else if (GET_CODE (value) == MULT)
3308 op2 = XEXP (value, 1);
3309 if (!CONSTANT_P (op2)
3310 && !(GET_CODE (op2) == REG && op2 != subtarget))
3312 tmp = force_operand (XEXP (value, 0), subtarget);
3313 return expand_mult (GET_MODE (value), tmp,
3314 force_operand (op2, NULL_RTX),
3320 op2 = XEXP (value, 1);
3321 if (!CONSTANT_P (op2)
3322 && !(GET_CODE (op2) == REG && op2 != subtarget))
3324 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
3326 binoptab = add_optab;
3327 op2 = negate_rtx (GET_MODE (value), op2);
3330 /* Check for an addition with OP2 a constant integer and our first
3331 operand a PLUS of a virtual register and something else. In that
3332 case, we want to emit the sum of the virtual register and the
3333 constant first and then add the other value. This allows virtual
3334 register instantiation to simply modify the constant rather than
3335 creating another one around this addition. */
3336 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
3337 && GET_CODE (XEXP (value, 0)) == PLUS
3338 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
3339 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3340 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3342 rtx temp = expand_binop (GET_MODE (value), binoptab,
3343 XEXP (XEXP (value, 0), 0), op2,
3344 subtarget, 0, OPTAB_LIB_WIDEN);
3345 return expand_binop (GET_MODE (value), binoptab, temp,
3346 force_operand (XEXP (XEXP (value, 0), 1), 0),
3347 target, 0, OPTAB_LIB_WIDEN);
3350 tmp = force_operand (XEXP (value, 0), subtarget);
3351 return expand_binop (GET_MODE (value), binoptab, tmp,
3352 force_operand (op2, NULL_RTX),
3353 target, 0, OPTAB_LIB_WIDEN);
3354 /* We give UNSIGNEDP = 0 to expand_binop
3355 because the only operations we are expanding here are signed ones. */
3360 /* Subroutine of expand_expr:
3361 save the non-copied parts (LIST) of an expr (LHS), and return a list
3362 which can restore these values to their previous values,
3363 should something modify their storage. */
3366 save_noncopied_parts (lhs, list)
3373 for (tail = list; tail; tail = TREE_CHAIN (tail))
3374 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3375 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3378 tree part = TREE_VALUE (tail);
3379 tree part_type = TREE_TYPE (part);
3380 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
3381 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3382 int_size_in_bytes (part_type), 0);
3383 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
3384 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
3385 parts = tree_cons (to_be_saved,
3386 build (RTL_EXPR, part_type, NULL_TREE,
3389 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3394 /* Subroutine of expand_expr:
3395 record the non-copied parts (LIST) of an expr (LHS), and return a list
3396 which specifies the initial values of these parts. */
3399 init_noncopied_parts (lhs, list)
3406 for (tail = list; tail; tail = TREE_CHAIN (tail))
3407 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3408 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3411 tree part = TREE_VALUE (tail);
3412 tree part_type = TREE_TYPE (part);
3413 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3414 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3419 /* Subroutine of expand_expr: return nonzero iff there is no way that
3420 EXP can reference X, which is being modified. */
3423 safe_from_p (x, exp)
3433 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3434 find the underlying pseudo. */
3435 if (GET_CODE (x) == SUBREG)
3438 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3442 /* If X is a location in the outgoing argument area, it is always safe. */
3443 if (GET_CODE (x) == MEM
3444 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3445 || (GET_CODE (XEXP (x, 0)) == PLUS
3446 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3449 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3452 exp_rtl = DECL_RTL (exp);
3459 if (TREE_CODE (exp) == TREE_LIST)
3460 return ((TREE_VALUE (exp) == 0
3461 || safe_from_p (x, TREE_VALUE (exp)))
3462 && (TREE_CHAIN (exp) == 0
3463 || safe_from_p (x, TREE_CHAIN (exp))));
3468 return safe_from_p (x, TREE_OPERAND (exp, 0));
3472 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3473 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3477 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3478 the expression. If it is set, we conflict iff we are that rtx or
3479 both are in memory. Otherwise, we check all operands of the
3480 expression recursively. */
3482 switch (TREE_CODE (exp))
3485 return (staticp (TREE_OPERAND (exp, 0))
3486 || safe_from_p (x, TREE_OPERAND (exp, 0)));
3489 if (GET_CODE (x) == MEM)
3494 exp_rtl = CALL_EXPR_RTL (exp);
3497 /* Assume that the call will clobber all hard registers and
3499 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3500 || GET_CODE (x) == MEM)
3507 exp_rtl = RTL_EXPR_RTL (exp);
3509 /* We don't know what this can modify. */
3514 case WITH_CLEANUP_EXPR:
3515 exp_rtl = RTL_EXPR_RTL (exp);
3519 exp_rtl = SAVE_EXPR_RTL (exp);
3523 /* The only operand we look at is operand 1. The rest aren't
3524 part of the expression. */
3525 return safe_from_p (x, TREE_OPERAND (exp, 1));
3527 case METHOD_CALL_EXPR:
3528 /* This takes a rtx argument, but shouldn't appear here. */
3532 /* If we have an rtx, we do not need to scan our operands. */
3536 nops = tree_code_length[(int) TREE_CODE (exp)];
3537 for (i = 0; i < nops; i++)
3538 if (TREE_OPERAND (exp, i) != 0
3539 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3543 /* If we have an rtl, find any enclosed object. Then see if we conflict
3547 if (GET_CODE (exp_rtl) == SUBREG)
3549 exp_rtl = SUBREG_REG (exp_rtl);
3550 if (GET_CODE (exp_rtl) == REG
3551 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3555 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3556 are memory and EXP is not readonly. */
3557 return ! (rtx_equal_p (x, exp_rtl)
3558 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3559 && ! TREE_READONLY (exp)));
3562 /* If we reach here, it is safe. */
3566 /* Subroutine of expand_expr: return nonzero iff EXP is an
3567 expression whose type is statically determinable. */
3573 if (TREE_CODE (exp) == PARM_DECL
3574 || TREE_CODE (exp) == VAR_DECL
3575 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3576 || TREE_CODE (exp) == COMPONENT_REF
3577 || TREE_CODE (exp) == ARRAY_REF)
3582 /* expand_expr: generate code for computing expression EXP.
3583 An rtx for the computed value is returned. The value is never null.
3584 In the case of a void EXP, const0_rtx is returned.
3586 The value may be stored in TARGET if TARGET is nonzero.
3587 TARGET is just a suggestion; callers must assume that
3588 the rtx returned may not be the same as TARGET.
3590 If TARGET is CONST0_RTX, it means that the value will be ignored.
3592 If TMODE is not VOIDmode, it suggests generating the
3593 result in mode TMODE. But this is done only when convenient.
3594 Otherwise, TMODE is ignored and the value generated in its natural mode.
3595 TMODE is just a suggestion; callers must assume that
3596 the rtx returned may not have mode TMODE.
3598 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3599 with a constant address even if that address is not normally legitimate.
3600 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3602 If MODIFIER is EXPAND_SUM then when EXP is an addition
3603 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3604 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3605 products as above, or REG or MEM, or constant.
3606 Ordinarily in such cases we would output mul or add instructions
3607 and then return a pseudo reg containing the sum.
3609 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3610 it also marks a label as absolutely required (it can't be dead).
3611 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3612 This is used for outputting expressions used in initializers. */
3615 expand_expr (exp, target, tmode, modifier)
3618 enum machine_mode tmode;
3619 enum expand_modifier modifier;
3621 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
3622 This is static so it will be accessible to our recursive callees. */
3623 static tree placeholder_list = 0;
3624 register rtx op0, op1, temp;
3625 tree type = TREE_TYPE (exp);
3626 int unsignedp = TREE_UNSIGNED (type);
3627 register enum machine_mode mode = TYPE_MODE (type);
3628 register enum tree_code code = TREE_CODE (exp);
3630 /* Use subtarget as the target for operand 0 of a binary operation. */
3631 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3632 rtx original_target = target;
3633 /* Maybe defer this until sure not doing bytecode? */
3634 int ignore = (target == const0_rtx
3635 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
3636 || code == CONVERT_EXPR || code == REFERENCE_EXPR
3637 || code == COND_EXPR)
3638 && TREE_CODE (type) == VOID_TYPE));
3642 if (output_bytecode)
3644 bc_expand_expr (exp);
3648 /* Don't use hard regs as subtargets, because the combiner
3649 can only handle pseudo regs. */
3650 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3652 /* Avoid subtargets inside loops,
3653 since they hide some invariant expressions. */
3654 if (preserve_subexpressions_p ())
3657 /* If we are going to ignore this result, we need only do something
3658 if there is a side-effect somewhere in the expression. If there
3659 is, short-circuit the most common cases here. Note that we must
3660 not call expand_expr with anything but const0_rtx in case this
3661 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
3665 if (! TREE_SIDE_EFFECTS (exp))
3668 /* Ensure we reference a volatile object even if value is ignored. */
3669 if (TREE_THIS_VOLATILE (exp)
3670 && TREE_CODE (exp) != FUNCTION_DECL
3671 && mode != VOIDmode && mode != BLKmode)
3673 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
3674 if (GET_CODE (temp) == MEM)
3675 temp = copy_to_reg (temp);
3679 if (TREE_CODE_CLASS (code) == '1')
3680 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3681 VOIDmode, modifier);
3682 else if (TREE_CODE_CLASS (code) == '2'
3683 || TREE_CODE_CLASS (code) == '<')
3685 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
3686 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
3689 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
3690 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
3691 /* If the second operand has no side effects, just evaluate
3693 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3694 VOIDmode, modifier);
3699 /* If will do cse, generate all results into pseudo registers
3700 since 1) that allows cse to find more things
3701 and 2) otherwise cse could produce an insn the machine
3704 if (! cse_not_expected && mode != BLKmode && target
3705 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3712 tree function = decl_function_context (exp);
3713 /* Handle using a label in a containing function. */
3714 if (function != current_function_decl && function != 0)
3716 struct function *p = find_function_data (function);
3717 /* Allocate in the memory associated with the function
3718 that the label is in. */
3719 push_obstacks (p->function_obstack,
3720 p->function_maybepermanent_obstack);
3722 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3723 label_rtx (exp), p->forced_labels);
3726 else if (modifier == EXPAND_INITIALIZER)
3727 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3728 label_rtx (exp), forced_labels);
3729 temp = gen_rtx (MEM, FUNCTION_MODE,
3730 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3731 if (function != current_function_decl && function != 0)
3732 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3737 if (DECL_RTL (exp) == 0)
3739 error_with_decl (exp, "prior parameter's size depends on `%s'");
3740 return CONST0_RTX (mode);
3744 /* If a static var's type was incomplete when the decl was written,
3745 but the type is complete now, lay out the decl now. */
3746 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
3747 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
3749 push_obstacks_nochange ();
3750 end_temporary_allocation ();
3751 layout_decl (exp, 0);
3752 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
3757 if (DECL_RTL (exp) == 0)
3759 /* Ensure variable marked as used even if it doesn't go through
3760 a parser. If it hasn't be used yet, write out an external
3762 if (! TREE_USED (exp))
3764 assemble_external (exp);
3765 TREE_USED (exp) = 1;
3768 /* Handle variables inherited from containing functions. */
3769 context = decl_function_context (exp);
3771 /* We treat inline_function_decl as an alias for the current function
3772 because that is the inline function whose vars, types, etc.
3773 are being merged into the current function.
3774 See expand_inline_function. */
3775 if (context != 0 && context != current_function_decl
3776 && context != inline_function_decl
3777 /* If var is static, we don't need a static chain to access it. */
3778 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3779 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3783 /* Mark as non-local and addressable. */
3784 DECL_NONLOCAL (exp) = 1;
3785 mark_addressable (exp);
3786 if (GET_CODE (DECL_RTL (exp)) != MEM)
3788 addr = XEXP (DECL_RTL (exp), 0);
3789 if (GET_CODE (addr) == MEM)
3790 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3792 addr = fix_lexical_addr (addr, exp);
3793 return change_address (DECL_RTL (exp), mode, addr);
3796 /* This is the case of an array whose size is to be determined
3797 from its initializer, while the initializer is still being parsed.
3799 if (GET_CODE (DECL_RTL (exp)) == MEM
3800 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3801 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3802 XEXP (DECL_RTL (exp), 0));
3803 if (GET_CODE (DECL_RTL (exp)) == MEM
3804 && modifier != EXPAND_CONST_ADDRESS
3805 && modifier != EXPAND_SUM
3806 && modifier != EXPAND_INITIALIZER)
3808 /* DECL_RTL probably contains a constant address.
3809 On RISC machines where a constant address isn't valid,
3810 make some insns to get that address into a register. */
3811 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3813 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3814 return change_address (DECL_RTL (exp), VOIDmode,
3815 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3818 /* If the mode of DECL_RTL does not match that of the decl, it
3819 must be a promoted value. We return a SUBREG of the wanted mode,
3820 but mark it so that we know that it was already extended. */
3822 if (GET_CODE (DECL_RTL (exp)) == REG
3823 && GET_MODE (DECL_RTL (exp)) != mode)
3825 enum machine_mode decl_mode = DECL_MODE (exp);
3827 /* Get the signedness used for this variable. Ensure we get the
3828 same mode we got when the variable was declared. */
3830 PROMOTE_MODE (decl_mode, unsignedp, type);
3832 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3835 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3836 SUBREG_PROMOTED_VAR_P (temp) = 1;
3837 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3841 return DECL_RTL (exp);
3844 return immed_double_const (TREE_INT_CST_LOW (exp),
3845 TREE_INT_CST_HIGH (exp),
3849 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3852 /* If optimized, generate immediate CONST_DOUBLE
3853 which will be turned into memory by reload if necessary.
3855 We used to force a register so that loop.c could see it. But
3856 this does not allow gen_* patterns to perform optimizations with
3857 the constants. It also produces two insns in cases like "x = 1.0;".
3858 On most machines, floating-point constants are not permitted in
3859 many insns, so we'd end up copying it to a register in any case.
3861 Now, we do the copying in expand_binop, if appropriate. */
3862 return immed_real_const (exp);
3866 if (! TREE_CST_RTL (exp))
3867 output_constant_def (exp);
3869 /* TREE_CST_RTL probably contains a constant address.
3870 On RISC machines where a constant address isn't valid,
3871 make some insns to get that address into a register. */
3872 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3873 && modifier != EXPAND_CONST_ADDRESS
3874 && modifier != EXPAND_INITIALIZER
3875 && modifier != EXPAND_SUM
3876 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3877 return change_address (TREE_CST_RTL (exp), VOIDmode,
3878 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3879 return TREE_CST_RTL (exp);
3882 context = decl_function_context (exp);
3883 /* We treat inline_function_decl as an alias for the current function
3884 because that is the inline function whose vars, types, etc.
3885 are being merged into the current function.
3886 See expand_inline_function. */
3887 if (context == current_function_decl || context == inline_function_decl)
3890 /* If this is non-local, handle it. */
3893 temp = SAVE_EXPR_RTL (exp);
3894 if (temp && GET_CODE (temp) == REG)
3896 put_var_into_stack (exp);
3897 temp = SAVE_EXPR_RTL (exp);
3899 if (temp == 0 || GET_CODE (temp) != MEM)
3901 return change_address (temp, mode,
3902 fix_lexical_addr (XEXP (temp, 0), exp));
3904 if (SAVE_EXPR_RTL (exp) == 0)
3906 if (mode == BLKmode)
3909 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3910 MEM_IN_STRUCT_P (temp)
3911 = (TREE_CODE (type) == RECORD_TYPE
3912 || TREE_CODE (type) == UNION_TYPE
3913 || TREE_CODE (type) == QUAL_UNION_TYPE
3914 || TREE_CODE (type) == ARRAY_TYPE);
3918 enum machine_mode var_mode = mode;
3920 if (TREE_CODE (type) == INTEGER_TYPE
3921 || TREE_CODE (type) == ENUMERAL_TYPE
3922 || TREE_CODE (type) == BOOLEAN_TYPE
3923 || TREE_CODE (type) == CHAR_TYPE
3924 || TREE_CODE (type) == REAL_TYPE
3925 || TREE_CODE (type) == POINTER_TYPE
3926 || TREE_CODE (type) == OFFSET_TYPE)
3928 PROMOTE_MODE (var_mode, unsignedp, type);
3931 temp = gen_reg_rtx (var_mode);
3934 SAVE_EXPR_RTL (exp) = temp;
3935 if (!optimize && GET_CODE (temp) == REG)
3936 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3939 /* If the mode of TEMP does not match that of the expression, it
3940 must be a promoted value. We pass store_expr a SUBREG of the
3941 wanted mode but mark it so that we know that it was already
3942 extended. Note that `unsignedp' was modified above in
3945 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
3947 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3948 SUBREG_PROMOTED_VAR_P (temp) = 1;
3949 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3952 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3955 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
3956 must be a promoted value. We return a SUBREG of the wanted mode,
3957 but mark it so that we know that it was already extended. */
3959 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
3960 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
3962 enum machine_mode var_mode = mode;
3964 if (TREE_CODE (type) == INTEGER_TYPE
3965 || TREE_CODE (type) == ENUMERAL_TYPE
3966 || TREE_CODE (type) == BOOLEAN_TYPE
3967 || TREE_CODE (type) == CHAR_TYPE
3968 || TREE_CODE (type) == REAL_TYPE
3969 || TREE_CODE (type) == POINTER_TYPE
3970 || TREE_CODE (type) == OFFSET_TYPE)
3972 PROMOTE_MODE (var_mode, unsignedp, type);
3975 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3976 SUBREG_PROMOTED_VAR_P (temp) = 1;
3977 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3981 return SAVE_EXPR_RTL (exp);
3983 case PLACEHOLDER_EXPR:
3984 /* If there is an object on the head of the placeholder list,
3985 see if some object in it's references is of type TYPE. For
3986 further information, see tree.def. */
3987 if (placeholder_list)
3990 tree old_list = placeholder_list;
3992 for (object = TREE_PURPOSE (placeholder_list);
3993 TREE_TYPE (object) != type
3994 && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
3995 || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
3996 || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
3997 || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
3998 object = TREE_OPERAND (object, 0))
4001 if (object && TREE_TYPE (object) == type)
4003 /* Expand this object skipping the list entries before
4004 it was found in case it is also a PLACEHOLDER_EXPR.
4005 In that case, we want to translate it using subsequent
4007 placeholder_list = TREE_CHAIN (placeholder_list);
4008 temp = expand_expr (object, original_target, tmode, modifier);
4009 placeholder_list = old_list;
4014 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
4017 case WITH_RECORD_EXPR:
4018 /* Put the object on the placeholder list, expand our first operand,
4019 and pop the list. */
4020 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
4022 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
4024 placeholder_list = TREE_CHAIN (placeholder_list);
4028 expand_exit_loop_if_false (NULL_PTR,
4029 invert_truthvalue (TREE_OPERAND (exp, 0)));
4034 expand_start_loop (1);
4035 expand_expr_stmt (TREE_OPERAND (exp, 0));
4043 tree vars = TREE_OPERAND (exp, 0);
4044 int vars_need_expansion = 0;
4046 /* Need to open a binding contour here because
4047 if there are any cleanups they most be contained here. */
4048 expand_start_bindings (0);
4050 /* Mark the corresponding BLOCK for output in its proper place. */
4051 if (TREE_OPERAND (exp, 2) != 0
4052 && ! TREE_USED (TREE_OPERAND (exp, 2)))
4053 insert_block (TREE_OPERAND (exp, 2));
4055 /* If VARS have not yet been expanded, expand them now. */
4058 if (DECL_RTL (vars) == 0)
4060 vars_need_expansion = 1;
4063 expand_decl_init (vars);
4064 vars = TREE_CHAIN (vars);
4067 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4069 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
4075 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
4077 emit_insns (RTL_EXPR_SEQUENCE (exp));
4078 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
4079 free_temps_for_rtl_expr (exp);
4080 return RTL_EXPR_RTL (exp);
4083 /* If we don't need the result, just ensure we evaluate any
4088 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4089 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
4093 /* All elts simple constants => refer to a constant in memory. But
4094 if this is a non-BLKmode mode, let it store a field at a time
4095 since that should make a CONST_INT or CONST_DOUBLE when we
4096 fold. Likewise, if we have a target we can use, it is best to
4097 store directly into the target. If we are making an initializer and
4098 all operands are constant, put it in memory as well. */
4099 else if ((TREE_STATIC (exp)
4100 && ((mode == BLKmode
4101 && ! (target != 0 && safe_from_p (target, exp)))
4102 || TREE_ADDRESSABLE (exp)))
4103 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
4105 rtx constructor = output_constant_def (exp);
4106 if (modifier != EXPAND_CONST_ADDRESS
4107 && modifier != EXPAND_INITIALIZER
4108 && modifier != EXPAND_SUM
4109 && !memory_address_p (GET_MODE (constructor),
4110 XEXP (constructor, 0)))
4111 constructor = change_address (constructor, VOIDmode,
4112 XEXP (constructor, 0));
4118 if (target == 0 || ! safe_from_p (target, exp))
4120 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
4121 target = gen_reg_rtx (mode);
4124 enum tree_code c = TREE_CODE (type);
4126 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4127 if (c == RECORD_TYPE || c == UNION_TYPE
4128 || c == QUAL_UNION_TYPE || c == ARRAY_TYPE)
4129 MEM_IN_STRUCT_P (target) = 1;
4132 store_constructor (exp, target);
4138 tree exp1 = TREE_OPERAND (exp, 0);
4141 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
4142 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
4143 This code has the same general effect as simply doing
4144 expand_expr on the save expr, except that the expression PTR
4145 is computed for use as a memory address. This means different
4146 code, suitable for indexing, may be generated. */
4147 if (TREE_CODE (exp1) == SAVE_EXPR
4148 && SAVE_EXPR_RTL (exp1) == 0
4149 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
4150 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
4151 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
4153 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
4154 VOIDmode, EXPAND_SUM);
4155 op0 = memory_address (mode, temp);
4156 op0 = copy_all_regs (op0);
4157 SAVE_EXPR_RTL (exp1) = op0;
4161 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
4162 op0 = memory_address (mode, op0);
4165 temp = gen_rtx (MEM, mode, op0);
4166 /* If address was computed by addition,
4167 mark this as an element of an aggregate. */
4168 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4169 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
4170 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
4171 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
4172 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4173 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4174 || TREE_CODE (TREE_TYPE (exp)) == QUAL_UNION_TYPE
4175 || (TREE_CODE (exp1) == ADDR_EXPR
4176 && (exp2 = TREE_OPERAND (exp1, 0))
4177 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
4178 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
4179 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE
4180 || TREE_CODE (TREE_TYPE (exp2)) == QUAL_UNION_TYPE)))
4181 MEM_IN_STRUCT_P (temp) = 1;
4182 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
4183 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4184 a location is accessed through a pointer to const does not mean
4185 that the value there can never change. */
4186 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4192 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
4196 tree array = TREE_OPERAND (exp, 0);
4197 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
4198 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4199 tree index = TREE_OPERAND (exp, 1);
4200 tree index_type = TREE_TYPE (index);
4203 if (TREE_CODE (low_bound) != INTEGER_CST
4204 && contains_placeholder_p (low_bound))
4205 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
4207 /* Optimize the special-case of a zero lower bound.
4209 We convert the low_bound to sizetype to avoid some problems
4210 with constant folding. (E.g. suppose the lower bound is 1,
4211 and its mode is QI. Without the conversion, (ARRAY
4212 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4213 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4215 But sizetype isn't quite right either (especially if
4216 the lowbound is negative). FIXME */
4218 if (! integer_zerop (low_bound))
4219 index = fold (build (MINUS_EXPR, index_type, index,
4220 convert (sizetype, low_bound)));
4222 if (TREE_CODE (index) != INTEGER_CST
4223 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4225 /* Nonconstant array index or nonconstant element size.
4226 Generate the tree for *(&array+index) and expand that,
4227 except do it in a language-independent way
4228 and don't complain about non-lvalue arrays.
4229 `mark_addressable' should already have been called
4230 for any array for which this case will be reached. */
4232 /* Don't forget the const or volatile flag from the array
4234 tree variant_type = build_type_variant (type,
4235 TREE_READONLY (exp),
4236 TREE_THIS_VOLATILE (exp));
4237 tree array_adr = build1 (ADDR_EXPR,
4238 build_pointer_type (variant_type), array);
4240 tree size = size_in_bytes (type);
4242 /* Convert the integer argument to a type the same size as a
4243 pointer so the multiply won't overflow spuriously. */
4244 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
4245 index = convert (type_for_size (POINTER_SIZE, 0), index);
4247 if (TREE_CODE (size) != INTEGER_CST
4248 && contains_placeholder_p (size))
4249 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
4251 /* Don't think the address has side effects
4252 just because the array does.
4253 (In some cases the address might have side effects,
4254 and we fail to record that fact here. However, it should not
4255 matter, since expand_expr should not care.) */
4256 TREE_SIDE_EFFECTS (array_adr) = 0;
4258 elt = build1 (INDIRECT_REF, type,
4259 fold (build (PLUS_EXPR,
4260 TYPE_POINTER_TO (variant_type),
4262 fold (build (MULT_EXPR,
4263 TYPE_POINTER_TO (variant_type),
4266 /* Volatility, etc., of new expression is same as old
4268 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
4269 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
4270 TREE_READONLY (elt) = TREE_READONLY (exp);
4272 return expand_expr (elt, target, tmode, modifier);
4275 /* Fold an expression like: "foo"[2].
4276 This is not done in fold so it won't happen inside &. */
4278 if (TREE_CODE (array) == STRING_CST
4279 && TREE_CODE (index) == INTEGER_CST
4280 && !TREE_INT_CST_HIGH (index)
4281 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
4282 && GET_MODE_CLASS (mode) == MODE_INT)
4283 return GEN_INT (TREE_STRING_POINTER (array)[i]);
4285 /* If this is a constant index into a constant array,
4286 just get the value from the array. Handle both the cases when
4287 we have an explicit constructor and when our operand is a variable
4288 that was declared const. */
4290 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
4292 if (TREE_CODE (index) == INTEGER_CST
4293 && TREE_INT_CST_HIGH (index) == 0)
4295 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
4297 i = TREE_INT_CST_LOW (index);
4299 elem = TREE_CHAIN (elem);
4301 return expand_expr (fold (TREE_VALUE (elem)), target,
4306 else if (optimize >= 1
4307 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
4308 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
4309 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
4311 if (TREE_CODE (index) == INTEGER_CST
4312 && TREE_INT_CST_HIGH (index) == 0)
4314 tree init = DECL_INITIAL (array);
4316 i = TREE_INT_CST_LOW (index);
4317 if (TREE_CODE (init) == CONSTRUCTOR)
4319 tree elem = CONSTRUCTOR_ELTS (init);
4322 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
4323 elem = TREE_CHAIN (elem);
4325 return expand_expr (fold (TREE_VALUE (elem)), target,
4328 else if (TREE_CODE (init) == STRING_CST
4329 && i < TREE_STRING_LENGTH (init))
4330 return GEN_INT (TREE_STRING_POINTER (init)[i]);
4335 /* Treat array-ref with constant index as a component-ref. */
4339 /* If the operand is a CONSTRUCTOR, we can just extract the
4340 appropriate field if it is present. */
4341 if (code != ARRAY_REF
4342 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
4346 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
4347 elt = TREE_CHAIN (elt))
4348 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
4349 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
4353 enum machine_mode mode1;
4358 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
4359 &mode1, &unsignedp, &volatilep);
4362 /* If we got back the original object, something is wrong. Perhaps
4363 we are evaluating an expression too early. In any event, don't
4364 infinitely recurse. */
4368 /* In some cases, we will be offsetting OP0's address by a constant.
4369 So get it as a sum, if possible. If we will be using it
4370 directly in an insn, we validate it. */
4371 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
4373 /* If this is a constant, put it into a register if it is a
4374 legitimate constant and memory if it isn't. */
4375 if (CONSTANT_P (op0))
4377 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
4378 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
4379 op0 = force_reg (mode, op0);
4381 op0 = validize_mem (force_const_mem (mode, op0));
4384 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
4387 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4389 if (GET_CODE (op0) != MEM)
4391 op0 = change_address (op0, VOIDmode,
4392 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
4393 force_reg (Pmode, offset_rtx)));
4394 /* If we have a variable offset, the known alignment
4395 is only that of the innermost structure containing the field.
4396 (Actually, we could sometimes do better by using the
4397 size of an element of the innermost array, but no need.) */
4398 if (TREE_CODE (exp) == COMPONENT_REF
4399 || TREE_CODE (exp) == BIT_FIELD_REF)
4400 alignment = (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4404 /* Don't forget about volatility even if this is a bitfield. */
4405 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
4407 op0 = copy_rtx (op0);
4408 MEM_VOLATILE_P (op0) = 1;
4411 /* In cases where an aligned union has an unaligned object
4412 as a field, we might be extracting a BLKmode value from
4413 an integer-mode (e.g., SImode) object. Handle this case
4414 by doing the extract into an object as wide as the field
4415 (which we know to be the width of a basic mode), then
4416 storing into memory, and changing the mode to BLKmode. */
4417 if (mode1 == VOIDmode
4418 || (mode1 != BLKmode && ! direct_load[(int) mode1]
4419 && modifier != EXPAND_CONST_ADDRESS
4420 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4421 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
4422 /* If the field isn't aligned enough to fetch as a memref,
4423 fetch it as a bit field. */
4424 || (STRICT_ALIGNMENT
4425 && TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
4426 || (STRICT_ALIGNMENT && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4428 enum machine_mode ext_mode = mode;
4430 if (ext_mode == BLKmode)
4431 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
4433 if (ext_mode == BLKmode)
4436 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
4437 unsignedp, target, ext_mode, ext_mode,
4439 int_size_in_bytes (TREE_TYPE (tem)));
4440 if (mode == BLKmode)
4442 rtx new = assign_stack_temp (ext_mode,
4443 bitsize / BITS_PER_UNIT, 0);
4445 emit_move_insn (new, op0);
4446 op0 = copy_rtx (new);
4447 PUT_MODE (op0, BLKmode);
4448 MEM_IN_STRUCT_P (op0) = 1;
4454 /* Get a reference to just this component. */
4455 if (modifier == EXPAND_CONST_ADDRESS
4456 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4457 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4458 (bitpos / BITS_PER_UNIT)));
4460 op0 = change_address (op0, mode1,
4461 plus_constant (XEXP (op0, 0),
4462 (bitpos / BITS_PER_UNIT)));
4463 MEM_IN_STRUCT_P (op0) = 1;
4464 MEM_VOLATILE_P (op0) |= volatilep;
4465 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4468 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4469 convert_move (target, op0, unsignedp);
4475 tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
4476 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
4477 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
4478 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4479 MEM_IN_STRUCT_P (temp) = 1;
4480 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
4481 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4482 a location is accessed through a pointer to const does not mean
4483 that the value there can never change. */
4484 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4489 /* Intended for a reference to a buffer of a file-object in Pascal.
4490 But it's not certain that a special tree code will really be
4491 necessary for these. INDIRECT_REF might work for them. */
4495 /* IN_EXPR: Inlined pascal set IN expression.
4498 rlo = set_low - (set_low%bits_per_word);
4499 the_word = set [ (index - rlo)/bits_per_word ];
4500 bit_index = index % bits_per_word;
4501 bitmask = 1 << bit_index;
4502 return !!(the_word & bitmask); */
4504 preexpand_calls (exp);
4506 tree set = TREE_OPERAND (exp, 0);
4507 tree index = TREE_OPERAND (exp, 1);
4508 tree set_type = TREE_TYPE (set);
4510 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4511 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4517 rtx diff, quo, rem, addr, bit, result;
4518 rtx setval, setaddr;
4519 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4522 target = gen_reg_rtx (mode);
4524 /* If domain is empty, answer is no. */
4525 if (tree_int_cst_lt (set_high_bound, set_low_bound))
4528 index_val = expand_expr (index, 0, VOIDmode, 0);
4529 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4530 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4531 setval = expand_expr (set, 0, VOIDmode, 0);
4532 setaddr = XEXP (setval, 0);
4534 /* Compare index against bounds, if they are constant. */
4535 if (GET_CODE (index_val) == CONST_INT
4536 && GET_CODE (lo_r) == CONST_INT
4537 && INTVAL (index_val) < INTVAL (lo_r))
4540 if (GET_CODE (index_val) == CONST_INT
4541 && GET_CODE (hi_r) == CONST_INT
4542 && INTVAL (hi_r) < INTVAL (index_val))
4545 /* If we get here, we have to generate the code for both cases
4546 (in range and out of range). */
4548 op0 = gen_label_rtx ();
4549 op1 = gen_label_rtx ();
4551 if (! (GET_CODE (index_val) == CONST_INT
4552 && GET_CODE (lo_r) == CONST_INT))
4554 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4555 GET_MODE (index_val), 0, 0);
4556 emit_jump_insn (gen_blt (op1));
4559 if (! (GET_CODE (index_val) == CONST_INT
4560 && GET_CODE (hi_r) == CONST_INT))
4562 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4563 GET_MODE (index_val), 0, 0);
4564 emit_jump_insn (gen_bgt (op1));
4567 /* Calculate the element number of bit zero in the first word
4569 if (GET_CODE (lo_r) == CONST_INT)
4570 rlow = GEN_INT (INTVAL (lo_r)
4571 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
4573 rlow = expand_binop (index_mode, and_optab, lo_r,
4574 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4575 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4577 diff = expand_binop (index_mode, sub_optab,
4578 index_val, rlow, NULL_RTX, 0, OPTAB_LIB_WIDEN);
4580 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4581 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
4582 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4583 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
4584 addr = memory_address (byte_mode,
4585 expand_binop (index_mode, add_optab,
4586 diff, setaddr, NULL_RTX, 0,
4588 /* Extract the bit we want to examine */
4589 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4590 gen_rtx (MEM, byte_mode, addr),
4591 make_tree (TREE_TYPE (index), rem),
4593 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4594 GET_MODE (target) == byte_mode ? target : 0,
4595 1, OPTAB_LIB_WIDEN);
4597 if (result != target)
4598 convert_move (target, result, 1);
4600 /* Output the code to handle the out-of-range case. */
4603 emit_move_insn (target, const0_rtx);
4608 case WITH_CLEANUP_EXPR:
4609 if (RTL_EXPR_RTL (exp) == 0)
4612 = expand_expr (TREE_OPERAND (exp, 0),
4613 target ? target : const0_rtx,
4616 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
4617 /* That's it for this cleanup. */
4618 TREE_OPERAND (exp, 2) = 0;
4620 return RTL_EXPR_RTL (exp);
4623 /* Check for a built-in function. */
4624 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4625 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4626 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4627 return expand_builtin (exp, target, subtarget, tmode, ignore);
4628 /* If this call was expanded already by preexpand_calls,
4629 just return the result we got. */
4630 if (CALL_EXPR_RTL (exp) != 0)
4631 return CALL_EXPR_RTL (exp);
4632 return expand_call (exp, target, ignore);
4634 case NON_LVALUE_EXPR:
4637 case REFERENCE_EXPR:
4638 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4639 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4640 if (TREE_CODE (type) == UNION_TYPE)
4642 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4645 if (mode == BLKmode)
4647 if (TYPE_SIZE (type) == 0
4648 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4650 target = assign_stack_temp (BLKmode,
4651 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4652 + BITS_PER_UNIT - 1)
4653 / BITS_PER_UNIT, 0);
4656 target = gen_reg_rtx (mode);
4658 if (GET_CODE (target) == MEM)
4659 /* Store data into beginning of memory target. */
4660 store_expr (TREE_OPERAND (exp, 0),
4661 change_address (target, TYPE_MODE (valtype), 0), 0);
4663 else if (GET_CODE (target) == REG)
4664 /* Store this field into a union of the proper type. */
4665 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4666 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4668 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4672 /* Return the entire union. */
4675 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
4676 if (GET_MODE (op0) == mode)
4678 /* If arg is a constant integer being extended from a narrower mode,
4679 we must really truncate to get the extended bits right. Otherwise
4680 (unsigned long) (unsigned char) ("\377"[0])
4681 would come out as ffffffff. */
4682 if (GET_MODE (op0) == VOIDmode
4683 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4684 < GET_MODE_BITSIZE (mode)))
4686 /* MODE must be narrower than HOST_BITS_PER_INT. */
4687 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4689 if (width < HOST_BITS_PER_WIDE_INT)
4691 HOST_WIDE_INT val = (GET_CODE (op0) == CONST_INT ? INTVAL (op0)
4692 : CONST_DOUBLE_LOW (op0));
4693 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4694 || !(val & ((HOST_WIDE_INT) 1 << (width - 1))))
4695 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4697 val |= ~(((HOST_WIDE_INT) 1 << width) - 1);
4699 op0 = GEN_INT (val);
4703 op0 = (simplify_unary_operation
4704 ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4705 ? ZERO_EXTEND : SIGN_EXTEND),
4707 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))));
4712 if (GET_MODE (op0) == VOIDmode)
4714 if (modifier == EXPAND_INITIALIZER)
4715 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
4716 if (flag_force_mem && GET_CODE (op0) == MEM)
4717 op0 = copy_to_reg (op0);
4720 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4722 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4726 /* We come here from MINUS_EXPR when the second operand is a constant. */
4728 this_optab = add_optab;
4730 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4731 something else, make sure we add the register to the constant and
4732 then to the other thing. This case can occur during strength
4733 reduction and doing it this way will produce better code if the
4734 frame pointer or argument pointer is eliminated.
4736 fold-const.c will ensure that the constant is always in the inner
4737 PLUS_EXPR, so the only case we need to do anything about is if
4738 sp, ap, or fp is our second argument, in which case we must swap
4739 the innermost first argument and our second argument. */
4741 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4742 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4743 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4744 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4745 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4746 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4748 tree t = TREE_OPERAND (exp, 1);
4750 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4751 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4754 /* If the result is to be Pmode and we are adding an integer to
4755 something, we might be forming a constant. So try to use
4756 plus_constant. If it produces a sum and we can't accept it,
4757 use force_operand. This allows P = &ARR[const] to generate
4758 efficient code on machines where a SYMBOL_REF is not a valid
4761 If this is an EXPAND_SUM call, always return the sum. */
4762 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4765 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4766 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4767 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
4769 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4771 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4772 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4773 op1 = force_operand (op1, target);
4777 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4778 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4779 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
4781 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4783 if (! CONSTANT_P (op0))
4785 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4786 VOIDmode, modifier);
4787 /* Don't go to both_summands if modifier
4788 says it's not right to return a PLUS. */
4789 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4793 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4794 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4795 op0 = force_operand (op0, target);
4800 /* No sense saving up arithmetic to be done
4801 if it's all in the wrong mode to form part of an address.
4802 And force_operand won't know whether to sign-extend or
4804 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4808 preexpand_calls (exp);
4809 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4812 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
4813 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
4816 /* Make sure any term that's a sum with a constant comes last. */
4817 if (GET_CODE (op0) == PLUS
4818 && CONSTANT_P (XEXP (op0, 1)))
4824 /* If adding to a sum including a constant,
4825 associate it to put the constant outside. */
4826 if (GET_CODE (op1) == PLUS
4827 && CONSTANT_P (XEXP (op1, 1)))
4829 rtx constant_term = const0_rtx;
4831 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4834 /* Ensure that MULT comes first if there is one. */
4835 else if (GET_CODE (op0) == MULT)
4836 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
4838 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4840 /* Let's also eliminate constants from op0 if possible. */
4841 op0 = eliminate_constant_term (op0, &constant_term);
4843 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4844 their sum should be a constant. Form it into OP1, since the
4845 result we want will then be OP0 + OP1. */
4847 temp = simplify_binary_operation (PLUS, mode, constant_term,
4852 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4855 /* Put a constant term last and put a multiplication first. */
4856 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4857 temp = op1, op1 = op0, op0 = temp;
4859 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4860 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4863 /* For initializers, we are allowed to return a MINUS of two
4864 symbolic constants. Here we handle all cases when both operands
4866 /* Handle difference of two symbolic constants,
4867 for the sake of an initializer. */
4868 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4869 && really_constant_p (TREE_OPERAND (exp, 0))
4870 && really_constant_p (TREE_OPERAND (exp, 1)))
4872 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4873 VOIDmode, modifier);
4874 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4875 VOIDmode, modifier);
4877 /* If one operand is a CONST_INT, put it last. */
4878 if (GET_CODE (op0) == CONST_INT)
4879 temp = op0, op0 = op1, op1 = temp;
4881 /* If the last operand is a CONST_INT, use plus_constant of
4882 the negated constant. Else make the MINUS. */
4883 if (GET_CODE (op1) == CONST_INT)
4884 return plus_constant (op0, - INTVAL (op1));
4886 return gen_rtx (MINUS, mode, op0, op1);
4888 /* Convert A - const to A + (-const). */
4889 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4891 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4892 fold (build1 (NEGATE_EXPR, type,
4893 TREE_OPERAND (exp, 1))));
4896 this_optab = sub_optab;
4900 preexpand_calls (exp);
4901 /* If first operand is constant, swap them.
4902 Thus the following special case checks need only
4903 check the second operand. */
4904 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4906 register tree t1 = TREE_OPERAND (exp, 0);
4907 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4908 TREE_OPERAND (exp, 1) = t1;
4911 /* Attempt to return something suitable for generating an
4912 indexed address, for machines that support that. */
4914 if (modifier == EXPAND_SUM && mode == Pmode
4915 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4916 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4918 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4920 /* Apply distributive law if OP0 is x+c. */
4921 if (GET_CODE (op0) == PLUS
4922 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4923 return gen_rtx (PLUS, mode,
4924 gen_rtx (MULT, mode, XEXP (op0, 0),
4925 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4926 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4927 * INTVAL (XEXP (op0, 1))));
4929 if (GET_CODE (op0) != REG)
4930 op0 = force_operand (op0, NULL_RTX);
4931 if (GET_CODE (op0) != REG)
4932 op0 = copy_to_mode_reg (mode, op0);
4934 return gen_rtx (MULT, mode, op0,
4935 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
4938 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4941 /* Check for multiplying things that have been extended
4942 from a narrower type. If this machine supports multiplying
4943 in that narrower type with a result in the desired type,
4944 do it that way, and avoid the explicit type-conversion. */
4945 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4946 && TREE_CODE (type) == INTEGER_TYPE
4947 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4948 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4949 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4950 && int_fits_type_p (TREE_OPERAND (exp, 1),
4951 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4952 /* Don't use a widening multiply if a shift will do. */
4953 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
4954 > HOST_BITS_PER_WIDE_INT)
4955 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4957 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4958 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4960 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4961 /* If both operands are extended, they must either both
4962 be zero-extended or both be sign-extended. */
4963 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4965 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4967 enum machine_mode innermode
4968 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4969 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4970 ? umul_widen_optab : smul_widen_optab);
4971 if (mode == GET_MODE_WIDER_MODE (innermode)
4972 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4974 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4975 NULL_RTX, VOIDmode, 0);
4976 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4977 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4980 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
4981 NULL_RTX, VOIDmode, 0);
4985 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4986 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4987 return expand_mult (mode, op0, op1, target, unsignedp);
4989 case TRUNC_DIV_EXPR:
4990 case FLOOR_DIV_EXPR:
4992 case ROUND_DIV_EXPR:
4993 case EXACT_DIV_EXPR:
4994 preexpand_calls (exp);
4995 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4997 /* Possible optimization: compute the dividend with EXPAND_SUM
4998 then if the divisor is constant can optimize the case
4999 where some terms of the dividend have coeffs divisible by it. */
5000 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5001 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5002 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
5005 this_optab = flodiv_optab;
5008 case TRUNC_MOD_EXPR:
5009 case FLOOR_MOD_EXPR:
5011 case ROUND_MOD_EXPR:
5012 preexpand_calls (exp);
5013 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5015 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5016 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5017 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
5019 case FIX_ROUND_EXPR:
5020 case FIX_FLOOR_EXPR:
5022 abort (); /* Not used for C. */
5024 case FIX_TRUNC_EXPR:
5025 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5027 target = gen_reg_rtx (mode);
5028 expand_fix (target, op0, unsignedp);
5032 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5034 target = gen_reg_rtx (mode);
5035 /* expand_float can't figure out what to do if FROM has VOIDmode.
5036 So give it the correct mode. With -O, cse will optimize this. */
5037 if (GET_MODE (op0) == VOIDmode)
5038 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5040 expand_float (target, op0,
5041 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5045 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5046 temp = expand_unop (mode, neg_optab, op0, target, 0);
5052 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5054 /* Handle complex values specially. */
5056 enum machine_mode opmode
5057 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5059 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
5060 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
5061 return expand_complex_abs (opmode, op0, target, unsignedp);
5064 /* Unsigned abs is simply the operand. Testing here means we don't
5065 risk generating incorrect code below. */
5066 if (TREE_UNSIGNED (type))
5069 /* First try to do it with a special abs instruction. */
5070 temp = expand_unop (mode, abs_optab, op0, target, 0);
5074 /* If this machine has expensive jumps, we can do integer absolute
5075 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
5076 where W is the width of MODE. */
5078 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
5080 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
5081 size_int (GET_MODE_BITSIZE (mode) - 1),
5084 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
5087 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
5094 /* If that does not win, use conditional jump and negate. */
5095 target = original_target;
5096 temp = gen_label_rtx ();
5097 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
5098 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
5099 || (GET_CODE (target) == REG
5100 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5101 target = gen_reg_rtx (mode);
5102 emit_move_insn (target, op0);
5103 emit_cmp_insn (target,
5104 expand_expr (convert (type, integer_zero_node),
5105 NULL_RTX, VOIDmode, 0),
5106 GE, NULL_RTX, mode, 0, 0);
5108 emit_jump_insn (gen_bge (temp));
5109 op0 = expand_unop (mode, neg_optab, target, target, 0);
5111 emit_move_insn (target, op0);
5118 target = original_target;
5119 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
5120 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
5121 || (GET_CODE (target) == REG
5122 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5123 target = gen_reg_rtx (mode);
5124 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5125 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5127 /* First try to do it with a special MIN or MAX instruction.
5128 If that does not win, use a conditional jump to select the proper
5130 this_optab = (TREE_UNSIGNED (type)
5131 ? (code == MIN_EXPR ? umin_optab : umax_optab)
5132 : (code == MIN_EXPR ? smin_optab : smax_optab));
5134 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
5140 emit_move_insn (target, op0);
5141 op0 = gen_label_rtx ();
5142 /* If this mode is an integer too wide to compare properly,
5143 compare word by word. Rely on cse to optimize constant cases. */
5144 if (GET_MODE_CLASS (mode) == MODE_INT
5145 && !can_compare_p (mode))
5147 if (code == MAX_EXPR)
5148 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), target, op1, NULL, op0);
5150 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), op1, target, NULL, op0);
5151 emit_move_insn (target, op1);
5155 if (code == MAX_EXPR)
5156 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5157 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
5158 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
5160 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5161 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
5162 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
5163 if (temp == const0_rtx)
5164 emit_move_insn (target, op1);
5165 else if (temp != const_true_rtx)
5167 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
5168 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
5171 emit_move_insn (target, op1);
5177 /* ??? Can optimize when the operand of this is a bitwise operation,
5178 by using a different bitwise operation. */
5180 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5181 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
5187 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5188 temp = expand_unop (mode, ffs_optab, op0, target, 1);
5193 /* ??? Can optimize bitwise operations with one arg constant.
5194 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
5195 and (a bitwise1 b) bitwise2 b (etc)
5196 but that is probably not worth while. */
5198 /* BIT_AND_EXPR is for bitwise anding.
5199 TRUTH_AND_EXPR is for anding two boolean values
5200 when we want in all cases to compute both of them.
5201 In general it is fastest to do TRUTH_AND_EXPR by
5202 computing both operands as actual zero-or-1 values
5203 and then bitwise anding. In cases where there cannot
5204 be any side effects, better code would be made by
5205 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
5206 but the question is how to recognize those cases. */
5208 /* TRUTH_AND_EXPR can have a result whose mode doesn't match
5209 th operands. If so, don't use our target. */
5210 case TRUTH_AND_EXPR:
5211 if (mode != TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5214 this_optab = and_optab;
5217 /* See comment above about TRUTH_AND_EXPR; it applies here too. */
5219 if (mode != TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5222 this_optab = ior_optab;
5225 case TRUTH_XOR_EXPR:
5226 if (mode != TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5229 this_optab = xor_optab;
5236 preexpand_calls (exp);
5237 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5239 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5240 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
5243 /* Could determine the answer when only additive constants differ.
5244 Also, the addition of one can be handled by changing the condition. */
5251 preexpand_calls (exp);
5252 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
5255 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
5256 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
5258 && GET_CODE (original_target) == REG
5259 && (GET_MODE (original_target)
5260 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5262 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
5263 if (temp != original_target)
5264 temp = copy_to_reg (temp);
5265 op1 = gen_label_rtx ();
5266 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
5267 GET_MODE (temp), unsignedp, 0);
5268 emit_jump_insn (gen_beq (op1));
5269 emit_move_insn (temp, const1_rtx);
5273 /* If no set-flag instruction, must generate a conditional
5274 store into a temporary variable. Drop through
5275 and handle this like && and ||. */
5277 case TRUTH_ANDIF_EXPR:
5278 case TRUTH_ORIF_EXPR:
5280 && (target == 0 || ! safe_from_p (target, exp)
5281 /* Make sure we don't have a hard reg (such as function's return
5282 value) live across basic blocks, if not optimizing. */
5283 || (!optimize && GET_CODE (target) == REG
5284 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
5285 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5288 emit_clr_insn (target);
5290 op1 = gen_label_rtx ();
5291 jumpifnot (exp, op1);
5294 emit_0_to_1_insn (target);
5297 return ignore ? const0_rtx : target;
5299 case TRUTH_NOT_EXPR:
5300 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5301 /* The parser is careful to generate TRUTH_NOT_EXPR
5302 only with operands that are always zero or one. */
5303 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
5304 target, 1, OPTAB_LIB_WIDEN);
5310 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5312 return expand_expr (TREE_OPERAND (exp, 1),
5313 (ignore ? const0_rtx : target),
5318 /* Note that COND_EXPRs whose type is a structure or union
5319 are required to be constructed to contain assignments of
5320 a temporary variable, so that we can evaluate them here
5321 for side effect only. If type is void, we must do likewise. */
5323 /* If an arm of the branch requires a cleanup,
5324 only that cleanup is performed. */
5327 tree binary_op = 0, unary_op = 0;
5328 tree old_cleanups = cleanups_this_call;
5329 cleanups_this_call = 0;
5331 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
5332 convert it to our mode, if necessary. */
5333 if (integer_onep (TREE_OPERAND (exp, 1))
5334 && integer_zerop (TREE_OPERAND (exp, 2))
5335 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5339 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5344 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
5345 if (GET_MODE (op0) == mode)
5348 target = gen_reg_rtx (mode);
5349 convert_move (target, op0, unsignedp);
5353 /* If we are not to produce a result, we have no target. Otherwise,
5354 if a target was specified use it; it will not be used as an
5355 intermediate target unless it is safe. If no target, use a
5360 else if (original_target
5361 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
5362 temp = original_target;
5363 else if (mode == BLKmode)
5365 if (TYPE_SIZE (type) == 0
5366 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5369 temp = assign_stack_temp (BLKmode,
5370 (TREE_INT_CST_LOW (TYPE_SIZE (type))
5371 + BITS_PER_UNIT - 1)
5372 / BITS_PER_UNIT, 0);
5373 MEM_IN_STRUCT_P (temp)
5374 = (TREE_CODE (type) == RECORD_TYPE
5375 || TREE_CODE (type) == UNION_TYPE
5376 || TREE_CODE (type) == QUAL_UNION_TYPE
5377 || TREE_CODE (type) == ARRAY_TYPE);
5380 temp = gen_reg_rtx (mode);
5382 /* Check for X ? A + B : A. If we have this, we can copy
5383 A to the output and conditionally add B. Similarly for unary
5384 operations. Don't do this if X has side-effects because
5385 those side effects might affect A or B and the "?" operation is
5386 a sequence point in ANSI. (We test for side effects later.) */
5388 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
5389 && operand_equal_p (TREE_OPERAND (exp, 2),
5390 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5391 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
5392 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
5393 && operand_equal_p (TREE_OPERAND (exp, 1),
5394 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5395 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
5396 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
5397 && operand_equal_p (TREE_OPERAND (exp, 2),
5398 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5399 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
5400 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
5401 && operand_equal_p (TREE_OPERAND (exp, 1),
5402 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5403 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
5405 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
5406 operation, do this as A + (X != 0). Similarly for other simple
5407 binary operators. */
5408 if (temp && singleton && binary_op
5409 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5410 && (TREE_CODE (binary_op) == PLUS_EXPR
5411 || TREE_CODE (binary_op) == MINUS_EXPR
5412 || TREE_CODE (binary_op) == BIT_IOR_EXPR
5413 || TREE_CODE (binary_op) == BIT_XOR_EXPR
5414 || TREE_CODE (binary_op) == BIT_AND_EXPR)
5415 && integer_onep (TREE_OPERAND (binary_op, 1))
5416 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5419 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
5420 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
5421 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
5422 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
5425 /* If we had X ? A : A + 1, do this as A + (X == 0).
5427 We have to invert the truth value here and then put it
5428 back later if do_store_flag fails. We cannot simply copy
5429 TREE_OPERAND (exp, 0) to another variable and modify that
5430 because invert_truthvalue can modify the tree pointed to
5432 if (singleton == TREE_OPERAND (exp, 1))
5433 TREE_OPERAND (exp, 0)
5434 = invert_truthvalue (TREE_OPERAND (exp, 0));
5436 result = do_store_flag (TREE_OPERAND (exp, 0),
5437 (safe_from_p (temp, singleton)
5439 mode, BRANCH_COST <= 1);
5443 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
5444 return expand_binop (mode, boptab, op1, result, temp,
5445 unsignedp, OPTAB_LIB_WIDEN);
5447 else if (singleton == TREE_OPERAND (exp, 1))
5448 TREE_OPERAND (exp, 0)
5449 = invert_truthvalue (TREE_OPERAND (exp, 0));
5453 op0 = gen_label_rtx ();
5455 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
5459 /* If the target conflicts with the other operand of the
5460 binary op, we can't use it. Also, we can't use the target
5461 if it is a hard register, because evaluating the condition
5462 might clobber it. */
5464 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
5465 || (GET_CODE (temp) == REG
5466 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
5467 temp = gen_reg_rtx (mode);
5468 store_expr (singleton, temp, 0);
5471 expand_expr (singleton,
5472 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5473 if (cleanups_this_call)
5475 sorry ("aggregate value in COND_EXPR");
5476 cleanups_this_call = 0;
5478 if (singleton == TREE_OPERAND (exp, 1))
5479 jumpif (TREE_OPERAND (exp, 0), op0);
5481 jumpifnot (TREE_OPERAND (exp, 0), op0);
5483 if (binary_op && temp == 0)
5484 /* Just touch the other operand. */
5485 expand_expr (TREE_OPERAND (binary_op, 1),
5486 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5488 store_expr (build (TREE_CODE (binary_op), type,
5489 make_tree (type, temp),
5490 TREE_OPERAND (binary_op, 1)),
5493 store_expr (build1 (TREE_CODE (unary_op), type,
5494 make_tree (type, temp)),
5499 /* This is now done in jump.c and is better done there because it
5500 produces shorter register lifetimes. */
5502 /* Check for both possibilities either constants or variables
5503 in registers (but not the same as the target!). If so, can
5504 save branches by assigning one, branching, and assigning the
5506 else if (temp && GET_MODE (temp) != BLKmode
5507 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5508 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5509 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5510 && DECL_RTL (TREE_OPERAND (exp, 1))
5511 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5512 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5513 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5514 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5515 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5516 && DECL_RTL (TREE_OPERAND (exp, 2))
5517 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5518 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5520 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5521 temp = gen_reg_rtx (mode);
5522 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5523 jumpifnot (TREE_OPERAND (exp, 0), op0);
5524 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5528 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5529 comparison operator. If we have one of these cases, set the
5530 output to A, branch on A (cse will merge these two references),
5531 then set the output to FOO. */
5533 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5534 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5535 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5536 TREE_OPERAND (exp, 1), 0)
5537 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5538 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5540 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5541 temp = gen_reg_rtx (mode);
5542 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5543 jumpif (TREE_OPERAND (exp, 0), op0);
5544 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5548 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5549 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5550 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5551 TREE_OPERAND (exp, 2), 0)
5552 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5553 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5555 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5556 temp = gen_reg_rtx (mode);
5557 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5558 jumpifnot (TREE_OPERAND (exp, 0), op0);
5559 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5564 op1 = gen_label_rtx ();
5565 jumpifnot (TREE_OPERAND (exp, 0), op0);
5567 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5569 expand_expr (TREE_OPERAND (exp, 1),
5570 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5571 if (cleanups_this_call)
5573 sorry ("aggregate value in COND_EXPR");
5574 cleanups_this_call = 0;
5578 emit_jump_insn (gen_jump (op1));
5582 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5584 expand_expr (TREE_OPERAND (exp, 2),
5585 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5588 if (cleanups_this_call)
5590 sorry ("aggregate value in COND_EXPR");
5591 cleanups_this_call = 0;
5597 cleanups_this_call = old_cleanups;
5603 /* Something needs to be initialized, but we didn't know
5604 where that thing was when building the tree. For example,
5605 it could be the return value of a function, or a parameter
5606 to a function which lays down in the stack, or a temporary
5607 variable which must be passed by reference.
5609 We guarantee that the expression will either be constructed
5610 or copied into our original target. */
5612 tree slot = TREE_OPERAND (exp, 0);
5615 if (TREE_CODE (slot) != VAR_DECL)
5620 if (DECL_RTL (slot) != 0)
5622 target = DECL_RTL (slot);
5623 /* If we have already expanded the slot, so don't do
5625 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5630 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
5631 /* All temp slots at this level must not conflict. */
5632 preserve_temp_slots (target);
5633 DECL_RTL (slot) = target;
5636 /* We set IGNORE when we know that we're already
5637 doing this for a cleanup. */
5640 /* Since SLOT is not known to the called function
5641 to belong to its stack frame, we must build an explicit
5642 cleanup. This case occurs when we must build up a reference
5643 to pass the reference as an argument. In this case,
5644 it is very likely that such a reference need not be
5647 if (TREE_OPERAND (exp, 2) == 0)
5648 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5649 if (TREE_OPERAND (exp, 2))
5650 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
5651 cleanups_this_call);
5656 /* This case does occur, when expanding a parameter which
5657 needs to be constructed on the stack. The target
5658 is the actual stack address that we want to initialize.
5659 The function we call will perform the cleanup in this case. */
5661 /* If we have already assigned it space, use that space,
5662 not target that we were passed in, as our target
5663 parameter is only a hint. */
5664 if (DECL_RTL (slot) != 0)
5666 target = DECL_RTL (slot);
5667 /* If we have already expanded the slot, so don't do
5669 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5673 DECL_RTL (slot) = target;
5676 exp1 = TREE_OPERAND (exp, 1);
5677 /* Mark it as expanded. */
5678 TREE_OPERAND (exp, 1) = NULL_TREE;
5680 return expand_expr (exp1, target, tmode, modifier);
5685 tree lhs = TREE_OPERAND (exp, 0);
5686 tree rhs = TREE_OPERAND (exp, 1);
5687 tree noncopied_parts = 0;
5688 tree lhs_type = TREE_TYPE (lhs);
5690 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5691 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5692 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5693 TYPE_NONCOPIED_PARTS (lhs_type));
5694 while (noncopied_parts != 0)
5696 expand_assignment (TREE_VALUE (noncopied_parts),
5697 TREE_PURPOSE (noncopied_parts), 0, 0);
5698 noncopied_parts = TREE_CHAIN (noncopied_parts);
5705 /* If lhs is complex, expand calls in rhs before computing it.
5706 That's so we don't compute a pointer and save it over a call.
5707 If lhs is simple, compute it first so we can give it as a
5708 target if the rhs is just a call. This avoids an extra temp and copy
5709 and that prevents a partial-subsumption which makes bad code.
5710 Actually we could treat component_ref's of vars like vars. */
5712 tree lhs = TREE_OPERAND (exp, 0);
5713 tree rhs = TREE_OPERAND (exp, 1);
5714 tree noncopied_parts = 0;
5715 tree lhs_type = TREE_TYPE (lhs);
5719 if (TREE_CODE (lhs) != VAR_DECL
5720 && TREE_CODE (lhs) != RESULT_DECL
5721 && TREE_CODE (lhs) != PARM_DECL)
5722 preexpand_calls (exp);
5724 /* Check for |= or &= of a bitfield of size one into another bitfield
5725 of size 1. In this case, (unless we need the result of the
5726 assignment) we can do this more efficiently with a
5727 test followed by an assignment, if necessary.
5729 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5730 things change so we do, this code should be enhanced to
5733 && TREE_CODE (lhs) == COMPONENT_REF
5734 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5735 || TREE_CODE (rhs) == BIT_AND_EXPR)
5736 && TREE_OPERAND (rhs, 0) == lhs
5737 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5738 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5739 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5741 rtx label = gen_label_rtx ();
5743 do_jump (TREE_OPERAND (rhs, 1),
5744 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5745 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5746 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5747 (TREE_CODE (rhs) == BIT_IOR_EXPR
5749 : integer_zero_node)),
5751 do_pending_stack_adjust ();
5756 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5757 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5758 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5759 TYPE_NONCOPIED_PARTS (lhs_type));
5761 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5762 while (noncopied_parts != 0)
5764 expand_assignment (TREE_PURPOSE (noncopied_parts),
5765 TREE_VALUE (noncopied_parts), 0, 0);
5766 noncopied_parts = TREE_CHAIN (noncopied_parts);
5771 case PREINCREMENT_EXPR:
5772 case PREDECREMENT_EXPR:
5773 return expand_increment (exp, 0);
5775 case POSTINCREMENT_EXPR:
5776 case POSTDECREMENT_EXPR:
5777 /* Faster to treat as pre-increment if result is not used. */
5778 return expand_increment (exp, ! ignore);
5781 /* Are we taking the address of a nested function? */
5782 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5783 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5785 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5786 op0 = force_operand (op0, target);
5790 /* We make sure to pass const0_rtx down if we came in with
5791 ignore set, to avoid doing the cleanups twice for something. */
5792 op0 = expand_expr (TREE_OPERAND (exp, 0),
5793 ignore ? const0_rtx : NULL_RTX, VOIDmode,
5794 (modifier == EXPAND_INITIALIZER
5795 ? modifier : EXPAND_CONST_ADDRESS));
5797 /* We would like the object in memory. If it is a constant,
5798 we can have it be statically allocated into memory. For
5799 a non-constant (REG or SUBREG), we need to allocate some
5800 memory and store the value into it. */
5802 if (CONSTANT_P (op0))
5803 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5806 /* These cases happen in Fortran. Is that legitimate?
5807 Should Fortran work in another way?
5808 Do they happen in C? */
5809 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5810 || GET_CODE (op0) == CONCAT)
5812 /* If this object is in a register, it must be not
5814 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
5815 enum machine_mode inner_mode = TYPE_MODE (inner_type);
5817 = assign_stack_temp (inner_mode,
5818 int_size_in_bytes (inner_type), 1);
5820 emit_move_insn (memloc, op0);
5824 if (GET_CODE (op0) != MEM)
5827 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5828 return XEXP (op0, 0);
5829 op0 = force_operand (XEXP (op0, 0), target);
5831 if (flag_force_addr && GET_CODE (op0) != REG)
5832 return force_reg (Pmode, op0);
5835 case ENTRY_VALUE_EXPR:
5838 /* COMPLEX type for Extended Pascal & Fortran */
5841 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5845 /* Get the rtx code of the operands. */
5846 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5847 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5850 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5852 prev = get_last_insn ();
5854 /* Tell flow that the whole of the destination is being set. */
5855 if (GET_CODE (target) == REG)
5856 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5858 /* Move the real (op0) and imaginary (op1) parts to their location. */
5859 emit_move_insn (gen_realpart (mode, target), op0);
5860 emit_move_insn (gen_imagpart (mode, target), op1);
5862 /* Complex construction should appear as a single unit. */
5863 if (GET_CODE (target) != CONCAT)
5864 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
5865 each with a separate pseudo as destination.
5866 It's not correct for flow to treat them as a unit. */
5873 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5874 return gen_realpart (mode, op0);
5877 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5878 return gen_imagpart (mode, op0);
5882 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5886 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5889 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5891 prev = get_last_insn ();
5893 /* Tell flow that the whole of the destination is being set. */
5894 if (GET_CODE (target) == REG)
5895 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5897 /* Store the realpart and the negated imagpart to target. */
5898 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
5900 imag_t = gen_imagpart (mode, target);
5901 temp = expand_unop (mode, neg_optab,
5902 gen_imagpart (mode, op0), imag_t, 0);
5904 emit_move_insn (imag_t, temp);
5906 /* Conjugate should appear as a single unit */
5907 if (GET_CODE (target) != CONCAT)
5908 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
5909 each with a separate pseudo as destination.
5910 It's not correct for flow to treat them as a unit. */
5917 op0 = CONST0_RTX (tmode);
5923 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
5926 /* Here to do an ordinary binary operator, generating an instruction
5927 from the optab already placed in `this_optab'. */
5929 preexpand_calls (exp);
5930 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5932 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5933 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5935 temp = expand_binop (mode, this_optab, op0, op1, target,
5936 unsignedp, OPTAB_LIB_WIDEN);
5943 /* Emit bytecode to evaluate the given expression EXP to the stack. */
5945 bc_expand_expr (exp)
5948 enum tree_code code;
5951 struct binary_operator *binoptab;
5952 struct unary_operator *unoptab;
5953 struct increment_operator *incroptab;
5954 struct bc_label *lab, *lab1;
5955 enum bytecode_opcode opcode;
5958 code = TREE_CODE (exp);
5964 if (DECL_RTL (exp) == 0)
5966 error_with_decl (exp, "prior parameter's size depends on `%s'");
5970 bc_load_parmaddr (DECL_RTL (exp));
5971 bc_load_memory (TREE_TYPE (exp), exp);
5977 if (DECL_RTL (exp) == 0)
5981 if (BYTECODE_LABEL (DECL_RTL (exp)))
5982 bc_load_externaddr (DECL_RTL (exp));
5984 bc_load_localaddr (DECL_RTL (exp));
5986 if (TREE_PUBLIC (exp))
5987 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
5988 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
5990 bc_load_localaddr (DECL_RTL (exp));
5992 bc_load_memory (TREE_TYPE (exp), exp);
5997 #ifdef DEBUG_PRINT_CODE
5998 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
6000 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
6002 : TYPE_MODE (TREE_TYPE (exp)))],
6003 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
6009 #ifdef DEBUG_PRINT_CODE
6010 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
6012 /* FIX THIS: find a better way to pass real_cst's. -bson */
6013 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
6014 (double) TREE_REAL_CST (exp));
6023 /* We build a call description vector describing the type of
6024 the return value and of the arguments; this call vector,
6025 together with a pointer to a location for the return value
6026 and the base of the argument list, is passed to the low
6027 level machine dependent call subroutine, which is responsible
6028 for putting the arguments wherever real functions expect
6029 them, as well as getting the return value back. */
6031 tree calldesc = 0, arg;
6035 /* Push the evaluated args on the evaluation stack in reverse
6036 order. Also make an entry for each arg in the calldesc
6037 vector while we're at it. */
6039 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6041 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
6044 bc_expand_expr (TREE_VALUE (arg));
6046 calldesc = tree_cons ((tree) 0,
6047 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
6049 calldesc = tree_cons ((tree) 0,
6050 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
6054 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6056 /* Allocate a location for the return value and push its
6057 address on the evaluation stack. Also make an entry
6058 at the front of the calldesc for the return value type. */
6060 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
6061 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
6062 bc_load_localaddr (retval);
6064 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
6065 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
6067 /* Prepend the argument count. */
6068 calldesc = tree_cons ((tree) 0,
6069 build_int_2 (nargs, 0),
6072 /* Push the address of the call description vector on the stack. */
6073 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
6074 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
6075 build_index_type (build_int_2 (nargs * 2, 0)));
6076 r = output_constant_def (calldesc);
6077 bc_load_externaddr (r);
6079 /* Push the address of the function to be called. */
6080 bc_expand_expr (TREE_OPERAND (exp, 0));
6082 /* Call the function, popping its address and the calldesc vector
6083 address off the evaluation stack in the process. */
6084 bc_emit_instruction (call);
6086 /* Pop the arguments off the stack. */
6087 bc_adjust_stack (nargs);
6089 /* Load the return value onto the stack. */
6090 bc_load_localaddr (retval);
6091 bc_load_memory (type, TREE_OPERAND (exp, 0));
6097 if (!SAVE_EXPR_RTL (exp))
6099 /* First time around: copy to local variable */
6100 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
6101 TYPE_ALIGN (TREE_TYPE(exp)));
6102 bc_expand_expr (TREE_OPERAND (exp, 0));
6103 bc_emit_instruction (duplicate);
6105 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6106 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6110 /* Consecutive reference: use saved copy */
6111 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6112 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6117 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
6118 how are they handled instead? */
6121 TREE_USED (exp) = 1;
6122 bc_expand_expr (STMT_BODY (exp));
6129 bc_expand_expr (TREE_OPERAND (exp, 0));
6130 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
6135 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
6140 bc_expand_address (TREE_OPERAND (exp, 0));
6145 bc_expand_expr (TREE_OPERAND (exp, 0));
6146 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6151 bc_expand_expr (bc_canonicalize_array_ref (exp));
6156 bc_expand_component_address (exp);
6158 /* If we have a bitfield, generate a proper load */
6159 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
6164 bc_expand_expr (TREE_OPERAND (exp, 0));
6165 bc_emit_instruction (drop);
6166 bc_expand_expr (TREE_OPERAND (exp, 1));
6171 bc_expand_expr (TREE_OPERAND (exp, 0));
6172 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6173 lab = bc_get_bytecode_label ();
6174 bc_emit_bytecode (xjumpifnot);
6175 bc_emit_bytecode_labelref (lab);
6177 #ifdef DEBUG_PRINT_CODE
6178 fputc ('\n', stderr);
6180 bc_expand_expr (TREE_OPERAND (exp, 1));
6181 lab1 = bc_get_bytecode_label ();
6182 bc_emit_bytecode (jump);
6183 bc_emit_bytecode_labelref (lab1);
6185 #ifdef DEBUG_PRINT_CODE
6186 fputc ('\n', stderr);
6189 bc_emit_bytecode_labeldef (lab);
6190 bc_expand_expr (TREE_OPERAND (exp, 2));
6191 bc_emit_bytecode_labeldef (lab1);
6194 case TRUTH_ANDIF_EXPR:
6196 opcode = xjumpifnot;
6199 case TRUTH_ORIF_EXPR:
6206 binoptab = optab_plus_expr;
6211 binoptab = optab_minus_expr;
6216 binoptab = optab_mult_expr;
6219 case TRUNC_DIV_EXPR:
6220 case FLOOR_DIV_EXPR:
6222 case ROUND_DIV_EXPR:
6223 case EXACT_DIV_EXPR:
6225 binoptab = optab_trunc_div_expr;
6228 case TRUNC_MOD_EXPR:
6229 case FLOOR_MOD_EXPR:
6231 case ROUND_MOD_EXPR:
6233 binoptab = optab_trunc_mod_expr;
6236 case FIX_ROUND_EXPR:
6237 case FIX_FLOOR_EXPR:
6239 abort (); /* Not used for C. */
6241 case FIX_TRUNC_EXPR:
6248 abort (); /* FIXME */
6252 binoptab = optab_rdiv_expr;
6257 binoptab = optab_bit_and_expr;
6262 binoptab = optab_bit_ior_expr;
6267 binoptab = optab_bit_xor_expr;
6272 binoptab = optab_lshift_expr;
6277 binoptab = optab_rshift_expr;
6280 case TRUTH_AND_EXPR:
6282 binoptab = optab_truth_and_expr;
6287 binoptab = optab_truth_or_expr;
6292 binoptab = optab_lt_expr;
6297 binoptab = optab_le_expr;
6302 binoptab = optab_ge_expr;
6307 binoptab = optab_gt_expr;
6312 binoptab = optab_eq_expr;
6317 binoptab = optab_ne_expr;
6322 unoptab = optab_negate_expr;
6327 unoptab = optab_bit_not_expr;
6330 case TRUTH_NOT_EXPR:
6332 unoptab = optab_truth_not_expr;
6335 case PREDECREMENT_EXPR:
6337 incroptab = optab_predecrement_expr;
6340 case PREINCREMENT_EXPR:
6342 incroptab = optab_preincrement_expr;
6345 case POSTDECREMENT_EXPR:
6347 incroptab = optab_postdecrement_expr;
6350 case POSTINCREMENT_EXPR:
6352 incroptab = optab_postincrement_expr;
6357 bc_expand_constructor (exp);
6367 tree vars = TREE_OPERAND (exp, 0);
6368 int vars_need_expansion = 0;
6370 /* Need to open a binding contour here because
6371 if there are any cleanups they most be contained here. */
6372 expand_start_bindings (0);
6374 /* Mark the corresponding BLOCK for output. */
6375 if (TREE_OPERAND (exp, 2) != 0)
6376 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
6378 /* If VARS have not yet been expanded, expand them now. */
6381 if (DECL_RTL (vars) == 0)
6383 vars_need_expansion = 1;
6384 bc_expand_decl (vars, 0);
6386 bc_expand_decl_init (vars);
6387 vars = TREE_CHAIN (vars);
6390 bc_expand_expr (TREE_OPERAND (exp, 1));
6392 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6402 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
6403 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
6409 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6415 bc_expand_expr (TREE_OPERAND (exp, 0));
6416 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6417 lab = bc_get_bytecode_label ();
6419 bc_emit_instruction (duplicate);
6420 bc_emit_bytecode (opcode);
6421 bc_emit_bytecode_labelref (lab);
6423 #ifdef DEBUG_PRINT_CODE
6424 fputc ('\n', stderr);
6427 bc_emit_instruction (drop);
6429 bc_expand_expr (TREE_OPERAND (exp, 1));
6430 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
6431 bc_emit_bytecode_labeldef (lab);
6437 type = TREE_TYPE (TREE_OPERAND (exp, 0));
6439 /* Push the quantum. */
6440 bc_expand_expr (TREE_OPERAND (exp, 1));
6442 /* Convert it to the lvalue's type. */
6443 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
6445 /* Push the address of the lvalue */
6446 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
6448 /* Perform actual increment */
6449 bc_expand_increment (incroptab, type);
6453 /* Return the alignment in bits of EXP, a pointer valued expression.
6454 But don't return more than MAX_ALIGN no matter what.
6455 The alignment returned is, by default, the alignment of the thing that
6456 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
6458 Otherwise, look at the expression to see if we can do better, i.e., if the
6459 expression is actually pointing at an object whose alignment is tighter. */
6462 get_pointer_alignment (exp, max_align)
6466 unsigned align, inner;
6468 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6471 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6472 align = MIN (align, max_align);
6476 switch (TREE_CODE (exp))
6480 case NON_LVALUE_EXPR:
6481 exp = TREE_OPERAND (exp, 0);
6482 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6484 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6485 inner = MIN (inner, max_align);
6486 align = MAX (align, inner);
6490 /* If sum of pointer + int, restrict our maximum alignment to that
6491 imposed by the integer. If not, we can't do any better than
6493 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
6496 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
6501 exp = TREE_OPERAND (exp, 0);
6505 /* See what we are pointing at and look at its alignment. */
6506 exp = TREE_OPERAND (exp, 0);
6507 if (TREE_CODE (exp) == FUNCTION_DECL)
6508 align = MAX (align, FUNCTION_BOUNDARY);
6509 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
6510 align = MAX (align, DECL_ALIGN (exp));
6511 #ifdef CONSTANT_ALIGNMENT
6512 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
6513 align = CONSTANT_ALIGNMENT (exp, align);
6515 return MIN (align, max_align);
6523 /* Return the tree node and offset if a given argument corresponds to
6524 a string constant. */
6527 string_constant (arg, ptr_offset)
6533 if (TREE_CODE (arg) == ADDR_EXPR
6534 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
6536 *ptr_offset = integer_zero_node;
6537 return TREE_OPERAND (arg, 0);
6539 else if (TREE_CODE (arg) == PLUS_EXPR)
6541 tree arg0 = TREE_OPERAND (arg, 0);
6542 tree arg1 = TREE_OPERAND (arg, 1);
6547 if (TREE_CODE (arg0) == ADDR_EXPR
6548 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
6551 return TREE_OPERAND (arg0, 0);
6553 else if (TREE_CODE (arg1) == ADDR_EXPR
6554 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
6557 return TREE_OPERAND (arg1, 0);
6564 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
6565 way, because it could contain a zero byte in the middle.
6566 TREE_STRING_LENGTH is the size of the character array, not the string.
6568 Unfortunately, string_constant can't access the values of const char
6569 arrays with initializers, so neither can we do so here. */
6579 src = string_constant (src, &offset_node);
6582 max = TREE_STRING_LENGTH (src);
6583 ptr = TREE_STRING_POINTER (src);
6584 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
6586 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
6587 compute the offset to the following null if we don't know where to
6588 start searching for it. */
6590 for (i = 0; i < max; i++)
6593 /* We don't know the starting offset, but we do know that the string
6594 has no internal zero bytes. We can assume that the offset falls
6595 within the bounds of the string; otherwise, the programmer deserves
6596 what he gets. Subtract the offset from the length of the string,
6598 /* This would perhaps not be valid if we were dealing with named
6599 arrays in addition to literal string constants. */
6600 return size_binop (MINUS_EXPR, size_int (max), offset_node);
6603 /* We have a known offset into the string. Start searching there for
6604 a null character. */
6605 if (offset_node == 0)
6609 /* Did we get a long long offset? If so, punt. */
6610 if (TREE_INT_CST_HIGH (offset_node) != 0)
6612 offset = TREE_INT_CST_LOW (offset_node);
6614 /* If the offset is known to be out of bounds, warn, and call strlen at
6616 if (offset < 0 || offset > max)
6618 warning ("offset outside bounds of constant string");
6621 /* Use strlen to search for the first zero byte. Since any strings
6622 constructed with build_string will have nulls appended, we win even
6623 if we get handed something like (char[4])"abcd".
6625 Since OFFSET is our starting index into the string, no further
6626 calculation is needed. */
6627 return size_int (strlen (ptr + offset));
6630 /* Expand an expression EXP that calls a built-in function,
6631 with result going to TARGET if that's convenient
6632 (and in mode MODE if that's convenient).
6633 SUBTARGET may be used as the target for computing one of EXP's operands.
6634 IGNORE is nonzero if the value is to be ignored. */
6637 expand_builtin (exp, target, subtarget, mode, ignore)
6641 enum machine_mode mode;
6644 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6645 tree arglist = TREE_OPERAND (exp, 1);
6648 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
6649 optab builtin_optab;
6651 switch (DECL_FUNCTION_CODE (fndecl))
6656 /* build_function_call changes these into ABS_EXPR. */
6661 case BUILT_IN_FSQRT:
6662 /* If not optimizing, call the library function. */
6667 /* Arg could be wrong type if user redeclared this fcn wrong. */
6668 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
6671 /* Stabilize and compute the argument. */
6672 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
6673 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
6675 exp = copy_node (exp);
6676 arglist = copy_node (arglist);
6677 TREE_OPERAND (exp, 1) = arglist;
6678 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
6680 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
6682 /* Make a suitable register to place result in. */
6683 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6688 switch (DECL_FUNCTION_CODE (fndecl))
6691 builtin_optab = sin_optab; break;
6693 builtin_optab = cos_optab; break;
6694 case BUILT_IN_FSQRT:
6695 builtin_optab = sqrt_optab; break;
6700 /* Compute into TARGET.
6701 Set TARGET to wherever the result comes back. */
6702 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
6703 builtin_optab, op0, target, 0);
6705 /* If we were unable to expand via the builtin, stop the
6706 sequence (without outputting the insns) and break, causing
6707 a call the the library function. */
6714 /* Check the results by default. But if flag_fast_math is turned on,
6715 then assume sqrt will always be called with valid arguments. */
6717 if (! flag_fast_math)
6719 /* Don't define the builtin FP instructions
6720 if your machine is not IEEE. */
6721 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
6724 lab1 = gen_label_rtx ();
6726 /* Test the result; if it is NaN, set errno=EDOM because
6727 the argument was not in the domain. */
6728 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
6729 emit_jump_insn (gen_beq (lab1));
6733 #ifdef GEN_ERRNO_RTX
6734 rtx errno_rtx = GEN_ERRNO_RTX;
6737 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
6740 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
6743 /* We can't set errno=EDOM directly; let the library call do it.
6744 Pop the arguments right away in case the call gets deleted. */
6746 expand_call (exp, target, 0);
6753 /* Output the entire sequence. */
6754 insns = get_insns ();
6760 /* __builtin_apply_args returns block of memory allocated on
6761 the stack into which is stored the arg pointer, structure
6762 value address, static chain, and all the registers that might
6763 possibly be used in performing a function call. The code is
6764 moved to the start of the function so the incoming values are
6766 case BUILT_IN_APPLY_ARGS:
6767 /* Don't do __builtin_apply_args more than once in a function.
6768 Save the result of the first call and reuse it. */
6769 if (apply_args_value != 0)
6770 return apply_args_value;
6772 /* When this function is called, it means that registers must be
6773 saved on entry to this function. So we migrate the
6774 call to the first insn of this function. */
6779 temp = expand_builtin_apply_args ();
6783 apply_args_value = temp;
6785 /* Put the sequence after the NOTE that starts the function.
6786 If this is inside a SEQUENCE, make the outer-level insn
6787 chain current, so the code is placed at the start of the
6789 push_topmost_sequence ();
6790 emit_insns_before (seq, NEXT_INSN (get_insns ()));
6791 pop_topmost_sequence ();
6795 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6796 FUNCTION with a copy of the parameters described by
6797 ARGUMENTS, and ARGSIZE. It returns a block of memory
6798 allocated on the stack into which is stored all the registers
6799 that might possibly be used for returning the result of a
6800 function. ARGUMENTS is the value returned by
6801 __builtin_apply_args. ARGSIZE is the number of bytes of
6802 arguments that must be copied. ??? How should this value be
6803 computed? We'll also need a safe worst case value for varargs
6805 case BUILT_IN_APPLY:
6807 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6808 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6809 || TREE_CHAIN (arglist) == 0
6810 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6811 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6812 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6820 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
6821 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
6823 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6826 /* __builtin_return (RESULT) causes the function to return the
6827 value described by RESULT. RESULT is address of the block of
6828 memory returned by __builtin_apply. */
6829 case BUILT_IN_RETURN:
6831 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6832 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
6833 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
6834 NULL_RTX, VOIDmode, 0));
6837 case BUILT_IN_SAVEREGS:
6838 /* Don't do __builtin_saveregs more than once in a function.
6839 Save the result of the first call and reuse it. */
6840 if (saveregs_value != 0)
6841 return saveregs_value;
6843 /* When this function is called, it means that registers must be
6844 saved on entry to this function. So we migrate the
6845 call to the first insn of this function. */
6848 rtx valreg, saved_valreg;
6850 /* Now really call the function. `expand_call' does not call
6851 expand_builtin, so there is no danger of infinite recursion here. */
6854 #ifdef EXPAND_BUILTIN_SAVEREGS
6855 /* Do whatever the machine needs done in this case. */
6856 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
6858 /* The register where the function returns its value
6859 is likely to have something else in it, such as an argument.
6860 So preserve that register around the call. */
6861 if (value_mode != VOIDmode)
6863 valreg = hard_libcall_value (value_mode);
6864 saved_valreg = gen_reg_rtx (value_mode);
6865 emit_move_insn (saved_valreg, valreg);
6868 /* Generate the call, putting the value in a pseudo. */
6869 temp = expand_call (exp, target, ignore);
6871 if (value_mode != VOIDmode)
6872 emit_move_insn (valreg, saved_valreg);
6878 saveregs_value = temp;
6880 /* Put the sequence after the NOTE that starts the function.
6881 If this is inside a SEQUENCE, make the outer-level insn
6882 chain current, so the code is placed at the start of the
6884 push_topmost_sequence ();
6885 emit_insns_before (seq, NEXT_INSN (get_insns ()));
6886 pop_topmost_sequence ();
6890 /* __builtin_args_info (N) returns word N of the arg space info
6891 for the current function. The number and meanings of words
6892 is controlled by the definition of CUMULATIVE_ARGS. */
6893 case BUILT_IN_ARGS_INFO:
6895 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
6897 int *word_ptr = (int *) ¤t_function_args_info;
6898 tree type, elts, result;
6900 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
6901 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
6902 __FILE__, __LINE__);
6906 tree arg = TREE_VALUE (arglist);
6907 if (TREE_CODE (arg) != INTEGER_CST)
6908 error ("argument of `__builtin_args_info' must be constant");
6911 int wordnum = TREE_INT_CST_LOW (arg);
6913 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
6914 error ("argument of `__builtin_args_info' out of range");
6916 return GEN_INT (word_ptr[wordnum]);
6920 error ("missing argument in `__builtin_args_info'");
6925 for (i = 0; i < nwords; i++)
6926 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
6928 type = build_array_type (integer_type_node,
6929 build_index_type (build_int_2 (nwords, 0)));
6930 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
6931 TREE_CONSTANT (result) = 1;
6932 TREE_STATIC (result) = 1;
6933 result = build (INDIRECT_REF, build_pointer_type (type), result);
6934 TREE_CONSTANT (result) = 1;
6935 return expand_expr (result, NULL_RTX, VOIDmode, 0);
6939 /* Return the address of the first anonymous stack arg. */
6940 case BUILT_IN_NEXT_ARG:
6943 tree fntype = TREE_TYPE (current_function_decl);
6944 tree fnargs = DECL_ARGUMENTS (current_function_decl);
6945 if (!(TYPE_ARG_TYPES (fntype) != 0
6946 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
6949 && (parm = tree_last (fnargs)) != 0
6951 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
6952 "__builtin_va_alist"))))
6954 error ("`va_start' used in function with fixed args");
6959 return expand_binop (Pmode, add_optab,
6960 current_function_internal_arg_pointer,
6961 current_function_arg_offset_rtx,
6962 NULL_RTX, 0, OPTAB_LIB_WIDEN);
6964 case BUILT_IN_CLASSIFY_TYPE:
6967 tree type = TREE_TYPE (TREE_VALUE (arglist));
6968 enum tree_code code = TREE_CODE (type);
6969 if (code == VOID_TYPE)
6970 return GEN_INT (void_type_class);
6971 if (code == INTEGER_TYPE)
6972 return GEN_INT (integer_type_class);
6973 if (code == CHAR_TYPE)
6974 return GEN_INT (char_type_class);
6975 if (code == ENUMERAL_TYPE)
6976 return GEN_INT (enumeral_type_class);
6977 if (code == BOOLEAN_TYPE)
6978 return GEN_INT (boolean_type_class);
6979 if (code == POINTER_TYPE)
6980 return GEN_INT (pointer_type_class);
6981 if (code == REFERENCE_TYPE)
6982 return GEN_INT (reference_type_class);
6983 if (code == OFFSET_TYPE)
6984 return GEN_INT (offset_type_class);
6985 if (code == REAL_TYPE)
6986 return GEN_INT (real_type_class);
6987 if (code == COMPLEX_TYPE)
6988 return GEN_INT (complex_type_class);
6989 if (code == FUNCTION_TYPE)
6990 return GEN_INT (function_type_class);
6991 if (code == METHOD_TYPE)
6992 return GEN_INT (method_type_class);
6993 if (code == RECORD_TYPE)
6994 return GEN_INT (record_type_class);
6995 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
6996 return GEN_INT (union_type_class);
6997 if (code == ARRAY_TYPE)
6998 return GEN_INT (array_type_class);
6999 if (code == STRING_TYPE)
7000 return GEN_INT (string_type_class);
7001 if (code == SET_TYPE)
7002 return GEN_INT (set_type_class);
7003 if (code == FILE_TYPE)
7004 return GEN_INT (file_type_class);
7005 if (code == LANG_TYPE)
7006 return GEN_INT (lang_type_class);
7008 return GEN_INT (no_type_class);
7010 case BUILT_IN_CONSTANT_P:
7014 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
7015 ? const1_rtx : const0_rtx);
7017 case BUILT_IN_FRAME_ADDRESS:
7018 /* The argument must be a nonnegative integer constant.
7019 It counts the number of frames to scan up the stack.
7020 The value is the address of that frame. */
7021 case BUILT_IN_RETURN_ADDRESS:
7022 /* The argument must be a nonnegative integer constant.
7023 It counts the number of frames to scan up the stack.
7024 The value is the return address saved in that frame. */
7026 /* Warning about missing arg was already issued. */
7028 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
7030 error ("invalid arg to `__builtin_return_address'");
7033 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
7035 error ("invalid arg to `__builtin_return_address'");
7040 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
7041 rtx tem = frame_pointer_rtx;
7044 /* Some machines need special handling before we can access arbitrary
7045 frames. For example, on the sparc, we must first flush all
7046 register windows to the stack. */
7047 #ifdef SETUP_FRAME_ADDRESSES
7048 SETUP_FRAME_ADDRESSES ();
7051 /* On the sparc, the return address is not in the frame, it is
7052 in a register. There is no way to access it off of the current
7053 frame pointer, but it can be accessed off the previous frame
7054 pointer by reading the value from the register window save
7056 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7057 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
7061 /* Scan back COUNT frames to the specified frame. */
7062 for (i = 0; i < count; i++)
7064 /* Assume the dynamic chain pointer is in the word that
7065 the frame address points to, unless otherwise specified. */
7066 #ifdef DYNAMIC_CHAIN_ADDRESS
7067 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7069 tem = memory_address (Pmode, tem);
7070 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
7073 /* For __builtin_frame_address, return what we've got. */
7074 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
7077 /* For __builtin_return_address,
7078 Get the return address from that frame. */
7079 #ifdef RETURN_ADDR_RTX
7080 return RETURN_ADDR_RTX (count, tem);
7082 tem = memory_address (Pmode,
7083 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7084 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
7088 case BUILT_IN_ALLOCA:
7090 /* Arg could be non-integer if user redeclared this fcn wrong. */
7091 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7093 current_function_calls_alloca = 1;
7094 /* Compute the argument. */
7095 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
7097 /* Allocate the desired space. */
7098 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
7100 /* Record the new stack level for nonlocal gotos. */
7101 if (nonlocal_goto_handler_slot != 0)
7102 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
7106 /* If not optimizing, call the library function. */
7111 /* Arg could be non-integer if user redeclared this fcn wrong. */
7112 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7115 /* Compute the argument. */
7116 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7117 /* Compute ffs, into TARGET if possible.
7118 Set TARGET to wherever the result comes back. */
7119 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7120 ffs_optab, op0, target, 1);
7125 case BUILT_IN_STRLEN:
7126 /* If not optimizing, call the library function. */
7131 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7132 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
7136 tree src = TREE_VALUE (arglist);
7137 tree len = c_strlen (src);
7140 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7142 rtx result, src_rtx, char_rtx;
7143 enum machine_mode insn_mode = value_mode, char_mode;
7144 enum insn_code icode;
7146 /* If the length is known, just return it. */
7148 return expand_expr (len, target, mode, 0);
7150 /* If SRC is not a pointer type, don't do this operation inline. */
7154 /* Call a function if we can't compute strlen in the right mode. */
7156 while (insn_mode != VOIDmode)
7158 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
7159 if (icode != CODE_FOR_nothing)
7162 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
7164 if (insn_mode == VOIDmode)
7167 /* Make a place to write the result of the instruction. */
7170 && GET_CODE (result) == REG
7171 && GET_MODE (result) == insn_mode
7172 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7173 result = gen_reg_rtx (insn_mode);
7175 /* Make sure the operands are acceptable to the predicates. */
7177 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
7178 result = gen_reg_rtx (insn_mode);
7180 src_rtx = memory_address (BLKmode,
7181 expand_expr (src, NULL_RTX, Pmode,
7183 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
7184 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
7186 char_rtx = const0_rtx;
7187 char_mode = insn_operand_mode[(int)icode][2];
7188 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
7189 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
7191 emit_insn (GEN_FCN (icode) (result,
7192 gen_rtx (MEM, BLKmode, src_rtx),
7193 char_rtx, GEN_INT (align)));
7195 /* Return the value in the proper mode for this function. */
7196 if (GET_MODE (result) == value_mode)
7198 else if (target != 0)
7200 convert_move (target, result, 0);
7204 return convert_to_mode (value_mode, result, 0);
7207 case BUILT_IN_STRCPY:
7208 /* If not optimizing, call the library function. */
7213 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7214 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7215 || TREE_CHAIN (arglist) == 0
7216 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7220 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
7225 len = size_binop (PLUS_EXPR, len, integer_one_node);
7227 chainon (arglist, build_tree_list (NULL_TREE, len));
7231 case BUILT_IN_MEMCPY:
7232 /* If not optimizing, call the library function. */
7237 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7238 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7239 || TREE_CHAIN (arglist) == 0
7240 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7241 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7242 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7246 tree dest = TREE_VALUE (arglist);
7247 tree src = TREE_VALUE (TREE_CHAIN (arglist));
7248 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7251 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7253 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7254 rtx dest_rtx, dest_mem, src_mem;
7256 /* If either SRC or DEST is not a pointer type, don't do
7257 this operation in-line. */
7258 if (src_align == 0 || dest_align == 0)
7260 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
7261 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7265 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
7266 dest_mem = gen_rtx (MEM, BLKmode,
7267 memory_address (BLKmode, dest_rtx));
7268 src_mem = gen_rtx (MEM, BLKmode,
7269 memory_address (BLKmode,
7270 expand_expr (src, NULL_RTX,
7274 /* Copy word part most expediently. */
7275 emit_block_move (dest_mem, src_mem,
7276 expand_expr (len, NULL_RTX, VOIDmode, 0),
7277 MIN (src_align, dest_align));
7281 /* These comparison functions need an instruction that returns an actual
7282 index. An ordinary compare that just sets the condition codes
7284 #ifdef HAVE_cmpstrsi
7285 case BUILT_IN_STRCMP:
7286 /* If not optimizing, call the library function. */
7291 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7292 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7293 || TREE_CHAIN (arglist) == 0
7294 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7296 else if (!HAVE_cmpstrsi)
7299 tree arg1 = TREE_VALUE (arglist);
7300 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7304 len = c_strlen (arg1);
7306 len = size_binop (PLUS_EXPR, integer_one_node, len);
7307 len2 = c_strlen (arg2);
7309 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
7311 /* If we don't have a constant length for the first, use the length
7312 of the second, if we know it. We don't require a constant for
7313 this case; some cost analysis could be done if both are available
7314 but neither is constant. For now, assume they're equally cheap.
7316 If both strings have constant lengths, use the smaller. This
7317 could arise if optimization results in strcpy being called with
7318 two fixed strings, or if the code was machine-generated. We should
7319 add some code to the `memcmp' handler below to deal with such
7320 situations, someday. */
7321 if (!len || TREE_CODE (len) != INTEGER_CST)
7328 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
7330 if (tree_int_cst_lt (len2, len))
7334 chainon (arglist, build_tree_list (NULL_TREE, len));
7338 case BUILT_IN_MEMCMP:
7339 /* If not optimizing, call the library function. */
7344 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7345 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7346 || TREE_CHAIN (arglist) == 0
7347 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7348 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7349 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7351 else if (!HAVE_cmpstrsi)
7354 tree arg1 = TREE_VALUE (arglist);
7355 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7356 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7360 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7362 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7363 enum machine_mode insn_mode
7364 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
7366 /* If we don't have POINTER_TYPE, call the function. */
7367 if (arg1_align == 0 || arg2_align == 0)
7369 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
7370 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7374 /* Make a place to write the result of the instruction. */
7377 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
7378 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7379 result = gen_reg_rtx (insn_mode);
7381 emit_insn (gen_cmpstrsi (result,
7382 gen_rtx (MEM, BLKmode,
7383 expand_expr (arg1, NULL_RTX, Pmode,
7385 gen_rtx (MEM, BLKmode,
7386 expand_expr (arg2, NULL_RTX, Pmode,
7388 expand_expr (len, NULL_RTX, VOIDmode, 0),
7389 GEN_INT (MIN (arg1_align, arg2_align))));
7391 /* Return the value in the proper mode for this function. */
7392 mode = TYPE_MODE (TREE_TYPE (exp));
7393 if (GET_MODE (result) == mode)
7395 else if (target != 0)
7397 convert_move (target, result, 0);
7401 return convert_to_mode (mode, result, 0);
7404 case BUILT_IN_STRCMP:
7405 case BUILT_IN_MEMCMP:
7409 default: /* just do library call, if unknown builtin */
7410 error ("built-in function `%s' not currently supported",
7411 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
7414 /* The switch statement above can drop through to cause the function
7415 to be called normally. */
7417 return expand_call (exp, target, ignore);
7420 /* Built-in functions to perform an untyped call and return. */
7422 /* For each register that may be used for calling a function, this
7423 gives a mode used to copy the register's value. VOIDmode indicates
7424 the register is not used for calling a function. If the machine
7425 has register windows, this gives only the outbound registers.
7426 INCOMING_REGNO gives the corresponding inbound register. */
7427 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
7429 /* For each register that may be used for returning values, this gives
7430 a mode used to copy the register's value. VOIDmode indicates the
7431 register is not used for returning values. If the machine has
7432 register windows, this gives only the outbound registers.
7433 INCOMING_REGNO gives the corresponding inbound register. */
7434 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
7436 /* For each register that may be used for calling a function, this
7437 gives the offset of that register into the block returned by
7438 __bultin_apply_args. 0 indicates that the register is not
7439 used for calling a function. */
7440 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
7442 /* Return the offset of register REGNO into the block returned by
7443 __builtin_apply_args. This is not declared static, since it is
7444 needed in objc-act.c. */
7447 apply_args_register_offset (regno)
7452 /* Arguments are always put in outgoing registers (in the argument
7453 block) if such make sense. */
7454 #ifdef OUTGOING_REGNO
7455 regno = OUTGOING_REGNO(regno);
7457 return apply_args_reg_offset[regno];
7460 /* Return the size required for the block returned by __builtin_apply_args,
7461 and initialize apply_args_mode. */
7466 static int size = -1;
7468 enum machine_mode mode;
7470 /* The values computed by this function never change. */
7473 /* The first value is the incoming arg-pointer. */
7474 size = GET_MODE_SIZE (Pmode);
7476 /* The second value is the structure value address unless this is
7477 passed as an "invisible" first argument. */
7478 if (struct_value_rtx)
7479 size += GET_MODE_SIZE (Pmode);
7481 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7482 if (FUNCTION_ARG_REGNO_P (regno))
7484 /* Search for the proper mode for copying this register's
7485 value. I'm not sure this is right, but it works so far. */
7486 enum machine_mode best_mode = VOIDmode;
7488 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
7490 mode = GET_MODE_WIDER_MODE (mode))
7491 if (HARD_REGNO_MODE_OK (regno, mode)
7492 && HARD_REGNO_NREGS (regno, mode) == 1)
7495 if (best_mode == VOIDmode)
7496 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
7498 mode = GET_MODE_WIDER_MODE (mode))
7499 if (HARD_REGNO_MODE_OK (regno, mode)
7500 && (mov_optab->handlers[(int) mode].insn_code
7501 != CODE_FOR_nothing))
7505 if (mode == VOIDmode)
7508 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7509 if (size % align != 0)
7510 size = CEIL (size, align) * align;
7511 apply_args_reg_offset[regno] = size;
7512 size += GET_MODE_SIZE (mode);
7513 apply_args_mode[regno] = mode;
7517 apply_args_mode[regno] = VOIDmode;
7518 apply_args_reg_offset[regno] = 0;
7524 /* Return the size required for the block returned by __builtin_apply,
7525 and initialize apply_result_mode. */
7528 apply_result_size ()
7530 static int size = -1;
7532 enum machine_mode mode;
7534 /* The values computed by this function never change. */
7539 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7540 if (FUNCTION_VALUE_REGNO_P (regno))
7542 /* Search for the proper mode for copying this register's
7543 value. I'm not sure this is right, but it works so far. */
7544 enum machine_mode best_mode = VOIDmode;
7546 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
7548 mode = GET_MODE_WIDER_MODE (mode))
7549 if (HARD_REGNO_MODE_OK (regno, mode))
7552 if (best_mode == VOIDmode)
7553 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
7555 mode = GET_MODE_WIDER_MODE (mode))
7556 if (HARD_REGNO_MODE_OK (regno, mode)
7557 && (mov_optab->handlers[(int) mode].insn_code
7558 != CODE_FOR_nothing))
7562 if (mode == VOIDmode)
7565 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7566 if (size % align != 0)
7567 size = CEIL (size, align) * align;
7568 size += GET_MODE_SIZE (mode);
7569 apply_result_mode[regno] = mode;
7572 apply_result_mode[regno] = VOIDmode;
7574 /* Allow targets that use untyped_call and untyped_return to override
7575 the size so that machine-specific information can be stored here. */
7576 #ifdef APPLY_RESULT_SIZE
7577 size = APPLY_RESULT_SIZE;
7583 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
7584 /* Create a vector describing the result block RESULT. If SAVEP is true,
7585 the result block is used to save the values; otherwise it is used to
7586 restore the values. */
7589 result_vector (savep, result)
7593 int regno, size, align, nelts;
7594 enum machine_mode mode;
7596 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
7599 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7600 if ((mode = apply_result_mode[regno]) != VOIDmode)
7602 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7603 if (size % align != 0)
7604 size = CEIL (size, align) * align;
7605 reg = gen_rtx (REG, mode, savep ? INCOMING_REGNO (regno) : regno);
7606 mem = change_address (result, mode,
7607 plus_constant (XEXP (result, 0), size));
7608 savevec[nelts++] = (savep
7609 ? gen_rtx (SET, VOIDmode, mem, reg)
7610 : gen_rtx (SET, VOIDmode, reg, mem));
7611 size += GET_MODE_SIZE (mode);
7613 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
7615 #endif /* HAVE_untyped_call or HAVE_untyped_return */
7617 /* Save the state required to perform an untyped call with the same
7618 arguments as were passed to the current function. */
7621 expand_builtin_apply_args ()
7624 int size, align, regno;
7625 enum machine_mode mode;
7627 /* Create a block where the arg-pointer, structure value address,
7628 and argument registers can be saved. */
7629 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
7631 /* Walk past the arg-pointer and structure value address. */
7632 size = GET_MODE_SIZE (Pmode);
7633 if (struct_value_rtx)
7634 size += GET_MODE_SIZE (Pmode);
7636 /* Save each register used in calling a function to the block. */
7637 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7638 if ((mode = apply_args_mode[regno]) != VOIDmode)
7640 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7641 if (size % align != 0)
7642 size = CEIL (size, align) * align;
7643 emit_move_insn (change_address (registers, mode,
7644 plus_constant (XEXP (registers, 0),
7646 gen_rtx (REG, mode, INCOMING_REGNO (regno)));
7647 size += GET_MODE_SIZE (mode);
7650 /* Save the arg pointer to the block. */
7651 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
7652 copy_to_reg (virtual_incoming_args_rtx));
7653 size = GET_MODE_SIZE (Pmode);
7655 /* Save the structure value address unless this is passed as an
7656 "invisible" first argument. */
7657 if (struct_value_incoming_rtx)
7659 emit_move_insn (change_address (registers, Pmode,
7660 plus_constant (XEXP (registers, 0),
7662 copy_to_reg (struct_value_incoming_rtx));
7663 size += GET_MODE_SIZE (Pmode);
7666 /* Return the address of the block. */
7667 return copy_addr_to_reg (XEXP (registers, 0));
7670 /* Perform an untyped call and save the state required to perform an
7671 untyped return of whatever value was returned by the given function. */
7674 expand_builtin_apply (function, arguments, argsize)
7675 rtx function, arguments, argsize;
7677 int size, align, regno;
7678 enum machine_mode mode;
7679 rtx incoming_args, result, reg, dest, call_insn;
7680 rtx old_stack_level = 0;
7683 /* Create a block where the return registers can be saved. */
7684 result = assign_stack_local (BLKmode, apply_result_size (), -1);
7686 /* ??? The argsize value should be adjusted here. */
7688 /* Fetch the arg pointer from the ARGUMENTS block. */
7689 incoming_args = gen_reg_rtx (Pmode);
7690 emit_move_insn (incoming_args,
7691 gen_rtx (MEM, Pmode, arguments));
7692 #ifndef STACK_GROWS_DOWNWARD
7693 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
7694 incoming_args, 0, OPTAB_LIB_WIDEN);
7697 /* Perform postincrements before actually calling the function. */
7700 /* Push a new argument block and copy the arguments. */
7701 do_pending_stack_adjust ();
7702 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
7704 /* Push a block of memory onto the stack to store the memory arguments.
7705 Save the address in a register, and copy the memory arguments. ??? I
7706 haven't figured out how the calling convention macros effect this,
7707 but it's likely that the source and/or destination addresses in
7708 the block copy will need updating in machine specific ways. */
7709 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
7710 emit_block_move (gen_rtx (MEM, BLKmode, dest),
7711 gen_rtx (MEM, BLKmode, incoming_args),
7713 PARM_BOUNDARY / BITS_PER_UNIT);
7715 /* Refer to the argument block. */
7717 arguments = gen_rtx (MEM, BLKmode, arguments);
7719 /* Walk past the arg-pointer and structure value address. */
7720 size = GET_MODE_SIZE (Pmode);
7721 if (struct_value_rtx)
7722 size += GET_MODE_SIZE (Pmode);
7724 /* Restore each of the registers previously saved. Make USE insns
7725 for each of these registers for use in making the call. */
7726 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7727 if ((mode = apply_args_mode[regno]) != VOIDmode)
7729 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7730 if (size % align != 0)
7731 size = CEIL (size, align) * align;
7732 reg = gen_rtx (REG, mode, regno);
7733 emit_move_insn (reg,
7734 change_address (arguments, mode,
7735 plus_constant (XEXP (arguments, 0),
7738 push_to_sequence (use_insns);
7739 emit_insn (gen_rtx (USE, VOIDmode, reg));
7740 use_insns = get_insns ();
7742 size += GET_MODE_SIZE (mode);
7745 /* Restore the structure value address unless this is passed as an
7746 "invisible" first argument. */
7747 size = GET_MODE_SIZE (Pmode);
7748 if (struct_value_rtx)
7750 rtx value = gen_reg_rtx (Pmode);
7751 emit_move_insn (value,
7752 change_address (arguments, Pmode,
7753 plus_constant (XEXP (arguments, 0),
7755 emit_move_insn (struct_value_rtx, value);
7756 if (GET_CODE (struct_value_rtx) == REG)
7758 push_to_sequence (use_insns);
7759 emit_insn (gen_rtx (USE, VOIDmode, struct_value_rtx));
7760 use_insns = get_insns ();
7763 size += GET_MODE_SIZE (Pmode);
7766 /* All arguments and registers used for the call are set up by now! */
7767 function = prepare_call_address (function, NULL_TREE, &use_insns);
7769 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
7770 and we don't want to load it into a register as an optimization,
7771 because prepare_call_address already did it if it should be done. */
7772 if (GET_CODE (function) != SYMBOL_REF)
7773 function = memory_address (FUNCTION_MODE, function);
7775 /* Generate the actual call instruction and save the return value. */
7776 #ifdef HAVE_untyped_call
7777 if (HAVE_untyped_call)
7778 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
7779 result, result_vector (1, result)));
7782 #ifdef HAVE_call_value
7783 if (HAVE_call_value)
7787 /* Locate the unique return register. It is not possible to
7788 express a call that sets more than one return register using
7789 call_value; use untyped_call for that. In fact, untyped_call
7790 only needs to save the return registers in the given block. */
7791 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7792 if ((mode = apply_result_mode[regno]) != VOIDmode)
7795 abort (); /* HAVE_untyped_call required. */
7796 valreg = gen_rtx (REG, mode, regno);
7799 emit_call_insn (gen_call_value (valreg,
7800 gen_rtx (MEM, FUNCTION_MODE, function),
7801 const0_rtx, NULL_RTX, const0_rtx));
7803 emit_move_insn (change_address (result, GET_MODE (valreg),
7811 /* Find the CALL insn we just emitted and write the USE insns before it. */
7812 for (call_insn = get_last_insn ();
7813 call_insn && GET_CODE (call_insn) != CALL_INSN;
7814 call_insn = PREV_INSN (call_insn))
7820 /* Put the USE insns before the CALL. */
7821 emit_insns_before (use_insns, call_insn);
7823 /* Restore the stack. */
7824 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
7826 /* Return the address of the result block. */
7827 return copy_addr_to_reg (XEXP (result, 0));
7830 /* Perform an untyped return. */
7833 expand_builtin_return (result)
7836 int size, align, regno;
7837 enum machine_mode mode;
7841 apply_result_size ();
7842 result = gen_rtx (MEM, BLKmode, result);
7844 #ifdef HAVE_untyped_return
7845 if (HAVE_untyped_return)
7847 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
7853 /* Restore the return value and note that each value is used. */
7855 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7856 if ((mode = apply_result_mode[regno]) != VOIDmode)
7858 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7859 if (size % align != 0)
7860 size = CEIL (size, align) * align;
7861 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
7862 emit_move_insn (reg,
7863 change_address (result, mode,
7864 plus_constant (XEXP (result, 0),
7867 push_to_sequence (use_insns);
7868 emit_insn (gen_rtx (USE, VOIDmode, reg));
7869 use_insns = get_insns ();
7871 size += GET_MODE_SIZE (mode);
7874 /* Put the USE insns before the return. */
7875 emit_insns (use_insns);
7877 /* Return whatever values was restored by jumping directly to the end
7879 expand_null_return ();
7882 /* Expand code for a post- or pre- increment or decrement
7883 and return the RTX for the result.
7884 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
7887 expand_increment (exp, post)
7891 register rtx op0, op1;
7892 register rtx temp, value;
7893 register tree incremented = TREE_OPERAND (exp, 0);
7894 optab this_optab = add_optab;
7896 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7897 int op0_is_copy = 0;
7898 int single_insn = 0;
7899 /* 1 means we can't store into OP0 directly,
7900 because it is a subreg narrower than a word,
7901 and we don't dare clobber the rest of the word. */
7904 if (output_bytecode)
7906 bc_expand_expr (exp);
7910 /* Stabilize any component ref that might need to be
7911 evaluated more than once below. */
7913 || TREE_CODE (incremented) == BIT_FIELD_REF
7914 || (TREE_CODE (incremented) == COMPONENT_REF
7915 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
7916 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
7917 incremented = stabilize_reference (incremented);
7918 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
7919 ones into save exprs so that they don't accidentally get evaluated
7920 more than once by the code below. */
7921 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
7922 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
7923 incremented = save_expr (incremented);
7925 /* Compute the operands as RTX.
7926 Note whether OP0 is the actual lvalue or a copy of it:
7927 I believe it is a copy iff it is a register or subreg
7928 and insns were generated in computing it. */
7930 temp = get_last_insn ();
7931 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
7933 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
7934 in place but intead must do sign- or zero-extension during assignment,
7935 so we copy it into a new register and let the code below use it as
7938 Note that we can safely modify this SUBREG since it is know not to be
7939 shared (it was made by the expand_expr call above). */
7941 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
7942 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
7943 else if (GET_CODE (op0) == SUBREG
7944 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
7947 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
7948 && temp != get_last_insn ());
7949 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7951 /* Decide whether incrementing or decrementing. */
7952 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
7953 || TREE_CODE (exp) == PREDECREMENT_EXPR)
7954 this_optab = sub_optab;
7956 /* Convert decrement by a constant into a negative increment. */
7957 if (this_optab == sub_optab
7958 && GET_CODE (op1) == CONST_INT)
7960 op1 = GEN_INT (- INTVAL (op1));
7961 this_optab = add_optab;
7964 /* For a preincrement, see if we can do this with a single instruction. */
7967 icode = (int) this_optab->handlers[(int) mode].insn_code;
7968 if (icode != (int) CODE_FOR_nothing
7969 /* Make sure that OP0 is valid for operands 0 and 1
7970 of the insn we want to queue. */
7971 && (*insn_operand_predicate[icode][0]) (op0, mode)
7972 && (*insn_operand_predicate[icode][1]) (op0, mode)
7973 && (*insn_operand_predicate[icode][2]) (op1, mode))
7977 /* If OP0 is not the actual lvalue, but rather a copy in a register,
7978 then we cannot just increment OP0. We must therefore contrive to
7979 increment the original value. Then, for postincrement, we can return
7980 OP0 since it is a copy of the old value. For preincrement, expand here
7981 unless we can do it with a single insn.
7983 Likewise if storing directly into OP0 would clobber high bits
7984 we need to preserve (bad_subreg). */
7985 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
7987 /* This is the easiest way to increment the value wherever it is.
7988 Problems with multiple evaluation of INCREMENTED are prevented
7989 because either (1) it is a component_ref or preincrement,
7990 in which case it was stabilized above, or (2) it is an array_ref
7991 with constant index in an array in a register, which is
7992 safe to reevaluate. */
7993 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
7994 || TREE_CODE (exp) == PREDECREMENT_EXPR)
7995 ? MINUS_EXPR : PLUS_EXPR),
7998 TREE_OPERAND (exp, 1));
7999 temp = expand_assignment (incremented, newexp, ! post, 0);
8000 return post ? op0 : temp;
8005 /* We have a true reference to the value in OP0.
8006 If there is an insn to add or subtract in this mode, queue it.
8007 Queueing the increment insn avoids the register shuffling
8008 that often results if we must increment now and first save
8009 the old value for subsequent use. */
8011 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8012 op0 = stabilize (op0);
8015 icode = (int) this_optab->handlers[(int) mode].insn_code;
8016 if (icode != (int) CODE_FOR_nothing
8017 /* Make sure that OP0 is valid for operands 0 and 1
8018 of the insn we want to queue. */
8019 && (*insn_operand_predicate[icode][0]) (op0, mode)
8020 && (*insn_operand_predicate[icode][1]) (op0, mode))
8022 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
8023 op1 = force_reg (mode, op1);
8025 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8029 /* Preincrement, or we can't increment with one simple insn. */
8031 /* Save a copy of the value before inc or dec, to return it later. */
8032 temp = value = copy_to_reg (op0);
8034 /* Arrange to return the incremented value. */
8035 /* Copy the rtx because expand_binop will protect from the queue,
8036 and the results of that would be invalid for us to return
8037 if our caller does emit_queue before using our result. */
8038 temp = copy_rtx (value = op0);
8040 /* Increment however we can. */
8041 op1 = expand_binop (mode, this_optab, value, op1, op0,
8042 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
8043 /* Make sure the value is stored into OP0. */
8045 emit_move_insn (op0, op1);
8050 /* Expand all function calls contained within EXP, innermost ones first.
8051 But don't look within expressions that have sequence points.
8052 For each CALL_EXPR, record the rtx for its value
8053 in the CALL_EXPR_RTL field. */
8056 preexpand_calls (exp)
8059 register int nops, i;
8060 int type = TREE_CODE_CLASS (TREE_CODE (exp));
8062 if (! do_preexpand_calls)
8065 /* Only expressions and references can contain calls. */
8067 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
8070 switch (TREE_CODE (exp))
8073 /* Do nothing if already expanded. */
8074 if (CALL_EXPR_RTL (exp) != 0)
8077 /* Do nothing to built-in functions. */
8078 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
8079 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
8080 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8081 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
8086 case TRUTH_ANDIF_EXPR:
8087 case TRUTH_ORIF_EXPR:
8088 /* If we find one of these, then we can be sure
8089 the adjust will be done for it (since it makes jumps).
8090 Do it now, so that if this is inside an argument
8091 of a function, we don't get the stack adjustment
8092 after some other args have already been pushed. */
8093 do_pending_stack_adjust ();
8098 case WITH_CLEANUP_EXPR:
8102 if (SAVE_EXPR_RTL (exp) != 0)
8106 nops = tree_code_length[(int) TREE_CODE (exp)];
8107 for (i = 0; i < nops; i++)
8108 if (TREE_OPERAND (exp, i) != 0)
8110 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
8111 if (type == 'e' || type == '<' || type == '1' || type == '2'
8113 preexpand_calls (TREE_OPERAND (exp, i));
8117 /* At the start of a function, record that we have no previously-pushed
8118 arguments waiting to be popped. */
8121 init_pending_stack_adjust ()
8123 pending_stack_adjust = 0;
8126 /* When exiting from function, if safe, clear out any pending stack adjust
8127 so the adjustment won't get done. */
8130 clear_pending_stack_adjust ()
8132 #ifdef EXIT_IGNORE_STACK
8133 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
8134 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
8135 && ! flag_inline_functions)
8136 pending_stack_adjust = 0;
8140 /* Pop any previously-pushed arguments that have not been popped yet. */
8143 do_pending_stack_adjust ()
8145 if (inhibit_defer_pop == 0)
8147 if (pending_stack_adjust != 0)
8148 adjust_stack (GEN_INT (pending_stack_adjust));
8149 pending_stack_adjust = 0;
8153 /* Expand all cleanups up to OLD_CLEANUPS.
8154 Needed here, and also for language-dependent calls. */
8157 expand_cleanups_to (old_cleanups)
8160 while (cleanups_this_call != old_cleanups)
8162 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
8163 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
8167 /* Expand conditional expressions. */
8169 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
8170 LABEL is an rtx of code CODE_LABEL, in this function and all the
8174 jumpifnot (exp, label)
8178 do_jump (exp, label, NULL_RTX);
8181 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
8188 do_jump (exp, NULL_RTX, label);
8191 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
8192 the result is zero, or IF_TRUE_LABEL if the result is one.
8193 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
8194 meaning fall through in that case.
8196 do_jump always does any pending stack adjust except when it does not
8197 actually perform a jump. An example where there is no jump
8198 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
8200 This function is responsible for optimizing cases such as
8201 &&, || and comparison operators in EXP. */
8204 do_jump (exp, if_false_label, if_true_label)
8206 rtx if_false_label, if_true_label;
8208 register enum tree_code code = TREE_CODE (exp);
8209 /* Some cases need to create a label to jump to
8210 in order to properly fall through.
8211 These cases set DROP_THROUGH_LABEL nonzero. */
8212 rtx drop_through_label = 0;
8226 temp = integer_zerop (exp) ? if_false_label : if_true_label;
8232 /* This is not true with #pragma weak */
8234 /* The address of something can never be zero. */
8236 emit_jump (if_true_label);
8241 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
8242 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
8243 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
8246 /* If we are narrowing the operand, we have to do the compare in the
8248 if ((TYPE_PRECISION (TREE_TYPE (exp))
8249 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8251 case NON_LVALUE_EXPR:
8252 case REFERENCE_EXPR:
8257 /* These cannot change zero->non-zero or vice versa. */
8258 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8262 /* This is never less insns than evaluating the PLUS_EXPR followed by
8263 a test and can be longer if the test is eliminated. */
8265 /* Reduce to minus. */
8266 exp = build (MINUS_EXPR, TREE_TYPE (exp),
8267 TREE_OPERAND (exp, 0),
8268 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
8269 TREE_OPERAND (exp, 1))));
8270 /* Process as MINUS. */
8274 /* Non-zero iff operands of minus differ. */
8275 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
8276 TREE_OPERAND (exp, 0),
8277 TREE_OPERAND (exp, 1)),
8282 /* If we are AND'ing with a small constant, do this comparison in the
8283 smallest type that fits. If the machine doesn't have comparisons
8284 that small, it will be converted back to the wider comparison.
8285 This helps if we are testing the sign bit of a narrower object.
8286 combine can't do this for us because it can't know whether a
8287 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
8289 if (! SLOW_BYTE_ACCESS
8290 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8291 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
8292 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
8293 && (type = type_for_size (i + 1, 1)) != 0
8294 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8295 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8296 != CODE_FOR_nothing))
8298 do_jump (convert (type, exp), if_false_label, if_true_label);
8303 case TRUTH_NOT_EXPR:
8304 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8307 case TRUTH_ANDIF_EXPR:
8308 if (if_false_label == 0)
8309 if_false_label = drop_through_label = gen_label_rtx ();
8310 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
8311 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8314 case TRUTH_ORIF_EXPR:
8315 if (if_true_label == 0)
8316 if_true_label = drop_through_label = gen_label_rtx ();
8317 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
8318 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8323 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8327 do_pending_stack_adjust ();
8328 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8335 int bitsize, bitpos, unsignedp;
8336 enum machine_mode mode;
8341 /* Get description of this reference. We don't actually care
8342 about the underlying object here. */
8343 get_inner_reference (exp, &bitsize, &bitpos, &offset,
8344 &mode, &unsignedp, &volatilep);
8346 type = type_for_size (bitsize, unsignedp);
8347 if (! SLOW_BYTE_ACCESS
8348 && type != 0 && bitsize >= 0
8349 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8350 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8351 != CODE_FOR_nothing))
8353 do_jump (convert (type, exp), if_false_label, if_true_label);
8360 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
8361 if (integer_onep (TREE_OPERAND (exp, 1))
8362 && integer_zerop (TREE_OPERAND (exp, 2)))
8363 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8365 else if (integer_zerop (TREE_OPERAND (exp, 1))
8366 && integer_onep (TREE_OPERAND (exp, 2)))
8367 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8371 register rtx label1 = gen_label_rtx ();
8372 drop_through_label = gen_label_rtx ();
8373 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
8374 /* Now the THEN-expression. */
8375 do_jump (TREE_OPERAND (exp, 1),
8376 if_false_label ? if_false_label : drop_through_label,
8377 if_true_label ? if_true_label : drop_through_label);
8378 /* In case the do_jump just above never jumps. */
8379 do_pending_stack_adjust ();
8380 emit_label (label1);
8381 /* Now the ELSE-expression. */
8382 do_jump (TREE_OPERAND (exp, 2),
8383 if_false_label ? if_false_label : drop_through_label,
8384 if_true_label ? if_true_label : drop_through_label);
8389 if (integer_zerop (TREE_OPERAND (exp, 1)))
8390 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8391 else if (((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8394 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8395 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_FLOAT
8396 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_INT)
8397 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
8399 comparison = compare (exp, EQ, EQ);
8403 if (integer_zerop (TREE_OPERAND (exp, 1)))
8404 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8405 else if (((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8408 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8409 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_FLOAT
8410 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_INT)
8411 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
8413 comparison = compare (exp, NE, NE);
8417 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8419 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8420 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
8422 comparison = compare (exp, LT, LTU);
8426 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8428 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8429 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
8431 comparison = compare (exp, LE, LEU);
8435 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8437 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8438 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
8440 comparison = compare (exp, GT, GTU);
8444 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8446 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8447 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
8449 comparison = compare (exp, GE, GEU);
8454 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
8456 /* This is not needed any more and causes poor code since it causes
8457 comparisons and tests from non-SI objects to have different code
8459 /* Copy to register to avoid generating bad insns by cse
8460 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
8461 if (!cse_not_expected && GET_CODE (temp) == MEM)
8462 temp = copy_to_reg (temp);
8464 do_pending_stack_adjust ();
8465 if (GET_CODE (temp) == CONST_INT)
8466 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
8467 else if (GET_CODE (temp) == LABEL_REF)
8468 comparison = const_true_rtx;
8469 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
8470 && !can_compare_p (GET_MODE (temp)))
8471 /* Note swapping the labels gives us not-equal. */
8472 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
8473 else if (GET_MODE (temp) != VOIDmode)
8474 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
8475 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
8476 GET_MODE (temp), NULL_RTX, 0);
8481 /* Do any postincrements in the expression that was tested. */
8484 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
8485 straight into a conditional jump instruction as the jump condition.
8486 Otherwise, all the work has been done already. */
8488 if (comparison == const_true_rtx)
8491 emit_jump (if_true_label);
8493 else if (comparison == const0_rtx)
8496 emit_jump (if_false_label);
8498 else if (comparison)
8499 do_jump_for_compare (comparison, if_false_label, if_true_label);
8501 if (drop_through_label)
8503 /* If do_jump produces code that might be jumped around,
8504 do any stack adjusts from that code, before the place
8505 where control merges in. */
8506 do_pending_stack_adjust ();
8507 emit_label (drop_through_label);
8511 /* Given a comparison expression EXP for values too wide to be compared
8512 with one insn, test the comparison and jump to the appropriate label.
8513 The code of EXP is ignored; we always test GT if SWAP is 0,
8514 and LT if SWAP is 1. */
8517 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
8520 rtx if_false_label, if_true_label;
8522 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
8523 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
8524 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8525 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8526 rtx drop_through_label = 0;
8527 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
8530 if (! if_true_label || ! if_false_label)
8531 drop_through_label = gen_label_rtx ();
8532 if (! if_true_label)
8533 if_true_label = drop_through_label;
8534 if (! if_false_label)
8535 if_false_label = drop_through_label;
8537 /* Compare a word at a time, high order first. */
8538 for (i = 0; i < nwords; i++)
8541 rtx op0_word, op1_word;
8543 if (WORDS_BIG_ENDIAN)
8545 op0_word = operand_subword_force (op0, i, mode);
8546 op1_word = operand_subword_force (op1, i, mode);
8550 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
8551 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
8554 /* All but high-order word must be compared as unsigned. */
8555 comp = compare_from_rtx (op0_word, op1_word,
8556 (unsignedp || i > 0) ? GTU : GT,
8557 unsignedp, word_mode, NULL_RTX, 0);
8558 if (comp == const_true_rtx)
8559 emit_jump (if_true_label);
8560 else if (comp != const0_rtx)
8561 do_jump_for_compare (comp, NULL_RTX, if_true_label);
8563 /* Consider lower words only if these are equal. */
8564 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
8566 if (comp == const_true_rtx)
8567 emit_jump (if_false_label);
8568 else if (comp != const0_rtx)
8569 do_jump_for_compare (comp, NULL_RTX, if_false_label);
8573 emit_jump (if_false_label);
8574 if (drop_through_label)
8575 emit_label (drop_through_label);
8578 /* Compare OP0 with OP1, word at a time, in mode MODE.
8579 UNSIGNEDP says to do unsigned comparison.
8580 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
8583 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
8584 enum machine_mode mode;
8587 rtx if_false_label, if_true_label;
8589 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8590 rtx drop_through_label = 0;
8593 if (! if_true_label || ! if_false_label)
8594 drop_through_label = gen_label_rtx ();
8595 if (! if_true_label)
8596 if_true_label = drop_through_label;
8597 if (! if_false_label)
8598 if_false_label = drop_through_label;
8600 /* Compare a word at a time, high order first. */
8601 for (i = 0; i < nwords; i++)
8604 rtx op0_word, op1_word;
8606 if (WORDS_BIG_ENDIAN)
8608 op0_word = operand_subword_force (op0, i, mode);
8609 op1_word = operand_subword_force (op1, i, mode);
8613 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
8614 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
8617 /* All but high-order word must be compared as unsigned. */
8618 comp = compare_from_rtx (op0_word, op1_word,
8619 (unsignedp || i > 0) ? GTU : GT,
8620 unsignedp, word_mode, NULL_RTX, 0);
8621 if (comp == const_true_rtx)
8622 emit_jump (if_true_label);
8623 else if (comp != const0_rtx)
8624 do_jump_for_compare (comp, NULL_RTX, if_true_label);
8626 /* Consider lower words only if these are equal. */
8627 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
8629 if (comp == const_true_rtx)
8630 emit_jump (if_false_label);
8631 else if (comp != const0_rtx)
8632 do_jump_for_compare (comp, NULL_RTX, if_false_label);
8636 emit_jump (if_false_label);
8637 if (drop_through_label)
8638 emit_label (drop_through_label);
8641 /* Given an EQ_EXPR expression EXP for values too wide to be compared
8642 with one insn, test the comparison and jump to the appropriate label. */
8645 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
8647 rtx if_false_label, if_true_label;
8649 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8650 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8651 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8652 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8654 rtx drop_through_label = 0;
8656 if (! if_false_label)
8657 drop_through_label = if_false_label = gen_label_rtx ();
8659 for (i = 0; i < nwords; i++)
8661 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
8662 operand_subword_force (op1, i, mode),
8663 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
8664 word_mode, NULL_RTX, 0);
8665 if (comp == const_true_rtx)
8666 emit_jump (if_false_label);
8667 else if (comp != const0_rtx)
8668 do_jump_for_compare (comp, if_false_label, NULL_RTX);
8672 emit_jump (if_true_label);
8673 if (drop_through_label)
8674 emit_label (drop_through_label);
8677 /* Jump according to whether OP0 is 0.
8678 We assume that OP0 has an integer mode that is too wide
8679 for the available compare insns. */
8682 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
8684 rtx if_false_label, if_true_label;
8686 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
8688 rtx drop_through_label = 0;
8690 if (! if_false_label)
8691 drop_through_label = if_false_label = gen_label_rtx ();
8693 for (i = 0; i < nwords; i++)
8695 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
8697 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
8698 if (comp == const_true_rtx)
8699 emit_jump (if_false_label);
8700 else if (comp != const0_rtx)
8701 do_jump_for_compare (comp, if_false_label, NULL_RTX);
8705 emit_jump (if_true_label);
8706 if (drop_through_label)
8707 emit_label (drop_through_label);
8710 /* Given a comparison expression in rtl form, output conditional branches to
8711 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
8714 do_jump_for_compare (comparison, if_false_label, if_true_label)
8715 rtx comparison, if_false_label, if_true_label;
8719 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
8720 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
8725 emit_jump (if_false_label);
8727 else if (if_false_label)
8730 rtx prev = get_last_insn ();
8734 prev = PREV_INSN (prev);
8736 /* Output the branch with the opposite condition. Then try to invert
8737 what is generated. If more than one insn is a branch, or if the
8738 branch is not the last insn written, abort. If we can't invert
8739 the branch, emit make a true label, redirect this jump to that,
8740 emit a jump to the false label and define the true label. */
8742 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
8743 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
8747 /* Here we get the insn before what was just emitted.
8748 On some machines, emitting the branch can discard
8749 the previous compare insn and emit a replacement. */
8751 /* If there's only one preceding insn... */
8752 insn = get_insns ();
8754 insn = NEXT_INSN (prev);
8756 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
8757 if (GET_CODE (insn) == JUMP_INSN)
8764 if (branch != get_last_insn ())
8767 if (! invert_jump (branch, if_false_label))
8769 if_true_label = gen_label_rtx ();
8770 redirect_jump (branch, if_true_label);
8771 emit_jump (if_false_label);
8772 emit_label (if_true_label);
8777 /* Generate code for a comparison expression EXP
8778 (including code to compute the values to be compared)
8779 and set (CC0) according to the result.
8780 SIGNED_CODE should be the rtx operation for this comparison for
8781 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
8783 We force a stack adjustment unless there are currently
8784 things pushed on the stack that aren't yet used. */
8787 compare (exp, signed_code, unsigned_code)
8789 enum rtx_code signed_code, unsigned_code;
8792 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8794 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8795 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
8796 register enum machine_mode mode = TYPE_MODE (type);
8797 int unsignedp = TREE_UNSIGNED (type);
8798 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
8800 return compare_from_rtx (op0, op1, code, unsignedp, mode,
8802 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
8803 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
8806 /* Like compare but expects the values to compare as two rtx's.
8807 The decision as to signed or unsigned comparison must be made by the caller.
8809 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
8812 If ALIGN is non-zero, it is the alignment of this type; if zero, the
8813 size of MODE should be used. */
8816 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
8817 register rtx op0, op1;
8820 enum machine_mode mode;
8826 /* If one operand is constant, make it the second one. Only do this
8827 if the other operand is not constant as well. */
8829 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
8830 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
8835 code = swap_condition (code);
8840 op0 = force_not_mem (op0);
8841 op1 = force_not_mem (op1);
8844 do_pending_stack_adjust ();
8846 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
8847 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
8851 /* There's no need to do this now that combine.c can eliminate lots of
8852 sign extensions. This can be less efficient in certain cases on other
8855 /* If this is a signed equality comparison, we can do it as an
8856 unsigned comparison since zero-extension is cheaper than sign
8857 extension and comparisons with zero are done as unsigned. This is
8858 the case even on machines that can do fast sign extension, since
8859 zero-extension is easier to combine with other operations than
8860 sign-extension is. If we are comparing against a constant, we must
8861 convert it to what it would look like unsigned. */
8862 if ((code == EQ || code == NE) && ! unsignedp
8863 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
8865 if (GET_CODE (op1) == CONST_INT
8866 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
8867 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
8872 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
8874 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
8877 /* Generate code to calculate EXP using a store-flag instruction
8878 and return an rtx for the result. EXP is either a comparison
8879 or a TRUTH_NOT_EXPR whose operand is a comparison.
8881 If TARGET is nonzero, store the result there if convenient.
8883 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
8886 Return zero if there is no suitable set-flag instruction
8887 available on this machine.
8889 Once expand_expr has been called on the arguments of the comparison,
8890 we are committed to doing the store flag, since it is not safe to
8891 re-evaluate the expression. We emit the store-flag insn by calling
8892 emit_store_flag, but only expand the arguments if we have a reason
8893 to believe that emit_store_flag will be successful. If we think that
8894 it will, but it isn't, we have to simulate the store-flag with a
8895 set/jump/set sequence. */
8898 do_store_flag (exp, target, mode, only_cheap)
8901 enum machine_mode mode;
8905 tree arg0, arg1, type;
8907 enum machine_mode operand_mode;
8911 enum insn_code icode;
8912 rtx subtarget = target;
8913 rtx result, label, pattern, jump_pat;
8915 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8916 result at the end. We can't simply invert the test since it would
8917 have already been inverted if it were valid. This case occurs for
8918 some floating-point comparisons. */
8920 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8921 invert = 1, exp = TREE_OPERAND (exp, 0);
8923 arg0 = TREE_OPERAND (exp, 0);
8924 arg1 = TREE_OPERAND (exp, 1);
8925 type = TREE_TYPE (arg0);
8926 operand_mode = TYPE_MODE (type);
8927 unsignedp = TREE_UNSIGNED (type);
8929 /* We won't bother with BLKmode store-flag operations because it would mean
8930 passing a lot of information to emit_store_flag. */
8931 if (operand_mode == BLKmode)
8937 /* Get the rtx comparison code to use. We know that EXP is a comparison
8938 operation of some type. Some comparisons against 1 and -1 can be
8939 converted to comparisons with zero. Do so here so that the tests
8940 below will be aware that we have a comparison with zero. These
8941 tests will not catch constants in the first operand, but constants
8942 are rarely passed as the first operand. */
8944 switch (TREE_CODE (exp))
8953 if (integer_onep (arg1))
8954 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8956 code = unsignedp ? LTU : LT;
8959 if (! unsignedp && integer_all_onesp (arg1))
8960 arg1 = integer_zero_node, code = LT;
8962 code = unsignedp ? LEU : LE;
8965 if (! unsignedp && integer_all_onesp (arg1))
8966 arg1 = integer_zero_node, code = GE;
8968 code = unsignedp ? GTU : GT;
8971 if (integer_onep (arg1))
8972 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8974 code = unsignedp ? GEU : GE;
8980 /* Put a constant second. */
8981 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8983 tem = arg0; arg0 = arg1; arg1 = tem;
8984 code = swap_condition (code);
8987 /* If this is an equality or inequality test of a single bit, we can
8988 do this by shifting the bit being tested to the low-order bit and
8989 masking the result with the constant 1. If the condition was EQ,
8990 we xor it with 1. This does not require an scc insn and is faster
8991 than an scc insn even if we have it. */
8993 if ((code == NE || code == EQ)
8994 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8995 && integer_pow2p (TREE_OPERAND (arg0, 1))
8996 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
8998 tree inner = TREE_OPERAND (arg0, 0);
8999 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
9000 NULL_RTX, VOIDmode, 0)));
9003 /* If INNER is a right shift of a constant and it plus BITNUM does
9004 not overflow, adjust BITNUM and INNER. */
9006 if (TREE_CODE (inner) == RSHIFT_EXPR
9007 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
9008 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
9009 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
9010 < TYPE_PRECISION (type)))
9012 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
9013 inner = TREE_OPERAND (inner, 0);
9016 /* If we are going to be able to omit the AND below, we must do our
9017 operations as unsigned. If we must use the AND, we have a choice.
9018 Normally unsigned is faster, but for some machines signed is. */
9019 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
9020 #ifdef LOAD_EXTEND_OP
9021 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
9027 if (subtarget == 0 || GET_CODE (subtarget) != REG
9028 || GET_MODE (subtarget) != operand_mode
9029 || ! safe_from_p (subtarget, inner))
9032 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
9035 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
9036 size_int (bitnum), subtarget, ops_unsignedp);
9038 if (GET_MODE (op0) != mode)
9039 op0 = convert_to_mode (mode, op0, ops_unsignedp);
9041 if ((code == EQ && ! invert) || (code == NE && invert))
9042 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
9043 ops_unsignedp, OPTAB_LIB_WIDEN);
9045 /* Put the AND last so it can combine with more things. */
9046 if (bitnum != TYPE_PRECISION (type) - 1)
9047 op0 = expand_and (op0, const1_rtx, subtarget);
9052 /* Now see if we are likely to be able to do this. Return if not. */
9053 if (! can_compare_p (operand_mode))
9055 icode = setcc_gen_code[(int) code];
9056 if (icode == CODE_FOR_nothing
9057 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
9059 /* We can only do this if it is one of the special cases that
9060 can be handled without an scc insn. */
9061 if ((code == LT && integer_zerop (arg1))
9062 || (! only_cheap && code == GE && integer_zerop (arg1)))
9064 else if (BRANCH_COST >= 0
9065 && ! only_cheap && (code == NE || code == EQ)
9066 && TREE_CODE (type) != REAL_TYPE
9067 && ((abs_optab->handlers[(int) operand_mode].insn_code
9068 != CODE_FOR_nothing)
9069 || (ffs_optab->handlers[(int) operand_mode].insn_code
9070 != CODE_FOR_nothing)))
9076 preexpand_calls (exp);
9077 if (subtarget == 0 || GET_CODE (subtarget) != REG
9078 || GET_MODE (subtarget) != operand_mode
9079 || ! safe_from_p (subtarget, arg1))
9082 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
9083 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
9086 target = gen_reg_rtx (mode);
9088 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9089 because, if the emit_store_flag does anything it will succeed and
9090 OP0 and OP1 will not be used subsequently. */
9092 result = emit_store_flag (target, code,
9093 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9094 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9095 operand_mode, unsignedp, 1);
9100 result = expand_binop (mode, xor_optab, result, const1_rtx,
9101 result, 0, OPTAB_LIB_WIDEN);
9105 /* If this failed, we have to do this with set/compare/jump/set code. */
9106 if (target == 0 || GET_CODE (target) != REG
9107 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9108 target = gen_reg_rtx (GET_MODE (target));
9110 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9111 result = compare_from_rtx (op0, op1, code, unsignedp,
9112 operand_mode, NULL_RTX, 0);
9113 if (GET_CODE (result) == CONST_INT)
9114 return (((result == const0_rtx && ! invert)
9115 || (result != const0_rtx && invert))
9116 ? const0_rtx : const1_rtx);
9118 label = gen_label_rtx ();
9119 if (bcc_gen_fctn[(int) code] == 0)
9122 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9123 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9129 /* Generate a tablejump instruction (used for switch statements). */
9131 #ifdef HAVE_tablejump
9133 /* INDEX is the value being switched on, with the lowest value
9134 in the table already subtracted.
9135 MODE is its expected mode (needed if INDEX is constant).
9136 RANGE is the length of the jump table.
9137 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9139 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9140 index value is out of range. */
9143 do_tablejump (index, mode, range, table_label, default_label)
9144 rtx index, range, table_label, default_label;
9145 enum machine_mode mode;
9147 register rtx temp, vector;
9149 /* Do an unsigned comparison (in the proper mode) between the index
9150 expression and the value which represents the length of the range.
9151 Since we just finished subtracting the lower bound of the range
9152 from the index expression, this comparison allows us to simultaneously
9153 check that the original index expression value is both greater than
9154 or equal to the minimum value of the range and less than or equal to
9155 the maximum value of the range. */
9157 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
9158 emit_jump_insn (gen_bgtu (default_label));
9160 /* If index is in range, it must fit in Pmode.
9161 Convert to Pmode so we can index with it. */
9163 index = convert_to_mode (Pmode, index, 1);
9165 /* Don't let a MEM slip thru, because then INDEX that comes
9166 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9167 and break_out_memory_refs will go to work on it and mess it up. */
9168 #ifdef PIC_CASE_VECTOR_ADDRESS
9169 if (flag_pic && GET_CODE (index) != REG)
9170 index = copy_to_mode_reg (Pmode, index);
9173 /* If flag_force_addr were to affect this address
9174 it could interfere with the tricky assumptions made
9175 about addresses that contain label-refs,
9176 which may be valid only very near the tablejump itself. */
9177 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9178 GET_MODE_SIZE, because this indicates how large insns are. The other
9179 uses should all be Pmode, because they are addresses. This code
9180 could fail if addresses and insns are not the same size. */
9181 index = gen_rtx (PLUS, Pmode,
9182 gen_rtx (MULT, Pmode, index,
9183 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9184 gen_rtx (LABEL_REF, Pmode, table_label));
9185 #ifdef PIC_CASE_VECTOR_ADDRESS
9187 index = PIC_CASE_VECTOR_ADDRESS (index);
9190 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9191 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9192 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
9193 RTX_UNCHANGING_P (vector) = 1;
9194 convert_move (temp, vector, 0);
9196 emit_jump_insn (gen_tablejump (temp, table_label));
9198 #ifndef CASE_VECTOR_PC_RELATIVE
9199 /* If we are generating PIC code or if the table is PC-relative, the
9200 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9206 #endif /* HAVE_tablejump */
9209 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
9210 to that value is on the top of the stack. The resulting type is TYPE, and
9211 the source declaration is DECL. */
9214 bc_load_memory (type, decl)
9217 enum bytecode_opcode opcode;
9220 /* Bit fields are special. We only know about signed and
9221 unsigned ints, and enums. The latter are treated as
9224 if (DECL_BIT_FIELD (decl))
9225 if (TREE_CODE (type) == ENUMERAL_TYPE
9226 || TREE_CODE (type) == INTEGER_TYPE)
9227 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
9231 /* See corresponding comment in bc_store_memory(). */
9232 if (TYPE_MODE (type) == BLKmode
9233 || TYPE_MODE (type) == VOIDmode)
9236 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
9238 if (opcode == neverneverland)
9241 bc_emit_bytecode (opcode);
9243 #ifdef DEBUG_PRINT_CODE
9244 fputc ('\n', stderr);
9249 /* Store the contents of the second stack slot to the address in the
9250 top stack slot. DECL is the declaration of the destination and is used
9251 to determine whether we're dealing with a bitfield. */
9254 bc_store_memory (type, decl)
9257 enum bytecode_opcode opcode;
9260 if (DECL_BIT_FIELD (decl))
9262 if (TREE_CODE (type) == ENUMERAL_TYPE
9263 || TREE_CODE (type) == INTEGER_TYPE)
9269 if (TYPE_MODE (type) == BLKmode)
9271 /* Copy structure. This expands to a block copy instruction, storeBLK.
9272 In addition to the arguments expected by the other store instructions,
9273 it also expects a type size (SImode) on top of the stack, which is the
9274 structure size in size units (usually bytes). The two first arguments
9275 are already on the stack; so we just put the size on level 1. For some
9276 other languages, the size may be variable, this is why we don't encode
9277 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
9279 bc_expand_expr (TYPE_SIZE (type));
9283 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
9285 if (opcode == neverneverland)
9288 bc_emit_bytecode (opcode);
9290 #ifdef DEBUG_PRINT_CODE
9291 fputc ('\n', stderr);
9296 /* Allocate local stack space sufficient to hold a value of the given
9297 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
9298 integral power of 2. A special case is locals of type VOID, which
9299 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
9300 remapped into the corresponding attribute of SI. */
9303 bc_allocate_local (size, alignment)
9304 int size, alignment;
9312 /* Normalize size and alignment */
9314 size = UNITS_PER_WORD;
9316 if (alignment < BITS_PER_UNIT)
9317 byte_alignment = 1 << (INT_ALIGN - 1);
9320 byte_alignment = alignment / BITS_PER_UNIT;
9322 if (local_vars_size & (byte_alignment - 1))
9323 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
9325 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
9326 local_vars_size += size;
9332 /* Allocate variable-sized local array. Variable-sized arrays are
9333 actually pointers to the address in memory where they are stored. */
9336 bc_allocate_variable_array (size)
9340 const int ptralign = (1 << (PTR_ALIGN - 1));
9343 if (local_vars_size & ptralign)
9344 local_vars_size += ptralign - (local_vars_size & ptralign);
9346 /* Note down local space needed: pointer to block; also return
9349 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
9350 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
9355 /* Push the machine address for the given external variable offset. */
9357 bc_load_externaddr (externaddr)
9360 bc_emit_bytecode (constP);
9361 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
9362 BYTECODE_BC_LABEL (externaddr)->offset);
9364 #ifdef DEBUG_PRINT_CODE
9365 fputc ('\n', stderr);
9374 char *new = (char *) xmalloc ((strlen (s) + 1) * sizeof *s);
9380 /* Like above, but expects an IDENTIFIER. */
9382 bc_load_externaddr_id (id, offset)
9386 if (!IDENTIFIER_POINTER (id))
9389 bc_emit_bytecode (constP);
9390 bc_emit_code_labelref (bc_xstrdup (IDENTIFIER_POINTER (id)), offset);
9392 #ifdef DEBUG_PRINT_CODE
9393 fputc ('\n', stderr);
9398 /* Push the machine address for the given local variable offset. */
9400 bc_load_localaddr (localaddr)
9403 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
9407 /* Push the machine address for the given parameter offset.
9408 NOTE: offset is in bits. */
9410 bc_load_parmaddr (parmaddr)
9413 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
9418 /* Convert a[i] into *(a + i). */
9420 bc_canonicalize_array_ref (exp)
9423 tree type = TREE_TYPE (exp);
9424 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
9425 TREE_OPERAND (exp, 0));
9426 tree index = TREE_OPERAND (exp, 1);
9429 /* Convert the integer argument to a type the same size as a pointer
9430 so the multiply won't overflow spuriously. */
9432 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
9433 index = convert (type_for_size (POINTER_SIZE, 0), index);
9435 /* The array address isn't volatile even if the array is.
9436 (Of course this isn't terribly relevant since the bytecode
9437 translator treats nearly everything as volatile anyway.) */
9438 TREE_THIS_VOLATILE (array_adr) = 0;
9440 return build1 (INDIRECT_REF, type,
9441 fold (build (PLUS_EXPR,
9442 TYPE_POINTER_TO (type),
9444 fold (build (MULT_EXPR,
9445 TYPE_POINTER_TO (type),
9447 size_in_bytes (type))))));
9451 /* Load the address of the component referenced by the given
9452 COMPONENT_REF expression.
9454 Returns innermost lvalue. */
9457 bc_expand_component_address (exp)
9461 enum machine_mode mode;
9463 HOST_WIDE_INT SIval;
9466 tem = TREE_OPERAND (exp, 1);
9467 mode = DECL_MODE (tem);
9470 /* Compute cumulative bit offset for nested component refs
9471 and array refs, and find the ultimate containing object. */
9473 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
9475 if (TREE_CODE (tem) == COMPONENT_REF)
9476 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
9478 if (TREE_CODE (tem) == ARRAY_REF
9479 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
9480 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
9482 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
9483 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
9484 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
9489 bc_expand_expr (tem);
9492 /* For bitfields also push their offset and size */
9493 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
9494 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
9496 if (SIval = bitpos / BITS_PER_UNIT)
9497 bc_emit_instruction (addconstPSI, SIval);
9499 return (TREE_OPERAND (exp, 1));
9503 /* Emit code to push two SI constants */
9505 bc_push_offset_and_size (offset, size)
9506 HOST_WIDE_INT offset, size;
9508 bc_emit_instruction (constSI, offset);
9509 bc_emit_instruction (constSI, size);
9513 /* Emit byte code to push the address of the given lvalue expression to
9514 the stack. If it's a bit field, we also push offset and size info.
9516 Returns innermost component, which allows us to determine not only
9517 its type, but also whether it's a bitfield. */
9520 bc_expand_address (exp)
9524 if (!exp || TREE_CODE (exp) == ERROR_MARK)
9528 switch (TREE_CODE (exp))
9532 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
9536 return (bc_expand_component_address (exp));
9540 bc_expand_expr (TREE_OPERAND (exp, 0));
9542 /* For variable-sized types: retrieve pointer. Sometimes the
9543 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
9544 also make sure we have an operand, just in case... */
9546 if (TREE_OPERAND (exp, 0)
9547 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
9548 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
9549 bc_emit_instruction (loadP);
9551 /* If packed, also return offset and size */
9552 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
9554 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
9555 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
9557 return (TREE_OPERAND (exp, 0));
9561 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
9562 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
9567 bc_load_parmaddr (DECL_RTL (exp));
9569 /* For variable-sized types: retrieve pointer */
9570 if (TYPE_SIZE (TREE_TYPE (exp))
9571 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9572 bc_emit_instruction (loadP);
9574 /* If packed, also return offset and size */
9575 if (DECL_BIT_FIELD (exp))
9576 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
9577 TREE_INT_CST_LOW (DECL_SIZE (exp)));
9583 bc_emit_instruction (returnP);
9589 if (BYTECODE_LABEL (DECL_RTL (exp)))
9590 bc_load_externaddr (DECL_RTL (exp));
9593 if (DECL_EXTERNAL (exp))
9594 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
9595 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
9597 bc_load_localaddr (DECL_RTL (exp));
9599 /* For variable-sized types: retrieve pointer */
9600 if (TYPE_SIZE (TREE_TYPE (exp))
9601 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9602 bc_emit_instruction (loadP);
9604 /* If packed, also return offset and size */
9605 if (DECL_BIT_FIELD (exp))
9606 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
9607 TREE_INT_CST_LOW (DECL_SIZE (exp)));
9615 bc_emit_bytecode (constP);
9616 r = output_constant_def (exp);
9617 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
9619 #ifdef DEBUG_PRINT_CODE
9620 fputc ('\n', stderr);
9631 /* Most lvalues don't have components. */
9636 /* Emit a type code to be used by the runtime support in handling
9637 parameter passing. The type code consists of the machine mode
9638 plus the minimal alignment shifted left 8 bits. */
9641 bc_runtime_type_code (type)
9646 switch (TREE_CODE (type))
9656 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
9668 return build_int_2 (val, 0);
9672 /* Generate constructor label */
9674 bc_gen_constr_label ()
9676 static int label_counter;
9677 static char label[20];
9679 sprintf (label, "*LR%d", label_counter++);
9681 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
9685 /* Evaluate constructor CONSTR and return pointer to it on level one. We
9686 expand the constructor data as static data, and push a pointer to it.
9687 The pointer is put in the pointer table and is retrieved by a constP
9688 bytecode instruction. We then loop and store each constructor member in
9689 the corresponding component. Finally, we return the original pointer on
9693 bc_expand_constructor (constr)
9697 HOST_WIDE_INT ptroffs;
9701 /* Literal constructors are handled as constants, whereas
9702 non-literals are evaluated and stored element by element
9703 into the data segment. */
9705 /* Allocate space in proper segment and push pointer to space on stack.
9708 l = bc_gen_constr_label ();
9710 if (TREE_CONSTANT (constr))
9714 bc_emit_const_labeldef (l);
9715 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
9721 bc_emit_data_labeldef (l);
9722 bc_output_data_constructor (constr);
9726 /* Add reference to pointer table and recall pointer to stack;
9727 this code is common for both types of constructors: literals
9728 and non-literals. */
9730 ptroffs = bc_define_pointer (l);
9731 bc_emit_instruction (constP, ptroffs);
9733 /* This is all that has to be done if it's a literal. */
9734 if (TREE_CONSTANT (constr))
9738 /* At this point, we have the pointer to the structure on top of the stack.
9739 Generate sequences of store_memory calls for the constructor. */
9741 /* constructor type is structure */
9742 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
9746 /* If the constructor has fewer fields than the structure,
9747 clear the whole structure first. */
9749 if (list_length (CONSTRUCTOR_ELTS (constr))
9750 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
9752 bc_emit_instruction (duplicate);
9753 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
9754 bc_emit_instruction (clearBLK);
9757 /* Store each element of the constructor into the corresponding
9760 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
9762 register tree field = TREE_PURPOSE (elt);
9763 register enum machine_mode mode;
9768 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
9769 mode = DECL_MODE (field);
9770 unsignedp = TREE_UNSIGNED (field);
9772 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
9774 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
9775 /* The alignment of TARGET is
9776 at least what its type requires. */
9778 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
9779 int_size_in_bytes (TREE_TYPE (constr)));
9784 /* Constructor type is array */
9785 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
9789 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
9790 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
9791 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
9792 tree elttype = TREE_TYPE (TREE_TYPE (constr));
9794 /* If the constructor has fewer fields than the structure,
9795 clear the whole structure first. */
9797 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
9799 bc_emit_instruction (duplicate);
9800 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
9801 bc_emit_instruction (clearBLK);
9805 /* Store each element of the constructor into the corresponding
9806 element of TARGET, determined by counting the elements. */
9808 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
9810 elt = TREE_CHAIN (elt), i++)
9812 register enum machine_mode mode;
9817 mode = TYPE_MODE (elttype);
9818 bitsize = GET_MODE_BITSIZE (mode);
9819 unsignedp = TREE_UNSIGNED (elttype);
9821 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
9822 /* * TYPE_SIZE_UNIT (elttype) */ );
9824 bc_store_field (elt, bitsize, bitpos, mode,
9825 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
9826 /* The alignment of TARGET is
9827 at least what its type requires. */
9829 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
9830 int_size_in_bytes (TREE_TYPE (constr)));
9837 /* Store the value of EXP (an expression tree) into member FIELD of
9838 structure at address on stack, which has type TYPE, mode MODE and
9839 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
9842 ALIGN is the alignment that TARGET is known to have, measured in bytes.
9843 TOTAL_SIZE is its size in bytes, or -1 if variable. */
9846 bc_store_field (field, bitsize, bitpos, mode, exp, type,
9847 value_mode, unsignedp, align, total_size)
9848 int bitsize, bitpos;
9849 enum machine_mode mode;
9850 tree field, exp, type;
9851 enum machine_mode value_mode;
9857 /* Expand expression and copy pointer */
9858 bc_expand_expr (exp);
9859 bc_emit_instruction (over);
9862 /* If the component is a bit field, we cannot use addressing to access
9863 it. Use bit-field techniques to store in it. */
9865 if (DECL_BIT_FIELD (field))
9867 bc_store_bit_field (bitpos, bitsize, unsignedp);
9873 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
9875 /* Advance pointer to the desired member */
9877 bc_emit_instruction (addconstPSI, offset);
9880 bc_store_memory (type, field);
9885 /* Store SI/SU in bitfield */
9887 bc_store_bit_field (offset, size, unsignedp)
9888 int offset, size, unsignedp;
9890 /* Push bitfield offset and size */
9891 bc_push_offset_and_size (offset, size);
9894 bc_emit_instruction (sstoreBI);
9898 /* Load SI/SU from bitfield */
9900 bc_load_bit_field (offset, size, unsignedp)
9901 int offset, size, unsignedp;
9903 /* Push bitfield offset and size */
9904 bc_push_offset_and_size (offset, size);
9906 /* Load: sign-extend if signed, else zero-extend */
9907 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
9911 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
9912 (adjust stack pointer upwards), negative means add that number of
9913 levels (adjust the stack pointer downwards). Only positive values
9914 normally make sense. */
9917 bc_adjust_stack (nlevels)
9926 bc_emit_instruction (drop);
9929 bc_emit_instruction (drop);
9934 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
9935 stack_depth -= nlevels;
9938 #if defined (VALIDATE_STACK_FOR_BC)
9939 VALIDATE_STACK_FOR_BC ();