1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92-97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
30 #include "hard-reg-set.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
40 #include "typeclass.h"
44 #define CEIL(x,y) (((x) + (y) - 1) / (y))
46 /* Decide whether a function's arguments should be processed
47 from first to last or from last to first.
49 They should if the stack and args grow in opposite directions, but
50 only if we have push insns. */
54 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
55 #define PUSH_ARGS_REVERSED /* If it's last to first */
60 #ifndef STACK_PUSH_CODE
61 #ifdef STACK_GROWS_DOWNWARD
62 #define STACK_PUSH_CODE PRE_DEC
64 #define STACK_PUSH_CODE PRE_INC
68 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
69 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
71 /* Assume that case vectors are not pc-relative. */
72 #ifndef CASE_VECTOR_PC_RELATIVE
73 #define CASE_VECTOR_PC_RELATIVE 0
76 /* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
84 /* Nonzero to generate code for all the subroutines within an
85 expression before generating the upper levels of the expression.
86 Nowadays this is never zero. */
87 int do_preexpand_calls = 1;
89 /* Number of units that we should eventually pop off the stack.
90 These are the arguments to function calls that have already returned. */
91 int pending_stack_adjust;
93 /* Nonzero means stack pops must not be deferred, and deferred stack
94 pops must not be output. It is nonzero inside a function call,
95 inside a conditional expression, inside a statement expression,
96 and in other cases as well. */
97 int inhibit_defer_pop;
99 /* Nonzero means __builtin_saveregs has already been done in this function.
100 The value is the pseudoreg containing the value __builtin_saveregs
102 static rtx saveregs_value;
104 /* Similarly for __builtin_apply_args. */
105 static rtx apply_args_value;
107 /* Nonzero if the machine description has been fixed to accept
108 CONSTANT_P_RTX patterns. We will emit a warning and continue
109 if we find we must actually use such a beast. */
110 static int can_handle_constant_p;
112 /* Don't check memory usage, since code is being emitted to check a memory
113 usage. Used when current_function_check_memory_usage is true, to avoid
114 infinite recursion. */
115 static int in_check_memory_usage;
117 /* Postincrements that still need to be expanded. */
118 static rtx pending_chain;
120 /* This structure is used by move_by_pieces to describe the move to
122 struct move_by_pieces
132 int explicit_inc_from;
139 /* This structure is used by clear_by_pieces to describe the clear to
142 struct clear_by_pieces
154 extern struct obstack permanent_obstack;
155 extern rtx arg_pointer_save_area;
157 static rtx get_push_address PROTO ((int));
159 static rtx enqueue_insn PROTO((rtx, rtx));
160 static int queued_subexp_p PROTO((rtx));
161 static void init_queue PROTO((void));
162 static int move_by_pieces_ninsns PROTO((unsigned int, int));
163 static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
164 struct move_by_pieces *));
165 static void clear_by_pieces PROTO((rtx, int, int));
166 static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
167 struct clear_by_pieces *));
168 static int is_zeros_p PROTO((tree));
169 static int mostly_zeros_p PROTO((tree));
170 static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
172 static void store_constructor PROTO((tree, rtx, int));
173 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
174 enum machine_mode, int, int,
176 static enum memory_use_mode
177 get_memory_usage_from_modifier PROTO((enum expand_modifier));
178 static tree save_noncopied_parts PROTO((tree, tree));
179 static tree init_noncopied_parts PROTO((tree, tree));
180 static int safe_from_p PROTO((rtx, tree, int));
181 static int fixed_type_p PROTO((tree));
182 static rtx var_rtx PROTO((tree));
183 static int get_pointer_alignment PROTO((tree, unsigned));
184 static tree string_constant PROTO((tree, tree *));
185 static tree c_strlen PROTO((tree));
186 static rtx get_memory_rtx PROTO((tree));
187 static rtx expand_builtin PROTO((tree, rtx, rtx,
188 enum machine_mode, int));
189 static int apply_args_size PROTO((void));
190 static int apply_result_size PROTO((void));
191 static rtx result_vector PROTO((int, rtx));
192 static rtx expand_builtin_apply_args PROTO((void));
193 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
194 static void expand_builtin_return PROTO((rtx));
195 static rtx expand_increment PROTO((tree, int, int));
196 static void preexpand_calls PROTO((tree));
197 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
198 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
199 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
200 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
201 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
203 /* Record for each mode whether we can move a register directly to or
204 from an object of that mode in memory. If we can't, we won't try
205 to use that mode directly when accessing a field of that mode. */
207 static char direct_load[NUM_MACHINE_MODES];
208 static char direct_store[NUM_MACHINE_MODES];
210 /* If a memory-to-memory move would take MOVE_RATIO or more simple
211 move-instruction sequences, we will do a movstr or libcall instead. */
214 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
217 /* If we are optimizing for space (-Os), cut down the default move ratio */
218 #define MOVE_RATIO (optimize_size ? 3 : 15)
222 /* This array records the insn_code of insns to perform block moves. */
223 enum insn_code movstr_optab[NUM_MACHINE_MODES];
225 /* This array records the insn_code of insns to perform block clears. */
226 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
228 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
230 #ifndef SLOW_UNALIGNED_ACCESS
231 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
234 /* Register mappings for target machines without register windows. */
235 #ifndef INCOMING_REGNO
236 #define INCOMING_REGNO(OUT) (OUT)
238 #ifndef OUTGOING_REGNO
239 #define OUTGOING_REGNO(IN) (IN)
242 /* This is run once per compilation to set up which modes can be used
243 directly in memory and to initialize the block move optab. */
249 enum machine_mode mode;
256 /* Since we are on the permanent obstack, we must be sure we save this
257 spot AFTER we call start_sequence, since it will reuse the rtl it
259 free_point = (char *) oballoc (0);
261 /* Try indexing by frame ptr and try by stack ptr.
262 It is known that on the Convex the stack ptr isn't a valid index.
263 With luck, one or the other is valid on any machine. */
264 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
265 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
267 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
268 pat = PATTERN (insn);
270 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
271 mode = (enum machine_mode) ((int) mode + 1))
276 direct_load[(int) mode] = direct_store[(int) mode] = 0;
277 PUT_MODE (mem, mode);
278 PUT_MODE (mem1, mode);
280 /* See if there is some register that can be used in this mode and
281 directly loaded or stored from memory. */
283 if (mode != VOIDmode && mode != BLKmode)
284 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
285 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
288 if (! HARD_REGNO_MODE_OK (regno, mode))
291 reg = gen_rtx_REG (mode, regno);
294 SET_DEST (pat) = reg;
295 if (recog (pat, insn, &num_clobbers) >= 0)
296 direct_load[(int) mode] = 1;
298 SET_SRC (pat) = mem1;
299 SET_DEST (pat) = reg;
300 if (recog (pat, insn, &num_clobbers) >= 0)
301 direct_load[(int) mode] = 1;
304 SET_DEST (pat) = mem;
305 if (recog (pat, insn, &num_clobbers) >= 0)
306 direct_store[(int) mode] = 1;
309 SET_DEST (pat) = mem1;
310 if (recog (pat, insn, &num_clobbers) >= 0)
311 direct_store[(int) mode] = 1;
315 /* Find out if CONSTANT_P_RTX is accepted. */
316 SET_DEST (pat) = gen_rtx_REG (TYPE_MODE (integer_type_node),
317 FIRST_PSEUDO_REGISTER);
318 SET_SRC (pat) = gen_rtx_CONSTANT_P_RTX (TYPE_MODE (integer_type_node),
320 if (recog (pat, insn, &num_clobbers) >= 0)
321 can_handle_constant_p = 1;
327 /* This is run at the start of compiling a function. */
334 pending_stack_adjust = 0;
335 inhibit_defer_pop = 0;
337 apply_args_value = 0;
341 /* Save all variables describing the current status into the structure *P.
342 This is used before starting a nested function. */
348 p->pending_chain = pending_chain;
349 p->pending_stack_adjust = pending_stack_adjust;
350 p->inhibit_defer_pop = inhibit_defer_pop;
351 p->saveregs_value = saveregs_value;
352 p->apply_args_value = apply_args_value;
353 p->forced_labels = forced_labels;
355 pending_chain = NULL_RTX;
356 pending_stack_adjust = 0;
357 inhibit_defer_pop = 0;
359 apply_args_value = 0;
363 /* Restore all variables describing the current status from the structure *P.
364 This is used after a nested function. */
367 restore_expr_status (p)
370 pending_chain = p->pending_chain;
371 pending_stack_adjust = p->pending_stack_adjust;
372 inhibit_defer_pop = p->inhibit_defer_pop;
373 saveregs_value = p->saveregs_value;
374 apply_args_value = p->apply_args_value;
375 forced_labels = p->forced_labels;
378 /* Manage the queue of increment instructions to be output
379 for POSTINCREMENT_EXPR expressions, etc. */
381 /* Queue up to increment (or change) VAR later. BODY says how:
382 BODY should be the same thing you would pass to emit_insn
383 to increment right away. It will go to emit_insn later on.
385 The value is a QUEUED expression to be used in place of VAR
386 where you want to guarantee the pre-incrementation value of VAR. */
389 enqueue_insn (var, body)
392 pending_chain = gen_rtx_QUEUED (GET_MODE (var),
393 var, NULL_RTX, NULL_RTX, body,
395 return pending_chain;
398 /* Use protect_from_queue to convert a QUEUED expression
399 into something that you can put immediately into an instruction.
400 If the queued incrementation has not happened yet,
401 protect_from_queue returns the variable itself.
402 If the incrementation has happened, protect_from_queue returns a temp
403 that contains a copy of the old value of the variable.
405 Any time an rtx which might possibly be a QUEUED is to be put
406 into an instruction, it must be passed through protect_from_queue first.
407 QUEUED expressions are not meaningful in instructions.
409 Do not pass a value through protect_from_queue and then hold
410 on to it for a while before putting it in an instruction!
411 If the queue is flushed in between, incorrect code will result. */
414 protect_from_queue (x, modify)
418 register RTX_CODE code = GET_CODE (x);
420 #if 0 /* A QUEUED can hang around after the queue is forced out. */
421 /* Shortcut for most common case. */
422 if (pending_chain == 0)
428 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
429 use of autoincrement. Make a copy of the contents of the memory
430 location rather than a copy of the address, but not if the value is
431 of mode BLKmode. Don't modify X in place since it might be
433 if (code == MEM && GET_MODE (x) != BLKmode
434 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
436 register rtx y = XEXP (x, 0);
437 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
439 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
440 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
441 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
442 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
446 register rtx temp = gen_reg_rtx (GET_MODE (new));
447 emit_insn_before (gen_move_insn (temp, new),
453 /* Otherwise, recursively protect the subexpressions of all
454 the kinds of rtx's that can contain a QUEUED. */
457 rtx tem = protect_from_queue (XEXP (x, 0), 0);
458 if (tem != XEXP (x, 0))
464 else if (code == PLUS || code == MULT)
466 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
467 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
468 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
477 /* If the increment has not happened, use the variable itself. */
478 if (QUEUED_INSN (x) == 0)
479 return QUEUED_VAR (x);
480 /* If the increment has happened and a pre-increment copy exists,
482 if (QUEUED_COPY (x) != 0)
483 return QUEUED_COPY (x);
484 /* The increment has happened but we haven't set up a pre-increment copy.
485 Set one up now, and use it. */
486 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
487 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
489 return QUEUED_COPY (x);
492 /* Return nonzero if X contains a QUEUED expression:
493 if it contains anything that will be altered by a queued increment.
494 We handle only combinations of MEM, PLUS, MINUS and MULT operators
495 since memory addresses generally contain only those. */
501 register enum rtx_code code = GET_CODE (x);
507 return queued_subexp_p (XEXP (x, 0));
511 return (queued_subexp_p (XEXP (x, 0))
512 || queued_subexp_p (XEXP (x, 1)));
518 /* Perform all the pending incrementations. */
524 while ((p = pending_chain))
526 rtx body = QUEUED_BODY (p);
528 if (GET_CODE (body) == SEQUENCE)
530 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
531 emit_insn (QUEUED_BODY (p));
534 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
535 pending_chain = QUEUED_NEXT (p);
546 /* Copy data from FROM to TO, where the machine modes are not the same.
547 Both modes may be integer, or both may be floating.
548 UNSIGNEDP should be nonzero if FROM is an unsigned type.
549 This causes zero-extension instead of sign-extension. */
552 convert_move (to, from, unsignedp)
553 register rtx to, from;
556 enum machine_mode to_mode = GET_MODE (to);
557 enum machine_mode from_mode = GET_MODE (from);
558 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
559 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
563 /* rtx code for making an equivalent value. */
564 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
566 to = protect_from_queue (to, 1);
567 from = protect_from_queue (from, 0);
569 if (to_real != from_real)
572 /* If FROM is a SUBREG that indicates that we have already done at least
573 the required extension, strip it. We don't handle such SUBREGs as
576 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
577 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
578 >= GET_MODE_SIZE (to_mode))
579 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
580 from = gen_lowpart (to_mode, from), from_mode = to_mode;
582 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
585 if (to_mode == from_mode
586 || (from_mode == VOIDmode && CONSTANT_P (from)))
588 emit_move_insn (to, from);
596 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
598 /* Try converting directly if the insn is supported. */
599 if ((code = can_extend_p (to_mode, from_mode, 0))
602 emit_unop_insn (code, to, from, UNKNOWN);
607 #ifdef HAVE_trunchfqf2
608 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
610 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
614 #ifdef HAVE_trunctqfqf2
615 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
617 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
621 #ifdef HAVE_truncsfqf2
622 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
624 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
628 #ifdef HAVE_truncdfqf2
629 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
631 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
635 #ifdef HAVE_truncxfqf2
636 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
638 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
642 #ifdef HAVE_trunctfqf2
643 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
645 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
650 #ifdef HAVE_trunctqfhf2
651 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
653 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
657 #ifdef HAVE_truncsfhf2
658 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
660 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
664 #ifdef HAVE_truncdfhf2
665 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
667 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
671 #ifdef HAVE_truncxfhf2
672 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
674 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
678 #ifdef HAVE_trunctfhf2
679 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
681 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
686 #ifdef HAVE_truncsftqf2
687 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
689 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
693 #ifdef HAVE_truncdftqf2
694 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
696 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
700 #ifdef HAVE_truncxftqf2
701 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
703 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
707 #ifdef HAVE_trunctftqf2
708 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
710 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
715 #ifdef HAVE_truncdfsf2
716 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
718 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
722 #ifdef HAVE_truncxfsf2
723 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
725 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
729 #ifdef HAVE_trunctfsf2
730 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
732 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
736 #ifdef HAVE_truncxfdf2
737 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
739 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
743 #ifdef HAVE_trunctfdf2
744 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
746 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
758 libcall = extendsfdf2_libfunc;
762 libcall = extendsfxf2_libfunc;
766 libcall = extendsftf2_libfunc;
778 libcall = truncdfsf2_libfunc;
782 libcall = extenddfxf2_libfunc;
786 libcall = extenddftf2_libfunc;
798 libcall = truncxfsf2_libfunc;
802 libcall = truncxfdf2_libfunc;
814 libcall = trunctfsf2_libfunc;
818 libcall = trunctfdf2_libfunc;
830 if (libcall == (rtx) 0)
831 /* This conversion is not implemented yet. */
834 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
836 emit_move_insn (to, value);
840 /* Now both modes are integers. */
842 /* Handle expanding beyond a word. */
843 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
844 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
851 enum machine_mode lowpart_mode;
852 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
854 /* Try converting directly if the insn is supported. */
855 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
858 /* If FROM is a SUBREG, put it into a register. Do this
859 so that we always generate the same set of insns for
860 better cse'ing; if an intermediate assignment occurred,
861 we won't be doing the operation directly on the SUBREG. */
862 if (optimize > 0 && GET_CODE (from) == SUBREG)
863 from = force_reg (from_mode, from);
864 emit_unop_insn (code, to, from, equiv_code);
867 /* Next, try converting via full word. */
868 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
869 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
870 != CODE_FOR_nothing))
872 if (GET_CODE (to) == REG)
873 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
874 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
875 emit_unop_insn (code, to,
876 gen_lowpart (word_mode, to), equiv_code);
880 /* No special multiword conversion insn; do it by hand. */
883 /* Since we will turn this into a no conflict block, we must ensure
884 that the source does not overlap the target. */
886 if (reg_overlap_mentioned_p (to, from))
887 from = force_reg (from_mode, from);
889 /* Get a copy of FROM widened to a word, if necessary. */
890 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
891 lowpart_mode = word_mode;
893 lowpart_mode = from_mode;
895 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
897 lowpart = gen_lowpart (lowpart_mode, to);
898 emit_move_insn (lowpart, lowfrom);
900 /* Compute the value to put in each remaining word. */
902 fill_value = const0_rtx;
907 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
908 && STORE_FLAG_VALUE == -1)
910 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
912 fill_value = gen_reg_rtx (word_mode);
913 emit_insn (gen_slt (fill_value));
919 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
920 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
922 fill_value = convert_to_mode (word_mode, fill_value, 1);
926 /* Fill the remaining words. */
927 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
929 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
930 rtx subword = operand_subword (to, index, 1, to_mode);
935 if (fill_value != subword)
936 emit_move_insn (subword, fill_value);
939 insns = get_insns ();
942 emit_no_conflict_block (insns, to, from, NULL_RTX,
943 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
947 /* Truncating multi-word to a word or less. */
948 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
949 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
951 if (!((GET_CODE (from) == MEM
952 && ! MEM_VOLATILE_P (from)
953 && direct_load[(int) to_mode]
954 && ! mode_dependent_address_p (XEXP (from, 0)))
955 || GET_CODE (from) == REG
956 || GET_CODE (from) == SUBREG))
957 from = force_reg (from_mode, from);
958 convert_move (to, gen_lowpart (word_mode, from), 0);
962 /* Handle pointer conversion */ /* SPEE 900220 */
963 if (to_mode == PQImode)
965 if (from_mode != QImode)
966 from = convert_to_mode (QImode, from, unsignedp);
968 #ifdef HAVE_truncqipqi2
969 if (HAVE_truncqipqi2)
971 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
974 #endif /* HAVE_truncqipqi2 */
978 if (from_mode == PQImode)
980 if (to_mode != QImode)
982 from = convert_to_mode (QImode, from, unsignedp);
987 #ifdef HAVE_extendpqiqi2
988 if (HAVE_extendpqiqi2)
990 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
993 #endif /* HAVE_extendpqiqi2 */
998 if (to_mode == PSImode)
1000 if (from_mode != SImode)
1001 from = convert_to_mode (SImode, from, unsignedp);
1003 #ifdef HAVE_truncsipsi2
1004 if (HAVE_truncsipsi2)
1006 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1009 #endif /* HAVE_truncsipsi2 */
1013 if (from_mode == PSImode)
1015 if (to_mode != SImode)
1017 from = convert_to_mode (SImode, from, unsignedp);
1022 #ifdef HAVE_extendpsisi2
1023 if (HAVE_extendpsisi2)
1025 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1028 #endif /* HAVE_extendpsisi2 */
1033 if (to_mode == PDImode)
1035 if (from_mode != DImode)
1036 from = convert_to_mode (DImode, from, unsignedp);
1038 #ifdef HAVE_truncdipdi2
1039 if (HAVE_truncdipdi2)
1041 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1044 #endif /* HAVE_truncdipdi2 */
1048 if (from_mode == PDImode)
1050 if (to_mode != DImode)
1052 from = convert_to_mode (DImode, from, unsignedp);
1057 #ifdef HAVE_extendpdidi2
1058 if (HAVE_extendpdidi2)
1060 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1063 #endif /* HAVE_extendpdidi2 */
1068 /* Now follow all the conversions between integers
1069 no more than a word long. */
1071 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1072 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1073 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1074 GET_MODE_BITSIZE (from_mode)))
1076 if (!((GET_CODE (from) == MEM
1077 && ! MEM_VOLATILE_P (from)
1078 && direct_load[(int) to_mode]
1079 && ! mode_dependent_address_p (XEXP (from, 0)))
1080 || GET_CODE (from) == REG
1081 || GET_CODE (from) == SUBREG))
1082 from = force_reg (from_mode, from);
1083 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1084 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1085 from = copy_to_reg (from);
1086 emit_move_insn (to, gen_lowpart (to_mode, from));
1090 /* Handle extension. */
1091 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1093 /* Convert directly if that works. */
1094 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1095 != CODE_FOR_nothing)
1097 emit_unop_insn (code, to, from, equiv_code);
1102 enum machine_mode intermediate;
1106 /* Search for a mode to convert via. */
1107 for (intermediate = from_mode; intermediate != VOIDmode;
1108 intermediate = GET_MODE_WIDER_MODE (intermediate))
1109 if (((can_extend_p (to_mode, intermediate, unsignedp)
1110 != CODE_FOR_nothing)
1111 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1112 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1113 && (can_extend_p (intermediate, from_mode, unsignedp)
1114 != CODE_FOR_nothing))
1116 convert_move (to, convert_to_mode (intermediate, from,
1117 unsignedp), unsignedp);
1121 /* No suitable intermediate mode.
1122 Generate what we need with shifts. */
1123 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1124 - GET_MODE_BITSIZE (from_mode), 0);
1125 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1126 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1128 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1131 emit_move_insn (to, tmp);
1136 /* Support special truncate insns for certain modes. */
1138 if (from_mode == DImode && to_mode == SImode)
1140 #ifdef HAVE_truncdisi2
1141 if (HAVE_truncdisi2)
1143 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1147 convert_move (to, force_reg (from_mode, from), unsignedp);
1151 if (from_mode == DImode && to_mode == HImode)
1153 #ifdef HAVE_truncdihi2
1154 if (HAVE_truncdihi2)
1156 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1160 convert_move (to, force_reg (from_mode, from), unsignedp);
1164 if (from_mode == DImode && to_mode == QImode)
1166 #ifdef HAVE_truncdiqi2
1167 if (HAVE_truncdiqi2)
1169 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1173 convert_move (to, force_reg (from_mode, from), unsignedp);
1177 if (from_mode == SImode && to_mode == HImode)
1179 #ifdef HAVE_truncsihi2
1180 if (HAVE_truncsihi2)
1182 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1186 convert_move (to, force_reg (from_mode, from), unsignedp);
1190 if (from_mode == SImode && to_mode == QImode)
1192 #ifdef HAVE_truncsiqi2
1193 if (HAVE_truncsiqi2)
1195 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1199 convert_move (to, force_reg (from_mode, from), unsignedp);
1203 if (from_mode == HImode && to_mode == QImode)
1205 #ifdef HAVE_trunchiqi2
1206 if (HAVE_trunchiqi2)
1208 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1212 convert_move (to, force_reg (from_mode, from), unsignedp);
1216 if (from_mode == TImode && to_mode == DImode)
1218 #ifdef HAVE_trunctidi2
1219 if (HAVE_trunctidi2)
1221 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1225 convert_move (to, force_reg (from_mode, from), unsignedp);
1229 if (from_mode == TImode && to_mode == SImode)
1231 #ifdef HAVE_trunctisi2
1232 if (HAVE_trunctisi2)
1234 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1238 convert_move (to, force_reg (from_mode, from), unsignedp);
1242 if (from_mode == TImode && to_mode == HImode)
1244 #ifdef HAVE_trunctihi2
1245 if (HAVE_trunctihi2)
1247 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1251 convert_move (to, force_reg (from_mode, from), unsignedp);
1255 if (from_mode == TImode && to_mode == QImode)
1257 #ifdef HAVE_trunctiqi2
1258 if (HAVE_trunctiqi2)
1260 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1264 convert_move (to, force_reg (from_mode, from), unsignedp);
1268 /* Handle truncation of volatile memrefs, and so on;
1269 the things that couldn't be truncated directly,
1270 and for which there was no special instruction. */
1271 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1273 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1274 emit_move_insn (to, temp);
1278 /* Mode combination is not recognized. */
1282 /* Return an rtx for a value that would result
1283 from converting X to mode MODE.
1284 Both X and MODE may be floating, or both integer.
1285 UNSIGNEDP is nonzero if X is an unsigned value.
1286 This can be done by referring to a part of X in place
1287 or by copying to a new temporary with conversion.
1289 This function *must not* call protect_from_queue
1290 except when putting X into an insn (in which case convert_move does it). */
1293 convert_to_mode (mode, x, unsignedp)
1294 enum machine_mode mode;
1298 return convert_modes (mode, VOIDmode, x, unsignedp);
1301 /* Return an rtx for a value that would result
1302 from converting X from mode OLDMODE to mode MODE.
1303 Both modes may be floating, or both integer.
1304 UNSIGNEDP is nonzero if X is an unsigned value.
1306 This can be done by referring to a part of X in place
1307 or by copying to a new temporary with conversion.
1309 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1311 This function *must not* call protect_from_queue
1312 except when putting X into an insn (in which case convert_move does it). */
1315 convert_modes (mode, oldmode, x, unsignedp)
1316 enum machine_mode mode, oldmode;
1322 /* If FROM is a SUBREG that indicates that we have already done at least
1323 the required extension, strip it. */
1325 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1326 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1327 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1328 x = gen_lowpart (mode, x);
1330 if (GET_MODE (x) != VOIDmode)
1331 oldmode = GET_MODE (x);
1333 if (mode == oldmode)
1336 /* There is one case that we must handle specially: If we are converting
1337 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1338 we are to interpret the constant as unsigned, gen_lowpart will do
1339 the wrong if the constant appears negative. What we want to do is
1340 make the high-order word of the constant zero, not all ones. */
1342 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1343 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1344 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1346 HOST_WIDE_INT val = INTVAL (x);
1348 if (oldmode != VOIDmode
1349 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1351 int width = GET_MODE_BITSIZE (oldmode);
1353 /* We need to zero extend VAL. */
1354 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1357 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1360 /* We can do this with a gen_lowpart if both desired and current modes
1361 are integer, and this is either a constant integer, a register, or a
1362 non-volatile MEM. Except for the constant case where MODE is no
1363 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1365 if ((GET_CODE (x) == CONST_INT
1366 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1367 || (GET_MODE_CLASS (mode) == MODE_INT
1368 && GET_MODE_CLASS (oldmode) == MODE_INT
1369 && (GET_CODE (x) == CONST_DOUBLE
1370 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1371 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1372 && direct_load[(int) mode])
1373 || (GET_CODE (x) == REG
1374 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1375 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1377 /* ?? If we don't know OLDMODE, we have to assume here that
1378 X does not need sign- or zero-extension. This may not be
1379 the case, but it's the best we can do. */
1380 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1381 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1383 HOST_WIDE_INT val = INTVAL (x);
1384 int width = GET_MODE_BITSIZE (oldmode);
1386 /* We must sign or zero-extend in this case. Start by
1387 zero-extending, then sign extend if we need to. */
1388 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1390 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1391 val |= (HOST_WIDE_INT) (-1) << width;
1393 return GEN_INT (val);
1396 return gen_lowpart (mode, x);
1399 temp = gen_reg_rtx (mode);
1400 convert_move (temp, x, unsignedp);
1404 /* Generate several move instructions to copy LEN bytes
1405 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1406 The caller must pass FROM and TO
1407 through protect_from_queue before calling.
1408 ALIGN (in bytes) is maximum alignment we can assume. */
1411 move_by_pieces (to, from, len, align)
1415 struct move_by_pieces data;
1416 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1417 int max_size = MOVE_MAX + 1;
1420 data.to_addr = to_addr;
1421 data.from_addr = from_addr;
1425 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1426 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1428 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1429 || GET_CODE (from_addr) == POST_INC
1430 || GET_CODE (from_addr) == POST_DEC);
1432 data.explicit_inc_from = 0;
1433 data.explicit_inc_to = 0;
1435 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1436 if (data.reverse) data.offset = len;
1439 data.to_struct = MEM_IN_STRUCT_P (to);
1440 data.from_struct = MEM_IN_STRUCT_P (from);
1442 /* If copying requires more than two move insns,
1443 copy addresses to registers (to make displacements shorter)
1444 and use post-increment if available. */
1445 if (!(data.autinc_from && data.autinc_to)
1446 && move_by_pieces_ninsns (len, align) > 2)
1448 #ifdef HAVE_PRE_DECREMENT
1449 if (data.reverse && ! data.autinc_from)
1451 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1452 data.autinc_from = 1;
1453 data.explicit_inc_from = -1;
1456 #ifdef HAVE_POST_INCREMENT
1457 if (! data.autinc_from)
1459 data.from_addr = copy_addr_to_reg (from_addr);
1460 data.autinc_from = 1;
1461 data.explicit_inc_from = 1;
1464 if (!data.autinc_from && CONSTANT_P (from_addr))
1465 data.from_addr = copy_addr_to_reg (from_addr);
1466 #ifdef HAVE_PRE_DECREMENT
1467 if (data.reverse && ! data.autinc_to)
1469 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1471 data.explicit_inc_to = -1;
1474 #ifdef HAVE_POST_INCREMENT
1475 if (! data.reverse && ! data.autinc_to)
1477 data.to_addr = copy_addr_to_reg (to_addr);
1479 data.explicit_inc_to = 1;
1482 if (!data.autinc_to && CONSTANT_P (to_addr))
1483 data.to_addr = copy_addr_to_reg (to_addr);
1486 if (! SLOW_UNALIGNED_ACCESS
1487 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1490 /* First move what we can in the largest integer mode, then go to
1491 successively smaller modes. */
1493 while (max_size > 1)
1495 enum machine_mode mode = VOIDmode, tmode;
1496 enum insn_code icode;
1498 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1499 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1500 if (GET_MODE_SIZE (tmode) < max_size)
1503 if (mode == VOIDmode)
1506 icode = mov_optab->handlers[(int) mode].insn_code;
1507 if (icode != CODE_FOR_nothing
1508 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1509 GET_MODE_SIZE (mode)))
1510 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1512 max_size = GET_MODE_SIZE (mode);
1515 /* The code above should have handled everything. */
1520 /* Return number of insns required to move L bytes by pieces.
1521 ALIGN (in bytes) is maximum alignment we can assume. */
1524 move_by_pieces_ninsns (l, align)
1528 register int n_insns = 0;
1529 int max_size = MOVE_MAX + 1;
1531 if (! SLOW_UNALIGNED_ACCESS
1532 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1535 while (max_size > 1)
1537 enum machine_mode mode = VOIDmode, tmode;
1538 enum insn_code icode;
1540 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1541 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1542 if (GET_MODE_SIZE (tmode) < max_size)
1545 if (mode == VOIDmode)
1548 icode = mov_optab->handlers[(int) mode].insn_code;
1549 if (icode != CODE_FOR_nothing
1550 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1551 GET_MODE_SIZE (mode)))
1552 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1554 max_size = GET_MODE_SIZE (mode);
1560 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1561 with move instructions for mode MODE. GENFUN is the gen_... function
1562 to make a move insn for that mode. DATA has all the other info. */
1565 move_by_pieces_1 (genfun, mode, data)
1566 rtx (*genfun) PROTO ((rtx, ...));
1567 enum machine_mode mode;
1568 struct move_by_pieces *data;
1570 register int size = GET_MODE_SIZE (mode);
1571 register rtx to1, from1;
1573 while (data->len >= size)
1575 if (data->reverse) data->offset -= size;
1577 to1 = (data->autinc_to
1578 ? gen_rtx_MEM (mode, data->to_addr)
1579 : copy_rtx (change_address (data->to, mode,
1580 plus_constant (data->to_addr,
1582 MEM_IN_STRUCT_P (to1) = data->to_struct;
1585 = (data->autinc_from
1586 ? gen_rtx_MEM (mode, data->from_addr)
1587 : copy_rtx (change_address (data->from, mode,
1588 plus_constant (data->from_addr,
1590 MEM_IN_STRUCT_P (from1) = data->from_struct;
1592 #ifdef HAVE_PRE_DECREMENT
1593 if (data->explicit_inc_to < 0)
1594 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1595 if (data->explicit_inc_from < 0)
1596 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1599 emit_insn ((*genfun) (to1, from1));
1600 #ifdef HAVE_POST_INCREMENT
1601 if (data->explicit_inc_to > 0)
1602 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1603 if (data->explicit_inc_from > 0)
1604 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1607 if (! data->reverse) data->offset += size;
1613 /* Emit code to move a block Y to a block X.
1614 This may be done with string-move instructions,
1615 with multiple scalar move instructions, or with a library call.
1617 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1619 SIZE is an rtx that says how long they are.
1620 ALIGN is the maximum alignment we can assume they have,
1623 Return the address of the new block, if memcpy is called and returns it,
1627 emit_block_move (x, y, size, align)
1633 #ifdef TARGET_MEM_FUNCTIONS
1635 tree call_expr, arg_list;
1638 if (GET_MODE (x) != BLKmode)
1641 if (GET_MODE (y) != BLKmode)
1644 x = protect_from_queue (x, 1);
1645 y = protect_from_queue (y, 0);
1646 size = protect_from_queue (size, 0);
1648 if (GET_CODE (x) != MEM)
1650 if (GET_CODE (y) != MEM)
1655 if (GET_CODE (size) == CONST_INT
1656 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1657 move_by_pieces (x, y, INTVAL (size), align);
1660 /* Try the most limited insn first, because there's no point
1661 including more than one in the machine description unless
1662 the more limited one has some advantage. */
1664 rtx opalign = GEN_INT (align);
1665 enum machine_mode mode;
1667 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1668 mode = GET_MODE_WIDER_MODE (mode))
1670 enum insn_code code = movstr_optab[(int) mode];
1672 if (code != CODE_FOR_nothing
1673 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1674 here because if SIZE is less than the mode mask, as it is
1675 returned by the macro, it will definitely be less than the
1676 actual mode mask. */
1677 && ((GET_CODE (size) == CONST_INT
1678 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1679 <= (GET_MODE_MASK (mode) >> 1)))
1680 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1681 && (insn_operand_predicate[(int) code][0] == 0
1682 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1683 && (insn_operand_predicate[(int) code][1] == 0
1684 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1685 && (insn_operand_predicate[(int) code][3] == 0
1686 || (*insn_operand_predicate[(int) code][3]) (opalign,
1690 rtx last = get_last_insn ();
1693 op2 = convert_to_mode (mode, size, 1);
1694 if (insn_operand_predicate[(int) code][2] != 0
1695 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1696 op2 = copy_to_mode_reg (mode, op2);
1698 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1705 delete_insns_since (last);
1709 #ifdef TARGET_MEM_FUNCTIONS
1710 /* It is incorrect to use the libcall calling conventions to call
1711 memcpy in this context.
1713 This could be a user call to memcpy and the user may wish to
1714 examine the return value from memcpy.
1716 For targets where libcalls and normal calls have different conventions
1717 for returning pointers, we could end up generating incorrect code.
1719 So instead of using a libcall sequence we build up a suitable
1720 CALL_EXPR and expand the call in the normal fashion. */
1721 if (fn == NULL_TREE)
1725 /* This was copied from except.c, I don't know if all this is
1726 necessary in this context or not. */
1727 fn = get_identifier ("memcpy");
1728 push_obstacks_nochange ();
1729 end_temporary_allocation ();
1730 fntype = build_pointer_type (void_type_node);
1731 fntype = build_function_type (fntype, NULL_TREE);
1732 fn = build_decl (FUNCTION_DECL, fn, fntype);
1733 DECL_EXTERNAL (fn) = 1;
1734 TREE_PUBLIC (fn) = 1;
1735 DECL_ARTIFICIAL (fn) = 1;
1736 make_decl_rtl (fn, NULL_PTR, 1);
1737 assemble_external (fn);
1741 /* We need to make an argument list for the function call.
1743 memcpy has three arguments, the first two are void * addresses and
1744 the last is a size_t byte count for the copy. */
1746 = build_tree_list (NULL_TREE,
1747 make_tree (build_pointer_type (void_type_node),
1749 TREE_CHAIN (arg_list)
1750 = build_tree_list (NULL_TREE,
1751 make_tree (build_pointer_type (void_type_node),
1753 TREE_CHAIN (TREE_CHAIN (arg_list))
1754 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1755 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1757 /* Now we have to build up the CALL_EXPR itself. */
1758 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1759 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1760 call_expr, arg_list, NULL_TREE);
1761 TREE_SIDE_EFFECTS (call_expr) = 1;
1763 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1765 emit_library_call (bcopy_libfunc, 0,
1766 VOIDmode, 3, XEXP (y, 0), Pmode,
1768 convert_to_mode (TYPE_MODE (integer_type_node), size,
1769 TREE_UNSIGNED (integer_type_node)),
1770 TYPE_MODE (integer_type_node));
1777 /* Copy all or part of a value X into registers starting at REGNO.
1778 The number of registers to be filled is NREGS. */
1781 move_block_to_reg (regno, x, nregs, mode)
1785 enum machine_mode mode;
1788 #ifdef HAVE_load_multiple
1796 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1797 x = validize_mem (force_const_mem (mode, x));
1799 /* See if the machine can do this with a load multiple insn. */
1800 #ifdef HAVE_load_multiple
1801 if (HAVE_load_multiple)
1803 last = get_last_insn ();
1804 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1812 delete_insns_since (last);
1816 for (i = 0; i < nregs; i++)
1817 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1818 operand_subword_force (x, i, mode));
1821 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1822 The number of registers to be filled is NREGS. SIZE indicates the number
1823 of bytes in the object X. */
1827 move_block_from_reg (regno, x, nregs, size)
1834 #ifdef HAVE_store_multiple
1838 enum machine_mode mode;
1840 /* If SIZE is that of a mode no bigger than a word, just use that
1841 mode's store operation. */
1842 if (size <= UNITS_PER_WORD
1843 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1845 emit_move_insn (change_address (x, mode, NULL),
1846 gen_rtx_REG (mode, regno));
1850 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1851 to the left before storing to memory. Note that the previous test
1852 doesn't handle all cases (e.g. SIZE == 3). */
1853 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1855 rtx tem = operand_subword (x, 0, 1, BLKmode);
1861 shift = expand_shift (LSHIFT_EXPR, word_mode,
1862 gen_rtx_REG (word_mode, regno),
1863 build_int_2 ((UNITS_PER_WORD - size)
1864 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1865 emit_move_insn (tem, shift);
1869 /* See if the machine can do this with a store multiple insn. */
1870 #ifdef HAVE_store_multiple
1871 if (HAVE_store_multiple)
1873 last = get_last_insn ();
1874 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1882 delete_insns_since (last);
1886 for (i = 0; i < nregs; i++)
1888 rtx tem = operand_subword (x, i, 1, BLKmode);
1893 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1897 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1898 registers represented by a PARALLEL. SSIZE represents the total size of
1899 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1901 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1902 the balance will be in what would be the low-order memory addresses, i.e.
1903 left justified for big endian, right justified for little endian. This
1904 happens to be true for the targets currently using this support. If this
1905 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1909 emit_group_load (dst, orig_src, ssize, align)
1916 if (GET_CODE (dst) != PARALLEL)
1919 /* Check for a NULL entry, used to indicate that the parameter goes
1920 both on the stack and in registers. */
1921 if (XEXP (XVECEXP (dst, 0, 0), 0))
1926 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1928 /* If we won't be loading directly from memory, protect the real source
1929 from strange tricks we might play. */
1931 if (GET_CODE (src) != MEM)
1933 src = gen_reg_rtx (GET_MODE (orig_src));
1934 emit_move_insn (src, orig_src);
1937 /* Process the pieces. */
1938 for (i = start; i < XVECLEN (dst, 0); i++)
1940 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1941 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1942 int bytelen = GET_MODE_SIZE (mode);
1945 /* Handle trailing fragments that run over the size of the struct. */
1946 if (ssize >= 0 && bytepos + bytelen > ssize)
1948 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1949 bytelen = ssize - bytepos;
1954 /* Optimize the access just a bit. */
1955 if (GET_CODE (src) == MEM
1956 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1957 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1958 && bytelen == GET_MODE_SIZE (mode))
1960 tmps[i] = gen_reg_rtx (mode);
1961 emit_move_insn (tmps[i],
1962 change_address (src, mode,
1963 plus_constant (XEXP (src, 0),
1968 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
1969 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
1970 mode, mode, align, ssize);
1973 if (BYTES_BIG_ENDIAN && shift)
1975 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1976 tmps[i], 0, OPTAB_WIDEN);
1981 /* Copy the extracted pieces into the proper (probable) hard regs. */
1982 for (i = start; i < XVECLEN (dst, 0); i++)
1983 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1986 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1987 registers represented by a PARALLEL. SSIZE represents the total size of
1988 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
1991 emit_group_store (orig_dst, src, ssize, align)
1998 if (GET_CODE (src) != PARALLEL)
2001 /* Check for a NULL entry, used to indicate that the parameter goes
2002 both on the stack and in registers. */
2003 if (XEXP (XVECEXP (src, 0, 0), 0))
2008 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
2010 /* Copy the (probable) hard regs into pseudos. */
2011 for (i = start; i < XVECLEN (src, 0); i++)
2013 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2014 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2015 emit_move_insn (tmps[i], reg);
2019 /* If we won't be storing directly into memory, protect the real destination
2020 from strange tricks we might play. */
2022 if (GET_CODE (dst) == PARALLEL)
2026 /* We can get a PARALLEL dst if there is a conditional expression in
2027 a return statement. In that case, the dst and src are the same,
2028 so no action is necessary. */
2029 if (rtx_equal_p (dst, src))
2032 /* It is unclear if we can ever reach here, but we may as well handle
2033 it. Allocate a temporary, and split this into a store/load to/from
2036 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2037 emit_group_store (temp, src, ssize, align);
2038 emit_group_load (dst, temp, ssize, align);
2041 else if (GET_CODE (dst) != MEM)
2043 dst = gen_reg_rtx (GET_MODE (orig_dst));
2044 /* Make life a bit easier for combine. */
2045 emit_move_insn (dst, const0_rtx);
2047 else if (! MEM_IN_STRUCT_P (dst))
2049 /* store_bit_field requires that memory operations have
2050 mem_in_struct_p set; we might not. */
2052 dst = copy_rtx (orig_dst);
2053 MEM_IN_STRUCT_P (dst) = 1;
2056 /* Process the pieces. */
2057 for (i = start; i < XVECLEN (src, 0); i++)
2059 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2060 enum machine_mode mode = GET_MODE (tmps[i]);
2061 int bytelen = GET_MODE_SIZE (mode);
2063 /* Handle trailing fragments that run over the size of the struct. */
2064 if (ssize >= 0 && bytepos + bytelen > ssize)
2066 if (BYTES_BIG_ENDIAN)
2068 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2069 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2070 tmps[i], 0, OPTAB_WIDEN);
2072 bytelen = ssize - bytepos;
2075 /* Optimize the access just a bit. */
2076 if (GET_CODE (dst) == MEM
2077 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2078 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2079 && bytelen == GET_MODE_SIZE (mode))
2081 emit_move_insn (change_address (dst, mode,
2082 plus_constant (XEXP (dst, 0),
2088 store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT,
2089 mode, tmps[i], align, ssize);
2094 /* Copy from the pseudo into the (probable) hard reg. */
2095 if (GET_CODE (dst) == REG)
2096 emit_move_insn (orig_dst, dst);
2099 /* Generate code to copy a BLKmode object of TYPE out of a
2100 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2101 is null, a stack temporary is created. TGTBLK is returned.
2103 The primary purpose of this routine is to handle functions
2104 that return BLKmode structures in registers. Some machines
2105 (the PA for example) want to return all small structures
2106 in registers regardless of the structure's alignment.
2110 copy_blkmode_from_reg(tgtblk,srcreg,type)
2115 int bytes = int_size_in_bytes (type);
2116 rtx src = NULL, dst = NULL;
2117 int bitsize = MIN (TYPE_ALIGN (type), (unsigned int) BITS_PER_WORD);
2118 int bitpos, xbitpos, big_endian_correction = 0;
2122 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2123 MEM_IN_STRUCT_P (tgtblk) = AGGREGATE_TYPE_P (type);
2124 preserve_temp_slots (tgtblk);
2127 /* This code assumes srcreg is at least a full word. If it isn't,
2128 copy it into a new pseudo which is a full word. */
2129 if (GET_MODE (srcreg) != BLKmode
2130 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2131 srcreg = convert_to_mode (word_mode, srcreg,
2132 TREE_UNSIGNED (type));
2134 /* Structures whose size is not a multiple of a word are aligned
2135 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2136 machine, this means we must skip the empty high order bytes when
2137 calculating the bit offset. */
2138 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2139 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2142 /* Copy the structure BITSIZE bites at a time.
2144 We could probably emit more efficient code for machines
2145 which do not use strict alignment, but it doesn't seem
2146 worth the effort at the current time. */
2147 for (bitpos = 0, xbitpos = big_endian_correction;
2148 bitpos < bytes * BITS_PER_UNIT;
2149 bitpos += bitsize, xbitpos += bitsize)
2152 /* We need a new source operand each time xbitpos is on a
2153 word boundary and when xbitpos == big_endian_correction
2154 (the first time through). */
2155 if (xbitpos % BITS_PER_WORD == 0
2156 || xbitpos == big_endian_correction)
2157 src = operand_subword_force (srcreg,
2158 xbitpos / BITS_PER_WORD,
2161 /* We need a new destination operand each time bitpos is on
2163 if (bitpos % BITS_PER_WORD == 0)
2164 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2166 /* Use xbitpos for the source extraction (right justified) and
2167 xbitpos for the destination store (left justified). */
2168 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2169 extract_bit_field (src, bitsize,
2170 xbitpos % BITS_PER_WORD, 1,
2171 NULL_RTX, word_mode,
2173 bitsize / BITS_PER_UNIT,
2175 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2181 /* Add a USE expression for REG to the (possibly empty) list pointed
2182 to by CALL_FUSAGE. REG must denote a hard register. */
2185 use_reg (call_fusage, reg)
2186 rtx *call_fusage, reg;
2188 if (GET_CODE (reg) != REG
2189 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2193 = gen_rtx_EXPR_LIST (VOIDmode,
2194 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2197 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2198 starting at REGNO. All of these registers must be hard registers. */
2201 use_regs (call_fusage, regno, nregs)
2208 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2211 for (i = 0; i < nregs; i++)
2212 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2215 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2216 PARALLEL REGS. This is for calls that pass values in multiple
2217 non-contiguous locations. The Irix 6 ABI has examples of this. */
2220 use_group_regs (call_fusage, regs)
2226 for (i = 0; i < XVECLEN (regs, 0); i++)
2228 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2230 /* A NULL entry means the parameter goes both on the stack and in
2231 registers. This can also be a MEM for targets that pass values
2232 partially on the stack and partially in registers. */
2233 if (reg != 0 && GET_CODE (reg) == REG)
2234 use_reg (call_fusage, reg);
2238 /* Generate several move instructions to clear LEN bytes of block TO.
2239 (A MEM rtx with BLKmode). The caller must pass TO through
2240 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2244 clear_by_pieces (to, len, align)
2248 struct clear_by_pieces data;
2249 rtx to_addr = XEXP (to, 0);
2250 int max_size = MOVE_MAX + 1;
2253 data.to_addr = to_addr;
2256 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2257 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2259 data.explicit_inc_to = 0;
2261 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2262 if (data.reverse) data.offset = len;
2265 data.to_struct = MEM_IN_STRUCT_P (to);
2267 /* If copying requires more than two move insns,
2268 copy addresses to registers (to make displacements shorter)
2269 and use post-increment if available. */
2271 && move_by_pieces_ninsns (len, align) > 2)
2273 #ifdef HAVE_PRE_DECREMENT
2274 if (data.reverse && ! data.autinc_to)
2276 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2278 data.explicit_inc_to = -1;
2281 #ifdef HAVE_POST_INCREMENT
2282 if (! data.reverse && ! data.autinc_to)
2284 data.to_addr = copy_addr_to_reg (to_addr);
2286 data.explicit_inc_to = 1;
2289 if (!data.autinc_to && CONSTANT_P (to_addr))
2290 data.to_addr = copy_addr_to_reg (to_addr);
2293 if (! SLOW_UNALIGNED_ACCESS
2294 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2297 /* First move what we can in the largest integer mode, then go to
2298 successively smaller modes. */
2300 while (max_size > 1)
2302 enum machine_mode mode = VOIDmode, tmode;
2303 enum insn_code icode;
2305 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2306 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2307 if (GET_MODE_SIZE (tmode) < max_size)
2310 if (mode == VOIDmode)
2313 icode = mov_optab->handlers[(int) mode].insn_code;
2314 if (icode != CODE_FOR_nothing
2315 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2316 GET_MODE_SIZE (mode)))
2317 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2319 max_size = GET_MODE_SIZE (mode);
2322 /* The code above should have handled everything. */
2327 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2328 with move instructions for mode MODE. GENFUN is the gen_... function
2329 to make a move insn for that mode. DATA has all the other info. */
2332 clear_by_pieces_1 (genfun, mode, data)
2333 rtx (*genfun) PROTO ((rtx, ...));
2334 enum machine_mode mode;
2335 struct clear_by_pieces *data;
2337 register int size = GET_MODE_SIZE (mode);
2340 while (data->len >= size)
2342 if (data->reverse) data->offset -= size;
2344 to1 = (data->autinc_to
2345 ? gen_rtx_MEM (mode, data->to_addr)
2346 : copy_rtx (change_address (data->to, mode,
2347 plus_constant (data->to_addr,
2349 MEM_IN_STRUCT_P (to1) = data->to_struct;
2351 #ifdef HAVE_PRE_DECREMENT
2352 if (data->explicit_inc_to < 0)
2353 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2356 emit_insn ((*genfun) (to1, const0_rtx));
2357 #ifdef HAVE_POST_INCREMENT
2358 if (data->explicit_inc_to > 0)
2359 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2362 if (! data->reverse) data->offset += size;
2368 /* Write zeros through the storage of OBJECT.
2369 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2370 the maximum alignment we can is has, measured in bytes.
2372 If we call a function that returns the length of the block, return it. */
2375 clear_storage (object, size, align)
2380 #ifdef TARGET_MEM_FUNCTIONS
2382 tree call_expr, arg_list;
2386 if (GET_MODE (object) == BLKmode)
2388 object = protect_from_queue (object, 1);
2389 size = protect_from_queue (size, 0);
2391 if (GET_CODE (size) == CONST_INT
2392 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2393 clear_by_pieces (object, INTVAL (size), align);
2397 /* Try the most limited insn first, because there's no point
2398 including more than one in the machine description unless
2399 the more limited one has some advantage. */
2401 rtx opalign = GEN_INT (align);
2402 enum machine_mode mode;
2404 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2405 mode = GET_MODE_WIDER_MODE (mode))
2407 enum insn_code code = clrstr_optab[(int) mode];
2409 if (code != CODE_FOR_nothing
2410 /* We don't need MODE to be narrower than
2411 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2412 the mode mask, as it is returned by the macro, it will
2413 definitely be less than the actual mode mask. */
2414 && ((GET_CODE (size) == CONST_INT
2415 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2416 <= (GET_MODE_MASK (mode) >> 1)))
2417 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2418 && (insn_operand_predicate[(int) code][0] == 0
2419 || (*insn_operand_predicate[(int) code][0]) (object,
2421 && (insn_operand_predicate[(int) code][2] == 0
2422 || (*insn_operand_predicate[(int) code][2]) (opalign,
2426 rtx last = get_last_insn ();
2429 op1 = convert_to_mode (mode, size, 1);
2430 if (insn_operand_predicate[(int) code][1] != 0
2431 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2433 op1 = copy_to_mode_reg (mode, op1);
2435 pat = GEN_FCN ((int) code) (object, op1, opalign);
2442 delete_insns_since (last);
2447 #ifdef TARGET_MEM_FUNCTIONS
2448 /* It is incorrect to use the libcall calling conventions to call
2449 memset in this context.
2451 This could be a user call to memset and the user may wish to
2452 examine the return value from memset.
2454 For targets where libcalls and normal calls have different conventions
2455 for returning pointers, we could end up generating incorrect code.
2457 So instead of using a libcall sequence we build up a suitable
2458 CALL_EXPR and expand the call in the normal fashion. */
2459 if (fn == NULL_TREE)
2463 /* This was copied from except.c, I don't know if all this is
2464 necessary in this context or not. */
2465 fn = get_identifier ("memset");
2466 push_obstacks_nochange ();
2467 end_temporary_allocation ();
2468 fntype = build_pointer_type (void_type_node);
2469 fntype = build_function_type (fntype, NULL_TREE);
2470 fn = build_decl (FUNCTION_DECL, fn, fntype);
2471 DECL_EXTERNAL (fn) = 1;
2472 TREE_PUBLIC (fn) = 1;
2473 DECL_ARTIFICIAL (fn) = 1;
2474 make_decl_rtl (fn, NULL_PTR, 1);
2475 assemble_external (fn);
2479 /* We need to make an argument list for the function call.
2481 memset has three arguments, the first is a void * addresses, the
2482 second a integer with the initialization value, the last is a size_t
2483 byte count for the copy. */
2485 = build_tree_list (NULL_TREE,
2486 make_tree (build_pointer_type (void_type_node),
2488 TREE_CHAIN (arg_list)
2489 = build_tree_list (NULL_TREE,
2490 make_tree (integer_type_node, const0_rtx));
2491 TREE_CHAIN (TREE_CHAIN (arg_list))
2492 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2493 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2495 /* Now we have to build up the CALL_EXPR itself. */
2496 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2497 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2498 call_expr, arg_list, NULL_TREE);
2499 TREE_SIDE_EFFECTS (call_expr) = 1;
2501 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2503 emit_library_call (bzero_libfunc, 0,
2505 XEXP (object, 0), Pmode,
2507 (TYPE_MODE (integer_type_node), size,
2508 TREE_UNSIGNED (integer_type_node)),
2509 TYPE_MODE (integer_type_node));
2514 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2519 /* Generate code to copy Y into X.
2520 Both Y and X must have the same mode, except that
2521 Y can be a constant with VOIDmode.
2522 This mode cannot be BLKmode; use emit_block_move for that.
2524 Return the last instruction emitted. */
2527 emit_move_insn (x, y)
2530 enum machine_mode mode = GET_MODE (x);
2532 x = protect_from_queue (x, 1);
2533 y = protect_from_queue (y, 0);
2535 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2538 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2539 y = force_const_mem (mode, y);
2541 /* If X or Y are memory references, verify that their addresses are valid
2543 if (GET_CODE (x) == MEM
2544 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2545 && ! push_operand (x, GET_MODE (x)))
2547 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2548 x = change_address (x, VOIDmode, XEXP (x, 0));
2550 if (GET_CODE (y) == MEM
2551 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2553 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2554 y = change_address (y, VOIDmode, XEXP (y, 0));
2556 if (mode == BLKmode)
2559 return emit_move_insn_1 (x, y);
2562 /* Low level part of emit_move_insn.
2563 Called just like emit_move_insn, but assumes X and Y
2564 are basically valid. */
2567 emit_move_insn_1 (x, y)
2570 enum machine_mode mode = GET_MODE (x);
2571 enum machine_mode submode;
2572 enum mode_class class = GET_MODE_CLASS (mode);
2575 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2577 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2579 /* Expand complex moves by moving real part and imag part, if possible. */
2580 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2581 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2583 (class == MODE_COMPLEX_INT
2584 ? MODE_INT : MODE_FLOAT),
2586 && (mov_optab->handlers[(int) submode].insn_code
2587 != CODE_FOR_nothing))
2589 /* Don't split destination if it is a stack push. */
2590 int stack = push_operand (x, GET_MODE (x));
2592 /* If this is a stack, push the highpart first, so it
2593 will be in the argument order.
2595 In that case, change_address is used only to convert
2596 the mode, not to change the address. */
2599 /* Note that the real part always precedes the imag part in memory
2600 regardless of machine's endianness. */
2601 #ifdef STACK_GROWS_DOWNWARD
2602 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2603 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2604 gen_imagpart (submode, y)));
2605 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2606 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2607 gen_realpart (submode, y)));
2609 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2610 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2611 gen_realpart (submode, y)));
2612 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2613 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2614 gen_imagpart (submode, y)));
2619 /* Show the output dies here. */
2621 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2623 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2624 (gen_realpart (submode, x), gen_realpart (submode, y)));
2625 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2626 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2629 return get_last_insn ();
2632 /* This will handle any multi-word mode that lacks a move_insn pattern.
2633 However, you will get better code if you define such patterns,
2634 even if they must turn into multiple assembler instructions. */
2635 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2639 #ifdef PUSH_ROUNDING
2641 /* If X is a push on the stack, do the push now and replace
2642 X with a reference to the stack pointer. */
2643 if (push_operand (x, GET_MODE (x)))
2645 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2646 x = change_address (x, VOIDmode, stack_pointer_rtx);
2650 /* Show the output dies here. */
2652 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2655 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2658 rtx xpart = operand_subword (x, i, 1, mode);
2659 rtx ypart = operand_subword (y, i, 1, mode);
2661 /* If we can't get a part of Y, put Y into memory if it is a
2662 constant. Otherwise, force it into a register. If we still
2663 can't get a part of Y, abort. */
2664 if (ypart == 0 && CONSTANT_P (y))
2666 y = force_const_mem (mode, y);
2667 ypart = operand_subword (y, i, 1, mode);
2669 else if (ypart == 0)
2670 ypart = operand_subword_force (y, i, mode);
2672 if (xpart == 0 || ypart == 0)
2675 last_insn = emit_move_insn (xpart, ypart);
2684 /* Pushing data onto the stack. */
2686 /* Push a block of length SIZE (perhaps variable)
2687 and return an rtx to address the beginning of the block.
2688 Note that it is not possible for the value returned to be a QUEUED.
2689 The value may be virtual_outgoing_args_rtx.
2691 EXTRA is the number of bytes of padding to push in addition to SIZE.
2692 BELOW nonzero means this padding comes at low addresses;
2693 otherwise, the padding comes at high addresses. */
2696 push_block (size, extra, below)
2702 size = convert_modes (Pmode, ptr_mode, size, 1);
2703 if (CONSTANT_P (size))
2704 anti_adjust_stack (plus_constant (size, extra));
2705 else if (GET_CODE (size) == REG && extra == 0)
2706 anti_adjust_stack (size);
2709 rtx temp = copy_to_mode_reg (Pmode, size);
2711 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2712 temp, 0, OPTAB_LIB_WIDEN);
2713 anti_adjust_stack (temp);
2716 #if defined (STACK_GROWS_DOWNWARD) \
2717 || (defined (ARGS_GROW_DOWNWARD) \
2718 && !defined (ACCUMULATE_OUTGOING_ARGS))
2720 /* Return the lowest stack address when STACK or ARGS grow downward and
2721 we are not aaccumulating outgoing arguments (the c4x port uses such
2723 temp = virtual_outgoing_args_rtx;
2724 if (extra != 0 && below)
2725 temp = plus_constant (temp, extra);
2727 if (GET_CODE (size) == CONST_INT)
2728 temp = plus_constant (virtual_outgoing_args_rtx,
2729 - INTVAL (size) - (below ? 0 : extra));
2730 else if (extra != 0 && !below)
2731 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2732 negate_rtx (Pmode, plus_constant (size, extra)));
2734 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2735 negate_rtx (Pmode, size));
2738 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2744 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2747 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2748 block of SIZE bytes. */
2751 get_push_address (size)
2756 if (STACK_PUSH_CODE == POST_DEC)
2757 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2758 else if (STACK_PUSH_CODE == POST_INC)
2759 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2761 temp = stack_pointer_rtx;
2763 return copy_to_reg (temp);
2766 /* Generate code to push X onto the stack, assuming it has mode MODE and
2768 MODE is redundant except when X is a CONST_INT (since they don't
2770 SIZE is an rtx for the size of data to be copied (in bytes),
2771 needed only if X is BLKmode.
2773 ALIGN (in bytes) is maximum alignment we can assume.
2775 If PARTIAL and REG are both nonzero, then copy that many of the first
2776 words of X into registers starting with REG, and push the rest of X.
2777 The amount of space pushed is decreased by PARTIAL words,
2778 rounded *down* to a multiple of PARM_BOUNDARY.
2779 REG must be a hard register in this case.
2780 If REG is zero but PARTIAL is not, take any all others actions for an
2781 argument partially in registers, but do not actually load any
2784 EXTRA is the amount in bytes of extra space to leave next to this arg.
2785 This is ignored if an argument block has already been allocated.
2787 On a machine that lacks real push insns, ARGS_ADDR is the address of
2788 the bottom of the argument block for this call. We use indexing off there
2789 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2790 argument block has not been preallocated.
2792 ARGS_SO_FAR is the size of args previously pushed for this call.
2794 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2795 for arguments passed in registers. If nonzero, it will be the number
2796 of bytes required. */
2799 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2800 args_addr, args_so_far, reg_parm_stack_space)
2802 enum machine_mode mode;
2811 int reg_parm_stack_space;
2814 enum direction stack_direction
2815 #ifdef STACK_GROWS_DOWNWARD
2821 /* Decide where to pad the argument: `downward' for below,
2822 `upward' for above, or `none' for don't pad it.
2823 Default is below for small data on big-endian machines; else above. */
2824 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2826 /* Invert direction if stack is post-update. */
2827 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2828 if (where_pad != none)
2829 where_pad = (where_pad == downward ? upward : downward);
2831 xinner = x = protect_from_queue (x, 0);
2833 if (mode == BLKmode)
2835 /* Copy a block into the stack, entirely or partially. */
2838 int used = partial * UNITS_PER_WORD;
2839 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2847 /* USED is now the # of bytes we need not copy to the stack
2848 because registers will take care of them. */
2851 xinner = change_address (xinner, BLKmode,
2852 plus_constant (XEXP (xinner, 0), used));
2854 /* If the partial register-part of the arg counts in its stack size,
2855 skip the part of stack space corresponding to the registers.
2856 Otherwise, start copying to the beginning of the stack space,
2857 by setting SKIP to 0. */
2858 skip = (reg_parm_stack_space == 0) ? 0 : used;
2860 #ifdef PUSH_ROUNDING
2861 /* Do it with several push insns if that doesn't take lots of insns
2862 and if there is no difficulty with push insns that skip bytes
2863 on the stack for alignment purposes. */
2865 && GET_CODE (size) == CONST_INT
2867 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2869 /* Here we avoid the case of a structure whose weak alignment
2870 forces many pushes of a small amount of data,
2871 and such small pushes do rounding that causes trouble. */
2872 && ((! SLOW_UNALIGNED_ACCESS)
2873 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2874 || PUSH_ROUNDING (align) == align)
2875 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2877 /* Push padding now if padding above and stack grows down,
2878 or if padding below and stack grows up.
2879 But if space already allocated, this has already been done. */
2880 if (extra && args_addr == 0
2881 && where_pad != none && where_pad != stack_direction)
2882 anti_adjust_stack (GEN_INT (extra));
2884 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
2885 INTVAL (size) - used, align);
2887 if (current_function_check_memory_usage && ! in_check_memory_usage)
2891 in_check_memory_usage = 1;
2892 temp = get_push_address (INTVAL(size) - used);
2893 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2894 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2896 XEXP (xinner, 0), ptr_mode,
2897 GEN_INT (INTVAL(size) - used),
2898 TYPE_MODE (sizetype));
2900 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2902 GEN_INT (INTVAL(size) - used),
2903 TYPE_MODE (sizetype),
2904 GEN_INT (MEMORY_USE_RW),
2905 TYPE_MODE (integer_type_node));
2906 in_check_memory_usage = 0;
2910 #endif /* PUSH_ROUNDING */
2912 /* Otherwise make space on the stack and copy the data
2913 to the address of that space. */
2915 /* Deduct words put into registers from the size we must copy. */
2918 if (GET_CODE (size) == CONST_INT)
2919 size = GEN_INT (INTVAL (size) - used);
2921 size = expand_binop (GET_MODE (size), sub_optab, size,
2922 GEN_INT (used), NULL_RTX, 0,
2926 /* Get the address of the stack space.
2927 In this case, we do not deal with EXTRA separately.
2928 A single stack adjust will do. */
2931 temp = push_block (size, extra, where_pad == downward);
2934 else if (GET_CODE (args_so_far) == CONST_INT)
2935 temp = memory_address (BLKmode,
2936 plus_constant (args_addr,
2937 skip + INTVAL (args_so_far)));
2939 temp = memory_address (BLKmode,
2940 plus_constant (gen_rtx_PLUS (Pmode,
2944 if (current_function_check_memory_usage && ! in_check_memory_usage)
2948 in_check_memory_usage = 1;
2949 target = copy_to_reg (temp);
2950 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2951 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2953 XEXP (xinner, 0), ptr_mode,
2954 size, TYPE_MODE (sizetype));
2956 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2958 size, TYPE_MODE (sizetype),
2959 GEN_INT (MEMORY_USE_RW),
2960 TYPE_MODE (integer_type_node));
2961 in_check_memory_usage = 0;
2964 /* TEMP is the address of the block. Copy the data there. */
2965 if (GET_CODE (size) == CONST_INT
2966 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2969 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
2970 INTVAL (size), align);
2975 rtx opalign = GEN_INT (align);
2976 enum machine_mode mode;
2977 rtx target = gen_rtx_MEM (BLKmode, temp);
2979 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2981 mode = GET_MODE_WIDER_MODE (mode))
2983 enum insn_code code = movstr_optab[(int) mode];
2985 if (code != CODE_FOR_nothing
2986 && ((GET_CODE (size) == CONST_INT
2987 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2988 <= (GET_MODE_MASK (mode) >> 1)))
2989 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2990 && (insn_operand_predicate[(int) code][0] == 0
2991 || ((*insn_operand_predicate[(int) code][0])
2993 && (insn_operand_predicate[(int) code][1] == 0
2994 || ((*insn_operand_predicate[(int) code][1])
2996 && (insn_operand_predicate[(int) code][3] == 0
2997 || ((*insn_operand_predicate[(int) code][3])
2998 (opalign, VOIDmode))))
3000 rtx op2 = convert_to_mode (mode, size, 1);
3001 rtx last = get_last_insn ();
3004 if (insn_operand_predicate[(int) code][2] != 0
3005 && ! ((*insn_operand_predicate[(int) code][2])
3007 op2 = copy_to_mode_reg (mode, op2);
3009 pat = GEN_FCN ((int) code) (target, xinner,
3017 delete_insns_since (last);
3022 #ifndef ACCUMULATE_OUTGOING_ARGS
3023 /* If the source is referenced relative to the stack pointer,
3024 copy it to another register to stabilize it. We do not need
3025 to do this if we know that we won't be changing sp. */
3027 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3028 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3029 temp = copy_to_reg (temp);
3032 /* Make inhibit_defer_pop nonzero around the library call
3033 to force it to pop the bcopy-arguments right away. */
3035 #ifdef TARGET_MEM_FUNCTIONS
3036 emit_library_call (memcpy_libfunc, 0,
3037 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3038 convert_to_mode (TYPE_MODE (sizetype),
3039 size, TREE_UNSIGNED (sizetype)),
3040 TYPE_MODE (sizetype));
3042 emit_library_call (bcopy_libfunc, 0,
3043 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3044 convert_to_mode (TYPE_MODE (integer_type_node),
3046 TREE_UNSIGNED (integer_type_node)),
3047 TYPE_MODE (integer_type_node));
3052 else if (partial > 0)
3054 /* Scalar partly in registers. */
3056 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3059 /* # words of start of argument
3060 that we must make space for but need not store. */
3061 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3062 int args_offset = INTVAL (args_so_far);
3065 /* Push padding now if padding above and stack grows down,
3066 or if padding below and stack grows up.
3067 But if space already allocated, this has already been done. */
3068 if (extra && args_addr == 0
3069 && where_pad != none && where_pad != stack_direction)
3070 anti_adjust_stack (GEN_INT (extra));
3072 /* If we make space by pushing it, we might as well push
3073 the real data. Otherwise, we can leave OFFSET nonzero
3074 and leave the space uninitialized. */
3078 /* Now NOT_STACK gets the number of words that we don't need to
3079 allocate on the stack. */
3080 not_stack = partial - offset;
3082 /* If the partial register-part of the arg counts in its stack size,
3083 skip the part of stack space corresponding to the registers.
3084 Otherwise, start copying to the beginning of the stack space,
3085 by setting SKIP to 0. */
3086 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3088 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3089 x = validize_mem (force_const_mem (mode, x));
3091 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3092 SUBREGs of such registers are not allowed. */
3093 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3094 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3095 x = copy_to_reg (x);
3097 /* Loop over all the words allocated on the stack for this arg. */
3098 /* We can do it by words, because any scalar bigger than a word
3099 has a size a multiple of a word. */
3100 #ifndef PUSH_ARGS_REVERSED
3101 for (i = not_stack; i < size; i++)
3103 for (i = size - 1; i >= not_stack; i--)
3105 if (i >= not_stack + offset)
3106 emit_push_insn (operand_subword_force (x, i, mode),
3107 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3109 GEN_INT (args_offset + ((i - not_stack + skip)
3111 reg_parm_stack_space);
3116 rtx target = NULL_RTX;
3118 /* Push padding now if padding above and stack grows down,
3119 or if padding below and stack grows up.
3120 But if space already allocated, this has already been done. */
3121 if (extra && args_addr == 0
3122 && where_pad != none && where_pad != stack_direction)
3123 anti_adjust_stack (GEN_INT (extra));
3125 #ifdef PUSH_ROUNDING
3127 addr = gen_push_operand ();
3131 if (GET_CODE (args_so_far) == CONST_INT)
3133 = memory_address (mode,
3134 plus_constant (args_addr,
3135 INTVAL (args_so_far)));
3137 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3142 emit_move_insn (gen_rtx_MEM (mode, addr), x);
3144 if (current_function_check_memory_usage && ! in_check_memory_usage)
3146 in_check_memory_usage = 1;
3148 target = get_push_address (GET_MODE_SIZE (mode));
3150 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3151 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3153 XEXP (x, 0), ptr_mode,
3154 GEN_INT (GET_MODE_SIZE (mode)),
3155 TYPE_MODE (sizetype));
3157 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3159 GEN_INT (GET_MODE_SIZE (mode)),
3160 TYPE_MODE (sizetype),
3161 GEN_INT (MEMORY_USE_RW),
3162 TYPE_MODE (integer_type_node));
3163 in_check_memory_usage = 0;
3168 /* If part should go in registers, copy that part
3169 into the appropriate registers. Do this now, at the end,
3170 since mem-to-mem copies above may do function calls. */
3171 if (partial > 0 && reg != 0)
3173 /* Handle calls that pass values in multiple non-contiguous locations.
3174 The Irix 6 ABI has examples of this. */
3175 if (GET_CODE (reg) == PARALLEL)
3176 emit_group_load (reg, x, -1, align); /* ??? size? */
3178 move_block_to_reg (REGNO (reg), x, partial, mode);
3181 if (extra && args_addr == 0 && where_pad == stack_direction)
3182 anti_adjust_stack (GEN_INT (extra));
3185 /* Expand an assignment that stores the value of FROM into TO.
3186 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3187 (This may contain a QUEUED rtx;
3188 if the value is constant, this rtx is a constant.)
3189 Otherwise, the returned value is NULL_RTX.
3191 SUGGEST_REG is no longer actually used.
3192 It used to mean, copy the value through a register
3193 and return that register, if that is possible.
3194 We now use WANT_VALUE to decide whether to do this. */
3197 expand_assignment (to, from, want_value, suggest_reg)
3202 register rtx to_rtx = 0;
3205 /* Don't crash if the lhs of the assignment was erroneous. */
3207 if (TREE_CODE (to) == ERROR_MARK)
3209 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3210 return want_value ? result : NULL_RTX;
3213 /* Assignment of a structure component needs special treatment
3214 if the structure component's rtx is not simply a MEM.
3215 Assignment of an array element at a constant index, and assignment of
3216 an array element in an unaligned packed structure field, has the same
3219 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3220 || TREE_CODE (to) == ARRAY_REF)
3222 enum machine_mode mode1;
3232 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3233 &unsignedp, &volatilep, &alignment);
3235 /* If we are going to use store_bit_field and extract_bit_field,
3236 make sure to_rtx will be safe for multiple use. */
3238 if (mode1 == VOIDmode && want_value)
3239 tem = stabilize_reference (tem);
3241 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3244 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3246 if (GET_CODE (to_rtx) != MEM)
3249 if (GET_MODE (offset_rtx) != ptr_mode)
3251 #ifdef POINTERS_EXTEND_UNSIGNED
3252 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3254 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3258 if (GET_CODE (to_rtx) == MEM
3259 && GET_MODE (to_rtx) == BLKmode
3261 && (bitpos % bitsize) == 0
3262 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3263 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3265 rtx temp = change_address (to_rtx, mode1,
3266 plus_constant (XEXP (to_rtx, 0),
3269 if (GET_CODE (XEXP (temp, 0)) == REG)
3272 to_rtx = change_address (to_rtx, mode1,
3273 force_reg (GET_MODE (XEXP (temp, 0)),
3278 to_rtx = change_address (to_rtx, VOIDmode,
3279 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3280 force_reg (ptr_mode, offset_rtx)));
3284 if (GET_CODE (to_rtx) == MEM)
3286 /* When the offset is zero, to_rtx is the address of the
3287 structure we are storing into, and hence may be shared.
3288 We must make a new MEM before setting the volatile bit. */
3290 to_rtx = copy_rtx (to_rtx);
3292 MEM_VOLATILE_P (to_rtx) = 1;
3294 #if 0 /* This was turned off because, when a field is volatile
3295 in an object which is not volatile, the object may be in a register,
3296 and then we would abort over here. */
3302 if (TREE_CODE (to) == COMPONENT_REF
3303 && TREE_READONLY (TREE_OPERAND (to, 1)))
3306 to_rtx = copy_rtx (to_rtx);
3308 RTX_UNCHANGING_P (to_rtx) = 1;
3311 /* Check the access. */
3312 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3317 enum machine_mode best_mode;
3319 best_mode = get_best_mode (bitsize, bitpos,
3320 TYPE_ALIGN (TREE_TYPE (tem)),
3322 if (best_mode == VOIDmode)
3325 best_mode_size = GET_MODE_BITSIZE (best_mode);
3326 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3327 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3328 size *= GET_MODE_SIZE (best_mode);
3330 /* Check the access right of the pointer. */
3332 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3334 GEN_INT (size), TYPE_MODE (sizetype),
3335 GEN_INT (MEMORY_USE_WO),
3336 TYPE_MODE (integer_type_node));
3339 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3341 /* Spurious cast makes HPUX compiler happy. */
3342 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3345 /* Required alignment of containing datum. */
3347 int_size_in_bytes (TREE_TYPE (tem)),
3348 get_alias_set (to));
3349 preserve_temp_slots (result);
3353 /* If the value is meaningful, convert RESULT to the proper mode.
3354 Otherwise, return nothing. */
3355 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3356 TYPE_MODE (TREE_TYPE (from)),
3358 TREE_UNSIGNED (TREE_TYPE (to)))
3362 /* If the rhs is a function call and its value is not an aggregate,
3363 call the function before we start to compute the lhs.
3364 This is needed for correct code for cases such as
3365 val = setjmp (buf) on machines where reference to val
3366 requires loading up part of an address in a separate insn.
3368 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3369 a promoted variable where the zero- or sign- extension needs to be done.
3370 Handling this in the normal way is safe because no computation is done
3372 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3373 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3374 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3379 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3381 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3383 /* Handle calls that return values in multiple non-contiguous locations.
3384 The Irix 6 ABI has examples of this. */
3385 if (GET_CODE (to_rtx) == PARALLEL)
3386 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3387 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3388 else if (GET_MODE (to_rtx) == BLKmode)
3389 emit_block_move (to_rtx, value, expr_size (from),
3390 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3392 emit_move_insn (to_rtx, value);
3393 preserve_temp_slots (to_rtx);
3396 return want_value ? to_rtx : NULL_RTX;
3399 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3400 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3404 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3405 if (GET_CODE (to_rtx) == MEM)
3406 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3409 /* Don't move directly into a return register. */
3410 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3415 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3416 emit_move_insn (to_rtx, temp);
3417 preserve_temp_slots (to_rtx);
3420 return want_value ? to_rtx : NULL_RTX;
3423 /* In case we are returning the contents of an object which overlaps
3424 the place the value is being stored, use a safe function when copying
3425 a value through a pointer into a structure value return block. */
3426 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3427 && current_function_returns_struct
3428 && !current_function_returns_pcc_struct)
3433 size = expr_size (from);
3434 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3435 EXPAND_MEMORY_USE_DONT);
3437 /* Copy the rights of the bitmap. */
3438 if (current_function_check_memory_usage)
3439 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3440 XEXP (to_rtx, 0), ptr_mode,
3441 XEXP (from_rtx, 0), ptr_mode,
3442 convert_to_mode (TYPE_MODE (sizetype),
3443 size, TREE_UNSIGNED (sizetype)),
3444 TYPE_MODE (sizetype));
3446 #ifdef TARGET_MEM_FUNCTIONS
3447 emit_library_call (memcpy_libfunc, 0,
3448 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3449 XEXP (from_rtx, 0), Pmode,
3450 convert_to_mode (TYPE_MODE (sizetype),
3451 size, TREE_UNSIGNED (sizetype)),
3452 TYPE_MODE (sizetype));
3454 emit_library_call (bcopy_libfunc, 0,
3455 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3456 XEXP (to_rtx, 0), Pmode,
3457 convert_to_mode (TYPE_MODE (integer_type_node),
3458 size, TREE_UNSIGNED (integer_type_node)),
3459 TYPE_MODE (integer_type_node));
3462 preserve_temp_slots (to_rtx);
3465 return want_value ? to_rtx : NULL_RTX;
3468 /* Compute FROM and store the value in the rtx we got. */
3471 result = store_expr (from, to_rtx, want_value);
3472 preserve_temp_slots (result);
3475 return want_value ? result : NULL_RTX;
3478 /* Generate code for computing expression EXP,
3479 and storing the value into TARGET.
3480 TARGET may contain a QUEUED rtx.
3482 If WANT_VALUE is nonzero, return a copy of the value
3483 not in TARGET, so that we can be sure to use the proper
3484 value in a containing expression even if TARGET has something
3485 else stored in it. If possible, we copy the value through a pseudo
3486 and return that pseudo. Or, if the value is constant, we try to
3487 return the constant. In some cases, we return a pseudo
3488 copied *from* TARGET.
3490 If the mode is BLKmode then we may return TARGET itself.
3491 It turns out that in BLKmode it doesn't cause a problem.
3492 because C has no operators that could combine two different
3493 assignments into the same BLKmode object with different values
3494 with no sequence point. Will other languages need this to
3497 If WANT_VALUE is 0, we return NULL, to make sure
3498 to catch quickly any cases where the caller uses the value
3499 and fails to set WANT_VALUE. */
3502 store_expr (exp, target, want_value)
3504 register rtx target;
3508 int dont_return_target = 0;
3510 if (TREE_CODE (exp) == COMPOUND_EXPR)
3512 /* Perform first part of compound expression, then assign from second
3514 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3516 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3518 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3520 /* For conditional expression, get safe form of the target. Then
3521 test the condition, doing the appropriate assignment on either
3522 side. This avoids the creation of unnecessary temporaries.
3523 For non-BLKmode, it is more efficient not to do this. */
3525 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3528 target = protect_from_queue (target, 1);
3530 do_pending_stack_adjust ();
3532 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3533 start_cleanup_deferral ();
3534 store_expr (TREE_OPERAND (exp, 1), target, 0);
3535 end_cleanup_deferral ();
3537 emit_jump_insn (gen_jump (lab2));
3540 start_cleanup_deferral ();
3541 store_expr (TREE_OPERAND (exp, 2), target, 0);
3542 end_cleanup_deferral ();
3547 return want_value ? target : NULL_RTX;
3549 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3550 && GET_MODE (target) != BLKmode)
3551 /* If target is in memory and caller wants value in a register instead,
3552 arrange that. Pass TARGET as target for expand_expr so that,
3553 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3554 We know expand_expr will not use the target in that case.
3555 Don't do this if TARGET is volatile because we are supposed
3556 to write it and then read it. */
3558 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3559 GET_MODE (target), 0);
3560 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3561 temp = copy_to_reg (temp);
3562 dont_return_target = 1;
3564 else if (queued_subexp_p (target))
3565 /* If target contains a postincrement, let's not risk
3566 using it as the place to generate the rhs. */
3568 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3570 /* Expand EXP into a new pseudo. */
3571 temp = gen_reg_rtx (GET_MODE (target));
3572 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3575 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3577 /* If target is volatile, ANSI requires accessing the value
3578 *from* the target, if it is accessed. So make that happen.
3579 In no case return the target itself. */
3580 if (! MEM_VOLATILE_P (target) && want_value)
3581 dont_return_target = 1;
3583 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3584 /* If this is an scalar in a register that is stored in a wider mode
3585 than the declared mode, compute the result into its declared mode
3586 and then convert to the wider mode. Our value is the computed
3589 /* If we don't want a value, we can do the conversion inside EXP,
3590 which will often result in some optimizations. Do the conversion
3591 in two steps: first change the signedness, if needed, then
3592 the extend. But don't do this if the type of EXP is a subtype
3593 of something else since then the conversion might involve
3594 more than just converting modes. */
3595 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3596 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3598 if (TREE_UNSIGNED (TREE_TYPE (exp))
3599 != SUBREG_PROMOTED_UNSIGNED_P (target))
3602 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3606 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3607 SUBREG_PROMOTED_UNSIGNED_P (target)),
3611 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3613 /* If TEMP is a volatile MEM and we want a result value, make
3614 the access now so it gets done only once. Likewise if
3615 it contains TARGET. */
3616 if (GET_CODE (temp) == MEM && want_value
3617 && (MEM_VOLATILE_P (temp)
3618 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3619 temp = copy_to_reg (temp);
3621 /* If TEMP is a VOIDmode constant, use convert_modes to make
3622 sure that we properly convert it. */
3623 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3624 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3625 TYPE_MODE (TREE_TYPE (exp)), temp,
3626 SUBREG_PROMOTED_UNSIGNED_P (target));
3628 convert_move (SUBREG_REG (target), temp,
3629 SUBREG_PROMOTED_UNSIGNED_P (target));
3630 return want_value ? temp : NULL_RTX;
3634 temp = expand_expr (exp, target, GET_MODE (target), 0);
3635 /* Return TARGET if it's a specified hardware register.
3636 If TARGET is a volatile mem ref, either return TARGET
3637 or return a reg copied *from* TARGET; ANSI requires this.
3639 Otherwise, if TEMP is not TARGET, return TEMP
3640 if it is constant (for efficiency),
3641 or if we really want the correct value. */
3642 if (!(target && GET_CODE (target) == REG
3643 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3644 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3645 && ! rtx_equal_p (temp, target)
3646 && (CONSTANT_P (temp) || want_value))
3647 dont_return_target = 1;
3650 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3651 the same as that of TARGET, adjust the constant. This is needed, for
3652 example, in case it is a CONST_DOUBLE and we want only a word-sized
3654 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3655 && TREE_CODE (exp) != ERROR_MARK
3656 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3657 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3658 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3660 if (current_function_check_memory_usage
3661 && GET_CODE (target) == MEM
3662 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3664 if (GET_CODE (temp) == MEM)
3665 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3666 XEXP (target, 0), ptr_mode,
3667 XEXP (temp, 0), ptr_mode,
3668 expr_size (exp), TYPE_MODE (sizetype));
3670 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3671 XEXP (target, 0), ptr_mode,
3672 expr_size (exp), TYPE_MODE (sizetype),
3673 GEN_INT (MEMORY_USE_WO),
3674 TYPE_MODE (integer_type_node));
3677 /* If value was not generated in the target, store it there.
3678 Convert the value to TARGET's type first if nec. */
3679 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3680 one or both of them are volatile memory refs, we have to distinguish
3682 - expand_expr has used TARGET. In this case, we must not generate
3683 another copy. This can be detected by TARGET being equal according
3685 - expand_expr has not used TARGET - that means that the source just
3686 happens to have the same RTX form. Since temp will have been created
3687 by expand_expr, it will compare unequal according to == .
3688 We must generate a copy in this case, to reach the correct number
3689 of volatile memory references. */
3691 if ((! rtx_equal_p (temp, target)
3692 || (temp != target && (side_effects_p (temp)
3693 || side_effects_p (target))))
3694 && TREE_CODE (exp) != ERROR_MARK)
3696 target = protect_from_queue (target, 1);
3697 if (GET_MODE (temp) != GET_MODE (target)
3698 && GET_MODE (temp) != VOIDmode)
3700 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3701 if (dont_return_target)
3703 /* In this case, we will return TEMP,
3704 so make sure it has the proper mode.
3705 But don't forget to store the value into TARGET. */
3706 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3707 emit_move_insn (target, temp);
3710 convert_move (target, temp, unsignedp);
3713 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3715 /* Handle copying a string constant into an array.
3716 The string constant may be shorter than the array.
3717 So copy just the string's actual length, and clear the rest. */
3721 /* Get the size of the data type of the string,
3722 which is actually the size of the target. */
3723 size = expr_size (exp);
3724 if (GET_CODE (size) == CONST_INT
3725 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3726 emit_block_move (target, temp, size,
3727 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3730 /* Compute the size of the data to copy from the string. */
3732 = size_binop (MIN_EXPR,
3733 make_tree (sizetype, size),
3735 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3736 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3740 /* Copy that much. */
3741 emit_block_move (target, temp, copy_size_rtx,
3742 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3744 /* Figure out how much is left in TARGET that we have to clear.
3745 Do all calculations in ptr_mode. */
3747 addr = XEXP (target, 0);
3748 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3750 if (GET_CODE (copy_size_rtx) == CONST_INT)
3752 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3753 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3757 addr = force_reg (ptr_mode, addr);
3758 addr = expand_binop (ptr_mode, add_optab, addr,
3759 copy_size_rtx, NULL_RTX, 0,
3762 size = expand_binop (ptr_mode, sub_optab, size,
3763 copy_size_rtx, NULL_RTX, 0,
3766 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
3767 GET_MODE (size), 0, 0);
3768 label = gen_label_rtx ();
3769 emit_jump_insn (gen_blt (label));
3772 if (size != const0_rtx)
3774 /* Be sure we can write on ADDR. */
3775 if (current_function_check_memory_usage)
3776 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3778 size, TYPE_MODE (sizetype),
3779 GEN_INT (MEMORY_USE_WO),
3780 TYPE_MODE (integer_type_node));
3781 #ifdef TARGET_MEM_FUNCTIONS
3782 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3784 const0_rtx, TYPE_MODE (integer_type_node),
3785 convert_to_mode (TYPE_MODE (sizetype),
3787 TREE_UNSIGNED (sizetype)),
3788 TYPE_MODE (sizetype));
3790 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3792 convert_to_mode (TYPE_MODE (integer_type_node),
3794 TREE_UNSIGNED (integer_type_node)),
3795 TYPE_MODE (integer_type_node));
3803 /* Handle calls that return values in multiple non-contiguous locations.
3804 The Irix 6 ABI has examples of this. */
3805 else if (GET_CODE (target) == PARALLEL)
3806 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3807 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3808 else if (GET_MODE (temp) == BLKmode)
3809 emit_block_move (target, temp, expr_size (exp),
3810 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3812 emit_move_insn (target, temp);
3815 /* If we don't want a value, return NULL_RTX. */
3819 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3820 ??? The latter test doesn't seem to make sense. */
3821 else if (dont_return_target && GET_CODE (temp) != MEM)
3824 /* Return TARGET itself if it is a hard register. */
3825 else if (want_value && GET_MODE (target) != BLKmode
3826 && ! (GET_CODE (target) == REG
3827 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3828 return copy_to_reg (target);
3834 /* Return 1 if EXP just contains zeros. */
3842 switch (TREE_CODE (exp))
3846 case NON_LVALUE_EXPR:
3847 return is_zeros_p (TREE_OPERAND (exp, 0));
3850 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3854 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3857 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
3860 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3861 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3862 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3863 if (! is_zeros_p (TREE_VALUE (elt)))
3873 /* Return 1 if EXP contains mostly (3/4) zeros. */
3876 mostly_zeros_p (exp)
3879 if (TREE_CODE (exp) == CONSTRUCTOR)
3881 int elts = 0, zeros = 0;
3882 tree elt = CONSTRUCTOR_ELTS (exp);
3883 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3885 /* If there are no ranges of true bits, it is all zero. */
3886 return elt == NULL_TREE;
3888 for (; elt; elt = TREE_CHAIN (elt))
3890 /* We do not handle the case where the index is a RANGE_EXPR,
3891 so the statistic will be somewhat inaccurate.
3892 We do make a more accurate count in store_constructor itself,
3893 so since this function is only used for nested array elements,
3894 this should be close enough. */
3895 if (mostly_zeros_p (TREE_VALUE (elt)))
3900 return 4 * zeros >= 3 * elts;
3903 return is_zeros_p (exp);
3906 /* Helper function for store_constructor.
3907 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3908 TYPE is the type of the CONSTRUCTOR, not the element type.
3909 CLEARED is as for store_constructor.
3911 This provides a recursive shortcut back to store_constructor when it isn't
3912 necessary to go through store_field. This is so that we can pass through
3913 the cleared field to let store_constructor know that we may not have to
3914 clear a substructure if the outer structure has already been cleared. */
3917 store_constructor_field (target, bitsize, bitpos,
3918 mode, exp, type, cleared)
3920 int bitsize, bitpos;
3921 enum machine_mode mode;
3925 if (TREE_CODE (exp) == CONSTRUCTOR
3926 && bitpos % BITS_PER_UNIT == 0
3927 /* If we have a non-zero bitpos for a register target, then we just
3928 let store_field do the bitfield handling. This is unlikely to
3929 generate unnecessary clear instructions anyways. */
3930 && (bitpos == 0 || GET_CODE (target) == MEM))
3933 target = change_address (target, VOIDmode,
3934 plus_constant (XEXP (target, 0),
3935 bitpos / BITS_PER_UNIT));
3936 store_constructor (exp, target, cleared);
3939 store_field (target, bitsize, bitpos, mode, exp,
3940 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3941 int_size_in_bytes (type), 0);
3944 /* Store the value of constructor EXP into the rtx TARGET.
3945 TARGET is either a REG or a MEM.
3946 CLEARED is true if TARGET is known to have been zero'd. */
3949 store_constructor (exp, target, cleared)
3954 tree type = TREE_TYPE (exp);
3955 rtx exp_size = expr_size (exp);
3957 /* We know our target cannot conflict, since safe_from_p has been called. */
3959 /* Don't try copying piece by piece into a hard register
3960 since that is vulnerable to being clobbered by EXP.
3961 Instead, construct in a pseudo register and then copy it all. */
3962 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3964 rtx temp = gen_reg_rtx (GET_MODE (target));
3965 store_constructor (exp, temp, 0);
3966 emit_move_insn (target, temp);
3971 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3972 || TREE_CODE (type) == QUAL_UNION_TYPE)
3976 /* Inform later passes that the whole union value is dead. */
3977 if (TREE_CODE (type) == UNION_TYPE
3978 || TREE_CODE (type) == QUAL_UNION_TYPE)
3979 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3981 /* If we are building a static constructor into a register,
3982 set the initial value as zero so we can fold the value into
3983 a constant. But if more than one register is involved,
3984 this probably loses. */
3985 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3986 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
3989 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
3994 /* If the constructor has fewer fields than the structure
3995 or if we are initializing the structure to mostly zeros,
3996 clear the whole structure first. */
3997 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3998 != list_length (TYPE_FIELDS (type)))
3999 || mostly_zeros_p (exp))
4002 clear_storage (target, expr_size (exp),
4003 TYPE_ALIGN (type) / BITS_PER_UNIT);
4008 /* Inform later passes that the old value is dead. */
4009 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4011 /* Store each element of the constructor into
4012 the corresponding field of TARGET. */
4014 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4016 register tree field = TREE_PURPOSE (elt);
4017 tree value = TREE_VALUE (elt);
4018 register enum machine_mode mode;
4022 tree pos, constant = 0, offset = 0;
4023 rtx to_rtx = target;
4025 /* Just ignore missing fields.
4026 We cleared the whole structure, above,
4027 if any fields are missing. */
4031 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4034 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
4035 unsignedp = TREE_UNSIGNED (field);
4036 mode = DECL_MODE (field);
4037 if (DECL_BIT_FIELD (field))
4040 pos = DECL_FIELD_BITPOS (field);
4041 if (TREE_CODE (pos) == INTEGER_CST)
4043 else if (TREE_CODE (pos) == PLUS_EXPR
4044 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4045 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
4050 bitpos = TREE_INT_CST_LOW (constant);
4056 if (contains_placeholder_p (offset))
4057 offset = build (WITH_RECORD_EXPR, sizetype,
4058 offset, make_tree (TREE_TYPE (exp), target));
4060 offset = size_binop (FLOOR_DIV_EXPR, offset,
4061 size_int (BITS_PER_UNIT));
4063 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4064 if (GET_CODE (to_rtx) != MEM)
4067 if (GET_MODE (offset_rtx) != ptr_mode)
4069 #ifdef POINTERS_EXTEND_UNSIGNED
4070 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4072 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4077 = change_address (to_rtx, VOIDmode,
4078 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4079 force_reg (ptr_mode, offset_rtx)));
4081 if (TREE_READONLY (field))
4083 if (GET_CODE (to_rtx) == MEM)
4084 to_rtx = copy_rtx (to_rtx);
4086 RTX_UNCHANGING_P (to_rtx) = 1;
4089 #ifdef WORD_REGISTER_OPERATIONS
4090 /* If this initializes a field that is smaller than a word, at the
4091 start of a word, try to widen it to a full word.
4092 This special case allows us to output C++ member function
4093 initializations in a form that the optimizers can understand. */
4095 && GET_CODE (target) == REG
4096 && bitsize < BITS_PER_WORD
4097 && bitpos % BITS_PER_WORD == 0
4098 && GET_MODE_CLASS (mode) == MODE_INT
4099 && TREE_CODE (value) == INTEGER_CST
4100 && GET_CODE (exp_size) == CONST_INT
4101 && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4103 tree type = TREE_TYPE (value);
4104 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4106 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4107 value = convert (type, value);
4109 if (BYTES_BIG_ENDIAN)
4111 = fold (build (LSHIFT_EXPR, type, value,
4112 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4113 bitsize = BITS_PER_WORD;
4117 store_constructor_field (to_rtx, bitsize, bitpos,
4118 mode, value, type, cleared);
4121 else if (TREE_CODE (type) == ARRAY_TYPE)
4126 tree domain = TYPE_DOMAIN (type);
4127 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4128 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4129 tree elttype = TREE_TYPE (type);
4131 /* If the constructor has fewer elements than the array,
4132 clear the whole array first. Similarly if this is
4133 static constructor of a non-BLKmode object. */
4134 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4138 HOST_WIDE_INT count = 0, zero_count = 0;
4140 /* This loop is a more accurate version of the loop in
4141 mostly_zeros_p (it handles RANGE_EXPR in an index).
4142 It is also needed to check for missing elements. */
4143 for (elt = CONSTRUCTOR_ELTS (exp);
4145 elt = TREE_CHAIN (elt))
4147 tree index = TREE_PURPOSE (elt);
4148 HOST_WIDE_INT this_node_count;
4149 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4151 tree lo_index = TREE_OPERAND (index, 0);
4152 tree hi_index = TREE_OPERAND (index, 1);
4153 if (TREE_CODE (lo_index) != INTEGER_CST
4154 || TREE_CODE (hi_index) != INTEGER_CST)
4159 this_node_count = TREE_INT_CST_LOW (hi_index)
4160 - TREE_INT_CST_LOW (lo_index) + 1;
4163 this_node_count = 1;
4164 count += this_node_count;
4165 if (mostly_zeros_p (TREE_VALUE (elt)))
4166 zero_count += this_node_count;
4168 /* Clear the entire array first if there are any missing elements,
4169 or if the incidence of zero elements is >= 75%. */
4170 if (count < maxelt - minelt + 1
4171 || 4 * zero_count >= 3 * count)
4177 clear_storage (target, expr_size (exp),
4178 TYPE_ALIGN (type) / BITS_PER_UNIT);
4182 /* Inform later passes that the old value is dead. */
4183 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4185 /* Store each element of the constructor into
4186 the corresponding element of TARGET, determined
4187 by counting the elements. */
4188 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4190 elt = TREE_CHAIN (elt), i++)
4192 register enum machine_mode mode;
4196 tree value = TREE_VALUE (elt);
4197 tree index = TREE_PURPOSE (elt);
4198 rtx xtarget = target;
4200 if (cleared && is_zeros_p (value))
4203 mode = TYPE_MODE (elttype);
4204 bitsize = GET_MODE_BITSIZE (mode);
4205 unsignedp = TREE_UNSIGNED (elttype);
4207 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4209 tree lo_index = TREE_OPERAND (index, 0);
4210 tree hi_index = TREE_OPERAND (index, 1);
4211 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4212 struct nesting *loop;
4213 HOST_WIDE_INT lo, hi, count;
4216 /* If the range is constant and "small", unroll the loop. */
4217 if (TREE_CODE (lo_index) == INTEGER_CST
4218 && TREE_CODE (hi_index) == INTEGER_CST
4219 && (lo = TREE_INT_CST_LOW (lo_index),
4220 hi = TREE_INT_CST_LOW (hi_index),
4221 count = hi - lo + 1,
4222 (GET_CODE (target) != MEM
4224 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4225 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4228 lo -= minelt; hi -= minelt;
4229 for (; lo <= hi; lo++)
4231 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4232 store_constructor_field (target, bitsize, bitpos,
4233 mode, value, type, cleared);
4238 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4239 loop_top = gen_label_rtx ();
4240 loop_end = gen_label_rtx ();
4242 unsignedp = TREE_UNSIGNED (domain);
4244 index = build_decl (VAR_DECL, NULL_TREE, domain);
4246 DECL_RTL (index) = index_r
4247 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4250 if (TREE_CODE (value) == SAVE_EXPR
4251 && SAVE_EXPR_RTL (value) == 0)
4253 /* Make sure value gets expanded once before the
4255 expand_expr (value, const0_rtx, VOIDmode, 0);
4258 store_expr (lo_index, index_r, 0);
4259 loop = expand_start_loop (0);
4261 /* Assign value to element index. */
4262 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4263 size_int (BITS_PER_UNIT));
4264 position = size_binop (MULT_EXPR,
4265 size_binop (MINUS_EXPR, index,
4266 TYPE_MIN_VALUE (domain)),
4268 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4269 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4270 xtarget = change_address (target, mode, addr);
4271 if (TREE_CODE (value) == CONSTRUCTOR)
4272 store_constructor (value, xtarget, cleared);
4274 store_expr (value, xtarget, 0);
4276 expand_exit_loop_if_false (loop,
4277 build (LT_EXPR, integer_type_node,
4280 expand_increment (build (PREINCREMENT_EXPR,
4282 index, integer_one_node), 0, 0);
4284 emit_label (loop_end);
4286 /* Needed by stupid register allocation. to extend the
4287 lifetime of pseudo-regs used by target past the end
4289 emit_insn (gen_rtx_USE (GET_MODE (target), target));
4292 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
4293 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
4299 index = size_int (i);
4302 index = size_binop (MINUS_EXPR, index,
4303 TYPE_MIN_VALUE (domain));
4304 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4305 size_int (BITS_PER_UNIT));
4306 position = size_binop (MULT_EXPR, index, position);
4307 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4308 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4309 xtarget = change_address (target, mode, addr);
4310 store_expr (value, xtarget, 0);
4315 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
4316 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4318 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4319 store_constructor_field (target, bitsize, bitpos,
4320 mode, value, type, cleared);
4324 /* set constructor assignments */
4325 else if (TREE_CODE (type) == SET_TYPE)
4327 tree elt = CONSTRUCTOR_ELTS (exp);
4328 int nbytes = int_size_in_bytes (type), nbits;
4329 tree domain = TYPE_DOMAIN (type);
4330 tree domain_min, domain_max, bitlength;
4332 /* The default implementation strategy is to extract the constant
4333 parts of the constructor, use that to initialize the target,
4334 and then "or" in whatever non-constant ranges we need in addition.
4336 If a large set is all zero or all ones, it is
4337 probably better to set it using memset (if available) or bzero.
4338 Also, if a large set has just a single range, it may also be
4339 better to first clear all the first clear the set (using
4340 bzero/memset), and set the bits we want. */
4342 /* Check for all zeros. */
4343 if (elt == NULL_TREE)
4346 clear_storage (target, expr_size (exp),
4347 TYPE_ALIGN (type) / BITS_PER_UNIT);
4351 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4352 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4353 bitlength = size_binop (PLUS_EXPR,
4354 size_binop (MINUS_EXPR, domain_max, domain_min),
4357 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4359 nbits = TREE_INT_CST_LOW (bitlength);
4361 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4362 are "complicated" (more than one range), initialize (the
4363 constant parts) by copying from a constant. */
4364 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4365 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4367 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4368 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4369 char *bit_buffer = (char *) alloca (nbits);
4370 HOST_WIDE_INT word = 0;
4373 int offset = 0; /* In bytes from beginning of set. */
4374 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4377 if (bit_buffer[ibit])
4379 if (BYTES_BIG_ENDIAN)
4380 word |= (1 << (set_word_size - 1 - bit_pos));
4382 word |= 1 << bit_pos;
4385 if (bit_pos >= set_word_size || ibit == nbits)
4387 if (word != 0 || ! cleared)
4389 rtx datum = GEN_INT (word);
4391 /* The assumption here is that it is safe to use
4392 XEXP if the set is multi-word, but not if
4393 it's single-word. */
4394 if (GET_CODE (target) == MEM)
4396 to_rtx = plus_constant (XEXP (target, 0), offset);
4397 to_rtx = change_address (target, mode, to_rtx);
4399 else if (offset == 0)
4403 emit_move_insn (to_rtx, datum);
4409 offset += set_word_size / BITS_PER_UNIT;
4415 /* Don't bother clearing storage if the set is all ones. */
4416 if (TREE_CHAIN (elt) != NULL_TREE
4417 || (TREE_PURPOSE (elt) == NULL_TREE
4419 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4420 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4421 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4422 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4424 clear_storage (target, expr_size (exp),
4425 TYPE_ALIGN (type) / BITS_PER_UNIT);
4428 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4430 /* start of range of element or NULL */
4431 tree startbit = TREE_PURPOSE (elt);
4432 /* end of range of element, or element value */
4433 tree endbit = TREE_VALUE (elt);
4434 #ifdef TARGET_MEM_FUNCTIONS
4435 HOST_WIDE_INT startb, endb;
4437 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4439 bitlength_rtx = expand_expr (bitlength,
4440 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4442 /* handle non-range tuple element like [ expr ] */
4443 if (startbit == NULL_TREE)
4445 startbit = save_expr (endbit);
4448 startbit = convert (sizetype, startbit);
4449 endbit = convert (sizetype, endbit);
4450 if (! integer_zerop (domain_min))
4452 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4453 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4455 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4456 EXPAND_CONST_ADDRESS);
4457 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4458 EXPAND_CONST_ADDRESS);
4462 targetx = assign_stack_temp (GET_MODE (target),
4463 GET_MODE_SIZE (GET_MODE (target)),
4465 emit_move_insn (targetx, target);
4467 else if (GET_CODE (target) == MEM)
4472 #ifdef TARGET_MEM_FUNCTIONS
4473 /* Optimization: If startbit and endbit are
4474 constants divisible by BITS_PER_UNIT,
4475 call memset instead. */
4476 if (TREE_CODE (startbit) == INTEGER_CST
4477 && TREE_CODE (endbit) == INTEGER_CST
4478 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4479 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4481 emit_library_call (memset_libfunc, 0,
4483 plus_constant (XEXP (targetx, 0),
4484 startb / BITS_PER_UNIT),
4486 constm1_rtx, TYPE_MODE (integer_type_node),
4487 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4488 TYPE_MODE (sizetype));
4493 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4494 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4495 bitlength_rtx, TYPE_MODE (sizetype),
4496 startbit_rtx, TYPE_MODE (sizetype),
4497 endbit_rtx, TYPE_MODE (sizetype));
4500 emit_move_insn (target, targetx);
4508 /* Store the value of EXP (an expression tree)
4509 into a subfield of TARGET which has mode MODE and occupies
4510 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4511 If MODE is VOIDmode, it means that we are storing into a bit-field.
4513 If VALUE_MODE is VOIDmode, return nothing in particular.
4514 UNSIGNEDP is not used in this case.
4516 Otherwise, return an rtx for the value stored. This rtx
4517 has mode VALUE_MODE if that is convenient to do.
4518 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4520 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4521 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4523 ALIAS_SET is the alias set for the destination. This value will
4524 (in general) be different from that for TARGET, since TARGET is a
4525 reference to the containing structure. */
4528 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4529 unsignedp, align, total_size, alias_set)
4531 int bitsize, bitpos;
4532 enum machine_mode mode;
4534 enum machine_mode value_mode;
4540 HOST_WIDE_INT width_mask = 0;
4542 if (TREE_CODE (exp) == ERROR_MARK)
4545 if (bitsize < HOST_BITS_PER_WIDE_INT)
4546 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4548 /* If we are storing into an unaligned field of an aligned union that is
4549 in a register, we may have the mode of TARGET being an integer mode but
4550 MODE == BLKmode. In that case, get an aligned object whose size and
4551 alignment are the same as TARGET and store TARGET into it (we can avoid
4552 the store if the field being stored is the entire width of TARGET). Then
4553 call ourselves recursively to store the field into a BLKmode version of
4554 that object. Finally, load from the object into TARGET. This is not
4555 very efficient in general, but should only be slightly more expensive
4556 than the otherwise-required unaligned accesses. Perhaps this can be
4557 cleaned up later. */
4560 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4562 rtx object = assign_stack_temp (GET_MODE (target),
4563 GET_MODE_SIZE (GET_MODE (target)), 0);
4564 rtx blk_object = copy_rtx (object);
4566 MEM_IN_STRUCT_P (object) = 1;
4567 MEM_IN_STRUCT_P (blk_object) = 1;
4568 PUT_MODE (blk_object, BLKmode);
4570 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4571 emit_move_insn (object, target);
4573 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4574 align, total_size, alias_set);
4576 /* Even though we aren't returning target, we need to
4577 give it the updated value. */
4578 emit_move_insn (target, object);
4583 /* If the structure is in a register or if the component
4584 is a bit field, we cannot use addressing to access it.
4585 Use bit-field techniques or SUBREG to store in it. */
4587 if (mode == VOIDmode
4588 || (mode != BLKmode && ! direct_store[(int) mode])
4589 || GET_CODE (target) == REG
4590 || GET_CODE (target) == SUBREG
4591 /* If the field isn't aligned enough to store as an ordinary memref,
4592 store it as a bit field. */
4593 || (SLOW_UNALIGNED_ACCESS
4594 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4595 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4597 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4599 /* If BITSIZE is narrower than the size of the type of EXP
4600 we will be narrowing TEMP. Normally, what's wanted are the
4601 low-order bits. However, if EXP's type is a record and this is
4602 big-endian machine, we want the upper BITSIZE bits. */
4603 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4604 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4605 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4606 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4607 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4611 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4613 if (mode != VOIDmode && mode != BLKmode
4614 && mode != TYPE_MODE (TREE_TYPE (exp)))
4615 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4617 /* If the modes of TARGET and TEMP are both BLKmode, both
4618 must be in memory and BITPOS must be aligned on a byte
4619 boundary. If so, we simply do a block copy. */
4620 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4622 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4623 || bitpos % BITS_PER_UNIT != 0)
4626 target = change_address (target, VOIDmode,
4627 plus_constant (XEXP (target, 0),
4628 bitpos / BITS_PER_UNIT));
4630 emit_block_move (target, temp,
4631 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4635 return value_mode == VOIDmode ? const0_rtx : target;
4638 /* Store the value in the bitfield. */
4639 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4640 if (value_mode != VOIDmode)
4642 /* The caller wants an rtx for the value. */
4643 /* If possible, avoid refetching from the bitfield itself. */
4645 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4648 enum machine_mode tmode;
4651 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4652 tmode = GET_MODE (temp);
4653 if (tmode == VOIDmode)
4655 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4656 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4657 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4659 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4660 NULL_RTX, value_mode, 0, align,
4667 rtx addr = XEXP (target, 0);
4670 /* If a value is wanted, it must be the lhs;
4671 so make the address stable for multiple use. */
4673 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4674 && ! CONSTANT_ADDRESS_P (addr)
4675 /* A frame-pointer reference is already stable. */
4676 && ! (GET_CODE (addr) == PLUS
4677 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4678 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4679 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4680 addr = copy_to_reg (addr);
4682 /* Now build a reference to just the desired component. */
4684 to_rtx = copy_rtx (change_address (target, mode,
4685 plus_constant (addr,
4687 / BITS_PER_UNIT))));
4688 MEM_IN_STRUCT_P (to_rtx) = 1;
4689 MEM_ALIAS_SET (to_rtx) = alias_set;
4691 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4695 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4696 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4697 ARRAY_REFs and find the ultimate containing object, which we return.
4699 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4700 bit position, and *PUNSIGNEDP to the signedness of the field.
4701 If the position of the field is variable, we store a tree
4702 giving the variable offset (in units) in *POFFSET.
4703 This offset is in addition to the bit position.
4704 If the position is not variable, we store 0 in *POFFSET.
4705 We set *PALIGNMENT to the alignment in bytes of the address that will be
4706 computed. This is the alignment of the thing we return if *POFFSET
4707 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4709 If any of the extraction expressions is volatile,
4710 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4712 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4713 is a mode that can be used to access the field. In that case, *PBITSIZE
4716 If the field describes a variable-sized object, *PMODE is set to
4717 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4718 this case, but the address of the object can be found. */
4721 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4722 punsignedp, pvolatilep, palignment)
4727 enum machine_mode *pmode;
4732 tree orig_exp = exp;
4734 enum machine_mode mode = VOIDmode;
4735 tree offset = integer_zero_node;
4736 unsigned int alignment = BIGGEST_ALIGNMENT;
4738 if (TREE_CODE (exp) == COMPONENT_REF)
4740 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4741 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4742 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4743 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4745 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4747 size_tree = TREE_OPERAND (exp, 1);
4748 *punsignedp = TREE_UNSIGNED (exp);
4752 mode = TYPE_MODE (TREE_TYPE (exp));
4753 *pbitsize = GET_MODE_BITSIZE (mode);
4754 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4759 if (TREE_CODE (size_tree) != INTEGER_CST)
4760 mode = BLKmode, *pbitsize = -1;
4762 *pbitsize = TREE_INT_CST_LOW (size_tree);
4765 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4766 and find the ultimate containing object. */
4772 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4774 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4775 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4776 : TREE_OPERAND (exp, 2));
4777 tree constant = integer_zero_node, var = pos;
4779 /* If this field hasn't been filled in yet, don't go
4780 past it. This should only happen when folding expressions
4781 made during type construction. */
4785 /* Assume here that the offset is a multiple of a unit.
4786 If not, there should be an explicitly added constant. */
4787 if (TREE_CODE (pos) == PLUS_EXPR
4788 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4789 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4790 else if (TREE_CODE (pos) == INTEGER_CST)
4791 constant = pos, var = integer_zero_node;
4793 *pbitpos += TREE_INT_CST_LOW (constant);
4794 offset = size_binop (PLUS_EXPR, offset,
4795 size_binop (EXACT_DIV_EXPR, var,
4796 size_int (BITS_PER_UNIT)));
4799 else if (TREE_CODE (exp) == ARRAY_REF)
4801 /* This code is based on the code in case ARRAY_REF in expand_expr
4802 below. We assume here that the size of an array element is
4803 always an integral multiple of BITS_PER_UNIT. */
4805 tree index = TREE_OPERAND (exp, 1);
4806 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4808 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4809 tree index_type = TREE_TYPE (index);
4812 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4814 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4816 index_type = TREE_TYPE (index);
4819 /* Optimize the special-case of a zero lower bound.
4821 We convert the low_bound to sizetype to avoid some problems
4822 with constant folding. (E.g. suppose the lower bound is 1,
4823 and its mode is QI. Without the conversion, (ARRAY
4824 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4825 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4827 But sizetype isn't quite right either (especially if
4828 the lowbound is negative). FIXME */
4830 if (! integer_zerop (low_bound))
4831 index = fold (build (MINUS_EXPR, index_type, index,
4832 convert (sizetype, low_bound)));
4834 if (TREE_CODE (index) == INTEGER_CST)
4836 index = convert (sbitsizetype, index);
4837 index_type = TREE_TYPE (index);
4840 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
4841 convert (sbitsizetype,
4842 TYPE_SIZE (TREE_TYPE (exp)))));
4844 if (TREE_CODE (xindex) == INTEGER_CST
4845 && TREE_INT_CST_HIGH (xindex) == 0)
4846 *pbitpos += TREE_INT_CST_LOW (xindex);
4849 /* Either the bit offset calculated above is not constant, or
4850 it overflowed. In either case, redo the multiplication
4851 against the size in units. This is especially important
4852 in the non-constant case to avoid a division at runtime. */
4853 xindex = fold (build (MULT_EXPR, ssizetype, index,
4855 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
4857 if (contains_placeholder_p (xindex))
4858 xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
4860 offset = size_binop (PLUS_EXPR, offset, xindex);
4863 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4864 && ! ((TREE_CODE (exp) == NOP_EXPR
4865 || TREE_CODE (exp) == CONVERT_EXPR)
4866 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4867 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4869 && (TYPE_MODE (TREE_TYPE (exp))
4870 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4873 /* If any reference in the chain is volatile, the effect is volatile. */
4874 if (TREE_THIS_VOLATILE (exp))
4877 /* If the offset is non-constant already, then we can't assume any
4878 alignment more than the alignment here. */
4879 if (! integer_zerop (offset))
4880 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4882 exp = TREE_OPERAND (exp, 0);
4885 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4886 alignment = MIN (alignment, DECL_ALIGN (exp));
4887 else if (TREE_TYPE (exp) != 0)
4888 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4890 if (integer_zerop (offset))
4893 if (offset != 0 && contains_placeholder_p (offset))
4894 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4898 *palignment = alignment / BITS_PER_UNIT;
4902 /* Subroutine of expand_exp: compute memory_usage from modifier. */
4903 static enum memory_use_mode
4904 get_memory_usage_from_modifier (modifier)
4905 enum expand_modifier modifier;
4911 return MEMORY_USE_RO;
4913 case EXPAND_MEMORY_USE_WO:
4914 return MEMORY_USE_WO;
4916 case EXPAND_MEMORY_USE_RW:
4917 return MEMORY_USE_RW;
4919 case EXPAND_MEMORY_USE_DONT:
4920 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
4921 MEMORY_USE_DONT, because they are modifiers to a call of
4922 expand_expr in the ADDR_EXPR case of expand_expr. */
4923 case EXPAND_CONST_ADDRESS:
4924 case EXPAND_INITIALIZER:
4925 return MEMORY_USE_DONT;
4926 case EXPAND_MEMORY_USE_BAD:
4932 /* Given an rtx VALUE that may contain additions and multiplications,
4933 return an equivalent value that just refers to a register or memory.
4934 This is done by generating instructions to perform the arithmetic
4935 and returning a pseudo-register containing the value.
4937 The returned value may be a REG, SUBREG, MEM or constant. */
4940 force_operand (value, target)
4943 register optab binoptab = 0;
4944 /* Use a temporary to force order of execution of calls to
4948 /* Use subtarget as the target for operand 0 of a binary operation. */
4949 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4951 /* Check for a PIC address load. */
4953 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
4954 && XEXP (value, 0) == pic_offset_table_rtx
4955 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
4956 || GET_CODE (XEXP (value, 1)) == LABEL_REF
4957 || GET_CODE (XEXP (value, 1)) == CONST))
4960 subtarget = gen_reg_rtx (GET_MODE (value));
4961 emit_move_insn (subtarget, value);
4965 if (GET_CODE (value) == PLUS)
4966 binoptab = add_optab;
4967 else if (GET_CODE (value) == MINUS)
4968 binoptab = sub_optab;
4969 else if (GET_CODE (value) == MULT)
4971 op2 = XEXP (value, 1);
4972 if (!CONSTANT_P (op2)
4973 && !(GET_CODE (op2) == REG && op2 != subtarget))
4975 tmp = force_operand (XEXP (value, 0), subtarget);
4976 return expand_mult (GET_MODE (value), tmp,
4977 force_operand (op2, NULL_RTX),
4983 op2 = XEXP (value, 1);
4984 if (!CONSTANT_P (op2)
4985 && !(GET_CODE (op2) == REG && op2 != subtarget))
4987 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4989 binoptab = add_optab;
4990 op2 = negate_rtx (GET_MODE (value), op2);
4993 /* Check for an addition with OP2 a constant integer and our first
4994 operand a PLUS of a virtual register and something else. In that
4995 case, we want to emit the sum of the virtual register and the
4996 constant first and then add the other value. This allows virtual
4997 register instantiation to simply modify the constant rather than
4998 creating another one around this addition. */
4999 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5000 && GET_CODE (XEXP (value, 0)) == PLUS
5001 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5002 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5003 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5005 rtx temp = expand_binop (GET_MODE (value), binoptab,
5006 XEXP (XEXP (value, 0), 0), op2,
5007 subtarget, 0, OPTAB_LIB_WIDEN);
5008 return expand_binop (GET_MODE (value), binoptab, temp,
5009 force_operand (XEXP (XEXP (value, 0), 1), 0),
5010 target, 0, OPTAB_LIB_WIDEN);
5013 tmp = force_operand (XEXP (value, 0), subtarget);
5014 return expand_binop (GET_MODE (value), binoptab, tmp,
5015 force_operand (op2, NULL_RTX),
5016 target, 0, OPTAB_LIB_WIDEN);
5017 /* We give UNSIGNEDP = 0 to expand_binop
5018 because the only operations we are expanding here are signed ones. */
5023 /* Subroutine of expand_expr:
5024 save the non-copied parts (LIST) of an expr (LHS), and return a list
5025 which can restore these values to their previous values,
5026 should something modify their storage. */
5029 save_noncopied_parts (lhs, list)
5036 for (tail = list; tail; tail = TREE_CHAIN (tail))
5037 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5038 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5041 tree part = TREE_VALUE (tail);
5042 tree part_type = TREE_TYPE (part);
5043 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5044 rtx target = assign_temp (part_type, 0, 1, 1);
5045 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5046 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5047 parts = tree_cons (to_be_saved,
5048 build (RTL_EXPR, part_type, NULL_TREE,
5051 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5056 /* Subroutine of expand_expr:
5057 record the non-copied parts (LIST) of an expr (LHS), and return a list
5058 which specifies the initial values of these parts. */
5061 init_noncopied_parts (lhs, list)
5068 for (tail = list; tail; tail = TREE_CHAIN (tail))
5069 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5070 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5073 tree part = TREE_VALUE (tail);
5074 tree part_type = TREE_TYPE (part);
5075 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5076 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5081 /* Subroutine of expand_expr: return nonzero iff there is no way that
5082 EXP can reference X, which is being modified. TOP_P is nonzero if this
5083 call is going to be used to determine whether we need a temporary
5084 for EXP, as opposed to a recursive call to this function.
5086 It is always safe for this routine to return zero since it merely
5087 searches for optimization opportunities. */
5090 safe_from_p (x, exp, top_p)
5097 static int save_expr_count;
5098 static int save_expr_size = 0;
5099 static tree *save_expr_rewritten;
5100 static tree save_expr_trees[256];
5103 /* If EXP has varying size, we MUST use a target since we currently
5104 have no way of allocating temporaries of variable size
5105 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5106 So we assume here that something at a higher level has prevented a
5107 clash. This is somewhat bogus, but the best we can do. Only
5108 do this when X is BLKmode and when we are at the top level. */
5109 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5110 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5111 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5112 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5113 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5115 && GET_MODE (x) == BLKmode))
5118 if (top_p && save_expr_size == 0)
5122 save_expr_count = 0;
5123 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5124 save_expr_rewritten = &save_expr_trees[0];
5126 rtn = safe_from_p (x, exp, 1);
5128 for (i = 0; i < save_expr_count; ++i)
5130 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5132 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5140 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5141 find the underlying pseudo. */
5142 if (GET_CODE (x) == SUBREG)
5145 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5149 /* If X is a location in the outgoing argument area, it is always safe. */
5150 if (GET_CODE (x) == MEM
5151 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5152 || (GET_CODE (XEXP (x, 0)) == PLUS
5153 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5156 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5159 exp_rtl = DECL_RTL (exp);
5166 if (TREE_CODE (exp) == TREE_LIST)
5167 return ((TREE_VALUE (exp) == 0
5168 || safe_from_p (x, TREE_VALUE (exp), 0))
5169 && (TREE_CHAIN (exp) == 0
5170 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5171 else if (TREE_CODE (exp) == ERROR_MARK)
5172 return 1; /* An already-visited SAVE_EXPR? */
5177 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5181 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5182 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5186 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5187 the expression. If it is set, we conflict iff we are that rtx or
5188 both are in memory. Otherwise, we check all operands of the
5189 expression recursively. */
5191 switch (TREE_CODE (exp))
5194 return (staticp (TREE_OPERAND (exp, 0))
5195 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5196 || TREE_STATIC (exp));
5199 if (GET_CODE (x) == MEM)
5204 exp_rtl = CALL_EXPR_RTL (exp);
5207 /* Assume that the call will clobber all hard registers and
5209 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5210 || GET_CODE (x) == MEM)
5217 /* If a sequence exists, we would have to scan every instruction
5218 in the sequence to see if it was safe. This is probably not
5220 if (RTL_EXPR_SEQUENCE (exp))
5223 exp_rtl = RTL_EXPR_RTL (exp);
5226 case WITH_CLEANUP_EXPR:
5227 exp_rtl = RTL_EXPR_RTL (exp);
5230 case CLEANUP_POINT_EXPR:
5231 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5234 exp_rtl = SAVE_EXPR_RTL (exp);
5238 /* This SAVE_EXPR might appear many times in the top-level
5239 safe_from_p() expression, and if it has a complex
5240 subexpression, examining it multiple times could result
5241 in a combinatorial explosion. E.g. on an Alpha
5242 running at least 200MHz, a Fortran test case compiled with
5243 optimization took about 28 minutes to compile -- even though
5244 it was only a few lines long, and the complicated line causing
5245 so much time to be spent in the earlier version of safe_from_p()
5246 had only 293 or so unique nodes.
5248 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5249 where it is so we can turn it back in the top-level safe_from_p()
5252 /* For now, don't bother re-sizing the array. */
5253 if (save_expr_count >= save_expr_size)
5255 save_expr_rewritten[save_expr_count++] = exp;
5257 nops = tree_code_length[(int) SAVE_EXPR];
5258 for (i = 0; i < nops; i++)
5260 tree operand = TREE_OPERAND (exp, i);
5261 if (operand == NULL_TREE)
5263 TREE_SET_CODE (exp, ERROR_MARK);
5264 if (!safe_from_p (x, operand, 0))
5266 TREE_SET_CODE (exp, SAVE_EXPR);
5268 TREE_SET_CODE (exp, ERROR_MARK);
5272 /* The only operand we look at is operand 1. The rest aren't
5273 part of the expression. */
5274 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5276 case METHOD_CALL_EXPR:
5277 /* This takes a rtx argument, but shouldn't appear here. */
5284 /* If we have an rtx, we do not need to scan our operands. */
5288 nops = tree_code_length[(int) TREE_CODE (exp)];
5289 for (i = 0; i < nops; i++)
5290 if (TREE_OPERAND (exp, i) != 0
5291 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5295 /* If we have an rtl, find any enclosed object. Then see if we conflict
5299 if (GET_CODE (exp_rtl) == SUBREG)
5301 exp_rtl = SUBREG_REG (exp_rtl);
5302 if (GET_CODE (exp_rtl) == REG
5303 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5307 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5308 are memory and EXP is not readonly. */
5309 return ! (rtx_equal_p (x, exp_rtl)
5310 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5311 && ! TREE_READONLY (exp)));
5314 /* If we reach here, it is safe. */
5318 /* Subroutine of expand_expr: return nonzero iff EXP is an
5319 expression whose type is statically determinable. */
5325 if (TREE_CODE (exp) == PARM_DECL
5326 || TREE_CODE (exp) == VAR_DECL
5327 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5328 || TREE_CODE (exp) == COMPONENT_REF
5329 || TREE_CODE (exp) == ARRAY_REF)
5334 /* Subroutine of expand_expr: return rtx if EXP is a
5335 variable or parameter; else return 0. */
5342 switch (TREE_CODE (exp))
5346 return DECL_RTL (exp);
5352 #ifdef MAX_INTEGER_COMPUTATION_MODE
5354 check_max_integer_computation_mode (exp)
5357 enum tree_code code = TREE_CODE (exp);
5358 enum machine_mode mode;
5360 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5361 if (code == NOP_EXPR
5362 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5365 /* First check the type of the overall operation. We need only look at
5366 unary, binary and relational operations. */
5367 if (TREE_CODE_CLASS (code) == '1'
5368 || TREE_CODE_CLASS (code) == '2'
5369 || TREE_CODE_CLASS (code) == '<')
5371 mode = TYPE_MODE (TREE_TYPE (exp));
5372 if (GET_MODE_CLASS (mode) == MODE_INT
5373 && mode > MAX_INTEGER_COMPUTATION_MODE)
5374 fatal ("unsupported wide integer operation");
5377 /* Check operand of a unary op. */
5378 if (TREE_CODE_CLASS (code) == '1')
5380 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5381 if (GET_MODE_CLASS (mode) == MODE_INT
5382 && mode > MAX_INTEGER_COMPUTATION_MODE)
5383 fatal ("unsupported wide integer operation");
5386 /* Check operands of a binary/comparison op. */
5387 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5389 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5390 if (GET_MODE_CLASS (mode) == MODE_INT
5391 && mode > MAX_INTEGER_COMPUTATION_MODE)
5392 fatal ("unsupported wide integer operation");
5394 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5395 if (GET_MODE_CLASS (mode) == MODE_INT
5396 && mode > MAX_INTEGER_COMPUTATION_MODE)
5397 fatal ("unsupported wide integer operation");
5403 /* expand_expr: generate code for computing expression EXP.
5404 An rtx for the computed value is returned. The value is never null.
5405 In the case of a void EXP, const0_rtx is returned.
5407 The value may be stored in TARGET if TARGET is nonzero.
5408 TARGET is just a suggestion; callers must assume that
5409 the rtx returned may not be the same as TARGET.
5411 If TARGET is CONST0_RTX, it means that the value will be ignored.
5413 If TMODE is not VOIDmode, it suggests generating the
5414 result in mode TMODE. But this is done only when convenient.
5415 Otherwise, TMODE is ignored and the value generated in its natural mode.
5416 TMODE is just a suggestion; callers must assume that
5417 the rtx returned may not have mode TMODE.
5419 Note that TARGET may have neither TMODE nor MODE. In that case, it
5420 probably will not be used.
5422 If MODIFIER is EXPAND_SUM then when EXP is an addition
5423 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5424 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5425 products as above, or REG or MEM, or constant.
5426 Ordinarily in such cases we would output mul or add instructions
5427 and then return a pseudo reg containing the sum.
5429 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5430 it also marks a label as absolutely required (it can't be dead).
5431 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5432 This is used for outputting expressions used in initializers.
5434 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5435 with a constant address even if that address is not normally legitimate.
5436 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5439 expand_expr (exp, target, tmode, modifier)
5442 enum machine_mode tmode;
5443 enum expand_modifier modifier;
5445 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
5446 This is static so it will be accessible to our recursive callees. */
5447 static tree placeholder_list = 0;
5448 register rtx op0, op1, temp;
5449 tree type = TREE_TYPE (exp);
5450 int unsignedp = TREE_UNSIGNED (type);
5451 register enum machine_mode mode = TYPE_MODE (type);
5452 register enum tree_code code = TREE_CODE (exp);
5454 /* Use subtarget as the target for operand 0 of a binary operation. */
5455 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5456 rtx original_target = target;
5457 int ignore = (target == const0_rtx
5458 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5459 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5460 || code == COND_EXPR)
5461 && TREE_CODE (type) == VOID_TYPE));
5463 /* Used by check-memory-usage to make modifier read only. */
5464 enum expand_modifier ro_modifier;
5466 /* Make a read-only version of the modifier. */
5467 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5468 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5469 ro_modifier = modifier;
5471 ro_modifier = EXPAND_NORMAL;
5473 /* Don't use hard regs as subtargets, because the combiner
5474 can only handle pseudo regs. */
5475 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5477 /* Avoid subtargets inside loops,
5478 since they hide some invariant expressions. */
5479 if (preserve_subexpressions_p ())
5482 /* If we are going to ignore this result, we need only do something
5483 if there is a side-effect somewhere in the expression. If there
5484 is, short-circuit the most common cases here. Note that we must
5485 not call expand_expr with anything but const0_rtx in case this
5486 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5490 if (! TREE_SIDE_EFFECTS (exp))
5493 /* Ensure we reference a volatile object even if value is ignored. */
5494 if (TREE_THIS_VOLATILE (exp)
5495 && TREE_CODE (exp) != FUNCTION_DECL
5496 && mode != VOIDmode && mode != BLKmode)
5498 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5499 if (GET_CODE (temp) == MEM)
5500 temp = copy_to_reg (temp);
5504 if (TREE_CODE_CLASS (code) == '1')
5505 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5506 VOIDmode, ro_modifier);
5507 else if (TREE_CODE_CLASS (code) == '2'
5508 || TREE_CODE_CLASS (code) == '<')
5510 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5511 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5514 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5515 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5516 /* If the second operand has no side effects, just evaluate
5518 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5519 VOIDmode, ro_modifier);
5524 #ifdef MAX_INTEGER_COMPUTATION_MODE
5526 && TREE_CODE (exp) != INTEGER_CST
5527 && TREE_CODE (exp) != PARM_DECL
5528 && TREE_CODE (exp) != ARRAY_REF
5529 && TREE_CODE (exp) != COMPONENT_REF
5530 && TREE_CODE (exp) != BIT_FIELD_REF
5531 && TREE_CODE (exp) != INDIRECT_REF
5532 && TREE_CODE (exp) != VAR_DECL)
5534 enum machine_mode mode = GET_MODE (target);
5536 if (GET_MODE_CLASS (mode) == MODE_INT
5537 && mode > MAX_INTEGER_COMPUTATION_MODE)
5538 fatal ("unsupported wide integer operation");
5541 if (TREE_CODE (exp) != INTEGER_CST
5542 && TREE_CODE (exp) != PARM_DECL
5543 && TREE_CODE (exp) != ARRAY_REF
5544 && TREE_CODE (exp) != COMPONENT_REF
5545 && TREE_CODE (exp) != BIT_FIELD_REF
5546 && TREE_CODE (exp) != INDIRECT_REF
5547 && TREE_CODE (exp) != VAR_DECL
5548 && GET_MODE_CLASS (tmode) == MODE_INT
5549 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5550 fatal ("unsupported wide integer operation");
5552 check_max_integer_computation_mode (exp);
5555 /* If will do cse, generate all results into pseudo registers
5556 since 1) that allows cse to find more things
5557 and 2) otherwise cse could produce an insn the machine
5560 if (! cse_not_expected && mode != BLKmode && target
5561 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5568 tree function = decl_function_context (exp);
5569 /* Handle using a label in a containing function. */
5570 if (function != current_function_decl
5571 && function != inline_function_decl && function != 0)
5573 struct function *p = find_function_data (function);
5574 /* Allocate in the memory associated with the function
5575 that the label is in. */
5576 push_obstacks (p->function_obstack,
5577 p->function_maybepermanent_obstack);
5579 p->forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5584 else if (modifier == EXPAND_INITIALIZER)
5585 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5586 label_rtx (exp), forced_labels);
5587 temp = gen_rtx_MEM (FUNCTION_MODE,
5588 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5589 if (function != current_function_decl
5590 && function != inline_function_decl && function != 0)
5591 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5596 if (DECL_RTL (exp) == 0)
5598 error_with_decl (exp, "prior parameter's size depends on `%s'");
5599 return CONST0_RTX (mode);
5602 /* ... fall through ... */
5605 /* If a static var's type was incomplete when the decl was written,
5606 but the type is complete now, lay out the decl now. */
5607 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5608 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5610 push_obstacks_nochange ();
5611 end_temporary_allocation ();
5612 layout_decl (exp, 0);
5613 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5617 /* Although static-storage variables start off initialized, according to
5618 ANSI C, a memcpy could overwrite them with uninitialized values. So
5619 we check them too. This also lets us check for read-only variables
5620 accessed via a non-const declaration, in case it won't be detected
5621 any other way (e.g., in an embedded system or OS kernel without
5624 Aggregates are not checked here; they're handled elsewhere. */
5625 if (current_function_check_memory_usage && code == VAR_DECL
5626 && GET_CODE (DECL_RTL (exp)) == MEM
5627 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5629 enum memory_use_mode memory_usage;
5630 memory_usage = get_memory_usage_from_modifier (modifier);
5632 if (memory_usage != MEMORY_USE_DONT)
5633 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5634 XEXP (DECL_RTL (exp), 0), ptr_mode,
5635 GEN_INT (int_size_in_bytes (type)),
5636 TYPE_MODE (sizetype),
5637 GEN_INT (memory_usage),
5638 TYPE_MODE (integer_type_node));
5641 /* ... fall through ... */
5645 if (DECL_RTL (exp) == 0)
5648 /* Ensure variable marked as used even if it doesn't go through
5649 a parser. If it hasn't be used yet, write out an external
5651 if (! TREE_USED (exp))
5653 assemble_external (exp);
5654 TREE_USED (exp) = 1;
5657 /* Show we haven't gotten RTL for this yet. */
5660 /* Handle variables inherited from containing functions. */
5661 context = decl_function_context (exp);
5663 /* We treat inline_function_decl as an alias for the current function
5664 because that is the inline function whose vars, types, etc.
5665 are being merged into the current function.
5666 See expand_inline_function. */
5668 if (context != 0 && context != current_function_decl
5669 && context != inline_function_decl
5670 /* If var is static, we don't need a static chain to access it. */
5671 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5672 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5676 /* Mark as non-local and addressable. */
5677 DECL_NONLOCAL (exp) = 1;
5678 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5680 mark_addressable (exp);
5681 if (GET_CODE (DECL_RTL (exp)) != MEM)
5683 addr = XEXP (DECL_RTL (exp), 0);
5684 if (GET_CODE (addr) == MEM)
5685 addr = gen_rtx_MEM (Pmode,
5686 fix_lexical_addr (XEXP (addr, 0), exp));
5688 addr = fix_lexical_addr (addr, exp);
5689 temp = change_address (DECL_RTL (exp), mode, addr);
5692 /* This is the case of an array whose size is to be determined
5693 from its initializer, while the initializer is still being parsed.
5696 else if (GET_CODE (DECL_RTL (exp)) == MEM
5697 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5698 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5699 XEXP (DECL_RTL (exp), 0));
5701 /* If DECL_RTL is memory, we are in the normal case and either
5702 the address is not valid or it is not a register and -fforce-addr
5703 is specified, get the address into a register. */
5705 else if (GET_CODE (DECL_RTL (exp)) == MEM
5706 && modifier != EXPAND_CONST_ADDRESS
5707 && modifier != EXPAND_SUM
5708 && modifier != EXPAND_INITIALIZER
5709 && (! memory_address_p (DECL_MODE (exp),
5710 XEXP (DECL_RTL (exp), 0))
5712 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5713 temp = change_address (DECL_RTL (exp), VOIDmode,
5714 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5716 /* If we got something, return it. But first, set the alignment
5717 the address is a register. */
5720 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5721 mark_reg_pointer (XEXP (temp, 0),
5722 DECL_ALIGN (exp) / BITS_PER_UNIT);
5727 /* If the mode of DECL_RTL does not match that of the decl, it
5728 must be a promoted value. We return a SUBREG of the wanted mode,
5729 but mark it so that we know that it was already extended. */
5731 if (GET_CODE (DECL_RTL (exp)) == REG
5732 && GET_MODE (DECL_RTL (exp)) != mode)
5734 /* Get the signedness used for this variable. Ensure we get the
5735 same mode we got when the variable was declared. */
5736 if (GET_MODE (DECL_RTL (exp))
5737 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5740 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
5741 SUBREG_PROMOTED_VAR_P (temp) = 1;
5742 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5746 return DECL_RTL (exp);
5749 return immed_double_const (TREE_INT_CST_LOW (exp),
5750 TREE_INT_CST_HIGH (exp),
5754 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5755 EXPAND_MEMORY_USE_BAD);
5758 /* If optimized, generate immediate CONST_DOUBLE
5759 which will be turned into memory by reload if necessary.
5761 We used to force a register so that loop.c could see it. But
5762 this does not allow gen_* patterns to perform optimizations with
5763 the constants. It also produces two insns in cases like "x = 1.0;".
5764 On most machines, floating-point constants are not permitted in
5765 many insns, so we'd end up copying it to a register in any case.
5767 Now, we do the copying in expand_binop, if appropriate. */
5768 return immed_real_const (exp);
5772 if (! TREE_CST_RTL (exp))
5773 output_constant_def (exp);
5775 /* TREE_CST_RTL probably contains a constant address.
5776 On RISC machines where a constant address isn't valid,
5777 make some insns to get that address into a register. */
5778 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5779 && modifier != EXPAND_CONST_ADDRESS
5780 && modifier != EXPAND_INITIALIZER
5781 && modifier != EXPAND_SUM
5782 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5784 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5785 return change_address (TREE_CST_RTL (exp), VOIDmode,
5786 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5787 return TREE_CST_RTL (exp);
5789 case EXPR_WITH_FILE_LOCATION:
5792 char *saved_input_filename = input_filename;
5793 int saved_lineno = lineno;
5794 input_filename = EXPR_WFL_FILENAME (exp);
5795 lineno = EXPR_WFL_LINENO (exp);
5796 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
5797 emit_line_note (input_filename, lineno);
5798 /* Possibly avoid switching back and force here */
5799 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
5800 input_filename = saved_input_filename;
5801 lineno = saved_lineno;
5806 context = decl_function_context (exp);
5808 /* If this SAVE_EXPR was at global context, assume we are an
5809 initialization function and move it into our context. */
5811 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5813 /* We treat inline_function_decl as an alias for the current function
5814 because that is the inline function whose vars, types, etc.
5815 are being merged into the current function.
5816 See expand_inline_function. */
5817 if (context == current_function_decl || context == inline_function_decl)
5820 /* If this is non-local, handle it. */
5823 /* The following call just exists to abort if the context is
5824 not of a containing function. */
5825 find_function_data (context);
5827 temp = SAVE_EXPR_RTL (exp);
5828 if (temp && GET_CODE (temp) == REG)
5830 put_var_into_stack (exp);
5831 temp = SAVE_EXPR_RTL (exp);
5833 if (temp == 0 || GET_CODE (temp) != MEM)
5835 return change_address (temp, mode,
5836 fix_lexical_addr (XEXP (temp, 0), exp));
5838 if (SAVE_EXPR_RTL (exp) == 0)
5840 if (mode == VOIDmode)
5843 temp = assign_temp (type, 3, 0, 0);
5845 SAVE_EXPR_RTL (exp) = temp;
5846 if (!optimize && GET_CODE (temp) == REG)
5847 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
5850 /* If the mode of TEMP does not match that of the expression, it
5851 must be a promoted value. We pass store_expr a SUBREG of the
5852 wanted mode but mark it so that we know that it was already
5853 extended. Note that `unsignedp' was modified above in
5856 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5858 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5859 SUBREG_PROMOTED_VAR_P (temp) = 1;
5860 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5863 if (temp == const0_rtx)
5864 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5865 EXPAND_MEMORY_USE_BAD);
5867 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5869 TREE_USED (exp) = 1;
5872 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5873 must be a promoted value. We return a SUBREG of the wanted mode,
5874 but mark it so that we know that it was already extended. */
5876 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5877 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5879 /* Compute the signedness and make the proper SUBREG. */
5880 promote_mode (type, mode, &unsignedp, 0);
5881 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5882 SUBREG_PROMOTED_VAR_P (temp) = 1;
5883 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5887 return SAVE_EXPR_RTL (exp);
5892 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5893 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5897 case PLACEHOLDER_EXPR:
5899 tree placeholder_expr;
5901 /* If there is an object on the head of the placeholder list,
5902 see if some object in it of type TYPE or a pointer to it. For
5903 further information, see tree.def. */
5904 for (placeholder_expr = placeholder_list;
5905 placeholder_expr != 0;
5906 placeholder_expr = TREE_CHAIN (placeholder_expr))
5908 tree need_type = TYPE_MAIN_VARIANT (type);
5910 tree old_list = placeholder_list;
5913 /* Find the outermost reference that is of the type we want.
5914 If none, see if any object has a type that is a pointer to
5915 the type we want. */
5916 for (elt = TREE_PURPOSE (placeholder_expr);
5917 elt != 0 && object == 0;
5919 = ((TREE_CODE (elt) == COMPOUND_EXPR
5920 || TREE_CODE (elt) == COND_EXPR)
5921 ? TREE_OPERAND (elt, 1)
5922 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5923 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5924 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5925 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5926 ? TREE_OPERAND (elt, 0) : 0))
5927 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5930 for (elt = TREE_PURPOSE (placeholder_expr);
5931 elt != 0 && object == 0;
5933 = ((TREE_CODE (elt) == COMPOUND_EXPR
5934 || TREE_CODE (elt) == COND_EXPR)
5935 ? TREE_OPERAND (elt, 1)
5936 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5937 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5938 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5939 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5940 ? TREE_OPERAND (elt, 0) : 0))
5941 if (POINTER_TYPE_P (TREE_TYPE (elt))
5942 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
5944 object = build1 (INDIRECT_REF, need_type, elt);
5948 /* Expand this object skipping the list entries before
5949 it was found in case it is also a PLACEHOLDER_EXPR.
5950 In that case, we want to translate it using subsequent
5952 placeholder_list = TREE_CHAIN (placeholder_expr);
5953 temp = expand_expr (object, original_target, tmode,
5955 placeholder_list = old_list;
5961 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5964 case WITH_RECORD_EXPR:
5965 /* Put the object on the placeholder list, expand our first operand,
5966 and pop the list. */
5967 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5969 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
5970 tmode, ro_modifier);
5971 placeholder_list = TREE_CHAIN (placeholder_list);
5975 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
5976 expand_goto (TREE_OPERAND (exp, 0));
5978 expand_computed_goto (TREE_OPERAND (exp, 0));
5982 expand_exit_loop_if_false (NULL_PTR,
5983 invert_truthvalue (TREE_OPERAND (exp, 0)));
5986 case LABELED_BLOCK_EXPR:
5987 if (LABELED_BLOCK_BODY (exp))
5988 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
5989 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
5992 case EXIT_BLOCK_EXPR:
5993 if (EXIT_BLOCK_RETURN (exp))
5994 really_sorry ("returned value in block_exit_expr");
5995 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6000 expand_start_loop (1);
6001 expand_expr_stmt (TREE_OPERAND (exp, 0));
6009 tree vars = TREE_OPERAND (exp, 0);
6010 int vars_need_expansion = 0;
6012 /* Need to open a binding contour here because
6013 if there are any cleanups they must be contained here. */
6014 expand_start_bindings (0);
6016 /* Mark the corresponding BLOCK for output in its proper place. */
6017 if (TREE_OPERAND (exp, 2) != 0
6018 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6019 insert_block (TREE_OPERAND (exp, 2));
6021 /* If VARS have not yet been expanded, expand them now. */
6024 if (DECL_RTL (vars) == 0)
6026 vars_need_expansion = 1;
6029 expand_decl_init (vars);
6030 vars = TREE_CHAIN (vars);
6033 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6035 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6041 if (RTL_EXPR_SEQUENCE (exp))
6043 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6045 emit_insns (RTL_EXPR_SEQUENCE (exp));
6046 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6048 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6049 free_temps_for_rtl_expr (exp);
6050 return RTL_EXPR_RTL (exp);
6053 /* If we don't need the result, just ensure we evaluate any
6058 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6059 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6060 EXPAND_MEMORY_USE_BAD);
6064 /* All elts simple constants => refer to a constant in memory. But
6065 if this is a non-BLKmode mode, let it store a field at a time
6066 since that should make a CONST_INT or CONST_DOUBLE when we
6067 fold. Likewise, if we have a target we can use, it is best to
6068 store directly into the target unless the type is large enough
6069 that memcpy will be used. If we are making an initializer and
6070 all operands are constant, put it in memory as well. */
6071 else if ((TREE_STATIC (exp)
6072 && ((mode == BLKmode
6073 && ! (target != 0 && safe_from_p (target, exp, 1)))
6074 || TREE_ADDRESSABLE (exp)
6075 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
6076 && (move_by_pieces_ninsns
6077 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
6078 TYPE_ALIGN (type) / BITS_PER_UNIT)
6080 && ! mostly_zeros_p (exp))))
6081 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6083 rtx constructor = output_constant_def (exp);
6084 if (modifier != EXPAND_CONST_ADDRESS
6085 && modifier != EXPAND_INITIALIZER
6086 && modifier != EXPAND_SUM
6087 && (! memory_address_p (GET_MODE (constructor),
6088 XEXP (constructor, 0))
6090 && GET_CODE (XEXP (constructor, 0)) != REG)))
6091 constructor = change_address (constructor, VOIDmode,
6092 XEXP (constructor, 0));
6098 /* Handle calls that pass values in multiple non-contiguous
6099 locations. The Irix 6 ABI has examples of this. */
6100 if (target == 0 || ! safe_from_p (target, exp, 1)
6101 || GET_CODE (target) == PARALLEL)
6103 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6104 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6106 target = assign_temp (type, 0, 1, 1);
6109 if (TREE_READONLY (exp))
6111 if (GET_CODE (target) == MEM)
6112 target = copy_rtx (target);
6114 RTX_UNCHANGING_P (target) = 1;
6117 store_constructor (exp, target, 0);
6123 tree exp1 = TREE_OPERAND (exp, 0);
6126 tree string = string_constant (exp1, &index);
6129 /* Try to optimize reads from const strings. */
6131 && TREE_CODE (string) == STRING_CST
6132 && TREE_CODE (index) == INTEGER_CST
6133 && !TREE_INT_CST_HIGH (index)
6134 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
6135 && GET_MODE_CLASS (mode) == MODE_INT
6136 && GET_MODE_SIZE (mode) == 1
6137 && modifier != EXPAND_MEMORY_USE_WO)
6138 return GEN_INT (TREE_STRING_POINTER (string)[i]);
6140 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6141 op0 = memory_address (mode, op0);
6143 if (current_function_check_memory_usage && !AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6145 enum memory_use_mode memory_usage;
6146 memory_usage = get_memory_usage_from_modifier (modifier);
6148 if (memory_usage != MEMORY_USE_DONT)
6150 in_check_memory_usage = 1;
6151 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6153 GEN_INT (int_size_in_bytes (type)),
6154 TYPE_MODE (sizetype),
6155 GEN_INT (memory_usage),
6156 TYPE_MODE (integer_type_node));
6157 in_check_memory_usage = 0;
6161 temp = gen_rtx_MEM (mode, op0);
6162 /* If address was computed by addition,
6163 mark this as an element of an aggregate. */
6164 if (TREE_CODE (exp1) == PLUS_EXPR
6165 || (TREE_CODE (exp1) == SAVE_EXPR
6166 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
6167 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
6168 || (TREE_CODE (exp1) == ADDR_EXPR
6169 && (exp2 = TREE_OPERAND (exp1, 0))
6170 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6171 MEM_IN_STRUCT_P (temp) = 1;
6173 /* If the pointer is actually a REFERENCE_TYPE, this could be pointing
6174 into some aggregate too. In theory we could fold this into the
6175 previous check and use rtx_addr_varies_p there too.
6177 However, this seems safer. */
6178 if (!MEM_IN_STRUCT_P (temp)
6179 && (TREE_CODE (TREE_TYPE (exp1)) == REFERENCE_TYPE
6180 /* This may have been an array reference to the first element
6181 that was optimized away from being an addition. */
6182 || (TREE_CODE (exp1) == NOP_EXPR
6183 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp1, 0)))
6185 || ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp1, 0)))
6187 && (AGGREGATE_TYPE_P
6188 (TREE_TYPE (TREE_TYPE
6189 (TREE_OPERAND (exp1, 0))))))))))
6190 MEM_IN_STRUCT_P (temp) = ! rtx_addr_varies_p (temp);
6192 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6193 MEM_ALIAS_SET (temp) = get_alias_set (exp);
6195 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6196 here, because, in C and C++, the fact that a location is accessed
6197 through a pointer to const does not mean that the value there can
6198 never change. Languages where it can never change should
6199 also set TREE_STATIC. */
6200 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6205 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6209 tree array = TREE_OPERAND (exp, 0);
6210 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6211 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6212 tree index = TREE_OPERAND (exp, 1);
6213 tree index_type = TREE_TYPE (index);
6216 /* Optimize the special-case of a zero lower bound.
6218 We convert the low_bound to sizetype to avoid some problems
6219 with constant folding. (E.g. suppose the lower bound is 1,
6220 and its mode is QI. Without the conversion, (ARRAY
6221 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6222 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
6224 But sizetype isn't quite right either (especially if
6225 the lowbound is negative). FIXME */
6227 if (! integer_zerop (low_bound))
6228 index = fold (build (MINUS_EXPR, index_type, index,
6229 convert (sizetype, low_bound)));
6231 /* Fold an expression like: "foo"[2].
6232 This is not done in fold so it won't happen inside &.
6233 Don't fold if this is for wide characters since it's too
6234 difficult to do correctly and this is a very rare case. */
6236 if (TREE_CODE (array) == STRING_CST
6237 && TREE_CODE (index) == INTEGER_CST
6238 && !TREE_INT_CST_HIGH (index)
6239 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
6240 && GET_MODE_CLASS (mode) == MODE_INT
6241 && GET_MODE_SIZE (mode) == 1)
6242 return GEN_INT (TREE_STRING_POINTER (array)[i]);
6244 /* If this is a constant index into a constant array,
6245 just get the value from the array. Handle both the cases when
6246 we have an explicit constructor and when our operand is a variable
6247 that was declared const. */
6249 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
6251 if (TREE_CODE (index) == INTEGER_CST
6252 && TREE_INT_CST_HIGH (index) == 0)
6254 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
6256 i = TREE_INT_CST_LOW (index);
6258 elem = TREE_CHAIN (elem);
6260 return expand_expr (fold (TREE_VALUE (elem)), target,
6261 tmode, ro_modifier);
6265 else if (optimize >= 1
6266 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6267 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6268 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6270 if (TREE_CODE (index) == INTEGER_CST)
6272 tree init = DECL_INITIAL (array);
6274 i = TREE_INT_CST_LOW (index);
6275 if (TREE_CODE (init) == CONSTRUCTOR)
6277 tree elem = CONSTRUCTOR_ELTS (init);
6280 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
6281 elem = TREE_CHAIN (elem);
6283 return expand_expr (fold (TREE_VALUE (elem)), target,
6284 tmode, ro_modifier);
6286 else if (TREE_CODE (init) == STRING_CST
6287 && TREE_INT_CST_HIGH (index) == 0
6288 && (TREE_INT_CST_LOW (index)
6289 < TREE_STRING_LENGTH (init)))
6291 (TREE_STRING_POINTER
6292 (init)[TREE_INT_CST_LOW (index)]));
6297 /* ... fall through ... */
6301 /* If the operand is a CONSTRUCTOR, we can just extract the
6302 appropriate field if it is present. Don't do this if we have
6303 already written the data since we want to refer to that copy
6304 and varasm.c assumes that's what we'll do. */
6305 if (code != ARRAY_REF
6306 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6307 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6311 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6312 elt = TREE_CHAIN (elt))
6313 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6314 /* We can normally use the value of the field in the
6315 CONSTRUCTOR. However, if this is a bitfield in
6316 an integral mode that we can fit in a HOST_WIDE_INT,
6317 we must mask only the number of bits in the bitfield,
6318 since this is done implicitly by the constructor. If
6319 the bitfield does not meet either of those conditions,
6320 we can't do this optimization. */
6321 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6322 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6324 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6325 <= HOST_BITS_PER_WIDE_INT))))
6327 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6328 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6330 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
6332 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6334 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6335 op0 = expand_and (op0, op1, target);
6339 enum machine_mode imode
6340 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6342 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6345 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6347 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6357 enum machine_mode mode1;
6363 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6364 &mode1, &unsignedp, &volatilep,
6367 /* If we got back the original object, something is wrong. Perhaps
6368 we are evaluating an expression too early. In any event, don't
6369 infinitely recurse. */
6373 /* If TEM's type is a union of variable size, pass TARGET to the inner
6374 computation, since it will need a temporary and TARGET is known
6375 to have to do. This occurs in unchecked conversion in Ada. */
6377 op0 = expand_expr (tem,
6378 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6379 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6381 ? target : NULL_RTX),
6383 modifier == EXPAND_INITIALIZER
6384 ? modifier : EXPAND_NORMAL);
6386 /* If this is a constant, put it into a register if it is a
6387 legitimate constant and memory if it isn't. */
6388 if (CONSTANT_P (op0))
6390 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6391 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
6392 op0 = force_reg (mode, op0);
6394 op0 = validize_mem (force_const_mem (mode, op0));
6399 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6401 if (GET_CODE (op0) != MEM)
6404 if (GET_MODE (offset_rtx) != ptr_mode)
6406 #ifdef POINTERS_EXTEND_UNSIGNED
6407 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6409 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6413 if (GET_CODE (op0) == MEM
6414 && GET_MODE (op0) == BLKmode
6416 && (bitpos % bitsize) == 0
6417 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6418 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6420 rtx temp = change_address (op0, mode1,
6421 plus_constant (XEXP (op0, 0),
6424 if (GET_CODE (XEXP (temp, 0)) == REG)
6427 op0 = change_address (op0, mode1,
6428 force_reg (GET_MODE (XEXP (temp, 0)),
6434 op0 = change_address (op0, VOIDmode,
6435 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6436 force_reg (ptr_mode, offset_rtx)));
6439 /* Don't forget about volatility even if this is a bitfield. */
6440 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6442 op0 = copy_rtx (op0);
6443 MEM_VOLATILE_P (op0) = 1;
6446 /* Check the access. */
6447 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
6449 enum memory_use_mode memory_usage;
6450 memory_usage = get_memory_usage_from_modifier (modifier);
6452 if (memory_usage != MEMORY_USE_DONT)
6457 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6458 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6460 /* Check the access right of the pointer. */
6461 if (size > BITS_PER_UNIT)
6462 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6464 GEN_INT (size / BITS_PER_UNIT),
6465 TYPE_MODE (sizetype),
6466 GEN_INT (memory_usage),
6467 TYPE_MODE (integer_type_node));
6471 /* In cases where an aligned union has an unaligned object
6472 as a field, we might be extracting a BLKmode value from
6473 an integer-mode (e.g., SImode) object. Handle this case
6474 by doing the extract into an object as wide as the field
6475 (which we know to be the width of a basic mode), then
6476 storing into memory, and changing the mode to BLKmode.
6477 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6478 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6479 if (mode1 == VOIDmode
6480 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6481 || (modifier != EXPAND_CONST_ADDRESS
6482 && modifier != EXPAND_INITIALIZER
6483 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6484 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6485 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6486 /* If the field isn't aligned enough to fetch as a memref,
6487 fetch it as a bit field. */
6488 || (SLOW_UNALIGNED_ACCESS
6489 && ((TYPE_ALIGN (TREE_TYPE (tem)) < (unsigned int) GET_MODE_ALIGNMENT (mode))
6490 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
6492 enum machine_mode ext_mode = mode;
6494 if (ext_mode == BLKmode)
6495 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6497 if (ext_mode == BLKmode)
6499 /* In this case, BITPOS must start at a byte boundary and
6500 TARGET, if specified, must be a MEM. */
6501 if (GET_CODE (op0) != MEM
6502 || (target != 0 && GET_CODE (target) != MEM)
6503 || bitpos % BITS_PER_UNIT != 0)
6506 op0 = change_address (op0, VOIDmode,
6507 plus_constant (XEXP (op0, 0),
6508 bitpos / BITS_PER_UNIT));
6510 target = assign_temp (type, 0, 1, 1);
6512 emit_block_move (target, op0,
6513 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6520 op0 = validize_mem (op0);
6522 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6523 mark_reg_pointer (XEXP (op0, 0), alignment);
6525 op0 = extract_bit_field (op0, bitsize, bitpos,
6526 unsignedp, target, ext_mode, ext_mode,
6528 int_size_in_bytes (TREE_TYPE (tem)));
6530 /* If the result is a record type and BITSIZE is narrower than
6531 the mode of OP0, an integral mode, and this is a big endian
6532 machine, we must put the field into the high-order bits. */
6533 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6534 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6535 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6536 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6537 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6541 if (mode == BLKmode)
6543 rtx new = assign_stack_temp (ext_mode,
6544 bitsize / BITS_PER_UNIT, 0);
6546 emit_move_insn (new, op0);
6547 op0 = copy_rtx (new);
6548 PUT_MODE (op0, BLKmode);
6549 MEM_IN_STRUCT_P (op0) = 1;
6555 /* If the result is BLKmode, use that to access the object
6557 if (mode == BLKmode)
6560 /* Get a reference to just this component. */
6561 if (modifier == EXPAND_CONST_ADDRESS
6562 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6563 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6564 (bitpos / BITS_PER_UNIT)));
6566 op0 = change_address (op0, mode1,
6567 plus_constant (XEXP (op0, 0),
6568 (bitpos / BITS_PER_UNIT)));
6570 if (GET_CODE (op0) == MEM)
6571 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6573 if (GET_CODE (XEXP (op0, 0)) == REG)
6574 mark_reg_pointer (XEXP (op0, 0), alignment);
6576 MEM_IN_STRUCT_P (op0) = 1;
6577 MEM_VOLATILE_P (op0) |= volatilep;
6578 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6579 || modifier == EXPAND_CONST_ADDRESS
6580 || modifier == EXPAND_INITIALIZER)
6582 else if (target == 0)
6583 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6585 convert_move (target, op0, unsignedp);
6589 /* Intended for a reference to a buffer of a file-object in Pascal.
6590 But it's not certain that a special tree code will really be
6591 necessary for these. INDIRECT_REF might work for them. */
6597 /* Pascal set IN expression.
6600 rlo = set_low - (set_low%bits_per_word);
6601 the_word = set [ (index - rlo)/bits_per_word ];
6602 bit_index = index % bits_per_word;
6603 bitmask = 1 << bit_index;
6604 return !!(the_word & bitmask); */
6606 tree set = TREE_OPERAND (exp, 0);
6607 tree index = TREE_OPERAND (exp, 1);
6608 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6609 tree set_type = TREE_TYPE (set);
6610 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6611 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6612 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6613 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6614 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6615 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6616 rtx setaddr = XEXP (setval, 0);
6617 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6619 rtx diff, quo, rem, addr, bit, result;
6621 preexpand_calls (exp);
6623 /* If domain is empty, answer is no. Likewise if index is constant
6624 and out of bounds. */
6625 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6626 && TREE_CODE (set_low_bound) == INTEGER_CST
6627 && tree_int_cst_lt (set_high_bound, set_low_bound))
6628 || (TREE_CODE (index) == INTEGER_CST
6629 && TREE_CODE (set_low_bound) == INTEGER_CST
6630 && tree_int_cst_lt (index, set_low_bound))
6631 || (TREE_CODE (set_high_bound) == INTEGER_CST
6632 && TREE_CODE (index) == INTEGER_CST
6633 && tree_int_cst_lt (set_high_bound, index))))
6637 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6639 /* If we get here, we have to generate the code for both cases
6640 (in range and out of range). */
6642 op0 = gen_label_rtx ();
6643 op1 = gen_label_rtx ();
6645 if (! (GET_CODE (index_val) == CONST_INT
6646 && GET_CODE (lo_r) == CONST_INT))
6648 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
6649 GET_MODE (index_val), iunsignedp, 0);
6650 emit_jump_insn (gen_blt (op1));
6653 if (! (GET_CODE (index_val) == CONST_INT
6654 && GET_CODE (hi_r) == CONST_INT))
6656 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
6657 GET_MODE (index_val), iunsignedp, 0);
6658 emit_jump_insn (gen_bgt (op1));
6661 /* Calculate the element number of bit zero in the first word
6663 if (GET_CODE (lo_r) == CONST_INT)
6664 rlow = GEN_INT (INTVAL (lo_r)
6665 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6667 rlow = expand_binop (index_mode, and_optab, lo_r,
6668 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6669 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6671 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6672 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6674 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6675 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6676 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6677 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6679 addr = memory_address (byte_mode,
6680 expand_binop (index_mode, add_optab, diff,
6681 setaddr, NULL_RTX, iunsignedp,
6684 /* Extract the bit we want to examine */
6685 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6686 gen_rtx_MEM (byte_mode, addr),
6687 make_tree (TREE_TYPE (index), rem),
6689 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6690 GET_MODE (target) == byte_mode ? target : 0,
6691 1, OPTAB_LIB_WIDEN);
6693 if (result != target)
6694 convert_move (target, result, 1);
6696 /* Output the code to handle the out-of-range case. */
6699 emit_move_insn (target, const0_rtx);
6704 case WITH_CLEANUP_EXPR:
6705 if (RTL_EXPR_RTL (exp) == 0)
6708 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6709 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6711 /* That's it for this cleanup. */
6712 TREE_OPERAND (exp, 2) = 0;
6714 return RTL_EXPR_RTL (exp);
6716 case CLEANUP_POINT_EXPR:
6718 extern int temp_slot_level;
6719 /* Start a new binding layer that will keep track of all cleanup
6720 actions to be performed. */
6721 expand_start_bindings (0);
6723 target_temp_slot_level = temp_slot_level;
6725 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6726 /* If we're going to use this value, load it up now. */
6728 op0 = force_not_mem (op0);
6729 preserve_temp_slots (op0);
6730 expand_end_bindings (NULL_TREE, 0, 0);
6735 /* Check for a built-in function. */
6736 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6737 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6739 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6740 return expand_builtin (exp, target, subtarget, tmode, ignore);
6742 /* If this call was expanded already by preexpand_calls,
6743 just return the result we got. */
6744 if (CALL_EXPR_RTL (exp) != 0)
6745 return CALL_EXPR_RTL (exp);
6747 return expand_call (exp, target, ignore);
6749 case NON_LVALUE_EXPR:
6752 case REFERENCE_EXPR:
6753 if (TREE_CODE (type) == UNION_TYPE)
6755 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6758 if (mode != BLKmode)
6759 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6761 target = assign_temp (type, 0, 1, 1);
6764 if (GET_CODE (target) == MEM)
6765 /* Store data into beginning of memory target. */
6766 store_expr (TREE_OPERAND (exp, 0),
6767 change_address (target, TYPE_MODE (valtype), 0), 0);
6769 else if (GET_CODE (target) == REG)
6770 /* Store this field into a union of the proper type. */
6771 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6772 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6774 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))),
6779 /* Return the entire union. */
6783 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6785 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
6788 /* If the signedness of the conversion differs and OP0 is
6789 a promoted SUBREG, clear that indication since we now
6790 have to do the proper extension. */
6791 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6792 && GET_CODE (op0) == SUBREG)
6793 SUBREG_PROMOTED_VAR_P (op0) = 0;
6798 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
6799 if (GET_MODE (op0) == mode)
6802 /* If OP0 is a constant, just convert it into the proper mode. */
6803 if (CONSTANT_P (op0))
6805 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6806 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6808 if (modifier == EXPAND_INITIALIZER)
6809 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
6813 convert_to_mode (mode, op0,
6814 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6816 convert_move (target, op0,
6817 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6821 /* We come here from MINUS_EXPR when the second operand is a
6824 this_optab = add_optab;
6826 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6827 something else, make sure we add the register to the constant and
6828 then to the other thing. This case can occur during strength
6829 reduction and doing it this way will produce better code if the
6830 frame pointer or argument pointer is eliminated.
6832 fold-const.c will ensure that the constant is always in the inner
6833 PLUS_EXPR, so the only case we need to do anything about is if
6834 sp, ap, or fp is our second argument, in which case we must swap
6835 the innermost first argument and our second argument. */
6837 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6838 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6839 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6840 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6841 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6842 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6844 tree t = TREE_OPERAND (exp, 1);
6846 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6847 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6850 /* If the result is to be ptr_mode and we are adding an integer to
6851 something, we might be forming a constant. So try to use
6852 plus_constant. If it produces a sum and we can't accept it,
6853 use force_operand. This allows P = &ARR[const] to generate
6854 efficient code on machines where a SYMBOL_REF is not a valid
6857 If this is an EXPAND_SUM call, always return the sum. */
6858 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
6859 || mode == ptr_mode)
6861 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6862 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6863 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6865 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6867 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
6868 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6869 op1 = force_operand (op1, target);
6873 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6874 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6875 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6877 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6879 if (! CONSTANT_P (op0))
6881 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6882 VOIDmode, modifier);
6883 /* Don't go to both_summands if modifier
6884 says it's not right to return a PLUS. */
6885 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6889 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
6890 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6891 op0 = force_operand (op0, target);
6896 /* No sense saving up arithmetic to be done
6897 if it's all in the wrong mode to form part of an address.
6898 And force_operand won't know whether to sign-extend or
6900 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6901 || mode != ptr_mode)
6904 preexpand_calls (exp);
6905 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6908 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
6909 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
6912 /* Make sure any term that's a sum with a constant comes last. */
6913 if (GET_CODE (op0) == PLUS
6914 && CONSTANT_P (XEXP (op0, 1)))
6920 /* If adding to a sum including a constant,
6921 associate it to put the constant outside. */
6922 if (GET_CODE (op1) == PLUS
6923 && CONSTANT_P (XEXP (op1, 1)))
6925 rtx constant_term = const0_rtx;
6927 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
6930 /* Ensure that MULT comes first if there is one. */
6931 else if (GET_CODE (op0) == MULT)
6932 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
6934 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
6936 /* Let's also eliminate constants from op0 if possible. */
6937 op0 = eliminate_constant_term (op0, &constant_term);
6939 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6940 their sum should be a constant. Form it into OP1, since the
6941 result we want will then be OP0 + OP1. */
6943 temp = simplify_binary_operation (PLUS, mode, constant_term,
6948 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
6951 /* Put a constant term last and put a multiplication first. */
6952 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
6953 temp = op1, op1 = op0, op0 = temp;
6955 temp = simplify_binary_operation (PLUS, mode, op0, op1);
6956 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
6959 /* For initializers, we are allowed to return a MINUS of two
6960 symbolic constants. Here we handle all cases when both operands
6962 /* Handle difference of two symbolic constants,
6963 for the sake of an initializer. */
6964 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6965 && really_constant_p (TREE_OPERAND (exp, 0))
6966 && really_constant_p (TREE_OPERAND (exp, 1)))
6968 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
6969 VOIDmode, ro_modifier);
6970 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6971 VOIDmode, ro_modifier);
6973 /* If the last operand is a CONST_INT, use plus_constant of
6974 the negated constant. Else make the MINUS. */
6975 if (GET_CODE (op1) == CONST_INT)
6976 return plus_constant (op0, - INTVAL (op1));
6978 return gen_rtx_MINUS (mode, op0, op1);
6980 /* Convert A - const to A + (-const). */
6981 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6983 tree negated = fold (build1 (NEGATE_EXPR, type,
6984 TREE_OPERAND (exp, 1)));
6986 /* Deal with the case where we can't negate the constant
6988 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6990 tree newtype = signed_type (type);
6991 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6992 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6993 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6995 if (! TREE_OVERFLOW (newneg))
6996 return expand_expr (convert (type,
6997 build (PLUS_EXPR, newtype,
6999 target, tmode, ro_modifier);
7003 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7007 this_optab = sub_optab;
7011 preexpand_calls (exp);
7012 /* If first operand is constant, swap them.
7013 Thus the following special case checks need only
7014 check the second operand. */
7015 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7017 register tree t1 = TREE_OPERAND (exp, 0);
7018 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7019 TREE_OPERAND (exp, 1) = t1;
7022 /* Attempt to return something suitable for generating an
7023 indexed address, for machines that support that. */
7025 if (modifier == EXPAND_SUM && mode == ptr_mode
7026 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7027 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7029 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7032 /* Apply distributive law if OP0 is x+c. */
7033 if (GET_CODE (op0) == PLUS
7034 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7035 return gen_rtx_PLUS (mode,
7036 gen_rtx_MULT (mode, XEXP (op0, 0),
7037 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7038 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7039 * INTVAL (XEXP (op0, 1))));
7041 if (GET_CODE (op0) != REG)
7042 op0 = force_operand (op0, NULL_RTX);
7043 if (GET_CODE (op0) != REG)
7044 op0 = copy_to_mode_reg (mode, op0);
7046 return gen_rtx_MULT (mode, op0,
7047 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7050 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7053 /* Check for multiplying things that have been extended
7054 from a narrower type. If this machine supports multiplying
7055 in that narrower type with a result in the desired type,
7056 do it that way, and avoid the explicit type-conversion. */
7057 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7058 && TREE_CODE (type) == INTEGER_TYPE
7059 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7060 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7061 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7062 && int_fits_type_p (TREE_OPERAND (exp, 1),
7063 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7064 /* Don't use a widening multiply if a shift will do. */
7065 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7066 > HOST_BITS_PER_WIDE_INT)
7067 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7069 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7070 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7072 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7073 /* If both operands are extended, they must either both
7074 be zero-extended or both be sign-extended. */
7075 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7077 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7079 enum machine_mode innermode
7080 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7081 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7082 ? smul_widen_optab : umul_widen_optab);
7083 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7084 ? umul_widen_optab : smul_widen_optab);
7085 if (mode == GET_MODE_WIDER_MODE (innermode))
7087 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7089 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7090 NULL_RTX, VOIDmode, 0);
7091 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7092 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7095 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7096 NULL_RTX, VOIDmode, 0);
7099 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7100 && innermode == word_mode)
7103 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7104 NULL_RTX, VOIDmode, 0);
7105 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7106 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7109 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7110 NULL_RTX, VOIDmode, 0);
7111 temp = expand_binop (mode, other_optab, op0, op1, target,
7112 unsignedp, OPTAB_LIB_WIDEN);
7113 htem = expand_mult_highpart_adjust (innermode,
7114 gen_highpart (innermode, temp),
7116 gen_highpart (innermode, temp),
7118 emit_move_insn (gen_highpart (innermode, temp), htem);
7123 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7124 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7125 return expand_mult (mode, op0, op1, target, unsignedp);
7127 case TRUNC_DIV_EXPR:
7128 case FLOOR_DIV_EXPR:
7130 case ROUND_DIV_EXPR:
7131 case EXACT_DIV_EXPR:
7132 preexpand_calls (exp);
7133 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7135 /* Possible optimization: compute the dividend with EXPAND_SUM
7136 then if the divisor is constant can optimize the case
7137 where some terms of the dividend have coeffs divisible by it. */
7138 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7139 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7140 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7143 this_optab = flodiv_optab;
7146 case TRUNC_MOD_EXPR:
7147 case FLOOR_MOD_EXPR:
7149 case ROUND_MOD_EXPR:
7150 preexpand_calls (exp);
7151 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7153 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7154 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7155 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7157 case FIX_ROUND_EXPR:
7158 case FIX_FLOOR_EXPR:
7160 abort (); /* Not used for C. */
7162 case FIX_TRUNC_EXPR:
7163 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7165 target = gen_reg_rtx (mode);
7166 expand_fix (target, op0, unsignedp);
7170 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7172 target = gen_reg_rtx (mode);
7173 /* expand_float can't figure out what to do if FROM has VOIDmode.
7174 So give it the correct mode. With -O, cse will optimize this. */
7175 if (GET_MODE (op0) == VOIDmode)
7176 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7178 expand_float (target, op0,
7179 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7183 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7184 temp = expand_unop (mode, neg_optab, op0, target, 0);
7190 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7192 /* Handle complex values specially. */
7193 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7194 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7195 return expand_complex_abs (mode, op0, target, unsignedp);
7197 /* Unsigned abs is simply the operand. Testing here means we don't
7198 risk generating incorrect code below. */
7199 if (TREE_UNSIGNED (type))
7202 return expand_abs (mode, op0, target, unsignedp,
7203 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7207 target = original_target;
7208 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7209 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7210 || GET_MODE (target) != mode
7211 || (GET_CODE (target) == REG
7212 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7213 target = gen_reg_rtx (mode);
7214 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7215 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7217 /* First try to do it with a special MIN or MAX instruction.
7218 If that does not win, use a conditional jump to select the proper
7220 this_optab = (TREE_UNSIGNED (type)
7221 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7222 : (code == MIN_EXPR ? smin_optab : smax_optab));
7224 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7229 /* At this point, a MEM target is no longer useful; we will get better
7232 if (GET_CODE (target) == MEM)
7233 target = gen_reg_rtx (mode);
7236 emit_move_insn (target, op0);
7238 op0 = gen_label_rtx ();
7240 /* If this mode is an integer too wide to compare properly,
7241 compare word by word. Rely on cse to optimize constant cases. */
7242 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
7244 if (code == MAX_EXPR)
7245 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7246 target, op1, NULL_RTX, op0);
7248 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7249 op1, target, NULL_RTX, op0);
7250 emit_move_insn (target, op1);
7254 if (code == MAX_EXPR)
7255 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
7256 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
7257 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
7259 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
7260 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
7261 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
7262 if (temp == const0_rtx)
7263 emit_move_insn (target, op1);
7264 else if (temp != const_true_rtx)
7266 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
7267 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
7270 emit_move_insn (target, op1);
7277 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7278 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7284 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7285 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7290 /* ??? Can optimize bitwise operations with one arg constant.
7291 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7292 and (a bitwise1 b) bitwise2 b (etc)
7293 but that is probably not worth while. */
7295 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7296 boolean values when we want in all cases to compute both of them. In
7297 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7298 as actual zero-or-1 values and then bitwise anding. In cases where
7299 there cannot be any side effects, better code would be made by
7300 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7301 how to recognize those cases. */
7303 case TRUTH_AND_EXPR:
7305 this_optab = and_optab;
7310 this_optab = ior_optab;
7313 case TRUTH_XOR_EXPR:
7315 this_optab = xor_optab;
7322 preexpand_calls (exp);
7323 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7325 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7326 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7329 /* Could determine the answer when only additive constants differ. Also,
7330 the addition of one can be handled by changing the condition. */
7337 preexpand_calls (exp);
7338 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7342 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7343 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7345 && GET_CODE (original_target) == REG
7346 && (GET_MODE (original_target)
7347 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7349 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7352 if (temp != original_target)
7353 temp = copy_to_reg (temp);
7355 op1 = gen_label_rtx ();
7356 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
7357 GET_MODE (temp), unsignedp, 0);
7358 emit_jump_insn (gen_beq (op1));
7359 emit_move_insn (temp, const1_rtx);
7364 /* If no set-flag instruction, must generate a conditional
7365 store into a temporary variable. Drop through
7366 and handle this like && and ||. */
7368 case TRUTH_ANDIF_EXPR:
7369 case TRUTH_ORIF_EXPR:
7371 && (target == 0 || ! safe_from_p (target, exp, 1)
7372 /* Make sure we don't have a hard reg (such as function's return
7373 value) live across basic blocks, if not optimizing. */
7374 || (!optimize && GET_CODE (target) == REG
7375 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7376 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7379 emit_clr_insn (target);
7381 op1 = gen_label_rtx ();
7382 jumpifnot (exp, op1);
7385 emit_0_to_1_insn (target);
7388 return ignore ? const0_rtx : target;
7390 case TRUTH_NOT_EXPR:
7391 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7392 /* The parser is careful to generate TRUTH_NOT_EXPR
7393 only with operands that are always zero or one. */
7394 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7395 target, 1, OPTAB_LIB_WIDEN);
7401 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7403 return expand_expr (TREE_OPERAND (exp, 1),
7404 (ignore ? const0_rtx : target),
7408 /* If we would have a "singleton" (see below) were it not for a
7409 conversion in each arm, bring that conversion back out. */
7410 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7411 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7412 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7413 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7415 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7416 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7418 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7419 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7420 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7421 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7422 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7423 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7424 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7425 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7426 return expand_expr (build1 (NOP_EXPR, type,
7427 build (COND_EXPR, TREE_TYPE (true),
7428 TREE_OPERAND (exp, 0),
7430 target, tmode, modifier);
7434 /* Note that COND_EXPRs whose type is a structure or union
7435 are required to be constructed to contain assignments of
7436 a temporary variable, so that we can evaluate them here
7437 for side effect only. If type is void, we must do likewise. */
7439 /* If an arm of the branch requires a cleanup,
7440 only that cleanup is performed. */
7443 tree binary_op = 0, unary_op = 0;
7445 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7446 convert it to our mode, if necessary. */
7447 if (integer_onep (TREE_OPERAND (exp, 1))
7448 && integer_zerop (TREE_OPERAND (exp, 2))
7449 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7453 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7458 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7459 if (GET_MODE (op0) == mode)
7463 target = gen_reg_rtx (mode);
7464 convert_move (target, op0, unsignedp);
7468 /* Check for X ? A + B : A. If we have this, we can copy A to the
7469 output and conditionally add B. Similarly for unary operations.
7470 Don't do this if X has side-effects because those side effects
7471 might affect A or B and the "?" operation is a sequence point in
7472 ANSI. (operand_equal_p tests for side effects.) */
7474 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7475 && operand_equal_p (TREE_OPERAND (exp, 2),
7476 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7477 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7478 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7479 && operand_equal_p (TREE_OPERAND (exp, 1),
7480 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7481 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7482 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7483 && operand_equal_p (TREE_OPERAND (exp, 2),
7484 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7485 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7486 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7487 && operand_equal_p (TREE_OPERAND (exp, 1),
7488 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7489 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7491 /* If we are not to produce a result, we have no target. Otherwise,
7492 if a target was specified use it; it will not be used as an
7493 intermediate target unless it is safe. If no target, use a
7498 else if (original_target
7499 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7500 || (singleton && GET_CODE (original_target) == REG
7501 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7502 && original_target == var_rtx (singleton)))
7503 && GET_MODE (original_target) == mode
7504 #ifdef HAVE_conditional_move
7505 && (! can_conditionally_move_p (mode)
7506 || GET_CODE (original_target) == REG
7507 || TREE_ADDRESSABLE (type))
7509 && ! (GET_CODE (original_target) == MEM
7510 && MEM_VOLATILE_P (original_target)))
7511 temp = original_target;
7512 else if (TREE_ADDRESSABLE (type))
7515 temp = assign_temp (type, 0, 0, 1);
7517 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7518 do the test of X as a store-flag operation, do this as
7519 A + ((X != 0) << log C). Similarly for other simple binary
7520 operators. Only do for C == 1 if BRANCH_COST is low. */
7521 if (temp && singleton && binary_op
7522 && (TREE_CODE (binary_op) == PLUS_EXPR
7523 || TREE_CODE (binary_op) == MINUS_EXPR
7524 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7525 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7526 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7527 : integer_onep (TREE_OPERAND (binary_op, 1)))
7528 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7531 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7532 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7533 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7536 /* If we had X ? A : A + 1, do this as A + (X == 0).
7538 We have to invert the truth value here and then put it
7539 back later if do_store_flag fails. We cannot simply copy
7540 TREE_OPERAND (exp, 0) to another variable and modify that
7541 because invert_truthvalue can modify the tree pointed to
7543 if (singleton == TREE_OPERAND (exp, 1))
7544 TREE_OPERAND (exp, 0)
7545 = invert_truthvalue (TREE_OPERAND (exp, 0));
7547 result = do_store_flag (TREE_OPERAND (exp, 0),
7548 (safe_from_p (temp, singleton, 1)
7550 mode, BRANCH_COST <= 1);
7552 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7553 result = expand_shift (LSHIFT_EXPR, mode, result,
7554 build_int_2 (tree_log2
7558 (safe_from_p (temp, singleton, 1)
7559 ? temp : NULL_RTX), 0);
7563 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7564 return expand_binop (mode, boptab, op1, result, temp,
7565 unsignedp, OPTAB_LIB_WIDEN);
7567 else if (singleton == TREE_OPERAND (exp, 1))
7568 TREE_OPERAND (exp, 0)
7569 = invert_truthvalue (TREE_OPERAND (exp, 0));
7572 do_pending_stack_adjust ();
7574 op0 = gen_label_rtx ();
7576 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7580 /* If the target conflicts with the other operand of the
7581 binary op, we can't use it. Also, we can't use the target
7582 if it is a hard register, because evaluating the condition
7583 might clobber it. */
7585 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7586 || (GET_CODE (temp) == REG
7587 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7588 temp = gen_reg_rtx (mode);
7589 store_expr (singleton, temp, 0);
7592 expand_expr (singleton,
7593 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7594 if (singleton == TREE_OPERAND (exp, 1))
7595 jumpif (TREE_OPERAND (exp, 0), op0);
7597 jumpifnot (TREE_OPERAND (exp, 0), op0);
7599 start_cleanup_deferral ();
7600 if (binary_op && temp == 0)
7601 /* Just touch the other operand. */
7602 expand_expr (TREE_OPERAND (binary_op, 1),
7603 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7605 store_expr (build (TREE_CODE (binary_op), type,
7606 make_tree (type, temp),
7607 TREE_OPERAND (binary_op, 1)),
7610 store_expr (build1 (TREE_CODE (unary_op), type,
7611 make_tree (type, temp)),
7615 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7616 comparison operator. If we have one of these cases, set the
7617 output to A, branch on A (cse will merge these two references),
7618 then set the output to FOO. */
7620 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7621 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7622 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7623 TREE_OPERAND (exp, 1), 0)
7624 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7625 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7626 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7628 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7629 temp = gen_reg_rtx (mode);
7630 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7631 jumpif (TREE_OPERAND (exp, 0), op0);
7633 start_cleanup_deferral ();
7634 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7638 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7639 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7640 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7641 TREE_OPERAND (exp, 2), 0)
7642 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7643 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7644 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7646 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7647 temp = gen_reg_rtx (mode);
7648 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7649 jumpifnot (TREE_OPERAND (exp, 0), op0);
7651 start_cleanup_deferral ();
7652 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7657 op1 = gen_label_rtx ();
7658 jumpifnot (TREE_OPERAND (exp, 0), op0);
7660 start_cleanup_deferral ();
7662 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7664 expand_expr (TREE_OPERAND (exp, 1),
7665 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7666 end_cleanup_deferral ();
7668 emit_jump_insn (gen_jump (op1));
7671 start_cleanup_deferral ();
7673 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7675 expand_expr (TREE_OPERAND (exp, 2),
7676 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7679 end_cleanup_deferral ();
7690 /* Something needs to be initialized, but we didn't know
7691 where that thing was when building the tree. For example,
7692 it could be the return value of a function, or a parameter
7693 to a function which lays down in the stack, or a temporary
7694 variable which must be passed by reference.
7696 We guarantee that the expression will either be constructed
7697 or copied into our original target. */
7699 tree slot = TREE_OPERAND (exp, 0);
7700 tree cleanups = NULL_TREE;
7703 if (TREE_CODE (slot) != VAR_DECL)
7707 target = original_target;
7711 if (DECL_RTL (slot) != 0)
7713 target = DECL_RTL (slot);
7714 /* If we have already expanded the slot, so don't do
7716 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7721 target = assign_temp (type, 2, 0, 1);
7722 /* All temp slots at this level must not conflict. */
7723 preserve_temp_slots (target);
7724 DECL_RTL (slot) = target;
7725 if (TREE_ADDRESSABLE (slot))
7727 TREE_ADDRESSABLE (slot) = 0;
7728 mark_addressable (slot);
7731 /* Since SLOT is not known to the called function
7732 to belong to its stack frame, we must build an explicit
7733 cleanup. This case occurs when we must build up a reference
7734 to pass the reference as an argument. In this case,
7735 it is very likely that such a reference need not be
7738 if (TREE_OPERAND (exp, 2) == 0)
7739 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
7740 cleanups = TREE_OPERAND (exp, 2);
7745 /* This case does occur, when expanding a parameter which
7746 needs to be constructed on the stack. The target
7747 is the actual stack address that we want to initialize.
7748 The function we call will perform the cleanup in this case. */
7750 /* If we have already assigned it space, use that space,
7751 not target that we were passed in, as our target
7752 parameter is only a hint. */
7753 if (DECL_RTL (slot) != 0)
7755 target = DECL_RTL (slot);
7756 /* If we have already expanded the slot, so don't do
7758 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7763 DECL_RTL (slot) = target;
7764 /* If we must have an addressable slot, then make sure that
7765 the RTL that we just stored in slot is OK. */
7766 if (TREE_ADDRESSABLE (slot))
7768 TREE_ADDRESSABLE (slot) = 0;
7769 mark_addressable (slot);
7774 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
7775 /* Mark it as expanded. */
7776 TREE_OPERAND (exp, 1) = NULL_TREE;
7778 TREE_USED (slot) = 1;
7779 store_expr (exp1, target, 0);
7781 expand_decl_cleanup (NULL_TREE, cleanups);
7788 tree lhs = TREE_OPERAND (exp, 0);
7789 tree rhs = TREE_OPERAND (exp, 1);
7790 tree noncopied_parts = 0;
7791 tree lhs_type = TREE_TYPE (lhs);
7793 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7794 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7795 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7796 TYPE_NONCOPIED_PARTS (lhs_type));
7797 while (noncopied_parts != 0)
7799 expand_assignment (TREE_VALUE (noncopied_parts),
7800 TREE_PURPOSE (noncopied_parts), 0, 0);
7801 noncopied_parts = TREE_CHAIN (noncopied_parts);
7808 /* If lhs is complex, expand calls in rhs before computing it.
7809 That's so we don't compute a pointer and save it over a call.
7810 If lhs is simple, compute it first so we can give it as a
7811 target if the rhs is just a call. This avoids an extra temp and copy
7812 and that prevents a partial-subsumption which makes bad code.
7813 Actually we could treat component_ref's of vars like vars. */
7815 tree lhs = TREE_OPERAND (exp, 0);
7816 tree rhs = TREE_OPERAND (exp, 1);
7817 tree noncopied_parts = 0;
7818 tree lhs_type = TREE_TYPE (lhs);
7822 if (TREE_CODE (lhs) != VAR_DECL
7823 && TREE_CODE (lhs) != RESULT_DECL
7824 && TREE_CODE (lhs) != PARM_DECL
7825 && ! (TREE_CODE (lhs) == INDIRECT_REF
7826 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
7827 preexpand_calls (exp);
7829 /* Check for |= or &= of a bitfield of size one into another bitfield
7830 of size 1. In this case, (unless we need the result of the
7831 assignment) we can do this more efficiently with a
7832 test followed by an assignment, if necessary.
7834 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7835 things change so we do, this code should be enhanced to
7838 && TREE_CODE (lhs) == COMPONENT_REF
7839 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7840 || TREE_CODE (rhs) == BIT_AND_EXPR)
7841 && TREE_OPERAND (rhs, 0) == lhs
7842 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7843 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7844 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7846 rtx label = gen_label_rtx ();
7848 do_jump (TREE_OPERAND (rhs, 1),
7849 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7850 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7851 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7852 (TREE_CODE (rhs) == BIT_IOR_EXPR
7854 : integer_zero_node)),
7856 do_pending_stack_adjust ();
7861 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7862 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7863 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7864 TYPE_NONCOPIED_PARTS (lhs_type));
7866 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7867 while (noncopied_parts != 0)
7869 expand_assignment (TREE_PURPOSE (noncopied_parts),
7870 TREE_VALUE (noncopied_parts), 0, 0);
7871 noncopied_parts = TREE_CHAIN (noncopied_parts);
7877 if (!TREE_OPERAND (exp, 0))
7878 expand_null_return ();
7880 expand_return (TREE_OPERAND (exp, 0));
7883 case PREINCREMENT_EXPR:
7884 case PREDECREMENT_EXPR:
7885 return expand_increment (exp, 0, ignore);
7887 case POSTINCREMENT_EXPR:
7888 case POSTDECREMENT_EXPR:
7889 /* Faster to treat as pre-increment if result is not used. */
7890 return expand_increment (exp, ! ignore, ignore);
7893 /* If nonzero, TEMP will be set to the address of something that might
7894 be a MEM corresponding to a stack slot. */
7897 /* Are we taking the address of a nested function? */
7898 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
7899 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
7900 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
7901 && ! TREE_STATIC (exp))
7903 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7904 op0 = force_operand (op0, target);
7906 /* If we are taking the address of something erroneous, just
7908 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7912 /* We make sure to pass const0_rtx down if we came in with
7913 ignore set, to avoid doing the cleanups twice for something. */
7914 op0 = expand_expr (TREE_OPERAND (exp, 0),
7915 ignore ? const0_rtx : NULL_RTX, VOIDmode,
7916 (modifier == EXPAND_INITIALIZER
7917 ? modifier : EXPAND_CONST_ADDRESS));
7919 /* If we are going to ignore the result, OP0 will have been set
7920 to const0_rtx, so just return it. Don't get confused and
7921 think we are taking the address of the constant. */
7925 op0 = protect_from_queue (op0, 0);
7927 /* We would like the object in memory. If it is a constant,
7928 we can have it be statically allocated into memory. For
7929 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7930 memory and store the value into it. */
7932 if (CONSTANT_P (op0))
7933 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7935 else if (GET_CODE (op0) == MEM)
7937 mark_temp_addr_taken (op0);
7938 temp = XEXP (op0, 0);
7941 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7942 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7944 /* If this object is in a register, it must be not
7946 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7947 rtx memloc = assign_temp (inner_type, 1, 1, 1);
7949 mark_temp_addr_taken (memloc);
7950 emit_move_insn (memloc, op0);
7954 if (GET_CODE (op0) != MEM)
7957 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7959 temp = XEXP (op0, 0);
7960 #ifdef POINTERS_EXTEND_UNSIGNED
7961 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7962 && mode == ptr_mode)
7963 temp = convert_memory_address (ptr_mode, temp);
7968 op0 = force_operand (XEXP (op0, 0), target);
7971 if (flag_force_addr && GET_CODE (op0) != REG)
7972 op0 = force_reg (Pmode, op0);
7974 if (GET_CODE (op0) == REG
7975 && ! REG_USERVAR_P (op0))
7976 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
7978 /* If we might have had a temp slot, add an equivalent address
7981 update_temp_slot_address (temp, op0);
7983 #ifdef POINTERS_EXTEND_UNSIGNED
7984 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7985 && mode == ptr_mode)
7986 op0 = convert_memory_address (ptr_mode, op0);
7991 case ENTRY_VALUE_EXPR:
7994 /* COMPLEX type for Extended Pascal & Fortran */
7997 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8000 /* Get the rtx code of the operands. */
8001 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8002 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8005 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8009 /* Move the real (op0) and imaginary (op1) parts to their location. */
8010 emit_move_insn (gen_realpart (mode, target), op0);
8011 emit_move_insn (gen_imagpart (mode, target), op1);
8013 insns = get_insns ();
8016 /* Complex construction should appear as a single unit. */
8017 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8018 each with a separate pseudo as destination.
8019 It's not correct for flow to treat them as a unit. */
8020 if (GET_CODE (target) != CONCAT)
8021 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8029 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8030 return gen_realpart (mode, op0);
8033 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8034 return gen_imagpart (mode, op0);
8038 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8042 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8045 target = gen_reg_rtx (mode);
8049 /* Store the realpart and the negated imagpart to target. */
8050 emit_move_insn (gen_realpart (partmode, target),
8051 gen_realpart (partmode, op0));
8053 imag_t = gen_imagpart (partmode, target);
8054 temp = expand_unop (partmode, neg_optab,
8055 gen_imagpart (partmode, op0), imag_t, 0);
8057 emit_move_insn (imag_t, temp);
8059 insns = get_insns ();
8062 /* Conjugate should appear as a single unit
8063 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8064 each with a separate pseudo as destination.
8065 It's not correct for flow to treat them as a unit. */
8066 if (GET_CODE (target) != CONCAT)
8067 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8074 case TRY_CATCH_EXPR:
8076 tree handler = TREE_OPERAND (exp, 1);
8078 expand_eh_region_start ();
8080 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8082 expand_eh_region_end (handler);
8089 rtx dcc = get_dynamic_cleanup_chain ();
8090 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8096 rtx dhc = get_dynamic_handler_chain ();
8097 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8102 op0 = CONST0_RTX (tmode);
8108 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8111 /* Here to do an ordinary binary operator, generating an instruction
8112 from the optab already placed in `this_optab'. */
8114 preexpand_calls (exp);
8115 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8117 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8118 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8120 temp = expand_binop (mode, this_optab, op0, op1, target,
8121 unsignedp, OPTAB_LIB_WIDEN);
8129 /* Return the alignment in bits of EXP, a pointer valued expression.
8130 But don't return more than MAX_ALIGN no matter what.
8131 The alignment returned is, by default, the alignment of the thing that
8132 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
8134 Otherwise, look at the expression to see if we can do better, i.e., if the
8135 expression is actually pointing at an object whose alignment is tighter. */
8138 get_pointer_alignment (exp, max_align)
8142 unsigned align, inner;
8144 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
8147 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8148 align = MIN (align, max_align);
8152 switch (TREE_CODE (exp))
8156 case NON_LVALUE_EXPR:
8157 exp = TREE_OPERAND (exp, 0);
8158 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
8160 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8161 align = MIN (inner, max_align);
8165 /* If sum of pointer + int, restrict our maximum alignment to that
8166 imposed by the integer. If not, we can't do any better than
8168 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
8171 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
8176 exp = TREE_OPERAND (exp, 0);
8180 /* See what we are pointing at and look at its alignment. */
8181 exp = TREE_OPERAND (exp, 0);
8182 if (TREE_CODE (exp) == FUNCTION_DECL)
8183 align = FUNCTION_BOUNDARY;
8184 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
8185 align = DECL_ALIGN (exp);
8186 #ifdef CONSTANT_ALIGNMENT
8187 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
8188 align = CONSTANT_ALIGNMENT (exp, align);
8190 return MIN (align, max_align);
8198 /* Return the tree node and offset if a given argument corresponds to
8199 a string constant. */
8202 string_constant (arg, ptr_offset)
8208 if (TREE_CODE (arg) == ADDR_EXPR
8209 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8211 *ptr_offset = integer_zero_node;
8212 return TREE_OPERAND (arg, 0);
8214 else if (TREE_CODE (arg) == PLUS_EXPR)
8216 tree arg0 = TREE_OPERAND (arg, 0);
8217 tree arg1 = TREE_OPERAND (arg, 1);
8222 if (TREE_CODE (arg0) == ADDR_EXPR
8223 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8226 return TREE_OPERAND (arg0, 0);
8228 else if (TREE_CODE (arg1) == ADDR_EXPR
8229 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8232 return TREE_OPERAND (arg1, 0);
8239 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
8240 way, because it could contain a zero byte in the middle.
8241 TREE_STRING_LENGTH is the size of the character array, not the string.
8243 Unfortunately, string_constant can't access the values of const char
8244 arrays with initializers, so neither can we do so here. */
8254 src = string_constant (src, &offset_node);
8257 max = TREE_STRING_LENGTH (src);
8258 ptr = TREE_STRING_POINTER (src);
8259 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
8261 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
8262 compute the offset to the following null if we don't know where to
8263 start searching for it. */
8265 for (i = 0; i < max; i++)
8268 /* We don't know the starting offset, but we do know that the string
8269 has no internal zero bytes. We can assume that the offset falls
8270 within the bounds of the string; otherwise, the programmer deserves
8271 what he gets. Subtract the offset from the length of the string,
8273 /* This would perhaps not be valid if we were dealing with named
8274 arrays in addition to literal string constants. */
8275 return size_binop (MINUS_EXPR, size_int (max), offset_node);
8278 /* We have a known offset into the string. Start searching there for
8279 a null character. */
8280 if (offset_node == 0)
8284 /* Did we get a long long offset? If so, punt. */
8285 if (TREE_INT_CST_HIGH (offset_node) != 0)
8287 offset = TREE_INT_CST_LOW (offset_node);
8289 /* If the offset is known to be out of bounds, warn, and call strlen at
8291 if (offset < 0 || offset > max)
8293 warning ("offset outside bounds of constant string");
8296 /* Use strlen to search for the first zero byte. Since any strings
8297 constructed with build_string will have nulls appended, we win even
8298 if we get handed something like (char[4])"abcd".
8300 Since OFFSET is our starting index into the string, no further
8301 calculation is needed. */
8302 return size_int (strlen (ptr + offset));
8306 expand_builtin_return_addr (fndecl_code, count, tem)
8307 enum built_in_function fndecl_code;
8313 /* Some machines need special handling before we can access
8314 arbitrary frames. For example, on the sparc, we must first flush
8315 all register windows to the stack. */
8316 #ifdef SETUP_FRAME_ADDRESSES
8318 SETUP_FRAME_ADDRESSES ();
8321 /* On the sparc, the return address is not in the frame, it is in a
8322 register. There is no way to access it off of the current frame
8323 pointer, but it can be accessed off the previous frame pointer by
8324 reading the value from the register window save area. */
8325 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
8326 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
8330 /* Scan back COUNT frames to the specified frame. */
8331 for (i = 0; i < count; i++)
8333 /* Assume the dynamic chain pointer is in the word that the
8334 frame address points to, unless otherwise specified. */
8335 #ifdef DYNAMIC_CHAIN_ADDRESS
8336 tem = DYNAMIC_CHAIN_ADDRESS (tem);
8338 tem = memory_address (Pmode, tem);
8339 tem = copy_to_reg (gen_rtx_MEM (Pmode, tem));
8342 /* For __builtin_frame_address, return what we've got. */
8343 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
8346 /* For __builtin_return_address, Get the return address from that
8348 #ifdef RETURN_ADDR_RTX
8349 tem = RETURN_ADDR_RTX (count, tem);
8351 tem = memory_address (Pmode,
8352 plus_constant (tem, GET_MODE_SIZE (Pmode)));
8353 tem = gen_rtx_MEM (Pmode, tem);
8358 /* __builtin_setjmp is passed a pointer to an array of five words (not
8359 all will be used on all machines). It operates similarly to the C
8360 library function of the same name, but is more efficient. Much of
8361 the code below (and for longjmp) is copied from the handling of
8364 NOTE: This is intended for use by GNAT and the exception handling
8365 scheme in the compiler and will only work in the method used by
8369 expand_builtin_setjmp (buf_addr, target, first_label, next_label)
8372 rtx first_label, next_label;
8374 rtx lab1 = gen_label_rtx ();
8375 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
8376 enum machine_mode value_mode;
8379 value_mode = TYPE_MODE (integer_type_node);
8381 #ifdef POINTERS_EXTEND_UNSIGNED
8382 buf_addr = convert_memory_address (Pmode, buf_addr);
8385 buf_addr = force_reg (Pmode, buf_addr);
8387 if (target == 0 || GET_CODE (target) != REG
8388 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8389 target = gen_reg_rtx (value_mode);
8393 /* We store the frame pointer and the address of lab1 in the buffer
8394 and use the rest of it for the stack save area, which is
8395 machine-dependent. */
8397 #ifndef BUILTIN_SETJMP_FRAME_VALUE
8398 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
8401 emit_move_insn (gen_rtx_MEM (Pmode, buf_addr),
8402 BUILTIN_SETJMP_FRAME_VALUE);
8403 emit_move_insn (validize_mem
8404 (gen_rtx_MEM (Pmode,
8405 plus_constant (buf_addr,
8406 GET_MODE_SIZE (Pmode)))),
8407 gen_rtx_LABEL_REF (Pmode, lab1));
8409 stack_save = gen_rtx_MEM (sa_mode,
8410 plus_constant (buf_addr,
8411 2 * GET_MODE_SIZE (Pmode)));
8412 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8414 /* If there is further processing to do, do it. */
8415 #ifdef HAVE_builtin_setjmp_setup
8416 if (HAVE_builtin_setjmp_setup)
8417 emit_insn (gen_builtin_setjmp_setup (buf_addr));
8420 /* Set TARGET to zero and branch to the first-time-through label. */
8421 emit_move_insn (target, const0_rtx);
8422 emit_jump_insn (gen_jump (first_label));
8426 /* Tell flow about the strange goings on. */
8427 current_function_has_nonlocal_label = 1;
8429 /* Clobber the FP when we get here, so we have to make sure it's
8430 marked as used by this function. */
8431 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
8433 /* Mark the static chain as clobbered here so life information
8434 doesn't get messed up for it. */
8435 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
8437 /* Now put in the code to restore the frame pointer, and argument
8438 pointer, if needed. The code below is from expand_end_bindings
8439 in stmt.c; see detailed documentation there. */
8440 #ifdef HAVE_nonlocal_goto
8441 if (! HAVE_nonlocal_goto)
8443 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8445 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8446 if (fixed_regs[ARG_POINTER_REGNUM])
8448 #ifdef ELIMINABLE_REGS
8450 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8452 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8453 if (elim_regs[i].from == ARG_POINTER_REGNUM
8454 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8457 if (i == sizeof elim_regs / sizeof elim_regs [0])
8460 /* Now restore our arg pointer from the address at which it
8461 was saved in our stack frame.
8462 If there hasn't be space allocated for it yet, make
8464 if (arg_pointer_save_area == 0)
8465 arg_pointer_save_area
8466 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8467 emit_move_insn (virtual_incoming_args_rtx,
8468 copy_to_reg (arg_pointer_save_area));
8473 #ifdef HAVE_builtin_setjmp_receiver
8474 if (HAVE_builtin_setjmp_receiver)
8475 emit_insn (gen_builtin_setjmp_receiver (lab1));
8478 #ifdef HAVE_nonlocal_goto_receiver
8479 if (HAVE_nonlocal_goto_receiver)
8480 emit_insn (gen_nonlocal_goto_receiver ());
8487 /* Set TARGET, and branch to the next-time-through label. */
8488 emit_move_insn (target, const1_rtx);
8489 emit_jump_insn (gen_jump (next_label));
8496 expand_builtin_longjmp (buf_addr, value)
8497 rtx buf_addr, value;
8500 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
8502 #ifdef POINTERS_EXTEND_UNSIGNED
8503 buf_addr = convert_memory_address (Pmode, buf_addr);
8505 buf_addr = force_reg (Pmode, buf_addr);
8507 /* We used to store value in static_chain_rtx, but that fails if pointers
8508 are smaller than integers. We instead require that the user must pass
8509 a second argument of 1, because that is what builtin_setjmp will
8510 return. This also makes EH slightly more efficient, since we are no
8511 longer copying around a value that we don't care about. */
8512 if (value != const1_rtx)
8515 #ifdef HAVE_builtin_longjmp
8516 if (HAVE_builtin_longjmp)
8517 emit_insn (gen_builtin_longjmp (buf_addr));
8521 fp = gen_rtx_MEM (Pmode, buf_addr);
8522 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
8523 GET_MODE_SIZE (Pmode)));
8525 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
8526 2 * GET_MODE_SIZE (Pmode)));
8528 /* Pick up FP, label, and SP from the block and jump. This code is
8529 from expand_goto in stmt.c; see there for detailed comments. */
8530 #if HAVE_nonlocal_goto
8531 if (HAVE_nonlocal_goto)
8532 /* We have to pass a value to the nonlocal_goto pattern that will
8533 get copied into the static_chain pointer, but it does not matter
8534 what that value is, because builtin_setjmp does not use it. */
8535 emit_insn (gen_nonlocal_goto (value, fp, stack, lab));
8539 lab = copy_to_reg (lab);
8541 emit_move_insn (hard_frame_pointer_rtx, fp);
8542 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
8544 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
8545 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
8546 emit_indirect_jump (lab);
8552 get_memory_rtx (exp)
8558 mem = gen_rtx_MEM (BLKmode,
8559 memory_address (BLKmode,
8560 expand_expr (exp, NULL_RTX,
8561 ptr_mode, EXPAND_SUM)));
8563 RTX_UNCHANGING_P (mem) = TREE_READONLY (exp);
8565 /* Figure out the type of the object pointed to. Set MEM_IN_STRUCT_P
8566 if the value is the address of a structure or if the expression is
8567 cast to a pointer to structure type. */
8570 while (TREE_CODE (exp) == NOP_EXPR)
8572 tree cast_type = TREE_TYPE (exp);
8573 if (TREE_CODE (cast_type) == POINTER_TYPE
8574 && AGGREGATE_TYPE_P (TREE_TYPE (cast_type)))
8579 exp = TREE_OPERAND (exp, 0);
8582 if (is_aggregate == 0)
8586 if (TREE_CODE (exp) == ADDR_EXPR)
8587 /* If this is the address of an object, check whether the
8588 object is an array. */
8589 type = TREE_TYPE (TREE_OPERAND (exp, 0));
8591 type = TREE_TYPE (TREE_TYPE (exp));
8592 is_aggregate = AGGREGATE_TYPE_P (type);
8595 MEM_IN_STRUCT_P (mem) = is_aggregate;
8600 /* Expand an expression EXP that calls a built-in function,
8601 with result going to TARGET if that's convenient
8602 (and in mode MODE if that's convenient).
8603 SUBTARGET may be used as the target for computing one of EXP's operands.
8604 IGNORE is nonzero if the value is to be ignored. */
8606 #define CALLED_AS_BUILT_IN(NODE) \
8607 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
8610 expand_builtin (exp, target, subtarget, mode, ignore)
8614 enum machine_mode mode;
8617 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8618 tree arglist = TREE_OPERAND (exp, 1);
8621 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
8622 optab builtin_optab;
8624 switch (DECL_FUNCTION_CODE (fndecl))
8629 /* build_function_call changes these into ABS_EXPR. */
8634 /* Treat these like sqrt, but only if the user asks for them. */
8635 if (! flag_fast_math)
8637 case BUILT_IN_FSQRT:
8638 /* If not optimizing, call the library function. */
8643 /* Arg could be wrong type if user redeclared this fcn wrong. */
8644 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
8647 /* Stabilize and compute the argument. */
8648 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
8649 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
8651 exp = copy_node (exp);
8652 arglist = copy_node (arglist);
8653 TREE_OPERAND (exp, 1) = arglist;
8654 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
8656 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8658 /* Make a suitable register to place result in. */
8659 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8664 switch (DECL_FUNCTION_CODE (fndecl))
8667 builtin_optab = sin_optab; break;
8669 builtin_optab = cos_optab; break;
8670 case BUILT_IN_FSQRT:
8671 builtin_optab = sqrt_optab; break;
8676 /* Compute into TARGET.
8677 Set TARGET to wherever the result comes back. */
8678 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8679 builtin_optab, op0, target, 0);
8681 /* If we were unable to expand via the builtin, stop the
8682 sequence (without outputting the insns) and break, causing
8683 a call to the library function. */
8690 /* Check the results by default. But if flag_fast_math is turned on,
8691 then assume sqrt will always be called with valid arguments. */
8693 if (! flag_fast_math)
8695 /* Don't define the builtin FP instructions
8696 if your machine is not IEEE. */
8697 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8700 lab1 = gen_label_rtx ();
8702 /* Test the result; if it is NaN, set errno=EDOM because
8703 the argument was not in the domain. */
8704 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
8705 emit_jump_insn (gen_beq (lab1));
8709 #ifdef GEN_ERRNO_RTX
8710 rtx errno_rtx = GEN_ERRNO_RTX;
8713 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
8716 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8719 /* We can't set errno=EDOM directly; let the library call do it.
8720 Pop the arguments right away in case the call gets deleted. */
8722 expand_call (exp, target, 0);
8729 /* Output the entire sequence. */
8730 insns = get_insns ();
8739 /* __builtin_apply_args returns block of memory allocated on
8740 the stack into which is stored the arg pointer, structure
8741 value address, static chain, and all the registers that might
8742 possibly be used in performing a function call. The code is
8743 moved to the start of the function so the incoming values are
8745 case BUILT_IN_APPLY_ARGS:
8746 /* Don't do __builtin_apply_args more than once in a function.
8747 Save the result of the first call and reuse it. */
8748 if (apply_args_value != 0)
8749 return apply_args_value;
8751 /* When this function is called, it means that registers must be
8752 saved on entry to this function. So we migrate the
8753 call to the first insn of this function. */
8758 temp = expand_builtin_apply_args ();
8762 apply_args_value = temp;
8764 /* Put the sequence after the NOTE that starts the function.
8765 If this is inside a SEQUENCE, make the outer-level insn
8766 chain current, so the code is placed at the start of the
8768 push_topmost_sequence ();
8769 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8770 pop_topmost_sequence ();
8774 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8775 FUNCTION with a copy of the parameters described by
8776 ARGUMENTS, and ARGSIZE. It returns a block of memory
8777 allocated on the stack into which is stored all the registers
8778 that might possibly be used for returning the result of a
8779 function. ARGUMENTS is the value returned by
8780 __builtin_apply_args. ARGSIZE is the number of bytes of
8781 arguments that must be copied. ??? How should this value be
8782 computed? We'll also need a safe worst case value for varargs
8784 case BUILT_IN_APPLY:
8786 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8787 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
8788 || TREE_CHAIN (arglist) == 0
8789 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8790 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8791 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8799 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8800 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8802 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8805 /* __builtin_return (RESULT) causes the function to return the
8806 value described by RESULT. RESULT is address of the block of
8807 memory returned by __builtin_apply. */
8808 case BUILT_IN_RETURN:
8810 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8811 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8812 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8813 NULL_RTX, VOIDmode, 0));
8816 case BUILT_IN_SAVEREGS:
8817 /* Don't do __builtin_saveregs more than once in a function.
8818 Save the result of the first call and reuse it. */
8819 if (saveregs_value != 0)
8820 return saveregs_value;
8822 /* When this function is called, it means that registers must be
8823 saved on entry to this function. So we migrate the
8824 call to the first insn of this function. */
8828 /* Now really call the function. `expand_call' does not call
8829 expand_builtin, so there is no danger of infinite recursion here. */
8832 #ifdef EXPAND_BUILTIN_SAVEREGS
8833 /* Do whatever the machine needs done in this case. */
8834 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8836 /* The register where the function returns its value
8837 is likely to have something else in it, such as an argument.
8838 So preserve that register around the call. */
8840 if (value_mode != VOIDmode)
8842 rtx valreg = hard_libcall_value (value_mode);
8843 rtx saved_valreg = gen_reg_rtx (value_mode);
8845 emit_move_insn (saved_valreg, valreg);
8846 temp = expand_call (exp, target, ignore);
8847 emit_move_insn (valreg, saved_valreg);
8850 /* Generate the call, putting the value in a pseudo. */
8851 temp = expand_call (exp, target, ignore);
8857 saveregs_value = temp;
8859 /* Put the sequence after the NOTE that starts the function.
8860 If this is inside a SEQUENCE, make the outer-level insn
8861 chain current, so the code is placed at the start of the
8863 push_topmost_sequence ();
8864 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8865 pop_topmost_sequence ();
8869 /* __builtin_args_info (N) returns word N of the arg space info
8870 for the current function. The number and meanings of words
8871 is controlled by the definition of CUMULATIVE_ARGS. */
8872 case BUILT_IN_ARGS_INFO:
8874 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8875 int *word_ptr = (int *) ¤t_function_args_info;
8877 /* These are used by the code below that is if 0'ed away */
8879 tree type, elts, result;
8882 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8883 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8884 __FILE__, __LINE__);
8888 tree arg = TREE_VALUE (arglist);
8889 if (TREE_CODE (arg) != INTEGER_CST)
8890 error ("argument of `__builtin_args_info' must be constant");
8893 int wordnum = TREE_INT_CST_LOW (arg);
8895 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8896 error ("argument of `__builtin_args_info' out of range");
8898 return GEN_INT (word_ptr[wordnum]);
8902 error ("missing argument in `__builtin_args_info'");
8907 for (i = 0; i < nwords; i++)
8908 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8910 type = build_array_type (integer_type_node,
8911 build_index_type (build_int_2 (nwords, 0)));
8912 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8913 TREE_CONSTANT (result) = 1;
8914 TREE_STATIC (result) = 1;
8915 result = build (INDIRECT_REF, build_pointer_type (type), result);
8916 TREE_CONSTANT (result) = 1;
8917 return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
8921 /* Return the address of the first anonymous stack arg. */
8922 case BUILT_IN_NEXT_ARG:
8924 tree fntype = TREE_TYPE (current_function_decl);
8926 if ((TYPE_ARG_TYPES (fntype) == 0
8927 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8929 && ! current_function_varargs)
8931 error ("`va_start' used in function with fixed args");
8937 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8938 tree arg = TREE_VALUE (arglist);
8940 /* Strip off all nops for the sake of the comparison. This
8941 is not quite the same as STRIP_NOPS. It does more.
8942 We must also strip off INDIRECT_EXPR for C++ reference
8944 while (TREE_CODE (arg) == NOP_EXPR
8945 || TREE_CODE (arg) == CONVERT_EXPR
8946 || TREE_CODE (arg) == NON_LVALUE_EXPR
8947 || TREE_CODE (arg) == INDIRECT_REF)
8948 arg = TREE_OPERAND (arg, 0);
8949 if (arg != last_parm)
8950 warning ("second parameter of `va_start' not last named argument");
8952 else if (! current_function_varargs)
8953 /* Evidently an out of date version of <stdarg.h>; can't validate
8954 va_start's second argument, but can still work as intended. */
8955 warning ("`__builtin_next_arg' called without an argument");
8958 return expand_binop (Pmode, add_optab,
8959 current_function_internal_arg_pointer,
8960 current_function_arg_offset_rtx,
8961 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8963 case BUILT_IN_CLASSIFY_TYPE:
8966 tree type = TREE_TYPE (TREE_VALUE (arglist));
8967 enum tree_code code = TREE_CODE (type);
8968 if (code == VOID_TYPE)
8969 return GEN_INT (void_type_class);
8970 if (code == INTEGER_TYPE)
8971 return GEN_INT (integer_type_class);
8972 if (code == CHAR_TYPE)
8973 return GEN_INT (char_type_class);
8974 if (code == ENUMERAL_TYPE)
8975 return GEN_INT (enumeral_type_class);
8976 if (code == BOOLEAN_TYPE)
8977 return GEN_INT (boolean_type_class);
8978 if (code == POINTER_TYPE)
8979 return GEN_INT (pointer_type_class);
8980 if (code == REFERENCE_TYPE)
8981 return GEN_INT (reference_type_class);
8982 if (code == OFFSET_TYPE)
8983 return GEN_INT (offset_type_class);
8984 if (code == REAL_TYPE)
8985 return GEN_INT (real_type_class);
8986 if (code == COMPLEX_TYPE)
8987 return GEN_INT (complex_type_class);
8988 if (code == FUNCTION_TYPE)
8989 return GEN_INT (function_type_class);
8990 if (code == METHOD_TYPE)
8991 return GEN_INT (method_type_class);
8992 if (code == RECORD_TYPE)
8993 return GEN_INT (record_type_class);
8994 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8995 return GEN_INT (union_type_class);
8996 if (code == ARRAY_TYPE)
8998 if (TYPE_STRING_FLAG (type))
8999 return GEN_INT (string_type_class);
9001 return GEN_INT (array_type_class);
9003 if (code == SET_TYPE)
9004 return GEN_INT (set_type_class);
9005 if (code == FILE_TYPE)
9006 return GEN_INT (file_type_class);
9007 if (code == LANG_TYPE)
9008 return GEN_INT (lang_type_class);
9010 return GEN_INT (no_type_class);
9012 case BUILT_IN_CONSTANT_P:
9017 tree arg = TREE_VALUE (arglist);
9020 if (really_constant_p (arg)
9021 || (TREE_CODE (arg) == ADDR_EXPR
9022 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST))
9025 /* Only emit CONSTANT_P_RTX if CSE will be run.
9026 Moreover, we don't want to expand trees that have side effects,
9027 as the original __builtin_constant_p did not evaluate its
9028 argument at all, and we would break existing usage by changing
9029 this. This quirk was generally useful, eliminating a bit of hair
9030 in the writing of the macros that use this function. Now the
9031 same thing can be better accomplished in an inline function. */
9033 if (! cse_not_expected && ! TREE_SIDE_EFFECTS (arg))
9035 /* Lazy fixup of old code: issue a warning and fail the test. */
9036 if (! can_handle_constant_p)
9038 warning ("Delayed evaluation of __builtin_constant_p not supported on this target.");
9039 warning ("Please report this as a bug to egcs-bugs@cygnus.com.");
9042 return gen_rtx_CONSTANT_P_RTX (TYPE_MODE (integer_type_node),
9043 expand_expr (arg, NULL_RTX,
9050 case BUILT_IN_FRAME_ADDRESS:
9051 /* The argument must be a nonnegative integer constant.
9052 It counts the number of frames to scan up the stack.
9053 The value is the address of that frame. */
9054 case BUILT_IN_RETURN_ADDRESS:
9055 /* The argument must be a nonnegative integer constant.
9056 It counts the number of frames to scan up the stack.
9057 The value is the return address saved in that frame. */
9059 /* Warning about missing arg was already issued. */
9061 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
9062 || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
9064 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9065 error ("invalid arg to `__builtin_frame_address'");
9067 error ("invalid arg to `__builtin_return_address'");
9072 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
9073 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
9074 hard_frame_pointer_rtx);
9076 /* Some ports cannot access arbitrary stack frames. */
9079 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9080 warning ("unsupported arg to `__builtin_frame_address'");
9082 warning ("unsupported arg to `__builtin_return_address'");
9086 /* For __builtin_frame_address, return what we've got. */
9087 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9090 if (GET_CODE (tem) != REG)
9091 tem = copy_to_reg (tem);
9095 /* Returns the address of the area where the structure is returned.
9097 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
9099 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
9100 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
9103 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
9105 case BUILT_IN_ALLOCA:
9107 /* Arg could be non-integer if user redeclared this fcn wrong. */
9108 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
9111 /* Compute the argument. */
9112 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
9114 /* Allocate the desired space. */
9115 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
9118 /* If not optimizing, call the library function. */
9119 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9123 /* Arg could be non-integer if user redeclared this fcn wrong. */
9124 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
9127 /* Compute the argument. */
9128 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
9129 /* Compute ffs, into TARGET if possible.
9130 Set TARGET to wherever the result comes back. */
9131 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
9132 ffs_optab, op0, target, 1);
9137 case BUILT_IN_STRLEN:
9138 /* If not optimizing, call the library function. */
9139 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9143 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9144 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9148 tree src = TREE_VALUE (arglist);
9149 tree len = c_strlen (src);
9152 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9154 rtx result, src_rtx, char_rtx;
9155 enum machine_mode insn_mode = value_mode, char_mode;
9156 enum insn_code icode;
9158 /* If the length is known, just return it. */
9160 return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
9162 /* If SRC is not a pointer type, don't do this operation inline. */
9166 /* Call a function if we can't compute strlen in the right mode. */
9168 while (insn_mode != VOIDmode)
9170 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
9171 if (icode != CODE_FOR_nothing)
9174 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
9176 if (insn_mode == VOIDmode)
9179 /* Make a place to write the result of the instruction. */
9182 && GET_CODE (result) == REG
9183 && GET_MODE (result) == insn_mode
9184 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9185 result = gen_reg_rtx (insn_mode);
9187 /* Make sure the operands are acceptable to the predicates. */
9189 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
9190 result = gen_reg_rtx (insn_mode);
9191 src_rtx = memory_address (BLKmode,
9192 expand_expr (src, NULL_RTX, ptr_mode,
9195 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
9196 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
9198 /* Check the string is readable and has an end. */
9199 if (current_function_check_memory_usage)
9200 emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2,
9202 GEN_INT (MEMORY_USE_RO),
9203 TYPE_MODE (integer_type_node));
9205 char_rtx = const0_rtx;
9206 char_mode = insn_operand_mode[(int)icode][2];
9207 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
9208 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
9210 emit_insn (GEN_FCN (icode) (result,
9211 gen_rtx_MEM (BLKmode, src_rtx),
9212 char_rtx, GEN_INT (align)));
9214 /* Return the value in the proper mode for this function. */
9215 if (GET_MODE (result) == value_mode)
9217 else if (target != 0)
9219 convert_move (target, result, 0);
9223 return convert_to_mode (value_mode, result, 0);
9226 case BUILT_IN_STRCPY:
9227 /* If not optimizing, call the library function. */
9228 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9232 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9233 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9234 || TREE_CHAIN (arglist) == 0
9235 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9239 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
9244 len = size_binop (PLUS_EXPR, len, integer_one_node);
9246 chainon (arglist, build_tree_list (NULL_TREE, len));
9250 case BUILT_IN_MEMCPY:
9251 /* If not optimizing, call the library function. */
9252 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9256 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9257 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9258 || TREE_CHAIN (arglist) == 0
9259 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9261 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9262 || (TREE_CODE (TREE_TYPE (TREE_VALUE
9263 (TREE_CHAIN (TREE_CHAIN (arglist)))))
9268 tree dest = TREE_VALUE (arglist);
9269 tree src = TREE_VALUE (TREE_CHAIN (arglist));
9270 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9273 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9275 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9276 rtx dest_mem, src_mem, dest_addr, len_rtx;
9278 /* If either SRC or DEST is not a pointer type, don't do
9279 this operation in-line. */
9280 if (src_align == 0 || dest_align == 0)
9282 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
9283 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9287 dest_mem = get_memory_rtx (dest);
9288 src_mem = get_memory_rtx (src);
9289 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9291 /* Just copy the rights of SRC to the rights of DEST. */
9292 if (current_function_check_memory_usage)
9293 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
9294 XEXP (dest_mem, 0), ptr_mode,
9295 XEXP (src_mem, 0), ptr_mode,
9296 len_rtx, TYPE_MODE (sizetype));
9298 /* Copy word part most expediently. */
9300 = emit_block_move (dest_mem, src_mem, len_rtx,
9301 MIN (src_align, dest_align));
9304 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
9309 case BUILT_IN_MEMSET:
9310 /* If not optimizing, call the library function. */
9311 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9315 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9316 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9317 || TREE_CHAIN (arglist) == 0
9318 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9320 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9322 != (TREE_CODE (TREE_TYPE
9324 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
9328 tree dest = TREE_VALUE (arglist);
9329 tree val = TREE_VALUE (TREE_CHAIN (arglist));
9330 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9333 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9334 rtx dest_mem, dest_addr, len_rtx;
9336 /* If DEST is not a pointer type, don't do this
9337 operation in-line. */
9338 if (dest_align == 0)
9341 /* If the arguments have side-effects, then we can only evaluate
9342 them at most once. The following code evaluates them twice if
9343 they are not constants because we break out to expand_call
9344 in that case. They can't be constants if they have side-effects
9345 so we can check for that first. Alternatively, we could call
9346 save_expr to make multiple evaluation safe. */
9347 if (TREE_SIDE_EFFECTS (val) || TREE_SIDE_EFFECTS (len))
9350 /* If VAL is not 0, don't do this operation in-line. */
9351 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
9354 /* If LEN does not expand to a constant, don't do this
9355 operation in-line. */
9356 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9357 if (GET_CODE (len_rtx) != CONST_INT)
9360 dest_mem = get_memory_rtx (dest);
9362 /* Just check DST is writable and mark it as readable. */
9363 if (current_function_check_memory_usage)
9364 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
9365 XEXP (dest_mem, 0), ptr_mode,
9366 len_rtx, TYPE_MODE (sizetype),
9367 GEN_INT (MEMORY_USE_WO),
9368 TYPE_MODE (integer_type_node));
9371 dest_addr = clear_storage (dest_mem, len_rtx, dest_align);
9374 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
9379 /* These comparison functions need an instruction that returns an actual
9380 index. An ordinary compare that just sets the condition codes
9382 #ifdef HAVE_cmpstrsi
9383 case BUILT_IN_STRCMP:
9384 /* If not optimizing, call the library function. */
9385 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9388 /* If we need to check memory accesses, call the library function. */
9389 if (current_function_check_memory_usage)
9393 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9394 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9395 || TREE_CHAIN (arglist) == 0
9396 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9398 else if (!HAVE_cmpstrsi)
9401 tree arg1 = TREE_VALUE (arglist);
9402 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9405 len = c_strlen (arg1);
9407 len = size_binop (PLUS_EXPR, integer_one_node, len);
9408 len2 = c_strlen (arg2);
9410 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
9412 /* If we don't have a constant length for the first, use the length
9413 of the second, if we know it. We don't require a constant for
9414 this case; some cost analysis could be done if both are available
9415 but neither is constant. For now, assume they're equally cheap.
9417 If both strings have constant lengths, use the smaller. This
9418 could arise if optimization results in strcpy being called with
9419 two fixed strings, or if the code was machine-generated. We should
9420 add some code to the `memcmp' handler below to deal with such
9421 situations, someday. */
9422 if (!len || TREE_CODE (len) != INTEGER_CST)
9429 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
9431 if (tree_int_cst_lt (len2, len))
9435 chainon (arglist, build_tree_list (NULL_TREE, len));
9439 case BUILT_IN_MEMCMP:
9440 /* If not optimizing, call the library function. */
9441 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9444 /* If we need to check memory accesses, call the library function. */
9445 if (current_function_check_memory_usage)
9449 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9450 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9451 || TREE_CHAIN (arglist) == 0
9452 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
9453 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9454 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
9456 else if (!HAVE_cmpstrsi)
9459 tree arg1 = TREE_VALUE (arglist);
9460 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9461 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9465 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9467 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9468 enum machine_mode insn_mode
9469 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
9471 /* If we don't have POINTER_TYPE, call the function. */
9472 if (arg1_align == 0 || arg2_align == 0)
9474 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
9475 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9479 /* Make a place to write the result of the instruction. */
9482 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
9483 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9484 result = gen_reg_rtx (insn_mode);
9486 emit_insn (gen_cmpstrsi (result, get_memory_rtx (arg1),
9487 get_memory_rtx (arg2),
9488 expand_expr (len, NULL_RTX, VOIDmode, 0),
9489 GEN_INT (MIN (arg1_align, arg2_align))));
9491 /* Return the value in the proper mode for this function. */
9492 mode = TYPE_MODE (TREE_TYPE (exp));
9493 if (GET_MODE (result) == mode)
9495 else if (target != 0)
9497 convert_move (target, result, 0);
9501 return convert_to_mode (mode, result, 0);
9504 case BUILT_IN_STRCMP:
9505 case BUILT_IN_MEMCMP:
9509 case BUILT_IN_SETJMP:
9511 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9515 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9517 rtx lab = gen_label_rtx ();
9518 rtx ret = expand_builtin_setjmp (buf_addr, target, lab, lab);
9523 /* __builtin_longjmp is passed a pointer to an array of five words.
9524 It's similar to the C library longjmp function but works with
9525 __builtin_setjmp above. */
9526 case BUILT_IN_LONGJMP:
9527 if (arglist == 0 || TREE_CHAIN (arglist) == 0
9528 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9532 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9534 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
9535 NULL_RTX, VOIDmode, 0);
9537 if (value != const1_rtx)
9539 error ("__builtin_longjmp second argument must be 1");
9543 expand_builtin_longjmp (buf_addr, value);
9550 emit_insn (gen_trap ());
9553 error ("__builtin_trap not supported by this target");
9557 /* Various hooks for the DWARF 2 __throw routine. */
9558 case BUILT_IN_UNWIND_INIT:
9559 expand_builtin_unwind_init ();
9561 case BUILT_IN_DWARF_CFA:
9562 return virtual_cfa_rtx;
9563 #ifdef DWARF2_UNWIND_INFO
9564 case BUILT_IN_DWARF_FP_REGNUM:
9565 return expand_builtin_dwarf_fp_regnum ();
9566 case BUILT_IN_DWARF_REG_SIZE:
9567 return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target);
9569 case BUILT_IN_FROB_RETURN_ADDR:
9570 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
9571 case BUILT_IN_EXTRACT_RETURN_ADDR:
9572 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
9573 case BUILT_IN_EH_RETURN:
9574 expand_builtin_eh_return (TREE_VALUE (arglist),
9575 TREE_VALUE (TREE_CHAIN (arglist)),
9576 TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))));
9579 default: /* just do library call, if unknown builtin */
9580 error ("built-in function `%s' not currently supported",
9581 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9584 /* The switch statement above can drop through to cause the function
9585 to be called normally. */
9587 return expand_call (exp, target, ignore);
9590 /* Built-in functions to perform an untyped call and return. */
9592 /* For each register that may be used for calling a function, this
9593 gives a mode used to copy the register's value. VOIDmode indicates
9594 the register is not used for calling a function. If the machine
9595 has register windows, this gives only the outbound registers.
9596 INCOMING_REGNO gives the corresponding inbound register. */
9597 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
9599 /* For each register that may be used for returning values, this gives
9600 a mode used to copy the register's value. VOIDmode indicates the
9601 register is not used for returning values. If the machine has
9602 register windows, this gives only the outbound registers.
9603 INCOMING_REGNO gives the corresponding inbound register. */
9604 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
9606 /* For each register that may be used for calling a function, this
9607 gives the offset of that register into the block returned by
9608 __builtin_apply_args. 0 indicates that the register is not
9609 used for calling a function. */
9610 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9612 /* Return the offset of register REGNO into the block returned by
9613 __builtin_apply_args. This is not declared static, since it is
9614 needed in objc-act.c. */
9617 apply_args_register_offset (regno)
9622 /* Arguments are always put in outgoing registers (in the argument
9623 block) if such make sense. */
9624 #ifdef OUTGOING_REGNO
9625 regno = OUTGOING_REGNO(regno);
9627 return apply_args_reg_offset[regno];
9630 /* Return the size required for the block returned by __builtin_apply_args,
9631 and initialize apply_args_mode. */
9636 static int size = -1;
9638 enum machine_mode mode;
9640 /* The values computed by this function never change. */
9643 /* The first value is the incoming arg-pointer. */
9644 size = GET_MODE_SIZE (Pmode);
9646 /* The second value is the structure value address unless this is
9647 passed as an "invisible" first argument. */
9648 if (struct_value_rtx)
9649 size += GET_MODE_SIZE (Pmode);
9651 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9652 if (FUNCTION_ARG_REGNO_P (regno))
9654 /* Search for the proper mode for copying this register's
9655 value. I'm not sure this is right, but it works so far. */
9656 enum machine_mode best_mode = VOIDmode;
9658 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9660 mode = GET_MODE_WIDER_MODE (mode))
9661 if (HARD_REGNO_MODE_OK (regno, mode)
9662 && HARD_REGNO_NREGS (regno, mode) == 1)
9665 if (best_mode == VOIDmode)
9666 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9668 mode = GET_MODE_WIDER_MODE (mode))
9669 if (HARD_REGNO_MODE_OK (regno, mode)
9670 && (mov_optab->handlers[(int) mode].insn_code
9671 != CODE_FOR_nothing))
9675 if (mode == VOIDmode)
9678 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9679 if (size % align != 0)
9680 size = CEIL (size, align) * align;
9681 apply_args_reg_offset[regno] = size;
9682 size += GET_MODE_SIZE (mode);
9683 apply_args_mode[regno] = mode;
9687 apply_args_mode[regno] = VOIDmode;
9688 apply_args_reg_offset[regno] = 0;
9694 /* Return the size required for the block returned by __builtin_apply,
9695 and initialize apply_result_mode. */
9698 apply_result_size ()
9700 static int size = -1;
9702 enum machine_mode mode;
9704 /* The values computed by this function never change. */
9709 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9710 if (FUNCTION_VALUE_REGNO_P (regno))
9712 /* Search for the proper mode for copying this register's
9713 value. I'm not sure this is right, but it works so far. */
9714 enum machine_mode best_mode = VOIDmode;
9716 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9718 mode = GET_MODE_WIDER_MODE (mode))
9719 if (HARD_REGNO_MODE_OK (regno, mode))
9722 if (best_mode == VOIDmode)
9723 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9725 mode = GET_MODE_WIDER_MODE (mode))
9726 if (HARD_REGNO_MODE_OK (regno, mode)
9727 && (mov_optab->handlers[(int) mode].insn_code
9728 != CODE_FOR_nothing))
9732 if (mode == VOIDmode)
9735 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9736 if (size % align != 0)
9737 size = CEIL (size, align) * align;
9738 size += GET_MODE_SIZE (mode);
9739 apply_result_mode[regno] = mode;
9742 apply_result_mode[regno] = VOIDmode;
9744 /* Allow targets that use untyped_call and untyped_return to override
9745 the size so that machine-specific information can be stored here. */
9746 #ifdef APPLY_RESULT_SIZE
9747 size = APPLY_RESULT_SIZE;
9753 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9754 /* Create a vector describing the result block RESULT. If SAVEP is true,
9755 the result block is used to save the values; otherwise it is used to
9756 restore the values. */
9759 result_vector (savep, result)
9763 int regno, size, align, nelts;
9764 enum machine_mode mode;
9766 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9769 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9770 if ((mode = apply_result_mode[regno]) != VOIDmode)
9772 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9773 if (size % align != 0)
9774 size = CEIL (size, align) * align;
9775 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
9776 mem = change_address (result, mode,
9777 plus_constant (XEXP (result, 0), size));
9778 savevec[nelts++] = (savep
9779 ? gen_rtx_SET (VOIDmode, mem, reg)
9780 : gen_rtx_SET (VOIDmode, reg, mem));
9781 size += GET_MODE_SIZE (mode);
9783 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
9785 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9787 /* Save the state required to perform an untyped call with the same
9788 arguments as were passed to the current function. */
9791 expand_builtin_apply_args ()
9794 int size, align, regno;
9795 enum machine_mode mode;
9797 /* Create a block where the arg-pointer, structure value address,
9798 and argument registers can be saved. */
9799 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9801 /* Walk past the arg-pointer and structure value address. */
9802 size = GET_MODE_SIZE (Pmode);
9803 if (struct_value_rtx)
9804 size += GET_MODE_SIZE (Pmode);
9806 /* Save each register used in calling a function to the block. */
9807 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9808 if ((mode = apply_args_mode[regno]) != VOIDmode)
9812 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9813 if (size % align != 0)
9814 size = CEIL (size, align) * align;
9816 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
9819 /* For reg-stack.c's stack register household.
9820 Compare with a similar piece of code in function.c. */
9822 emit_insn (gen_rtx_USE (mode, tem));
9825 emit_move_insn (change_address (registers, mode,
9826 plus_constant (XEXP (registers, 0),
9829 size += GET_MODE_SIZE (mode);
9832 /* Save the arg pointer to the block. */
9833 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9834 copy_to_reg (virtual_incoming_args_rtx));
9835 size = GET_MODE_SIZE (Pmode);
9837 /* Save the structure value address unless this is passed as an
9838 "invisible" first argument. */
9839 if (struct_value_incoming_rtx)
9841 emit_move_insn (change_address (registers, Pmode,
9842 plus_constant (XEXP (registers, 0),
9844 copy_to_reg (struct_value_incoming_rtx));
9845 size += GET_MODE_SIZE (Pmode);
9848 /* Return the address of the block. */
9849 return copy_addr_to_reg (XEXP (registers, 0));
9852 /* Perform an untyped call and save the state required to perform an
9853 untyped return of whatever value was returned by the given function. */
9856 expand_builtin_apply (function, arguments, argsize)
9857 rtx function, arguments, argsize;
9859 int size, align, regno;
9860 enum machine_mode mode;
9861 rtx incoming_args, result, reg, dest, call_insn;
9862 rtx old_stack_level = 0;
9863 rtx call_fusage = 0;
9865 /* Create a block where the return registers can be saved. */
9866 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9868 /* ??? The argsize value should be adjusted here. */
9870 /* Fetch the arg pointer from the ARGUMENTS block. */
9871 incoming_args = gen_reg_rtx (Pmode);
9872 emit_move_insn (incoming_args,
9873 gen_rtx_MEM (Pmode, arguments));
9874 #ifndef STACK_GROWS_DOWNWARD
9875 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9876 incoming_args, 0, OPTAB_LIB_WIDEN);
9879 /* Perform postincrements before actually calling the function. */
9882 /* Push a new argument block and copy the arguments. */
9883 do_pending_stack_adjust ();
9885 /* Save the stack with nonlocal if available */
9886 #ifdef HAVE_save_stack_nonlocal
9887 if (HAVE_save_stack_nonlocal)
9888 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
9891 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
9893 /* Push a block of memory onto the stack to store the memory arguments.
9894 Save the address in a register, and copy the memory arguments. ??? I
9895 haven't figured out how the calling convention macros effect this,
9896 but it's likely that the source and/or destination addresses in
9897 the block copy will need updating in machine specific ways. */
9898 dest = allocate_dynamic_stack_space (argsize, 0, 0);
9899 emit_block_move (gen_rtx_MEM (BLKmode, dest),
9900 gen_rtx_MEM (BLKmode, incoming_args),
9902 PARM_BOUNDARY / BITS_PER_UNIT);
9904 /* Refer to the argument block. */
9906 arguments = gen_rtx_MEM (BLKmode, arguments);
9908 /* Walk past the arg-pointer and structure value address. */
9909 size = GET_MODE_SIZE (Pmode);
9910 if (struct_value_rtx)
9911 size += GET_MODE_SIZE (Pmode);
9913 /* Restore each of the registers previously saved. Make USE insns
9914 for each of these registers for use in making the call. */
9915 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9916 if ((mode = apply_args_mode[regno]) != VOIDmode)
9918 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9919 if (size % align != 0)
9920 size = CEIL (size, align) * align;
9921 reg = gen_rtx_REG (mode, regno);
9922 emit_move_insn (reg,
9923 change_address (arguments, mode,
9924 plus_constant (XEXP (arguments, 0),
9927 use_reg (&call_fusage, reg);
9928 size += GET_MODE_SIZE (mode);
9931 /* Restore the structure value address unless this is passed as an
9932 "invisible" first argument. */
9933 size = GET_MODE_SIZE (Pmode);
9934 if (struct_value_rtx)
9936 rtx value = gen_reg_rtx (Pmode);
9937 emit_move_insn (value,
9938 change_address (arguments, Pmode,
9939 plus_constant (XEXP (arguments, 0),
9941 emit_move_insn (struct_value_rtx, value);
9942 if (GET_CODE (struct_value_rtx) == REG)
9943 use_reg (&call_fusage, struct_value_rtx);
9944 size += GET_MODE_SIZE (Pmode);
9947 /* All arguments and registers used for the call are set up by now! */
9948 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
9950 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9951 and we don't want to load it into a register as an optimization,
9952 because prepare_call_address already did it if it should be done. */
9953 if (GET_CODE (function) != SYMBOL_REF)
9954 function = memory_address (FUNCTION_MODE, function);
9956 /* Generate the actual call instruction and save the return value. */
9957 #ifdef HAVE_untyped_call
9958 if (HAVE_untyped_call)
9959 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
9960 result, result_vector (1, result)));
9963 #ifdef HAVE_call_value
9964 if (HAVE_call_value)
9968 /* Locate the unique return register. It is not possible to
9969 express a call that sets more than one return register using
9970 call_value; use untyped_call for that. In fact, untyped_call
9971 only needs to save the return registers in the given block. */
9972 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9973 if ((mode = apply_result_mode[regno]) != VOIDmode)
9976 abort (); /* HAVE_untyped_call required. */
9977 valreg = gen_rtx_REG (mode, regno);
9980 emit_call_insn (gen_call_value (valreg,
9981 gen_rtx_MEM (FUNCTION_MODE, function),
9982 const0_rtx, NULL_RTX, const0_rtx));
9984 emit_move_insn (change_address (result, GET_MODE (valreg),
9992 /* Find the CALL insn we just emitted. */
9993 for (call_insn = get_last_insn ();
9994 call_insn && GET_CODE (call_insn) != CALL_INSN;
9995 call_insn = PREV_INSN (call_insn))
10001 /* Put the register usage information on the CALL. If there is already
10002 some usage information, put ours at the end. */
10003 if (CALL_INSN_FUNCTION_USAGE (call_insn))
10007 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
10008 link = XEXP (link, 1))
10011 XEXP (link, 1) = call_fusage;
10014 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
10016 /* Restore the stack. */
10017 #ifdef HAVE_save_stack_nonlocal
10018 if (HAVE_save_stack_nonlocal)
10019 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
10022 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
10024 /* Return the address of the result block. */
10025 return copy_addr_to_reg (XEXP (result, 0));
10028 /* Perform an untyped return. */
10031 expand_builtin_return (result)
10034 int size, align, regno;
10035 enum machine_mode mode;
10037 rtx call_fusage = 0;
10039 apply_result_size ();
10040 result = gen_rtx_MEM (BLKmode, result);
10042 #ifdef HAVE_untyped_return
10043 if (HAVE_untyped_return)
10045 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
10051 /* Restore the return value and note that each value is used. */
10053 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10054 if ((mode = apply_result_mode[regno]) != VOIDmode)
10056 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
10057 if (size % align != 0)
10058 size = CEIL (size, align) * align;
10059 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
10060 emit_move_insn (reg,
10061 change_address (result, mode,
10062 plus_constant (XEXP (result, 0),
10065 push_to_sequence (call_fusage);
10066 emit_insn (gen_rtx_USE (VOIDmode, reg));
10067 call_fusage = get_insns ();
10069 size += GET_MODE_SIZE (mode);
10072 /* Put the USE insns before the return. */
10073 emit_insns (call_fusage);
10075 /* Return whatever values was restored by jumping directly to the end
10076 of the function. */
10077 expand_null_return ();
10080 /* Expand code for a post- or pre- increment or decrement
10081 and return the RTX for the result.
10082 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
10085 expand_increment (exp, post, ignore)
10089 register rtx op0, op1;
10090 register rtx temp, value;
10091 register tree incremented = TREE_OPERAND (exp, 0);
10092 optab this_optab = add_optab;
10094 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
10095 int op0_is_copy = 0;
10096 int single_insn = 0;
10097 /* 1 means we can't store into OP0 directly,
10098 because it is a subreg narrower than a word,
10099 and we don't dare clobber the rest of the word. */
10100 int bad_subreg = 0;
10102 /* Stabilize any component ref that might need to be
10103 evaluated more than once below. */
10105 || TREE_CODE (incremented) == BIT_FIELD_REF
10106 || (TREE_CODE (incremented) == COMPONENT_REF
10107 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
10108 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
10109 incremented = stabilize_reference (incremented);
10110 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
10111 ones into save exprs so that they don't accidentally get evaluated
10112 more than once by the code below. */
10113 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
10114 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
10115 incremented = save_expr (incremented);
10117 /* Compute the operands as RTX.
10118 Note whether OP0 is the actual lvalue or a copy of it:
10119 I believe it is a copy iff it is a register or subreg
10120 and insns were generated in computing it. */
10122 temp = get_last_insn ();
10123 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
10125 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
10126 in place but instead must do sign- or zero-extension during assignment,
10127 so we copy it into a new register and let the code below use it as
10130 Note that we can safely modify this SUBREG since it is know not to be
10131 shared (it was made by the expand_expr call above). */
10133 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
10136 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
10140 else if (GET_CODE (op0) == SUBREG
10141 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
10143 /* We cannot increment this SUBREG in place. If we are
10144 post-incrementing, get a copy of the old value. Otherwise,
10145 just mark that we cannot increment in place. */
10147 op0 = copy_to_reg (op0);
10152 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
10153 && temp != get_last_insn ());
10154 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
10155 EXPAND_MEMORY_USE_BAD);
10157 /* Decide whether incrementing or decrementing. */
10158 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
10159 || TREE_CODE (exp) == PREDECREMENT_EXPR)
10160 this_optab = sub_optab;
10162 /* Convert decrement by a constant into a negative increment. */
10163 if (this_optab == sub_optab
10164 && GET_CODE (op1) == CONST_INT)
10166 op1 = GEN_INT (- INTVAL (op1));
10167 this_optab = add_optab;
10170 /* For a preincrement, see if we can do this with a single instruction. */
10173 icode = (int) this_optab->handlers[(int) mode].insn_code;
10174 if (icode != (int) CODE_FOR_nothing
10175 /* Make sure that OP0 is valid for operands 0 and 1
10176 of the insn we want to queue. */
10177 && (*insn_operand_predicate[icode][0]) (op0, mode)
10178 && (*insn_operand_predicate[icode][1]) (op0, mode)
10179 && (*insn_operand_predicate[icode][2]) (op1, mode))
10183 /* If OP0 is not the actual lvalue, but rather a copy in a register,
10184 then we cannot just increment OP0. We must therefore contrive to
10185 increment the original value. Then, for postincrement, we can return
10186 OP0 since it is a copy of the old value. For preincrement, expand here
10187 unless we can do it with a single insn.
10189 Likewise if storing directly into OP0 would clobber high bits
10190 we need to preserve (bad_subreg). */
10191 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
10193 /* This is the easiest way to increment the value wherever it is.
10194 Problems with multiple evaluation of INCREMENTED are prevented
10195 because either (1) it is a component_ref or preincrement,
10196 in which case it was stabilized above, or (2) it is an array_ref
10197 with constant index in an array in a register, which is
10198 safe to reevaluate. */
10199 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
10200 || TREE_CODE (exp) == PREDECREMENT_EXPR)
10201 ? MINUS_EXPR : PLUS_EXPR),
10204 TREE_OPERAND (exp, 1));
10206 while (TREE_CODE (incremented) == NOP_EXPR
10207 || TREE_CODE (incremented) == CONVERT_EXPR)
10209 newexp = convert (TREE_TYPE (incremented), newexp);
10210 incremented = TREE_OPERAND (incremented, 0);
10213 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
10214 return post ? op0 : temp;
10219 /* We have a true reference to the value in OP0.
10220 If there is an insn to add or subtract in this mode, queue it.
10221 Queueing the increment insn avoids the register shuffling
10222 that often results if we must increment now and first save
10223 the old value for subsequent use. */
10225 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
10226 op0 = stabilize (op0);
10229 icode = (int) this_optab->handlers[(int) mode].insn_code;
10230 if (icode != (int) CODE_FOR_nothing
10231 /* Make sure that OP0 is valid for operands 0 and 1
10232 of the insn we want to queue. */
10233 && (*insn_operand_predicate[icode][0]) (op0, mode)
10234 && (*insn_operand_predicate[icode][1]) (op0, mode))
10236 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10237 op1 = force_reg (mode, op1);
10239 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
10241 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
10243 rtx addr = (general_operand (XEXP (op0, 0), mode)
10244 ? force_reg (Pmode, XEXP (op0, 0))
10245 : copy_to_reg (XEXP (op0, 0)));
10248 op0 = change_address (op0, VOIDmode, addr);
10249 temp = force_reg (GET_MODE (op0), op0);
10250 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10251 op1 = force_reg (mode, op1);
10253 /* The increment queue is LIFO, thus we have to `queue'
10254 the instructions in reverse order. */
10255 enqueue_insn (op0, gen_move_insn (op0, temp));
10256 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
10261 /* Preincrement, or we can't increment with one simple insn. */
10263 /* Save a copy of the value before inc or dec, to return it later. */
10264 temp = value = copy_to_reg (op0);
10266 /* Arrange to return the incremented value. */
10267 /* Copy the rtx because expand_binop will protect from the queue,
10268 and the results of that would be invalid for us to return
10269 if our caller does emit_queue before using our result. */
10270 temp = copy_rtx (value = op0);
10272 /* Increment however we can. */
10273 op1 = expand_binop (mode, this_optab, value, op1,
10274 current_function_check_memory_usage ? NULL_RTX : op0,
10275 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
10276 /* Make sure the value is stored into OP0. */
10278 emit_move_insn (op0, op1);
10283 /* Expand all function calls contained within EXP, innermost ones first.
10284 But don't look within expressions that have sequence points.
10285 For each CALL_EXPR, record the rtx for its value
10286 in the CALL_EXPR_RTL field. */
10289 preexpand_calls (exp)
10292 register int nops, i;
10293 int type = TREE_CODE_CLASS (TREE_CODE (exp));
10295 if (! do_preexpand_calls)
10298 /* Only expressions and references can contain calls. */
10300 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
10303 switch (TREE_CODE (exp))
10306 /* Do nothing if already expanded. */
10307 if (CALL_EXPR_RTL (exp) != 0
10308 /* Do nothing if the call returns a variable-sized object. */
10309 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
10310 /* Do nothing to built-in functions. */
10311 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
10312 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
10314 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
10317 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
10320 case COMPOUND_EXPR:
10322 case TRUTH_ANDIF_EXPR:
10323 case TRUTH_ORIF_EXPR:
10324 /* If we find one of these, then we can be sure
10325 the adjust will be done for it (since it makes jumps).
10326 Do it now, so that if this is inside an argument
10327 of a function, we don't get the stack adjustment
10328 after some other args have already been pushed. */
10329 do_pending_stack_adjust ();
10334 case WITH_CLEANUP_EXPR:
10335 case CLEANUP_POINT_EXPR:
10336 case TRY_CATCH_EXPR:
10340 if (SAVE_EXPR_RTL (exp) != 0)
10347 nops = tree_code_length[(int) TREE_CODE (exp)];
10348 for (i = 0; i < nops; i++)
10349 if (TREE_OPERAND (exp, i) != 0)
10351 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
10352 if (type == 'e' || type == '<' || type == '1' || type == '2'
10354 preexpand_calls (TREE_OPERAND (exp, i));
10358 /* At the start of a function, record that we have no previously-pushed
10359 arguments waiting to be popped. */
10362 init_pending_stack_adjust ()
10364 pending_stack_adjust = 0;
10367 /* When exiting from function, if safe, clear out any pending stack adjust
10368 so the adjustment won't get done.
10370 Note, if the current function calls alloca, then it must have a
10371 frame pointer regardless of the value of flag_omit_frame_pointer. */
10374 clear_pending_stack_adjust ()
10376 #ifdef EXIT_IGNORE_STACK
10378 && (! flag_omit_frame_pointer || current_function_calls_alloca)
10379 && EXIT_IGNORE_STACK
10380 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
10381 && ! flag_inline_functions)
10382 pending_stack_adjust = 0;
10386 /* Pop any previously-pushed arguments that have not been popped yet. */
10389 do_pending_stack_adjust ()
10391 if (inhibit_defer_pop == 0)
10393 if (pending_stack_adjust != 0)
10394 adjust_stack (GEN_INT (pending_stack_adjust));
10395 pending_stack_adjust = 0;
10399 /* Expand conditional expressions. */
10401 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
10402 LABEL is an rtx of code CODE_LABEL, in this function and all the
10406 jumpifnot (exp, label)
10410 do_jump (exp, label, NULL_RTX);
10413 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
10416 jumpif (exp, label)
10420 do_jump (exp, NULL_RTX, label);
10423 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
10424 the result is zero, or IF_TRUE_LABEL if the result is one.
10425 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
10426 meaning fall through in that case.
10428 do_jump always does any pending stack adjust except when it does not
10429 actually perform a jump. An example where there is no jump
10430 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
10432 This function is responsible for optimizing cases such as
10433 &&, || and comparison operators in EXP. */
10436 do_jump (exp, if_false_label, if_true_label)
10438 rtx if_false_label, if_true_label;
10440 register enum tree_code code = TREE_CODE (exp);
10441 /* Some cases need to create a label to jump to
10442 in order to properly fall through.
10443 These cases set DROP_THROUGH_LABEL nonzero. */
10444 rtx drop_through_label = 0;
10446 rtx comparison = 0;
10449 enum machine_mode mode;
10451 #ifdef MAX_INTEGER_COMPUTATION_MODE
10452 check_max_integer_computation_mode (exp);
10463 temp = integer_zerop (exp) ? if_false_label : if_true_label;
10469 /* This is not true with #pragma weak */
10471 /* The address of something can never be zero. */
10473 emit_jump (if_true_label);
10478 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
10479 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
10480 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
10483 /* If we are narrowing the operand, we have to do the compare in the
10485 if ((TYPE_PRECISION (TREE_TYPE (exp))
10486 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10488 case NON_LVALUE_EXPR:
10489 case REFERENCE_EXPR:
10494 /* These cannot change zero->non-zero or vice versa. */
10495 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10499 /* This is never less insns than evaluating the PLUS_EXPR followed by
10500 a test and can be longer if the test is eliminated. */
10502 /* Reduce to minus. */
10503 exp = build (MINUS_EXPR, TREE_TYPE (exp),
10504 TREE_OPERAND (exp, 0),
10505 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
10506 TREE_OPERAND (exp, 1))));
10507 /* Process as MINUS. */
10511 /* Non-zero iff operands of minus differ. */
10512 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
10513 TREE_OPERAND (exp, 0),
10514 TREE_OPERAND (exp, 1)),
10519 /* If we are AND'ing with a small constant, do this comparison in the
10520 smallest type that fits. If the machine doesn't have comparisons
10521 that small, it will be converted back to the wider comparison.
10522 This helps if we are testing the sign bit of a narrower object.
10523 combine can't do this for us because it can't know whether a
10524 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
10526 if (! SLOW_BYTE_ACCESS
10527 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
10528 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
10529 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
10530 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
10531 && (type = type_for_mode (mode, 1)) != 0
10532 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10533 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10534 != CODE_FOR_nothing))
10536 do_jump (convert (type, exp), if_false_label, if_true_label);
10541 case TRUTH_NOT_EXPR:
10542 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10545 case TRUTH_ANDIF_EXPR:
10546 if (if_false_label == 0)
10547 if_false_label = drop_through_label = gen_label_rtx ();
10548 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10549 start_cleanup_deferral ();
10550 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10551 end_cleanup_deferral ();
10554 case TRUTH_ORIF_EXPR:
10555 if (if_true_label == 0)
10556 if_true_label = drop_through_label = gen_label_rtx ();
10557 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10558 start_cleanup_deferral ();
10559 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10560 end_cleanup_deferral ();
10563 case COMPOUND_EXPR:
10564 push_temp_slots ();
10565 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10566 preserve_temp_slots (NULL_RTX);
10567 free_temp_slots ();
10570 do_pending_stack_adjust ();
10571 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10574 case COMPONENT_REF:
10575 case BIT_FIELD_REF:
10578 int bitsize, bitpos, unsignedp;
10579 enum machine_mode mode;
10585 /* Get description of this reference. We don't actually care
10586 about the underlying object here. */
10587 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10588 &mode, &unsignedp, &volatilep,
10591 type = type_for_size (bitsize, unsignedp);
10592 if (! SLOW_BYTE_ACCESS
10593 && type != 0 && bitsize >= 0
10594 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10595 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10596 != CODE_FOR_nothing))
10598 do_jump (convert (type, exp), if_false_label, if_true_label);
10605 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10606 if (integer_onep (TREE_OPERAND (exp, 1))
10607 && integer_zerop (TREE_OPERAND (exp, 2)))
10608 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10610 else if (integer_zerop (TREE_OPERAND (exp, 1))
10611 && integer_onep (TREE_OPERAND (exp, 2)))
10612 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10616 register rtx label1 = gen_label_rtx ();
10617 drop_through_label = gen_label_rtx ();
10619 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10621 start_cleanup_deferral ();
10622 /* Now the THEN-expression. */
10623 do_jump (TREE_OPERAND (exp, 1),
10624 if_false_label ? if_false_label : drop_through_label,
10625 if_true_label ? if_true_label : drop_through_label);
10626 /* In case the do_jump just above never jumps. */
10627 do_pending_stack_adjust ();
10628 emit_label (label1);
10630 /* Now the ELSE-expression. */
10631 do_jump (TREE_OPERAND (exp, 2),
10632 if_false_label ? if_false_label : drop_through_label,
10633 if_true_label ? if_true_label : drop_through_label);
10634 end_cleanup_deferral ();
10640 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10642 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10643 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10645 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10646 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10649 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10650 fold (build (EQ_EXPR, TREE_TYPE (exp),
10651 fold (build1 (REALPART_EXPR,
10652 TREE_TYPE (inner_type),
10654 fold (build1 (REALPART_EXPR,
10655 TREE_TYPE (inner_type),
10657 fold (build (EQ_EXPR, TREE_TYPE (exp),
10658 fold (build1 (IMAGPART_EXPR,
10659 TREE_TYPE (inner_type),
10661 fold (build1 (IMAGPART_EXPR,
10662 TREE_TYPE (inner_type),
10664 if_false_label, if_true_label);
10667 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10668 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10670 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10671 && !can_compare_p (TYPE_MODE (inner_type)))
10672 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10674 comparison = compare (exp, EQ, EQ);
10680 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10682 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10683 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10685 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10686 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10689 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10690 fold (build (NE_EXPR, TREE_TYPE (exp),
10691 fold (build1 (REALPART_EXPR,
10692 TREE_TYPE (inner_type),
10694 fold (build1 (REALPART_EXPR,
10695 TREE_TYPE (inner_type),
10697 fold (build (NE_EXPR, TREE_TYPE (exp),
10698 fold (build1 (IMAGPART_EXPR,
10699 TREE_TYPE (inner_type),
10701 fold (build1 (IMAGPART_EXPR,
10702 TREE_TYPE (inner_type),
10704 if_false_label, if_true_label);
10707 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10708 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10710 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10711 && !can_compare_p (TYPE_MODE (inner_type)))
10712 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10714 comparison = compare (exp, NE, NE);
10719 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10721 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10722 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10724 comparison = compare (exp, LT, LTU);
10728 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10730 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10731 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10733 comparison = compare (exp, LE, LEU);
10737 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10739 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10740 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10742 comparison = compare (exp, GT, GTU);
10746 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10748 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10749 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10751 comparison = compare (exp, GE, GEU);
10756 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10758 /* This is not needed any more and causes poor code since it causes
10759 comparisons and tests from non-SI objects to have different code
10761 /* Copy to register to avoid generating bad insns by cse
10762 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10763 if (!cse_not_expected && GET_CODE (temp) == MEM)
10764 temp = copy_to_reg (temp);
10766 do_pending_stack_adjust ();
10767 if (GET_CODE (temp) == CONST_INT)
10768 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10769 else if (GET_CODE (temp) == LABEL_REF)
10770 comparison = const_true_rtx;
10771 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10772 && !can_compare_p (GET_MODE (temp)))
10773 /* Note swapping the labels gives us not-equal. */
10774 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10775 else if (GET_MODE (temp) != VOIDmode)
10776 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10777 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10778 GET_MODE (temp), NULL_RTX, 0);
10783 /* Do any postincrements in the expression that was tested. */
10786 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10787 straight into a conditional jump instruction as the jump condition.
10788 Otherwise, all the work has been done already. */
10790 if (comparison == const_true_rtx)
10793 emit_jump (if_true_label);
10795 else if (comparison == const0_rtx)
10797 if (if_false_label)
10798 emit_jump (if_false_label);
10800 else if (comparison)
10801 do_jump_for_compare (comparison, if_false_label, if_true_label);
10803 if (drop_through_label)
10805 /* If do_jump produces code that might be jumped around,
10806 do any stack adjusts from that code, before the place
10807 where control merges in. */
10808 do_pending_stack_adjust ();
10809 emit_label (drop_through_label);
10813 /* Given a comparison expression EXP for values too wide to be compared
10814 with one insn, test the comparison and jump to the appropriate label.
10815 The code of EXP is ignored; we always test GT if SWAP is 0,
10816 and LT if SWAP is 1. */
10819 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10822 rtx if_false_label, if_true_label;
10824 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10825 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10826 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10827 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10828 rtx drop_through_label = 0;
10829 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10832 if (! if_true_label || ! if_false_label)
10833 drop_through_label = gen_label_rtx ();
10834 if (! if_true_label)
10835 if_true_label = drop_through_label;
10836 if (! if_false_label)
10837 if_false_label = drop_through_label;
10839 /* Compare a word at a time, high order first. */
10840 for (i = 0; i < nwords; i++)
10843 rtx op0_word, op1_word;
10845 if (WORDS_BIG_ENDIAN)
10847 op0_word = operand_subword_force (op0, i, mode);
10848 op1_word = operand_subword_force (op1, i, mode);
10852 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10853 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10856 /* All but high-order word must be compared as unsigned. */
10857 comp = compare_from_rtx (op0_word, op1_word,
10858 (unsignedp || i > 0) ? GTU : GT,
10859 unsignedp, word_mode, NULL_RTX, 0);
10860 if (comp == const_true_rtx)
10861 emit_jump (if_true_label);
10862 else if (comp != const0_rtx)
10863 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10865 /* Consider lower words only if these are equal. */
10866 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10868 if (comp == const_true_rtx)
10869 emit_jump (if_false_label);
10870 else if (comp != const0_rtx)
10871 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10874 if (if_false_label)
10875 emit_jump (if_false_label);
10876 if (drop_through_label)
10877 emit_label (drop_through_label);
10880 /* Compare OP0 with OP1, word at a time, in mode MODE.
10881 UNSIGNEDP says to do unsigned comparison.
10882 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10885 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10886 enum machine_mode mode;
10889 rtx if_false_label, if_true_label;
10891 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10892 rtx drop_through_label = 0;
10895 if (! if_true_label || ! if_false_label)
10896 drop_through_label = gen_label_rtx ();
10897 if (! if_true_label)
10898 if_true_label = drop_through_label;
10899 if (! if_false_label)
10900 if_false_label = drop_through_label;
10902 /* Compare a word at a time, high order first. */
10903 for (i = 0; i < nwords; i++)
10906 rtx op0_word, op1_word;
10908 if (WORDS_BIG_ENDIAN)
10910 op0_word = operand_subword_force (op0, i, mode);
10911 op1_word = operand_subword_force (op1, i, mode);
10915 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10916 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10919 /* All but high-order word must be compared as unsigned. */
10920 comp = compare_from_rtx (op0_word, op1_word,
10921 (unsignedp || i > 0) ? GTU : GT,
10922 unsignedp, word_mode, NULL_RTX, 0);
10923 if (comp == const_true_rtx)
10924 emit_jump (if_true_label);
10925 else if (comp != const0_rtx)
10926 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10928 /* Consider lower words only if these are equal. */
10929 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10931 if (comp == const_true_rtx)
10932 emit_jump (if_false_label);
10933 else if (comp != const0_rtx)
10934 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10937 if (if_false_label)
10938 emit_jump (if_false_label);
10939 if (drop_through_label)
10940 emit_label (drop_through_label);
10943 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10944 with one insn, test the comparison and jump to the appropriate label. */
10947 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10949 rtx if_false_label, if_true_label;
10951 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10952 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10953 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10954 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10956 rtx drop_through_label = 0;
10958 if (! if_false_label)
10959 drop_through_label = if_false_label = gen_label_rtx ();
10961 for (i = 0; i < nwords; i++)
10963 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10964 operand_subword_force (op1, i, mode),
10965 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10966 word_mode, NULL_RTX, 0);
10967 if (comp == const_true_rtx)
10968 emit_jump (if_false_label);
10969 else if (comp != const0_rtx)
10970 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10974 emit_jump (if_true_label);
10975 if (drop_through_label)
10976 emit_label (drop_through_label);
10979 /* Jump according to whether OP0 is 0.
10980 We assume that OP0 has an integer mode that is too wide
10981 for the available compare insns. */
10984 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10986 rtx if_false_label, if_true_label;
10988 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10991 rtx drop_through_label = 0;
10993 /* The fastest way of doing this comparison on almost any machine is to
10994 "or" all the words and compare the result. If all have to be loaded
10995 from memory and this is a very wide item, it's possible this may
10996 be slower, but that's highly unlikely. */
10998 part = gen_reg_rtx (word_mode);
10999 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
11000 for (i = 1; i < nwords && part != 0; i++)
11001 part = expand_binop (word_mode, ior_optab, part,
11002 operand_subword_force (op0, i, GET_MODE (op0)),
11003 part, 1, OPTAB_WIDEN);
11007 rtx comp = compare_from_rtx (part, const0_rtx, EQ, 1, word_mode,
11010 if (comp == const_true_rtx)
11011 emit_jump (if_false_label);
11012 else if (comp == const0_rtx)
11013 emit_jump (if_true_label);
11015 do_jump_for_compare (comp, if_false_label, if_true_label);
11020 /* If we couldn't do the "or" simply, do this with a series of compares. */
11021 if (! if_false_label)
11022 drop_through_label = if_false_label = gen_label_rtx ();
11024 for (i = 0; i < nwords; i++)
11026 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
11028 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
11029 if (comp == const_true_rtx)
11030 emit_jump (if_false_label);
11031 else if (comp != const0_rtx)
11032 do_jump_for_compare (comp, if_false_label, NULL_RTX);
11036 emit_jump (if_true_label);
11038 if (drop_through_label)
11039 emit_label (drop_through_label);
11042 /* Given a comparison expression in rtl form, output conditional branches to
11043 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
11046 do_jump_for_compare (comparison, if_false_label, if_true_label)
11047 rtx comparison, if_false_label, if_true_label;
11051 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
11052 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
11056 if (if_false_label)
11057 emit_jump (if_false_label);
11059 else if (if_false_label)
11062 rtx prev = get_last_insn ();
11065 /* Output the branch with the opposite condition. Then try to invert
11066 what is generated. If more than one insn is a branch, or if the
11067 branch is not the last insn written, abort. If we can't invert
11068 the branch, emit make a true label, redirect this jump to that,
11069 emit a jump to the false label and define the true label. */
11071 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
11072 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
11076 /* Here we get the first insn that was just emitted. It used to be the
11077 case that, on some machines, emitting the branch would discard
11078 the previous compare insn and emit a replacement. This isn't
11079 done anymore, but abort if we see that PREV is deleted. */
11082 insn = get_insns ();
11083 else if (INSN_DELETED_P (prev))
11086 insn = NEXT_INSN (prev);
11088 for (; insn; insn = NEXT_INSN (insn))
11089 if (GET_CODE (insn) == JUMP_INSN)
11096 if (branch != get_last_insn ())
11099 JUMP_LABEL (branch) = if_false_label;
11100 if (! invert_jump (branch, if_false_label))
11102 if_true_label = gen_label_rtx ();
11103 redirect_jump (branch, if_true_label);
11104 emit_jump (if_false_label);
11105 emit_label (if_true_label);
11110 /* Generate code for a comparison expression EXP
11111 (including code to compute the values to be compared)
11112 and set (CC0) according to the result.
11113 SIGNED_CODE should be the rtx operation for this comparison for
11114 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
11116 We force a stack adjustment unless there are currently
11117 things pushed on the stack that aren't yet used. */
11120 compare (exp, signed_code, unsigned_code)
11122 enum rtx_code signed_code, unsigned_code;
11125 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
11127 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
11128 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
11129 register enum machine_mode mode = TYPE_MODE (type);
11130 int unsignedp = TREE_UNSIGNED (type);
11131 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
11133 #ifdef HAVE_canonicalize_funcptr_for_compare
11134 /* If function pointers need to be "canonicalized" before they can
11135 be reliably compared, then canonicalize them. */
11136 if (HAVE_canonicalize_funcptr_for_compare
11137 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11138 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11141 rtx new_op0 = gen_reg_rtx (mode);
11143 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
11147 if (HAVE_canonicalize_funcptr_for_compare
11148 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11149 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11152 rtx new_op1 = gen_reg_rtx (mode);
11154 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
11159 return compare_from_rtx (op0, op1, code, unsignedp, mode,
11161 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
11162 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
11165 /* Like compare but expects the values to compare as two rtx's.
11166 The decision as to signed or unsigned comparison must be made by the caller.
11168 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
11171 If ALIGN is non-zero, it is the alignment of this type; if zero, the
11172 size of MODE should be used. */
11175 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
11176 register rtx op0, op1;
11177 enum rtx_code code;
11179 enum machine_mode mode;
11185 /* If one operand is constant, make it the second one. Only do this
11186 if the other operand is not constant as well. */
11188 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
11189 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
11194 code = swap_condition (code);
11197 if (flag_force_mem)
11199 op0 = force_not_mem (op0);
11200 op1 = force_not_mem (op1);
11203 do_pending_stack_adjust ();
11205 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
11206 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
11210 /* There's no need to do this now that combine.c can eliminate lots of
11211 sign extensions. This can be less efficient in certain cases on other
11214 /* If this is a signed equality comparison, we can do it as an
11215 unsigned comparison since zero-extension is cheaper than sign
11216 extension and comparisons with zero are done as unsigned. This is
11217 the case even on machines that can do fast sign extension, since
11218 zero-extension is easier to combine with other operations than
11219 sign-extension is. If we are comparing against a constant, we must
11220 convert it to what it would look like unsigned. */
11221 if ((code == EQ || code == NE) && ! unsignedp
11222 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
11224 if (GET_CODE (op1) == CONST_INT
11225 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
11226 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
11231 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
11233 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
11236 /* Generate code to calculate EXP using a store-flag instruction
11237 and return an rtx for the result. EXP is either a comparison
11238 or a TRUTH_NOT_EXPR whose operand is a comparison.
11240 If TARGET is nonzero, store the result there if convenient.
11242 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
11245 Return zero if there is no suitable set-flag instruction
11246 available on this machine.
11248 Once expand_expr has been called on the arguments of the comparison,
11249 we are committed to doing the store flag, since it is not safe to
11250 re-evaluate the expression. We emit the store-flag insn by calling
11251 emit_store_flag, but only expand the arguments if we have a reason
11252 to believe that emit_store_flag will be successful. If we think that
11253 it will, but it isn't, we have to simulate the store-flag with a
11254 set/jump/set sequence. */
11257 do_store_flag (exp, target, mode, only_cheap)
11260 enum machine_mode mode;
11263 enum rtx_code code;
11264 tree arg0, arg1, type;
11266 enum machine_mode operand_mode;
11270 enum insn_code icode;
11271 rtx subtarget = target;
11274 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
11275 result at the end. We can't simply invert the test since it would
11276 have already been inverted if it were valid. This case occurs for
11277 some floating-point comparisons. */
11279 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
11280 invert = 1, exp = TREE_OPERAND (exp, 0);
11282 arg0 = TREE_OPERAND (exp, 0);
11283 arg1 = TREE_OPERAND (exp, 1);
11284 type = TREE_TYPE (arg0);
11285 operand_mode = TYPE_MODE (type);
11286 unsignedp = TREE_UNSIGNED (type);
11288 /* We won't bother with BLKmode store-flag operations because it would mean
11289 passing a lot of information to emit_store_flag. */
11290 if (operand_mode == BLKmode)
11293 /* We won't bother with store-flag operations involving function pointers
11294 when function pointers must be canonicalized before comparisons. */
11295 #ifdef HAVE_canonicalize_funcptr_for_compare
11296 if (HAVE_canonicalize_funcptr_for_compare
11297 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11298 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11300 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11301 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11302 == FUNCTION_TYPE))))
11309 /* Get the rtx comparison code to use. We know that EXP is a comparison
11310 operation of some type. Some comparisons against 1 and -1 can be
11311 converted to comparisons with zero. Do so here so that the tests
11312 below will be aware that we have a comparison with zero. These
11313 tests will not catch constants in the first operand, but constants
11314 are rarely passed as the first operand. */
11316 switch (TREE_CODE (exp))
11325 if (integer_onep (arg1))
11326 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11328 code = unsignedp ? LTU : LT;
11331 if (! unsignedp && integer_all_onesp (arg1))
11332 arg1 = integer_zero_node, code = LT;
11334 code = unsignedp ? LEU : LE;
11337 if (! unsignedp && integer_all_onesp (arg1))
11338 arg1 = integer_zero_node, code = GE;
11340 code = unsignedp ? GTU : GT;
11343 if (integer_onep (arg1))
11344 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11346 code = unsignedp ? GEU : GE;
11352 /* Put a constant second. */
11353 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
11355 tem = arg0; arg0 = arg1; arg1 = tem;
11356 code = swap_condition (code);
11359 /* If this is an equality or inequality test of a single bit, we can
11360 do this by shifting the bit being tested to the low-order bit and
11361 masking the result with the constant 1. If the condition was EQ,
11362 we xor it with 1. This does not require an scc insn and is faster
11363 than an scc insn even if we have it. */
11365 if ((code == NE || code == EQ)
11366 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
11367 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11369 tree inner = TREE_OPERAND (arg0, 0);
11370 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
11373 /* If INNER is a right shift of a constant and it plus BITNUM does
11374 not overflow, adjust BITNUM and INNER. */
11376 if (TREE_CODE (inner) == RSHIFT_EXPR
11377 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
11378 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
11379 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
11380 < TYPE_PRECISION (type)))
11382 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
11383 inner = TREE_OPERAND (inner, 0);
11386 /* If we are going to be able to omit the AND below, we must do our
11387 operations as unsigned. If we must use the AND, we have a choice.
11388 Normally unsigned is faster, but for some machines signed is. */
11389 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
11390 #ifdef LOAD_EXTEND_OP
11391 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
11397 if (subtarget == 0 || GET_CODE (subtarget) != REG
11398 || GET_MODE (subtarget) != operand_mode
11399 || ! safe_from_p (subtarget, inner, 1))
11402 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
11405 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
11406 size_int (bitnum), subtarget, ops_unsignedp);
11408 if (GET_MODE (op0) != mode)
11409 op0 = convert_to_mode (mode, op0, ops_unsignedp);
11411 if ((code == EQ && ! invert) || (code == NE && invert))
11412 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
11413 ops_unsignedp, OPTAB_LIB_WIDEN);
11415 /* Put the AND last so it can combine with more things. */
11416 if (bitnum != TYPE_PRECISION (type) - 1)
11417 op0 = expand_and (op0, const1_rtx, subtarget);
11422 /* Now see if we are likely to be able to do this. Return if not. */
11423 if (! can_compare_p (operand_mode))
11425 icode = setcc_gen_code[(int) code];
11426 if (icode == CODE_FOR_nothing
11427 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
11429 /* We can only do this if it is one of the special cases that
11430 can be handled without an scc insn. */
11431 if ((code == LT && integer_zerop (arg1))
11432 || (! only_cheap && code == GE && integer_zerop (arg1)))
11434 else if (BRANCH_COST >= 0
11435 && ! only_cheap && (code == NE || code == EQ)
11436 && TREE_CODE (type) != REAL_TYPE
11437 && ((abs_optab->handlers[(int) operand_mode].insn_code
11438 != CODE_FOR_nothing)
11439 || (ffs_optab->handlers[(int) operand_mode].insn_code
11440 != CODE_FOR_nothing)))
11446 preexpand_calls (exp);
11447 if (subtarget == 0 || GET_CODE (subtarget) != REG
11448 || GET_MODE (subtarget) != operand_mode
11449 || ! safe_from_p (subtarget, arg1, 1))
11452 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
11453 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11456 target = gen_reg_rtx (mode);
11458 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11459 because, if the emit_store_flag does anything it will succeed and
11460 OP0 and OP1 will not be used subsequently. */
11462 result = emit_store_flag (target, code,
11463 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
11464 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
11465 operand_mode, unsignedp, 1);
11470 result = expand_binop (mode, xor_optab, result, const1_rtx,
11471 result, 0, OPTAB_LIB_WIDEN);
11475 /* If this failed, we have to do this with set/compare/jump/set code. */
11476 if (GET_CODE (target) != REG
11477 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11478 target = gen_reg_rtx (GET_MODE (target));
11480 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11481 result = compare_from_rtx (op0, op1, code, unsignedp,
11482 operand_mode, NULL_RTX, 0);
11483 if (GET_CODE (result) == CONST_INT)
11484 return (((result == const0_rtx && ! invert)
11485 || (result != const0_rtx && invert))
11486 ? const0_rtx : const1_rtx);
11488 label = gen_label_rtx ();
11489 if (bcc_gen_fctn[(int) code] == 0)
11492 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11493 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11494 emit_label (label);
11499 /* Generate a tablejump instruction (used for switch statements). */
11501 #ifdef HAVE_tablejump
11503 /* INDEX is the value being switched on, with the lowest value
11504 in the table already subtracted.
11505 MODE is its expected mode (needed if INDEX is constant).
11506 RANGE is the length of the jump table.
11507 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11509 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11510 index value is out of range. */
11513 do_tablejump (index, mode, range, table_label, default_label)
11514 rtx index, range, table_label, default_label;
11515 enum machine_mode mode;
11517 register rtx temp, vector;
11519 /* Do an unsigned comparison (in the proper mode) between the index
11520 expression and the value which represents the length of the range.
11521 Since we just finished subtracting the lower bound of the range
11522 from the index expression, this comparison allows us to simultaneously
11523 check that the original index expression value is both greater than
11524 or equal to the minimum value of the range and less than or equal to
11525 the maximum value of the range. */
11527 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
11528 emit_jump_insn (gen_bgtu (default_label));
11530 /* If index is in range, it must fit in Pmode.
11531 Convert to Pmode so we can index with it. */
11533 index = convert_to_mode (Pmode, index, 1);
11535 /* Don't let a MEM slip thru, because then INDEX that comes
11536 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11537 and break_out_memory_refs will go to work on it and mess it up. */
11538 #ifdef PIC_CASE_VECTOR_ADDRESS
11539 if (flag_pic && GET_CODE (index) != REG)
11540 index = copy_to_mode_reg (Pmode, index);
11543 /* If flag_force_addr were to affect this address
11544 it could interfere with the tricky assumptions made
11545 about addresses that contain label-refs,
11546 which may be valid only very near the tablejump itself. */
11547 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11548 GET_MODE_SIZE, because this indicates how large insns are. The other
11549 uses should all be Pmode, because they are addresses. This code
11550 could fail if addresses and insns are not the same size. */
11551 index = gen_rtx_PLUS (Pmode,
11552 gen_rtx_MULT (Pmode, index,
11553 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11554 gen_rtx_LABEL_REF (Pmode, table_label));
11555 #ifdef PIC_CASE_VECTOR_ADDRESS
11557 index = PIC_CASE_VECTOR_ADDRESS (index);
11560 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11561 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11562 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
11563 RTX_UNCHANGING_P (vector) = 1;
11564 convert_move (temp, vector, 0);
11566 emit_jump_insn (gen_tablejump (temp, table_label));
11568 /* If we are generating PIC code or if the table is PC-relative, the
11569 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11570 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11574 #endif /* HAVE_tablejump */