1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91, 92, 1993 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
21 /* This file handles the generation of rtl code from tree structure
22 at the level of the function as a whole.
23 It creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
26 `expand_function_start' is called at the beginning of a function,
27 before the function body is parsed, and `expand_function_end' is
28 called after parsing the body.
30 Call `assign_stack_local' to allocate a stack slot for a local variable.
31 This is usually done during the RTL generation for the function body,
32 but it can also be done in the reload pass when a pseudo-register does
33 not get a hard register.
35 Call `put_var_into_stack' when you learn, belatedly, that a variable
36 previously given a pseudo-register must in fact go in the stack.
37 This function changes the DECL_RTL to be a stack slot instead of a reg
38 then scans all the RTL instructions so far generated to correct them. */
48 #include "insn-flags.h"
50 #include "insn-codes.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
56 #include "basic-block.h"
60 /* Some systems use __main in a way incompatible with its use in gcc, in these
61 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
62 give the same symbol without quotes for an alternative entry point. You
63 must define both, or niether. */
65 #define NAME__MAIN "__main"
66 #define SYMBOL__MAIN __main
69 /* Round a value to the lowest integer less than it that is a multiple of
70 the required alignment. Avoid using division in case the value is
71 negative. Assume the alignment is a power of two. */
72 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
74 /* Similar, but round to the next highest integer that meets the
76 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
78 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
79 during rtl generation. If they are different register numbers, this is
80 always true. It may also be true if
81 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
82 generation. See fix_lexical_addr for details. */
84 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
85 #define NEED_SEPARATE_AP
88 /* Number of bytes of args popped by function being compiled on its return.
89 Zero if no bytes are to be popped.
90 May affect compilation of return insn or of function epilogue. */
92 int current_function_pops_args;
94 /* Nonzero if function being compiled needs to be given an address
95 where the value should be stored. */
97 int current_function_returns_struct;
99 /* Nonzero if function being compiled needs to
100 return the address of where it has put a structure value. */
102 int current_function_returns_pcc_struct;
104 /* Nonzero if function being compiled needs to be passed a static chain. */
106 int current_function_needs_context;
108 /* Nonzero if function being compiled can call setjmp. */
110 int current_function_calls_setjmp;
112 /* Nonzero if function being compiled can call longjmp. */
114 int current_function_calls_longjmp;
116 /* Nonzero if function being compiled receives nonlocal gotos
117 from nested functions. */
119 int current_function_has_nonlocal_label;
121 /* Nonzero if function being compiled has nonlocal gotos to parent
124 int current_function_has_nonlocal_goto;
126 /* Nonzero if function being compiled contains nested functions. */
128 int current_function_contains_functions;
130 /* Nonzero if function being compiled can call alloca,
131 either as a subroutine or builtin. */
133 int current_function_calls_alloca;
135 /* Nonzero if the current function returns a pointer type */
137 int current_function_returns_pointer;
139 /* If some insns can be deferred to the delay slots of the epilogue, the
140 delay list for them is recorded here. */
142 rtx current_function_epilogue_delay_list;
144 /* If function's args have a fixed size, this is that size, in bytes.
146 May affect compilation of return insn or of function epilogue. */
148 int current_function_args_size;
150 /* # bytes the prologue should push and pretend that the caller pushed them.
151 The prologue must do this, but only if parms can be passed in registers. */
153 int current_function_pretend_args_size;
155 /* # of bytes of outgoing arguments required to be pushed by the prologue.
156 If this is non-zero, it means that ACCUMULATE_OUTGOING_ARGS was defined
157 and no stack adjusts will be done on function calls. */
159 int current_function_outgoing_args_size;
161 /* This is the offset from the arg pointer to the place where the first
162 anonymous arg can be found, if there is one. */
164 rtx current_function_arg_offset_rtx;
166 /* Nonzero if current function uses varargs.h or equivalent.
167 Zero for functions that use stdarg.h. */
169 int current_function_varargs;
171 /* Quantities of various kinds of registers
172 used for the current function's args. */
174 CUMULATIVE_ARGS current_function_args_info;
176 /* Name of function now being compiled. */
178 char *current_function_name;
180 /* If non-zero, an RTL expression for that location at which the current
181 function returns its result. Always equal to
182 DECL_RTL (DECL_RESULT (current_function_decl)), but provided
183 independently of the tree structures. */
185 rtx current_function_return_rtx;
187 /* Nonzero if the current function uses the constant pool. */
189 int current_function_uses_const_pool;
191 /* Nonzero if the current function uses pic_offset_table_rtx. */
192 int current_function_uses_pic_offset_table;
194 /* The arg pointer hard register, or the pseudo into which it was copied. */
195 rtx current_function_internal_arg_pointer;
197 /* The FUNCTION_DECL for an inline function currently being expanded. */
198 tree inline_function_decl;
200 /* Number of function calls seen so far in current function. */
202 int function_call_count;
204 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
205 (labels to which there can be nonlocal gotos from nested functions)
208 tree nonlocal_labels;
210 /* RTX for stack slot that holds the current handler for nonlocal gotos.
211 Zero when function does not have nonlocal labels. */
213 rtx nonlocal_goto_handler_slot;
215 /* RTX for stack slot that holds the stack pointer value to restore
217 Zero when function does not have nonlocal labels. */
219 rtx nonlocal_goto_stack_level;
221 /* Label that will go on parm cleanup code, if any.
222 Jumping to this label runs cleanup code for parameters, if
223 such code must be run. Following this code is the logical return label. */
227 /* Label that will go on function epilogue.
228 Jumping to this label serves as a "return" instruction
229 on machines which require execution of the epilogue on all returns. */
233 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
234 So we can mark them all live at the end of the function, if nonopt. */
237 /* List (chain of EXPR_LISTs) of all stack slots in this function.
238 Made for the sake of unshare_all_rtl. */
241 /* Chain of all RTL_EXPRs that have insns in them. */
244 /* Label to jump back to for tail recursion, or 0 if we have
245 not yet needed one for this function. */
246 rtx tail_recursion_label;
248 /* Place after which to insert the tail_recursion_label if we need one. */
249 rtx tail_recursion_reentry;
251 /* Location at which to save the argument pointer if it will need to be
252 referenced. There are two cases where this is done: if nonlocal gotos
253 exist, or if vars stored at an offset from the argument pointer will be
254 needed by inner routines. */
256 rtx arg_pointer_save_area;
258 /* Offset to end of allocated area of stack frame.
259 If stack grows down, this is the address of the last stack slot allocated.
260 If stack grows up, this is the address for the next slot. */
263 /* List (chain of TREE_LISTs) of static chains for containing functions.
264 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
265 in an RTL_EXPR in the TREE_VALUE. */
266 static tree context_display;
268 /* List (chain of TREE_LISTs) of trampolines for nested functions.
269 The trampoline sets up the static chain and jumps to the function.
270 We supply the trampoline's address when the function's address is requested.
272 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
273 in an RTL_EXPR in the TREE_VALUE. */
274 static tree trampoline_list;
276 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
277 static rtx parm_birth_insn;
280 /* Nonzero if a stack slot has been generated whose address is not
281 actually valid. It means that the generated rtl must all be scanned
282 to detect and correct the invalid addresses where they occur. */
283 static int invalid_stack_slot;
286 /* Last insn of those whose job was to put parms into their nominal homes. */
287 static rtx last_parm_insn;
289 /* 1 + last pseudo register number used for loading a copy
290 of a parameter of this function. */
291 static int max_parm_reg;
293 /* Vector indexed by REGNO, containing location on stack in which
294 to put the parm which is nominally in pseudo register REGNO,
295 if we discover that that parm must go in the stack. */
296 static rtx *parm_reg_stack_loc;
298 #if 0 /* Turned off because 0 seems to work just as well. */
299 /* Cleanup lists are required for binding levels regardless of whether
300 that binding level has cleanups or not. This node serves as the
301 cleanup list whenever an empty list is required. */
302 static tree empty_cleanup_list;
305 /* Nonzero once virtual register instantiation has been done.
306 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
307 static int virtuals_instantiated;
309 /* These variables hold pointers to functions to
310 save and restore machine-specific data,
311 in push_function_context and pop_function_context. */
312 void (*save_machine_status) ();
313 void (*restore_machine_status) ();
315 /* Nonzero if we need to distinguish between the return value of this function
316 and the return value of a function called by this function. This helps
319 extern int rtx_equal_function_value_matters;
320 extern tree bc_runtime_type_code ();
321 extern rtx bc_build_calldesc ();
322 extern char *bc_emit_trampoline ();
323 extern char *bc_end_function ();
327 static tree round_down ();
328 static rtx round_trampoline_addr ();
329 static rtx fixup_stack_1 ();
330 static void put_reg_into_stack ();
331 static void fixup_var_refs ();
332 static void fixup_var_refs_insns ();
333 static void fixup_var_refs_1 ();
334 static void optimize_bit_field ();
335 static void instantiate_decls ();
336 static void instantiate_decls_1 ();
337 static void instantiate_decl ();
338 static int instantiate_virtual_regs_1 ();
339 static rtx fixup_memory_subreg ();
340 static rtx walk_fixup_memory_subreg ();
342 /* In order to evaluate some expressions, such as function calls returning
343 structures in memory, we need to temporarily allocate stack locations.
344 We record each allocated temporary in the following structure.
346 Associated with each temporary slot is a nesting level. When we pop up
347 one level, all temporaries associated with the previous level are freed.
348 Normally, all temporaries are freed after the execution of the statement
349 in which they were created. However, if we are inside a ({...}) grouping,
350 the result may be in a temporary and hence must be preserved. If the
351 result could be in a temporary, we preserve it if we can determine which
352 one it is in. If we cannot determine which temporary may contain the
353 result, all temporaries are preserved. A temporary is preserved by
354 pretending it was allocated at the previous nesting level.
356 Automatic variables are also assigned temporary slots, at the nesting
357 level where they are defined. They are marked a "kept" so that
358 free_temp_slots will not free them. */
362 /* Points to next temporary slot. */
363 struct temp_slot *next;
364 /* The rtx to used to reference the slot. */
366 /* The size, in units, of the slot. */
368 /* Non-zero if this temporary is currently in use. */
370 /* Nesting level at which this slot is being used. */
372 /* Non-zero if this should survive a call to free_temp_slots. */
376 /* List of all temporaries allocated, both available and in use. */
378 struct temp_slot *temp_slots;
380 /* Current nesting level for temporaries. */
384 /* The FUNCTION_DECL node for the current function. */
385 static tree this_function_decl;
387 /* Callinfo pointer for the current function. */
388 static rtx this_function_callinfo;
390 /* The label in the bytecode file of this function's actual bytecode.
392 static char *this_function_bytecode;
394 /* The call description vector for the current function. */
395 static rtx this_function_calldesc;
397 /* Size of the local variables allocated for the current function. */
400 /* Current depth of the bytecode evaluation stack. */
403 /* Maximum depth of the evaluation stack in this function. */
406 /* Current depth in statement expressions. */
407 static int stmt_expr_depth;
409 /* Pointer to chain of `struct function' for containing functions. */
410 struct function *outer_function_chain;
412 /* Given a function decl for a containing function,
413 return the `struct function' for it. */
416 find_function_data (decl)
420 for (p = outer_function_chain; p; p = p->next)
426 /* Save the current context for compilation of a nested function.
427 This is called from language-specific code.
428 The caller is responsible for saving any language-specific status,
429 since this function knows only about language-independent variables. */
432 push_function_context ()
434 struct function *p = (struct function *) xmalloc (sizeof (struct function));
436 p->next = outer_function_chain;
437 outer_function_chain = p;
439 p->name = current_function_name;
440 p->decl = current_function_decl;
441 p->pops_args = current_function_pops_args;
442 p->returns_struct = current_function_returns_struct;
443 p->returns_pcc_struct = current_function_returns_pcc_struct;
444 p->needs_context = current_function_needs_context;
445 p->calls_setjmp = current_function_calls_setjmp;
446 p->calls_longjmp = current_function_calls_longjmp;
447 p->calls_alloca = current_function_calls_alloca;
448 p->has_nonlocal_label = current_function_has_nonlocal_label;
449 p->has_nonlocal_goto = current_function_has_nonlocal_goto;
450 p->args_size = current_function_args_size;
451 p->pretend_args_size = current_function_pretend_args_size;
452 p->arg_offset_rtx = current_function_arg_offset_rtx;
453 p->uses_const_pool = current_function_uses_const_pool;
454 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
455 p->internal_arg_pointer = current_function_internal_arg_pointer;
456 p->max_parm_reg = max_parm_reg;
457 p->parm_reg_stack_loc = parm_reg_stack_loc;
458 p->outgoing_args_size = current_function_outgoing_args_size;
459 p->return_rtx = current_function_return_rtx;
460 p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot;
461 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
462 p->nonlocal_labels = nonlocal_labels;
463 p->cleanup_label = cleanup_label;
464 p->return_label = return_label;
465 p->save_expr_regs = save_expr_regs;
466 p->stack_slot_list = stack_slot_list;
467 p->parm_birth_insn = parm_birth_insn;
468 p->frame_offset = frame_offset;
469 p->tail_recursion_label = tail_recursion_label;
470 p->tail_recursion_reentry = tail_recursion_reentry;
471 p->arg_pointer_save_area = arg_pointer_save_area;
472 p->rtl_expr_chain = rtl_expr_chain;
473 p->last_parm_insn = last_parm_insn;
474 p->context_display = context_display;
475 p->trampoline_list = trampoline_list;
476 p->function_call_count = function_call_count;
477 p->temp_slots = temp_slots;
478 p->temp_slot_level = temp_slot_level;
479 p->fixup_var_refs_queue = 0;
480 p->epilogue_delay_list = current_function_epilogue_delay_list;
482 save_tree_status (p);
483 save_storage_status (p);
484 save_emit_status (p);
486 save_expr_status (p);
487 save_stmt_status (p);
488 save_varasm_status (p);
490 if (save_machine_status)
491 (*save_machine_status) (p);
494 /* Restore the last saved context, at the end of a nested function.
495 This function is called from language-specific code. */
498 pop_function_context ()
500 struct function *p = outer_function_chain;
502 outer_function_chain = p->next;
504 current_function_name = p->name;
505 current_function_decl = p->decl;
506 current_function_pops_args = p->pops_args;
507 current_function_returns_struct = p->returns_struct;
508 current_function_returns_pcc_struct = p->returns_pcc_struct;
509 current_function_needs_context = p->needs_context;
510 current_function_calls_setjmp = p->calls_setjmp;
511 current_function_calls_longjmp = p->calls_longjmp;
512 current_function_calls_alloca = p->calls_alloca;
513 current_function_has_nonlocal_label = p->has_nonlocal_label;
514 current_function_has_nonlocal_goto = p->has_nonlocal_goto;
515 current_function_contains_functions = 1;
516 current_function_args_size = p->args_size;
517 current_function_pretend_args_size = p->pretend_args_size;
518 current_function_arg_offset_rtx = p->arg_offset_rtx;
519 current_function_uses_const_pool = p->uses_const_pool;
520 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
521 current_function_internal_arg_pointer = p->internal_arg_pointer;
522 max_parm_reg = p->max_parm_reg;
523 parm_reg_stack_loc = p->parm_reg_stack_loc;
524 current_function_outgoing_args_size = p->outgoing_args_size;
525 current_function_return_rtx = p->return_rtx;
526 nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot;
527 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
528 nonlocal_labels = p->nonlocal_labels;
529 cleanup_label = p->cleanup_label;
530 return_label = p->return_label;
531 save_expr_regs = p->save_expr_regs;
532 stack_slot_list = p->stack_slot_list;
533 parm_birth_insn = p->parm_birth_insn;
534 frame_offset = p->frame_offset;
535 tail_recursion_label = p->tail_recursion_label;
536 tail_recursion_reentry = p->tail_recursion_reentry;
537 arg_pointer_save_area = p->arg_pointer_save_area;
538 rtl_expr_chain = p->rtl_expr_chain;
539 last_parm_insn = p->last_parm_insn;
540 context_display = p->context_display;
541 trampoline_list = p->trampoline_list;
542 function_call_count = p->function_call_count;
543 temp_slots = p->temp_slots;
544 temp_slot_level = p->temp_slot_level;
545 current_function_epilogue_delay_list = p->epilogue_delay_list;
547 restore_tree_status (p);
548 restore_storage_status (p);
549 restore_expr_status (p);
550 restore_emit_status (p);
551 restore_stmt_status (p);
552 restore_varasm_status (p);
554 if (restore_machine_status)
555 (*restore_machine_status) (p);
557 /* Finish doing put_var_into_stack for any of our variables
558 which became addressable during the nested function. */
560 struct var_refs_queue *queue = p->fixup_var_refs_queue;
561 for (; queue; queue = queue->next)
562 fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp);
567 /* Reset variables that have known state during rtx generation. */
568 rtx_equal_function_value_matters = 1;
569 virtuals_instantiated = 0;
572 /* Allocate fixed slots in the stack frame of the current function. */
574 /* Return size needed for stack frame based on slots so far allocated.
575 This size counts from zero. It is not rounded to STACK_BOUNDARY;
576 the caller may have to do that. */
581 #ifdef FRAME_GROWS_DOWNWARD
582 return -frame_offset;
588 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
589 with machine mode MODE.
591 ALIGN controls the amount of alignment for the address of the slot:
592 0 means according to MODE,
593 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
594 positive specifies alignment boundary in bits.
596 We do not round to stack_boundary here. */
599 assign_stack_local (mode, size, align)
600 enum machine_mode mode;
604 register rtx x, addr;
605 int bigend_correction = 0;
610 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
612 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
614 else if (align == -1)
616 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
617 size = CEIL_ROUND (size, alignment);
620 alignment = align / BITS_PER_UNIT;
622 /* Round frame offset to that alignment.
623 We must be careful here, since FRAME_OFFSET might be negative and
624 division with a negative dividend isn't as well defined as we might
625 like. So we instead assume that ALIGNMENT is a power of two and
626 use logical operations which are unambiguous. */
627 #ifdef FRAME_GROWS_DOWNWARD
628 frame_offset = FLOOR_ROUND (frame_offset, alignment);
630 frame_offset = CEIL_ROUND (frame_offset, alignment);
633 /* On a big-endian machine, if we are allocating more space than we will use,
634 use the least significant bytes of those that are allocated. */
637 bigend_correction = size - GET_MODE_SIZE (mode);
640 #ifdef FRAME_GROWS_DOWNWARD
641 frame_offset -= size;
644 /* If we have already instantiated virtual registers, return the actual
645 address relative to the frame pointer. */
646 if (virtuals_instantiated)
647 addr = plus_constant (frame_pointer_rtx,
648 (frame_offset + bigend_correction
649 + STARTING_FRAME_OFFSET));
651 addr = plus_constant (virtual_stack_vars_rtx,
652 frame_offset + bigend_correction);
654 #ifndef FRAME_GROWS_DOWNWARD
655 frame_offset += size;
658 x = gen_rtx (MEM, mode, addr);
660 stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, x, stack_slot_list);
665 /* Assign a stack slot in a containing function.
666 First three arguments are same as in preceding function.
667 The last argument specifies the function to allocate in. */
670 assign_outer_stack_local (mode, size, align, function)
671 enum machine_mode mode;
674 struct function *function;
676 register rtx x, addr;
677 int bigend_correction = 0;
680 /* Allocate in the memory associated with the function in whose frame
682 push_obstacks (function->function_obstack,
683 function->function_maybepermanent_obstack);
687 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
689 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
691 else if (align == -1)
693 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
694 size = CEIL_ROUND (size, alignment);
697 alignment = align / BITS_PER_UNIT;
699 /* Round frame offset to that alignment. */
700 #ifdef FRAME_GROWS_DOWNWARD
701 function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
703 function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
706 /* On a big-endian machine, if we are allocating more space than we will use,
707 use the least significant bytes of those that are allocated. */
710 bigend_correction = size - GET_MODE_SIZE (mode);
713 #ifdef FRAME_GROWS_DOWNWARD
714 function->frame_offset -= size;
716 addr = plus_constant (virtual_stack_vars_rtx,
717 function->frame_offset + bigend_correction);
718 #ifndef FRAME_GROWS_DOWNWARD
719 function->frame_offset += size;
722 x = gen_rtx (MEM, mode, addr);
724 function->stack_slot_list
725 = gen_rtx (EXPR_LIST, VOIDmode, x, function->stack_slot_list);
732 /* Allocate a temporary stack slot and record it for possible later
735 MODE is the machine mode to be given to the returned rtx.
737 SIZE is the size in units of the space required. We do no rounding here
738 since assign_stack_local will do any required rounding.
740 KEEP is non-zero if this slot is to be retained after a call to
741 free_temp_slots. Automatic variables for a block are allocated with this
745 assign_stack_temp (mode, size, keep)
746 enum machine_mode mode;
750 struct temp_slot *p, *best_p = 0;
752 /* First try to find an available, already-allocated temporary that is the
753 exact size we require. */
754 for (p = temp_slots; p; p = p->next)
755 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
758 /* If we didn't find, one, try one that is larger than what we want. We
759 find the smallest such. */
761 for (p = temp_slots; p; p = p->next)
762 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
763 && (best_p == 0 || best_p->size > p->size))
766 /* Make our best, if any, the one to use. */
769 /* If there are enough aligned bytes left over, make them into a new
770 temp_slot so that the extra bytes don't get wasted. Do this only
771 for BLKmode slots, so that we can be sure of the alignment. */
772 if (GET_MODE (best_p->slot) == BLKmode)
774 int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
775 int rounded_size = CEIL_ROUND (size, alignment);
777 if (best_p->size - rounded_size >= alignment)
779 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
781 p->size = best_p->size - rounded_size;
782 p->slot = gen_rtx (MEM, BLKmode,
783 plus_constant (XEXP (best_p->slot, 0),
785 p->next = temp_slots;
788 stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, p->slot,
791 best_p->size = rounded_size;
799 /* If we still didn't find one, make a new temporary. */
802 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
804 /* If the temp slot mode doesn't indicate the alignment,
805 use the largest possible, so no one will be disappointed. */
806 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
807 p->next = temp_slots;
812 p->level = temp_slot_level;
817 /* Combine temporary stack slots which are adjacent on the stack.
819 This allows for better use of already allocated stack space. This is only
820 done for BLKmode slots because we can be sure that we won't have alignment
821 problems in this case. */
824 combine_temp_slots ()
826 struct temp_slot *p, *q;
827 struct temp_slot *prev_p, *prev_q;
828 /* Determine where to free back to after this function. */
829 rtx free_pointer = rtx_alloc (CONST_INT);
831 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
834 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
835 for (q = p->next, prev_q = p; q; q = prev_q->next)
838 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
840 if (rtx_equal_p (plus_constant (XEXP (p->slot, 0), p->size),
843 /* Q comes after P; combine Q into P. */
847 else if (rtx_equal_p (plus_constant (XEXP (q->slot, 0), q->size),
850 /* P comes after Q; combine P into Q. */
856 /* Either delete Q or advance past it. */
858 prev_q->next = q->next;
862 /* Either delete P or advance past it. */
866 prev_p->next = p->next;
868 temp_slots = p->next;
874 /* Free all the RTL made by plus_constant. */
875 rtx_free (free_pointer);
878 /* If X could be a reference to a temporary slot, mark that slot as belonging
879 to the to one level higher. If X matched one of our slots, just mark that
880 one. Otherwise, we can't easily predict which it is, so upgrade all of
881 them. Kept slots need not be touched.
883 This is called when an ({...}) construct occurs and a statement
884 returns a value in memory. */
887 preserve_temp_slots (x)
892 /* If X is not in memory or is at a constant address, it cannot be in
894 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
897 /* First see if we can find a match. */
898 for (p = temp_slots; p; p = p->next)
899 if (p->in_use && x == p->slot)
905 /* Otherwise, preserve all non-kept slots at this level. */
906 for (p = temp_slots; p; p = p->next)
907 if (p->in_use && p->level == temp_slot_level && ! p->keep)
911 /* Free all temporaries used so far. This is normally called at the end
912 of generating code for a statement. */
919 for (p = temp_slots; p; p = p->next)
920 if (p->in_use && p->level == temp_slot_level && ! p->keep)
923 combine_temp_slots ();
926 /* Push deeper into the nesting level for stack temporaries. */
934 /* Pop a temporary nesting level. All slots in use in the current level
942 for (p = temp_slots; p; p = p->next)
943 if (p->in_use && p->level == temp_slot_level)
946 combine_temp_slots ();
951 /* Retroactively move an auto variable from a register to a stack slot.
952 This is done when an address-reference to the variable is seen. */
955 put_var_into_stack (decl)
959 enum machine_mode promoted_mode, decl_mode;
960 struct function *function = 0;
966 context = decl_function_context (decl);
968 /* Get the current rtl used for this object and it's original mode. */
969 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
971 /* No need to do anything if decl has no rtx yet
972 since in that case caller is setting TREE_ADDRESSABLE
973 and a stack slot will be assigned when the rtl is made. */
977 /* Get the declared mode for this object. */
978 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
980 /* Get the mode it's actually stored in. */
981 promoted_mode = GET_MODE (reg);
983 /* If this variable comes from an outer function,
984 find that function's saved context. */
985 if (context != current_function_decl)
986 for (function = outer_function_chain; function; function = function->next)
987 if (function->decl == context)
990 /* If this is a variable-size object with a pseudo to address it,
991 put that pseudo into the stack, if the var is nonlocal. */
992 if (DECL_NONLOCAL (decl)
993 && GET_CODE (reg) == MEM
994 && GET_CODE (XEXP (reg, 0)) == REG
995 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
998 decl_mode = promoted_mode = GET_MODE (reg);
1001 /* Now we should have a value that resides in one or more pseudo regs. */
1003 if (GET_CODE (reg) == REG)
1004 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1005 promoted_mode, decl_mode);
1006 else if (GET_CODE (reg) == CONCAT)
1008 /* A CONCAT contains two pseudos; put them both in the stack.
1009 We do it so they end up consecutive. */
1010 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1011 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1012 #ifdef STACK_GROWS_DOWNWARD
1013 /* Since part 0 should have a lower address, do it second. */
1014 put_reg_into_stack (function, XEXP (reg, 1),
1015 part_type, part_mode, part_mode);
1016 put_reg_into_stack (function, XEXP (reg, 0),
1017 part_type, part_mode, part_mode);
1019 put_reg_into_stack (function, XEXP (reg, 0),
1020 part_type, part_mode, part_mode);
1021 put_reg_into_stack (function, XEXP (reg, 1),
1022 part_type, part_mode, part_mode);
1025 /* Change the CONCAT into a combined MEM for both parts. */
1026 PUT_CODE (reg, MEM);
1027 /* The two parts are in memory order already.
1028 Use the lower parts address as ours. */
1029 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1030 /* Prevent sharing of rtl that might lose. */
1031 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1032 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1036 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1037 into the stack frame of FUNCTION (0 means the current function).
1038 DECL_MODE is the machine mode of the user-level data type.
1039 PROMOTED_MODE is the machine mode of the register. */
1042 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode)
1043 struct function *function;
1046 enum machine_mode promoted_mode, decl_mode;
1052 if (REGNO (reg) < function->max_parm_reg)
1053 new = function->parm_reg_stack_loc[REGNO (reg)];
1055 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
1060 if (REGNO (reg) < max_parm_reg)
1061 new = parm_reg_stack_loc[REGNO (reg)];
1063 new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
1066 XEXP (reg, 0) = XEXP (new, 0);
1067 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1068 REG_USERVAR_P (reg) = 0;
1069 PUT_CODE (reg, MEM);
1070 PUT_MODE (reg, decl_mode);
1072 /* If this is a memory ref that contains aggregate components,
1073 mark it as such for cse and loop optimize. */
1074 MEM_IN_STRUCT_P (reg)
1075 = (TREE_CODE (type) == ARRAY_TYPE
1076 || TREE_CODE (type) == RECORD_TYPE
1077 || TREE_CODE (type) == UNION_TYPE
1078 || TREE_CODE (type) == QUAL_UNION_TYPE);
1080 /* Now make sure that all refs to the variable, previously made
1081 when it was a register, are fixed up to be valid again. */
1084 struct var_refs_queue *temp;
1086 /* Variable is inherited; fix it up when we get back to its function. */
1087 push_obstacks (function->function_obstack,
1088 function->function_maybepermanent_obstack);
1090 /* See comment in restore_tree_status in tree.c for why this needs to be
1091 on saveable obstack. */
1093 = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
1094 temp->modified = reg;
1095 temp->promoted_mode = promoted_mode;
1096 temp->unsignedp = TREE_UNSIGNED (type);
1097 temp->next = function->fixup_var_refs_queue;
1098 function->fixup_var_refs_queue = temp;
1102 /* Variable is local; fix it up now. */
1103 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type));
1107 fixup_var_refs (var, promoted_mode, unsignedp)
1109 enum machine_mode promoted_mode;
1113 rtx first_insn = get_insns ();
1114 struct sequence_stack *stack = sequence_stack;
1115 tree rtl_exps = rtl_expr_chain;
1117 /* Must scan all insns for stack-refs that exceed the limit. */
1118 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0);
1120 /* Scan all pending sequences too. */
1121 for (; stack; stack = stack->next)
1123 push_to_sequence (stack->first);
1124 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1125 stack->first, stack->next != 0);
1126 /* Update remembered end of sequence
1127 in case we added an insn at the end. */
1128 stack->last = get_last_insn ();
1132 /* Scan all waiting RTL_EXPRs too. */
1133 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1135 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1136 if (seq != const0_rtx && seq != 0)
1138 push_to_sequence (seq);
1139 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0);
1145 /* This structure is used by the following two functions to record MEMs or
1146 pseudos used to replace VAR, any SUBREGs of VAR, and any MEMs containing
1147 VAR as an address. We need to maintain this list in case two operands of
1148 an insn were required to match; in that case we must ensure we use the
1149 same replacement. */
1151 struct fixup_replacement
1155 struct fixup_replacement *next;
1158 /* REPLACEMENTS is a pointer to a list of the above structures and X is
1159 some part of an insn. Return a struct fixup_replacement whose OLD
1160 value is equal to X. Allocate a new structure if no such entry exists. */
1162 static struct fixup_replacement *
1163 find_fixup_replacement (replacements, x)
1164 struct fixup_replacement **replacements;
1167 struct fixup_replacement *p;
1169 /* See if we have already replaced this. */
1170 for (p = *replacements; p && p->old != x; p = p->next)
1175 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1178 p->next = *replacements;
1185 /* Scan the insn-chain starting with INSN for refs to VAR
1186 and fix them up. TOPLEVEL is nonzero if this chain is the
1187 main chain of insns for the current function. */
1190 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel)
1192 enum machine_mode promoted_mode;
1201 rtx next = NEXT_INSN (insn);
1203 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1205 /* The insn to load VAR from a home in the arglist
1206 is now a no-op. When we see it, just delete it. */
1208 && GET_CODE (PATTERN (insn)) == SET
1209 && SET_DEST (PATTERN (insn)) == var
1210 /* If this represents the result of an insn group,
1211 don't delete the insn. */
1212 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1213 && rtx_equal_p (SET_SRC (PATTERN (insn)), var))
1215 /* In unoptimized compilation, we shouldn't call delete_insn
1216 except in jump.c doing warnings. */
1217 PUT_CODE (insn, NOTE);
1218 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1219 NOTE_SOURCE_FILE (insn) = 0;
1220 if (insn == last_parm_insn)
1221 last_parm_insn = PREV_INSN (next);
1225 struct fixup_replacement *replacements = 0;
1226 rtx next_insn = NEXT_INSN (insn);
1228 #ifdef SMALL_REGISTER_CLASSES
1229 /* If the insn that copies the results of a CALL_INSN
1230 into a pseudo now references VAR, we have to use an
1231 intermediate pseudo since we want the life of the
1232 return value register to be only a single insn.
1234 If we don't use an intermediate pseudo, such things as
1235 address computations to make the address of VAR valid
1236 if it is not can be placed beween the CALL_INSN and INSN.
1238 To make sure this doesn't happen, we record the destination
1239 of the CALL_INSN and see if the next insn uses both that
1242 if (call_dest != 0 && GET_CODE (insn) == INSN
1243 && reg_mentioned_p (var, PATTERN (insn))
1244 && reg_mentioned_p (call_dest, PATTERN (insn)))
1246 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1248 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1250 PATTERN (insn) = replace_rtx (PATTERN (insn),
1254 if (GET_CODE (insn) == CALL_INSN
1255 && GET_CODE (PATTERN (insn)) == SET)
1256 call_dest = SET_DEST (PATTERN (insn));
1257 else if (GET_CODE (insn) == CALL_INSN
1258 && GET_CODE (PATTERN (insn)) == PARALLEL
1259 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1260 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1265 /* See if we have to do anything to INSN now that VAR is in
1266 memory. If it needs to be loaded into a pseudo, use a single
1267 pseudo for the entire insn in case there is a MATCH_DUP
1268 between two operands. We pass a pointer to the head of
1269 a list of struct fixup_replacements. If fixup_var_refs_1
1270 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1271 it will record them in this list.
1273 If it allocated a pseudo for any replacement, we copy into
1276 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1279 /* If this is last_parm_insn, and any instructions were output
1280 after it to fix it up, then we must set last_parm_insn to
1281 the last such instruction emitted. */
1282 if (insn == last_parm_insn)
1283 last_parm_insn = PREV_INSN (next_insn);
1285 while (replacements)
1287 if (GET_CODE (replacements->new) == REG)
1292 /* OLD might be a (subreg (mem)). */
1293 if (GET_CODE (replacements->old) == SUBREG)
1295 = fixup_memory_subreg (replacements->old, insn, 0);
1298 = fixup_stack_1 (replacements->old, insn);
1300 /* We can not separate USE insns from the CALL_INSN
1301 that they belong to. If this is a CALL_INSN, insert
1302 the move insn before the USE insns preceding it
1303 instead of immediately before the insn. */
1304 if (GET_CODE (insn) == CALL_INSN)
1306 insert_before = insn;
1307 while (GET_CODE (PREV_INSN (insert_before)) == INSN
1308 && GET_CODE (PATTERN (PREV_INSN (insert_before))) == USE)
1309 insert_before = PREV_INSN (insert_before);
1312 insert_before = insn;
1314 /* If we are changing the mode, do a conversion.
1315 This might be wasteful, but combine.c will
1316 eliminate much of the waste. */
1318 if (GET_MODE (replacements->new)
1319 != GET_MODE (replacements->old))
1322 convert_move (replacements->new,
1323 replacements->old, unsignedp);
1324 seq = gen_sequence ();
1328 seq = gen_move_insn (replacements->new,
1331 emit_insn_before (seq, insert_before);
1334 replacements = replacements->next;
1338 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1339 But don't touch other insns referred to by reg-notes;
1340 we will get them elsewhere. */
1341 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1342 if (GET_CODE (note) != INSN_LIST)
1344 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1350 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1351 See if the rtx expression at *LOC in INSN needs to be changed.
1353 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1354 contain a list of original rtx's and replacements. If we find that we need
1355 to modify this insn by replacing a memory reference with a pseudo or by
1356 making a new MEM to implement a SUBREG, we consult that list to see if
1357 we have already chosen a replacement. If none has already been allocated,
1358 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1359 or the SUBREG, as appropriate, to the pseudo. */
1362 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1364 enum machine_mode promoted_mode;
1367 struct fixup_replacement **replacements;
1370 register rtx x = *loc;
1371 RTX_CODE code = GET_CODE (x);
1373 register rtx tem, tem1;
1374 struct fixup_replacement *replacement;
1381 /* If we already have a replacement, use it. Otherwise,
1382 try to fix up this address in case it is invalid. */
1384 replacement = find_fixup_replacement (replacements, var);
1385 if (replacement->new)
1387 *loc = replacement->new;
1391 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1393 /* Unless we are forcing memory to register or we changed the mode,
1394 we can leave things the way they are if the insn is valid. */
1396 INSN_CODE (insn) = -1;
1397 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1398 && recog_memoized (insn) >= 0)
1401 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1405 /* If X contains VAR, we need to unshare it here so that we update
1406 each occurrence separately. But all identical MEMs in one insn
1407 must be replaced with the same rtx because of the possibility of
1410 if (reg_mentioned_p (var, x))
1412 replacement = find_fixup_replacement (replacements, x);
1413 if (replacement->new == 0)
1414 replacement->new = copy_most_rtx (x, var);
1416 *loc = x = replacement->new;
1432 /* Note that in some cases those types of expressions are altered
1433 by optimize_bit_field, and do not survive to get here. */
1434 if (XEXP (x, 0) == var
1435 || (GET_CODE (XEXP (x, 0)) == SUBREG
1436 && SUBREG_REG (XEXP (x, 0)) == var))
1438 /* Get TEM as a valid MEM in the mode presently in the insn.
1440 We don't worry about the possibility of MATCH_DUP here; it
1441 is highly unlikely and would be tricky to handle. */
1444 if (GET_CODE (tem) == SUBREG)
1445 tem = fixup_memory_subreg (tem, insn, 1);
1446 tem = fixup_stack_1 (tem, insn);
1448 /* Unless we want to load from memory, get TEM into the proper mode
1449 for an extract from memory. This can only be done if the
1450 extract is at a constant position and length. */
1452 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1453 && GET_CODE (XEXP (x, 2)) == CONST_INT
1454 && ! mode_dependent_address_p (XEXP (tem, 0))
1455 && ! MEM_VOLATILE_P (tem))
1457 enum machine_mode wanted_mode = VOIDmode;
1458 enum machine_mode is_mode = GET_MODE (tem);
1459 int width = INTVAL (XEXP (x, 1));
1460 int pos = INTVAL (XEXP (x, 2));
1463 if (GET_CODE (x) == ZERO_EXTRACT)
1464 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1467 if (GET_CODE (x) == SIGN_EXTRACT)
1468 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1470 /* If we have a narrower mode, we can do something. */
1471 if (wanted_mode != VOIDmode
1472 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1474 int offset = pos / BITS_PER_UNIT;
1475 rtx old_pos = XEXP (x, 2);
1478 /* If the bytes and bits are counted differently, we
1479 must adjust the offset. */
1480 #if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
1481 offset = (GET_MODE_SIZE (is_mode)
1482 - GET_MODE_SIZE (wanted_mode) - offset);
1485 pos %= GET_MODE_BITSIZE (wanted_mode);
1487 newmem = gen_rtx (MEM, wanted_mode,
1488 plus_constant (XEXP (tem, 0), offset));
1489 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1490 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1491 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1493 /* Make the change and see if the insn remains valid. */
1494 INSN_CODE (insn) = -1;
1495 XEXP (x, 0) = newmem;
1496 XEXP (x, 2) = GEN_INT (pos);
1498 if (recog_memoized (insn) >= 0)
1501 /* Otherwise, restore old position. XEXP (x, 0) will be
1503 XEXP (x, 2) = old_pos;
1507 /* If we get here, the bitfield extract insn can't accept a memory
1508 reference. Copy the input into a register. */
1510 tem1 = gen_reg_rtx (GET_MODE (tem));
1511 emit_insn_before (gen_move_insn (tem1, tem), insn);
1518 if (SUBREG_REG (x) == var)
1520 /* If this is a special SUBREG made because VAR was promoted
1521 from a wider mode, replace it with VAR and call ourself
1522 recursively, this time saying that the object previously
1523 had its current mode (by virtue of the SUBREG). */
1525 if (SUBREG_PROMOTED_VAR_P (x))
1528 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
1532 /* If this SUBREG makes VAR wider, it has become a paradoxical
1533 SUBREG with VAR in memory, but these aren't allowed at this
1534 stage of the compilation. So load VAR into a pseudo and take
1535 a SUBREG of that pseudo. */
1536 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
1538 replacement = find_fixup_replacement (replacements, var);
1539 if (replacement->new == 0)
1540 replacement->new = gen_reg_rtx (GET_MODE (var));
1541 SUBREG_REG (x) = replacement->new;
1545 /* See if we have already found a replacement for this SUBREG.
1546 If so, use it. Otherwise, make a MEM and see if the insn
1547 is recognized. If not, or if we should force MEM into a register,
1548 make a pseudo for this SUBREG. */
1549 replacement = find_fixup_replacement (replacements, x);
1550 if (replacement->new)
1552 *loc = replacement->new;
1556 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
1558 INSN_CODE (insn) = -1;
1559 if (! flag_force_mem && recog_memoized (insn) >= 0)
1562 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1568 /* First do special simplification of bit-field references. */
1569 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
1570 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
1571 optimize_bit_field (x, insn, 0);
1572 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
1573 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
1574 optimize_bit_field (x, insn, NULL_PTR);
1576 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
1577 insn into a pseudo and store the low part of the pseudo into VAR. */
1578 if (GET_CODE (SET_DEST (x)) == SUBREG
1579 && SUBREG_REG (SET_DEST (x)) == var
1580 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
1581 > GET_MODE_SIZE (GET_MODE (var))))
1583 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
1584 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
1591 rtx dest = SET_DEST (x);
1592 rtx src = SET_SRC (x);
1593 rtx outerdest = dest;
1595 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
1596 || GET_CODE (dest) == SIGN_EXTRACT
1597 || GET_CODE (dest) == ZERO_EXTRACT)
1598 dest = XEXP (dest, 0);
1600 if (GET_CODE (src) == SUBREG)
1601 src = XEXP (src, 0);
1603 /* If VAR does not appear at the top level of the SET
1604 just scan the lower levels of the tree. */
1606 if (src != var && dest != var)
1609 /* We will need to rerecognize this insn. */
1610 INSN_CODE (insn) = -1;
1613 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
1615 /* Since this case will return, ensure we fixup all the
1617 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
1618 insn, replacements);
1619 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
1620 insn, replacements);
1621 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
1622 insn, replacements);
1624 tem = XEXP (outerdest, 0);
1626 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
1627 that may appear inside a ZERO_EXTRACT.
1628 This was legitimate when the MEM was a REG. */
1629 if (GET_CODE (tem) == SUBREG
1630 && SUBREG_REG (tem) == var)
1631 tem = fixup_memory_subreg (tem, insn, 1);
1633 tem = fixup_stack_1 (tem, insn);
1635 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
1636 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
1637 && ! mode_dependent_address_p (XEXP (tem, 0))
1638 && ! MEM_VOLATILE_P (tem))
1640 enum machine_mode wanted_mode
1641 = insn_operand_mode[(int) CODE_FOR_insv][0];
1642 enum machine_mode is_mode = GET_MODE (tem);
1643 int width = INTVAL (XEXP (outerdest, 1));
1644 int pos = INTVAL (XEXP (outerdest, 2));
1646 /* If we have a narrower mode, we can do something. */
1647 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1649 int offset = pos / BITS_PER_UNIT;
1650 rtx old_pos = XEXP (outerdest, 2);
1653 #if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
1654 offset = (GET_MODE_SIZE (is_mode)
1655 - GET_MODE_SIZE (wanted_mode) - offset);
1658 pos %= GET_MODE_BITSIZE (wanted_mode);
1660 newmem = gen_rtx (MEM, wanted_mode,
1661 plus_constant (XEXP (tem, 0), offset));
1662 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1663 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1664 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1666 /* Make the change and see if the insn remains valid. */
1667 INSN_CODE (insn) = -1;
1668 XEXP (outerdest, 0) = newmem;
1669 XEXP (outerdest, 2) = GEN_INT (pos);
1671 if (recog_memoized (insn) >= 0)
1674 /* Otherwise, restore old position. XEXP (x, 0) will be
1676 XEXP (outerdest, 2) = old_pos;
1680 /* If we get here, the bit-field store doesn't allow memory
1681 or isn't located at a constant position. Load the value into
1682 a register, do the store, and put it back into memory. */
1684 tem1 = gen_reg_rtx (GET_MODE (tem));
1685 emit_insn_before (gen_move_insn (tem1, tem), insn);
1686 emit_insn_after (gen_move_insn (tem, tem1), insn);
1687 XEXP (outerdest, 0) = tem1;
1692 /* STRICT_LOW_PART is a no-op on memory references
1693 and it can cause combinations to be unrecognizable,
1696 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
1697 SET_DEST (x) = XEXP (SET_DEST (x), 0);
1699 /* A valid insn to copy VAR into or out of a register
1700 must be left alone, to avoid an infinite loop here.
1701 If the reference to VAR is by a subreg, fix that up,
1702 since SUBREG is not valid for a memref.
1703 Also fix up the address of the stack slot.
1705 Note that we must not try to recognize the insn until
1706 after we know that we have valid addresses and no
1707 (subreg (mem ...) ...) constructs, since these interfere
1708 with determining the validity of the insn. */
1710 if ((SET_SRC (x) == var
1711 || (GET_CODE (SET_SRC (x)) == SUBREG
1712 && SUBREG_REG (SET_SRC (x)) == var))
1713 && (GET_CODE (SET_DEST (x)) == REG
1714 || (GET_CODE (SET_DEST (x)) == SUBREG
1715 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
1716 && x == single_set (PATTERN (insn)))
1720 replacement = find_fixup_replacement (replacements, SET_SRC (x));
1721 if (replacement->new)
1722 SET_SRC (x) = replacement->new;
1723 else if (GET_CODE (SET_SRC (x)) == SUBREG)
1724 SET_SRC (x) = replacement->new
1725 = fixup_memory_subreg (SET_SRC (x), insn, 0);
1727 SET_SRC (x) = replacement->new
1728 = fixup_stack_1 (SET_SRC (x), insn);
1730 if (recog_memoized (insn) >= 0)
1733 /* INSN is not valid, but we know that we want to
1734 copy SET_SRC (x) to SET_DEST (x) in some way. So
1735 we generate the move and see whether it requires more
1736 than one insn. If it does, we emit those insns and
1737 delete INSN. Otherwise, we an just replace the pattern
1738 of INSN; we have already verified above that INSN has
1739 no other function that to do X. */
1741 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
1742 if (GET_CODE (pat) == SEQUENCE)
1744 emit_insn_after (pat, insn);
1745 PUT_CODE (insn, NOTE);
1746 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1747 NOTE_SOURCE_FILE (insn) = 0;
1750 PATTERN (insn) = pat;
1755 if ((SET_DEST (x) == var
1756 || (GET_CODE (SET_DEST (x)) == SUBREG
1757 && SUBREG_REG (SET_DEST (x)) == var))
1758 && (GET_CODE (SET_SRC (x)) == REG
1759 || (GET_CODE (SET_SRC (x)) == SUBREG
1760 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
1761 && x == single_set (PATTERN (insn)))
1765 if (GET_CODE (SET_DEST (x)) == SUBREG)
1766 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
1768 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
1770 if (recog_memoized (insn) >= 0)
1773 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
1774 if (GET_CODE (pat) == SEQUENCE)
1776 emit_insn_after (pat, insn);
1777 PUT_CODE (insn, NOTE);
1778 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1779 NOTE_SOURCE_FILE (insn) = 0;
1782 PATTERN (insn) = pat;
1787 /* Otherwise, storing into VAR must be handled specially
1788 by storing into a temporary and copying that into VAR
1789 with a new insn after this one. Note that this case
1790 will be used when storing into a promoted scalar since
1791 the insn will now have different modes on the input
1792 and output and hence will be invalid (except for the case
1793 of setting it to a constant, which does not need any
1794 change if it is valid). We generate extra code in that case,
1795 but combine.c will eliminate it. */
1800 rtx fixeddest = SET_DEST (x);
1802 /* STRICT_LOW_PART can be discarded, around a MEM. */
1803 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
1804 fixeddest = XEXP (fixeddest, 0);
1805 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
1806 if (GET_CODE (fixeddest) == SUBREG)
1807 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
1809 fixeddest = fixup_stack_1 (fixeddest, insn);
1811 temp = gen_reg_rtx (GET_MODE (SET_SRC (x)) == VOIDmode
1812 ? GET_MODE (fixeddest)
1813 : GET_MODE (SET_SRC (x)));
1815 emit_insn_after (gen_move_insn (fixeddest,
1816 gen_lowpart (GET_MODE (fixeddest),
1820 SET_DEST (x) = temp;
1825 /* Nothing special about this RTX; fix its operands. */
1827 fmt = GET_RTX_FORMAT (code);
1828 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1831 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
1835 for (j = 0; j < XVECLEN (x, i); j++)
1836 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
1837 insn, replacements);
1842 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
1843 return an rtx (MEM:m1 newaddr) which is equivalent.
1844 If any insns must be emitted to compute NEWADDR, put them before INSN.
1846 UNCRITICAL nonzero means accept paradoxical subregs.
1847 This is used for subregs found inside of ZERO_EXTRACTs and in REG_NOTES. */
1850 fixup_memory_subreg (x, insn, uncritical)
1855 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
1856 rtx addr = XEXP (SUBREG_REG (x), 0);
1857 enum machine_mode mode = GET_MODE (x);
1860 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
1861 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
1865 #if BYTES_BIG_ENDIAN
1866 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
1867 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
1869 addr = plus_constant (addr, offset);
1870 if (!flag_force_addr && memory_address_p (mode, addr))
1871 /* Shortcut if no insns need be emitted. */
1872 return change_address (SUBREG_REG (x), mode, addr);
1874 result = change_address (SUBREG_REG (x), mode, addr);
1875 emit_insn_before (gen_sequence (), insn);
1880 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
1881 Replace subexpressions of X in place.
1882 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
1883 Otherwise return X, with its contents possibly altered.
1885 If any insns must be emitted to compute NEWADDR, put them before INSN.
1887 UNCRITICAL is as in fixup_memory_subreg. */
1890 walk_fixup_memory_subreg (x, insn, uncritical)
1895 register enum rtx_code code;
1902 code = GET_CODE (x);
1904 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
1905 return fixup_memory_subreg (x, insn, uncritical);
1907 /* Nothing special about this RTX; fix its operands. */
1909 fmt = GET_RTX_FORMAT (code);
1910 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1913 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
1917 for (j = 0; j < XVECLEN (x, i); j++)
1919 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
1926 /* Fix up any references to stack slots that are invalid memory addresses
1927 because they exceed the maximum range of a displacement. */
1930 fixup_stack_slots ()
1934 /* Did we generate a stack slot that is out of range
1935 or otherwise has an invalid address? */
1936 if (invalid_stack_slot)
1938 /* Yes. Must scan all insns for stack-refs that exceed the limit. */
1939 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1940 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
1941 || GET_CODE (insn) == JUMP_INSN)
1942 fixup_stack_1 (PATTERN (insn), insn);
1947 /* For each memory ref within X, if it refers to a stack slot
1948 with an out of range displacement, put the address in a temp register
1949 (emitting new insns before INSN to load these registers)
1950 and alter the memory ref to use that register.
1951 Replace each such MEM rtx with a copy, to avoid clobberage. */
1954 fixup_stack_1 (x, insn)
1959 register RTX_CODE code = GET_CODE (x);
1964 register rtx ad = XEXP (x, 0);
1965 /* If we have address of a stack slot but it's not valid
1966 (displacement is too large), compute the sum in a register. */
1967 if (GET_CODE (ad) == PLUS
1968 && GET_CODE (XEXP (ad, 0)) == REG
1969 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
1970 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
1971 || XEXP (ad, 0) == current_function_internal_arg_pointer)
1972 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
1975 if (memory_address_p (GET_MODE (x), ad))
1979 temp = copy_to_reg (ad);
1980 seq = gen_sequence ();
1982 emit_insn_before (seq, insn);
1983 return change_address (x, VOIDmode, temp);
1988 fmt = GET_RTX_FORMAT (code);
1989 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1992 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
1996 for (j = 0; j < XVECLEN (x, i); j++)
1997 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2003 /* Optimization: a bit-field instruction whose field
2004 happens to be a byte or halfword in memory
2005 can be changed to a move instruction.
2007 We call here when INSN is an insn to examine or store into a bit-field.
2008 BODY is the SET-rtx to be altered.
2010 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2011 (Currently this is called only from function.c, and EQUIV_MEM
2015 optimize_bit_field (body, insn, equiv_mem)
2020 register rtx bitfield;
2023 enum machine_mode mode;
2025 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2026 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2027 bitfield = SET_DEST (body), destflag = 1;
2029 bitfield = SET_SRC (body), destflag = 0;
2031 /* First check that the field being stored has constant size and position
2032 and is in fact a byte or halfword suitably aligned. */
2034 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2035 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2036 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2038 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2040 register rtx memref = 0;
2042 /* Now check that the containing word is memory, not a register,
2043 and that it is safe to change the machine mode. */
2045 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2046 memref = XEXP (bitfield, 0);
2047 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2049 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2050 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2051 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2052 memref = SUBREG_REG (XEXP (bitfield, 0));
2053 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2055 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2056 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2059 && ! mode_dependent_address_p (XEXP (memref, 0))
2060 && ! MEM_VOLATILE_P (memref))
2062 /* Now adjust the address, first for any subreg'ing
2063 that we are now getting rid of,
2064 and then for which byte of the word is wanted. */
2066 register int offset = INTVAL (XEXP (bitfield, 2));
2067 /* Adjust OFFSET to count bits from low-address byte. */
2068 #if BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN
2069 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2070 - offset - INTVAL (XEXP (bitfield, 1)));
2072 /* Adjust OFFSET to count bytes from low-address byte. */
2073 offset /= BITS_PER_UNIT;
2074 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2076 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2077 #if BYTES_BIG_ENDIAN
2078 offset -= (MIN (UNITS_PER_WORD,
2079 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2080 - MIN (UNITS_PER_WORD,
2081 GET_MODE_SIZE (GET_MODE (memref))));
2085 memref = change_address (memref, mode,
2086 plus_constant (XEXP (memref, 0), offset));
2088 /* Store this memory reference where
2089 we found the bit field reference. */
2093 validate_change (insn, &SET_DEST (body), memref, 1);
2094 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2096 rtx src = SET_SRC (body);
2097 while (GET_CODE (src) == SUBREG
2098 && SUBREG_WORD (src) == 0)
2099 src = SUBREG_REG (src);
2100 if (GET_MODE (src) != GET_MODE (memref))
2101 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2102 validate_change (insn, &SET_SRC (body), src, 1);
2104 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2105 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2106 /* This shouldn't happen because anything that didn't have
2107 one of these modes should have got converted explicitly
2108 and then referenced through a subreg.
2109 This is so because the original bit-field was
2110 handled by agg_mode and so its tree structure had
2111 the same mode that memref now has. */
2116 rtx dest = SET_DEST (body);
2118 while (GET_CODE (dest) == SUBREG
2119 && SUBREG_WORD (dest) == 0)
2120 dest = SUBREG_REG (dest);
2122 validate_change (insn, &SET_DEST (body), dest, 1);
2124 if (GET_MODE (dest) == GET_MODE (memref))
2125 validate_change (insn, &SET_SRC (body), memref, 1);
2128 /* Convert the mem ref to the destination mode. */
2129 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2132 convert_move (newreg, memref,
2133 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2137 validate_change (insn, &SET_SRC (body), newreg, 1);
2141 /* See if we can convert this extraction or insertion into
2142 a simple move insn. We might not be able to do so if this
2143 was, for example, part of a PARALLEL.
2145 If we succeed, write out any needed conversions. If we fail,
2146 it is hard to guess why we failed, so don't do anything
2147 special; just let the optimization be suppressed. */
2149 if (apply_change_group () && seq)
2150 emit_insns_before (seq, insn);
2155 /* These routines are responsible for converting virtual register references
2156 to the actual hard register references once RTL generation is complete.
2158 The following four variables are used for communication between the
2159 routines. They contain the offsets of the virtual registers from their
2160 respective hard registers. */
2162 static int in_arg_offset;
2163 static int var_offset;
2164 static int dynamic_offset;
2165 static int out_arg_offset;
2167 /* In most machines, the stack pointer register is equivalent to the bottom
2170 #ifndef STACK_POINTER_OFFSET
2171 #define STACK_POINTER_OFFSET 0
2174 /* If not defined, pick an appropriate default for the offset of dynamically
2175 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2176 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2178 #ifndef STACK_DYNAMIC_OFFSET
2180 #ifdef ACCUMULATE_OUTGOING_ARGS
2181 /* The bottom of the stack points to the actual arguments. If
2182 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2183 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2184 stack space for register parameters is not pushed by the caller, but
2185 rather part of the fixed stack areas and hence not included in
2186 `current_function_outgoing_args_size'. Nevertheless, we must allow
2187 for it when allocating stack dynamic objects. */
2189 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2190 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2191 (current_function_outgoing_args_size \
2192 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2195 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2196 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2200 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2204 /* Pass through the INSNS of function FNDECL and convert virtual register
2205 references to hard register references. */
2208 instantiate_virtual_regs (fndecl, insns)
2214 /* Compute the offsets to use for this function. */
2215 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
2216 var_offset = STARTING_FRAME_OFFSET;
2217 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
2218 out_arg_offset = STACK_POINTER_OFFSET;
2220 /* Scan all variables and parameters of this function. For each that is
2221 in memory, instantiate all virtual registers if the result is a valid
2222 address. If not, we do it later. That will handle most uses of virtual
2223 regs on many machines. */
2224 instantiate_decls (fndecl, 1);
2226 /* Initialize recognition, indicating that volatile is OK. */
2229 /* Scan through all the insns, instantiating every virtual register still
2231 for (insn = insns; insn; insn = NEXT_INSN (insn))
2232 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2233 || GET_CODE (insn) == CALL_INSN)
2235 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
2236 instantiate_virtual_regs_1 (®_NOTES (insn), NULL_RTX, 0);
2239 /* Now instantiate the remaining register equivalences for debugging info.
2240 These will not be valid addresses. */
2241 instantiate_decls (fndecl, 0);
2243 /* Indicate that, from now on, assign_stack_local should use
2244 frame_pointer_rtx. */
2245 virtuals_instantiated = 1;
2248 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
2249 all virtual registers in their DECL_RTL's.
2251 If VALID_ONLY, do this only if the resulting address is still valid.
2252 Otherwise, always do it. */
2255 instantiate_decls (fndecl, valid_only)
2261 if (DECL_INLINE (fndecl))
2262 /* When compiling an inline function, the obstack used for
2263 rtl allocation is the maybepermanent_obstack. Calling
2264 `resume_temporary_allocation' switches us back to that
2265 obstack while we process this function's parameters. */
2266 resume_temporary_allocation ();
2268 /* Process all parameters of the function. */
2269 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2271 instantiate_decl (DECL_RTL (decl), int_size_in_bytes (TREE_TYPE (decl)),
2273 instantiate_decl (DECL_INCOMING_RTL (decl),
2274 int_size_in_bytes (TREE_TYPE (decl)), valid_only);
2277 /* Now process all variables defined in the function or its subblocks. */
2278 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
2280 if (DECL_INLINE (fndecl))
2282 /* Save all rtl allocated for this function by raising the
2283 high-water mark on the maybepermanent_obstack. */
2285 /* All further rtl allocation is now done in the current_obstack. */
2286 rtl_in_current_obstack ();
2290 /* Subroutine of instantiate_decls: Process all decls in the given
2291 BLOCK node and all its subblocks. */
2294 instantiate_decls_1 (let, valid_only)
2300 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2301 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
2304 /* Process all subblocks. */
2305 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2306 instantiate_decls_1 (t, valid_only);
2309 /* Subroutine of the preceding procedures: Given RTL representing a
2310 decl and the size of the object, do any instantiation required.
2312 If VALID_ONLY is non-zero, it means that the RTL should only be
2313 changed if the new address is valid. */
2316 instantiate_decl (x, size, valid_only)
2321 enum machine_mode mode;
2324 /* If this is not a MEM, no need to do anything. Similarly if the
2325 address is a constant or a register that is not a virtual register. */
2327 if (x == 0 || GET_CODE (x) != MEM)
2331 if (CONSTANT_P (addr)
2332 || (GET_CODE (addr) == REG
2333 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
2334 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
2337 /* If we should only do this if the address is valid, copy the address.
2338 We need to do this so we can undo any changes that might make the
2339 address invalid. This copy is unfortunate, but probably can't be
2343 addr = copy_rtx (addr);
2345 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
2350 /* Now verify that the resulting address is valid for every integer or
2351 floating-point mode up to and including SIZE bytes long. We do this
2352 since the object might be accessed in any mode and frame addresses
2355 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2356 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2357 mode = GET_MODE_WIDER_MODE (mode))
2358 if (! memory_address_p (mode, addr))
2361 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
2362 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2363 mode = GET_MODE_WIDER_MODE (mode))
2364 if (! memory_address_p (mode, addr))
2367 /* Otherwise, put back the address, now that we have updated it and we
2368 know it is valid. */
2373 /* Given a pointer to a piece of rtx and an optional pointer to the
2374 containing object, instantiate any virtual registers present in it.
2376 If EXTRA_INSNS, we always do the replacement and generate
2377 any extra insns before OBJECT. If it zero, we do nothing if replacement
2380 Return 1 if we either had nothing to do or if we were able to do the
2381 needed replacement. Return 0 otherwise; we only return zero if
2382 EXTRA_INSNS is zero.
2384 We first try some simple transformations to avoid the creation of extra
2388 instantiate_virtual_regs_1 (loc, object, extra_insns)
2402 /* Re-start here to avoid recursion in common cases. */
2409 code = GET_CODE (x);
2411 /* Check for some special cases. */
2428 /* We are allowed to set the virtual registers. This means that
2429 that the actual register should receive the source minus the
2430 appropriate offset. This is used, for example, in the handling
2431 of non-local gotos. */
2432 if (SET_DEST (x) == virtual_incoming_args_rtx)
2433 new = arg_pointer_rtx, offset = - in_arg_offset;
2434 else if (SET_DEST (x) == virtual_stack_vars_rtx)
2435 new = frame_pointer_rtx, offset = - var_offset;
2436 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
2437 new = stack_pointer_rtx, offset = - dynamic_offset;
2438 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
2439 new = stack_pointer_rtx, offset = - out_arg_offset;
2443 /* The only valid sources here are PLUS or REG. Just do
2444 the simplest possible thing to handle them. */
2445 if (GET_CODE (SET_SRC (x)) != REG
2446 && GET_CODE (SET_SRC (x)) != PLUS)
2450 if (GET_CODE (SET_SRC (x)) != REG)
2451 temp = force_operand (SET_SRC (x), NULL_RTX);
2454 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
2458 emit_insns_before (seq, object);
2461 if (!validate_change (object, &SET_SRC (x), temp, 0)
2468 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
2473 /* Handle special case of virtual register plus constant. */
2474 if (CONSTANT_P (XEXP (x, 1)))
2478 /* Check for (plus (plus VIRT foo) (const_int)) first. */
2479 if (GET_CODE (XEXP (x, 0)) == PLUS)
2481 rtx inner = XEXP (XEXP (x, 0), 0);
2483 if (inner == virtual_incoming_args_rtx)
2484 new = arg_pointer_rtx, offset = in_arg_offset;
2485 else if (inner == virtual_stack_vars_rtx)
2486 new = frame_pointer_rtx, offset = var_offset;
2487 else if (inner == virtual_stack_dynamic_rtx)
2488 new = stack_pointer_rtx, offset = dynamic_offset;
2489 else if (inner == virtual_outgoing_args_rtx)
2490 new = stack_pointer_rtx, offset = out_arg_offset;
2497 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
2499 new = gen_rtx (PLUS, Pmode, new, XEXP (XEXP (x, 0), 1));
2502 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
2503 new = arg_pointer_rtx, offset = in_arg_offset;
2504 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
2505 new = frame_pointer_rtx, offset = var_offset;
2506 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
2507 new = stack_pointer_rtx, offset = dynamic_offset;
2508 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
2509 new = stack_pointer_rtx, offset = out_arg_offset;
2512 /* We know the second operand is a constant. Unless the
2513 first operand is a REG (which has been already checked),
2514 it needs to be checked. */
2515 if (GET_CODE (XEXP (x, 0)) != REG)
2525 new = plus_constant (XEXP (x, 1), offset);
2527 /* If the new constant is zero, try to replace the sum with its
2529 if (new == const0_rtx
2530 && validate_change (object, loc, XEXP (x, 0), 0))
2533 /* Next try to replace constant with new one. */
2534 if (!validate_change (object, &XEXP (x, 1), new, 0))
2542 /* Otherwise copy the new constant into a register and replace
2543 constant with that register. */
2544 temp = gen_reg_rtx (Pmode);
2545 if (validate_change (object, &XEXP (x, 1), temp, 0))
2546 emit_insn_before (gen_move_insn (temp, new), object);
2549 /* If that didn't work, replace this expression with a
2550 register containing the sum. */
2552 new = gen_rtx (PLUS, Pmode, XEXP (x, 0), new);
2556 temp = force_operand (new, NULL_RTX);
2560 emit_insns_before (seq, object);
2561 if (! validate_change (object, loc, temp, 0)
2562 && ! validate_replace_rtx (x, temp, object))
2570 /* Fall through to generic two-operand expression case. */
2576 case DIV: case UDIV:
2577 case MOD: case UMOD:
2578 case AND: case IOR: case XOR:
2579 case LSHIFT: case ASHIFT: case ROTATE:
2580 case ASHIFTRT: case LSHIFTRT: case ROTATERT:
2582 case GE: case GT: case GEU: case GTU:
2583 case LE: case LT: case LEU: case LTU:
2584 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
2585 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
2590 /* Most cases of MEM that convert to valid addresses have already been
2591 handled by our scan of regno_reg_rtx. The only special handling we
2592 need here is to make a copy of the rtx to ensure it isn't being
2593 shared if we have to change it to a pseudo.
2595 If the rtx is a simple reference to an address via a virtual register,
2596 it can potentially be shared. In such cases, first try to make it
2597 a valid address, which can also be shared. Otherwise, copy it and
2600 First check for common cases that need no processing. These are
2601 usually due to instantiation already being done on a previous instance
2605 if (CONSTANT_ADDRESS_P (temp)
2606 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2607 || temp == arg_pointer_rtx
2609 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2610 || temp == hard_frame_pointer_rtx
2612 || temp == frame_pointer_rtx)
2615 if (GET_CODE (temp) == PLUS
2616 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2617 && (XEXP (temp, 0) == frame_pointer_rtx
2618 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2619 || XEXP (temp, 0) == hard_frame_pointer_rtx
2621 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2622 || XEXP (temp, 0) == arg_pointer_rtx
2627 if (temp == virtual_stack_vars_rtx
2628 || temp == virtual_incoming_args_rtx
2629 || (GET_CODE (temp) == PLUS
2630 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2631 && (XEXP (temp, 0) == virtual_stack_vars_rtx
2632 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
2634 /* This MEM may be shared. If the substitution can be done without
2635 the need to generate new pseudos, we want to do it in place
2636 so all copies of the shared rtx benefit. The call below will
2637 only make substitutions if the resulting address is still
2640 Note that we cannot pass X as the object in the recursive call
2641 since the insn being processed may not allow all valid
2642 addresses. However, if we were not passed on object, we can
2643 only modify X without copying it if X will have a valid
2646 ??? Also note that this can still lose if OBJECT is an insn that
2647 has less restrictions on an address that some other insn.
2648 In that case, we will modify the shared address. This case
2649 doesn't seem very likely, though. */
2651 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
2652 object ? object : x, 0))
2655 /* Otherwise make a copy and process that copy. We copy the entire
2656 RTL expression since it might be a PLUS which could also be
2658 *loc = x = copy_rtx (x);
2661 /* Fall through to generic unary operation case. */
2665 case STRICT_LOW_PART:
2667 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
2668 case SIGN_EXTEND: case ZERO_EXTEND:
2669 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2670 case FLOAT: case FIX:
2671 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2675 /* These case either have just one operand or we know that we need not
2676 check the rest of the operands. */
2681 /* Try to replace with a PLUS. If that doesn't work, compute the sum
2682 in front of this insn and substitute the temporary. */
2683 if (x == virtual_incoming_args_rtx)
2684 new = arg_pointer_rtx, offset = in_arg_offset;
2685 else if (x == virtual_stack_vars_rtx)
2686 new = frame_pointer_rtx, offset = var_offset;
2687 else if (x == virtual_stack_dynamic_rtx)
2688 new = stack_pointer_rtx, offset = dynamic_offset;
2689 else if (x == virtual_outgoing_args_rtx)
2690 new = stack_pointer_rtx, offset = out_arg_offset;
2694 temp = plus_constant (new, offset);
2695 if (!validate_change (object, loc, temp, 0))
2701 temp = force_operand (temp, NULL_RTX);
2705 emit_insns_before (seq, object);
2706 if (! validate_change (object, loc, temp, 0)
2707 && ! validate_replace_rtx (x, temp, object))
2715 /* Scan all subexpressions. */
2716 fmt = GET_RTX_FORMAT (code);
2717 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2720 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
2723 else if (*fmt == 'E')
2724 for (j = 0; j < XVECLEN (x, i); j++)
2725 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
2732 /* Optimization: assuming this function does not receive nonlocal gotos,
2733 delete the handlers for such, as well as the insns to establish
2734 and disestablish them. */
2740 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2742 /* Delete the handler by turning off the flag that would
2743 prevent jump_optimize from deleting it.
2744 Also permit deletion of the nonlocal labels themselves
2745 if nothing local refers to them. */
2746 if (GET_CODE (insn) == CODE_LABEL)
2747 LABEL_PRESERVE_P (insn) = 0;
2748 if (GET_CODE (insn) == INSN
2749 && ((nonlocal_goto_handler_slot != 0
2750 && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn)))
2751 || (nonlocal_goto_stack_level != 0
2752 && reg_mentioned_p (nonlocal_goto_stack_level,
2758 /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
2759 of the current function. */
2762 nonlocal_label_rtx_list ()
2767 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
2768 x = gen_rtx (EXPR_LIST, VOIDmode, label_rtx (TREE_VALUE (t)), x);
2773 /* Output a USE for any register use in RTL.
2774 This is used with -noreg to mark the extent of lifespan
2775 of any registers used in a user-visible variable's DECL_RTL. */
2781 if (GET_CODE (rtl) == REG)
2782 /* This is a register variable. */
2783 emit_insn (gen_rtx (USE, VOIDmode, rtl));
2784 else if (GET_CODE (rtl) == MEM
2785 && GET_CODE (XEXP (rtl, 0)) == REG
2786 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
2787 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
2788 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
2789 /* This is a variable-sized structure. */
2790 emit_insn (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)));
2793 /* Like use_variable except that it outputs the USEs after INSN
2794 instead of at the end of the insn-chain. */
2797 use_variable_after (rtl, insn)
2800 if (GET_CODE (rtl) == REG)
2801 /* This is a register variable. */
2802 emit_insn_after (gen_rtx (USE, VOIDmode, rtl), insn);
2803 else if (GET_CODE (rtl) == MEM
2804 && GET_CODE (XEXP (rtl, 0)) == REG
2805 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
2806 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
2807 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
2808 /* This is a variable-sized structure. */
2809 emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)), insn);
2815 return max_parm_reg;
2818 /* Return the first insn following those generated by `assign_parms'. */
2821 get_first_nonparm_insn ()
2824 return NEXT_INSN (last_parm_insn);
2825 return get_insns ();
2828 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
2829 Crash if there is none. */
2832 get_first_block_beg ()
2834 register rtx searcher;
2835 register rtx insn = get_first_nonparm_insn ();
2837 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
2838 if (GET_CODE (searcher) == NOTE
2839 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
2842 abort (); /* Invalid call to this function. (See comments above.) */
2846 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
2847 This means a type for which function calls must pass an address to the
2848 function or get an address back from the function.
2849 EXP may be a type node or an expression (whose type is tested). */
2852 aggregate_value_p (exp)
2855 int i, regno, nregs;
2858 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
2861 type = TREE_TYPE (exp);
2863 if (RETURN_IN_MEMORY (type))
2865 if (flag_pcc_struct_return
2866 && (TREE_CODE (type) == RECORD_TYPE
2867 || TREE_CODE (type) == UNION_TYPE
2868 || TREE_CODE (type) == QUAL_UNION_TYPE
2869 || TREE_CODE (type) == ARRAY_TYPE))
2871 /* Make sure we have suitable call-clobbered regs to return
2872 the value in; if not, we must return it in memory. */
2873 reg = hard_function_value (type, 0);
2874 regno = REGNO (reg);
2875 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
2876 for (i = 0; i < nregs; i++)
2877 if (! call_used_regs[regno + i])
2882 /* Assign RTL expressions to the function's parameters.
2883 This may involve copying them into registers and using
2884 those registers as the RTL for them.
2886 If SECOND_TIME is non-zero it means that this function is being
2887 called a second time. This is done by integrate.c when a function's
2888 compilation is deferred. We need to come back here in case the
2889 FUNCTION_ARG macro computes items needed for the rest of the compilation
2890 (such as changing which registers are fixed or caller-saved). But suppress
2891 writing any insns or setting DECL_RTL of anything in this case. */
2894 assign_parms (fndecl, second_time)
2899 register rtx entry_parm = 0;
2900 register rtx stack_parm = 0;
2901 CUMULATIVE_ARGS args_so_far;
2902 enum machine_mode promoted_mode, passed_mode, nominal_mode;
2904 /* Total space needed so far for args on the stack,
2905 given as a constant and a tree-expression. */
2906 struct args_size stack_args_size;
2907 tree fntype = TREE_TYPE (fndecl);
2908 tree fnargs = DECL_ARGUMENTS (fndecl);
2909 /* This is used for the arg pointer when referring to stack args. */
2910 rtx internal_arg_pointer;
2911 /* This is a dummy PARM_DECL that we used for the function result if
2912 the function returns a structure. */
2913 tree function_result_decl = 0;
2914 int nparmregs = list_length (fnargs) + LAST_VIRTUAL_REGISTER + 1;
2915 int varargs_setup = 0;
2916 rtx conversion_insns = 0;
2917 /* FUNCTION_ARG may look at this variable. Since this is not
2918 expanding a call it will always be zero in this function. */
2919 int current_call_is_indirect = 0;
2921 /* Nonzero if the last arg is named `__builtin_va_alist',
2922 which is used on some machines for old-fashioned non-ANSI varargs.h;
2923 this should be stuck onto the stack as if it had arrived there. */
2926 && (parm = tree_last (fnargs)) != 0
2928 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
2929 "__builtin_va_alist")));
2931 /* Nonzero if function takes extra anonymous args.
2932 This means the last named arg must be on the stack
2933 right before the anonymous ones. */
2935 = (TYPE_ARG_TYPES (fntype) != 0
2936 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2937 != void_type_node));
2939 /* If the reg that the virtual arg pointer will be translated into is
2940 not a fixed reg or is the stack pointer, make a copy of the virtual
2941 arg pointer, and address parms via the copy. The frame pointer is
2942 considered fixed even though it is not marked as such.
2944 The second time through, simply use ap to avoid generating rtx. */
2946 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
2947 || ! (fixed_regs[ARG_POINTER_REGNUM]
2948 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
2950 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
2952 internal_arg_pointer = virtual_incoming_args_rtx;
2953 current_function_internal_arg_pointer = internal_arg_pointer;
2955 stack_args_size.constant = 0;
2956 stack_args_size.var = 0;
2958 /* If struct value address is treated as the first argument, make it so. */
2959 if (aggregate_value_p (DECL_RESULT (fndecl))
2960 && ! current_function_returns_pcc_struct
2961 && struct_value_incoming_rtx == 0)
2963 tree type = build_pointer_type (fntype);
2965 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
2967 DECL_ARG_TYPE (function_result_decl) = type;
2968 TREE_CHAIN (function_result_decl) = fnargs;
2969 fnargs = function_result_decl;
2972 parm_reg_stack_loc = (rtx *) oballoc (nparmregs * sizeof (rtx));
2973 bzero (parm_reg_stack_loc, nparmregs * sizeof (rtx));
2975 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2976 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
2978 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX);
2981 /* We haven't yet found an argument that we must push and pretend the
2983 current_function_pretend_args_size = 0;
2985 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
2988 = (TREE_CODE (TREE_TYPE (parm)) == ARRAY_TYPE
2989 || TREE_CODE (TREE_TYPE (parm)) == RECORD_TYPE
2990 || TREE_CODE (TREE_TYPE (parm)) == UNION_TYPE
2991 || TREE_CODE (TREE_TYPE (parm)) == QUAL_UNION_TYPE);
2992 struct args_size stack_offset;
2993 struct args_size arg_size;
2994 int passed_pointer = 0;
2995 tree passed_type = DECL_ARG_TYPE (parm);
2997 /* Set LAST_NAMED if this is last named arg before some
2998 anonymous args. We treat it as if it were anonymous too. */
2999 int last_named = ((TREE_CHAIN (parm) == 0
3000 || DECL_NAME (TREE_CHAIN (parm)) == 0)
3001 && (vararg || stdarg));
3003 if (TREE_TYPE (parm) == error_mark_node
3004 /* This can happen after weird syntax errors
3005 or if an enum type is defined among the parms. */
3006 || TREE_CODE (parm) != PARM_DECL
3007 || passed_type == NULL)
3009 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = gen_rtx (MEM, BLKmode,
3011 TREE_USED (parm) = 1;
3015 /* For varargs.h function, save info about regs and stack space
3016 used by the individual args, not including the va_alist arg. */
3017 if (vararg && last_named)
3018 current_function_args_info = args_so_far;
3020 /* Find mode of arg as it is passed, and mode of arg
3021 as it should be during execution of this function. */
3022 passed_mode = TYPE_MODE (passed_type);
3023 nominal_mode = TYPE_MODE (TREE_TYPE (parm));
3025 /* If the parm's mode is VOID, its value doesn't matter,
3026 and avoid the usual things like emit_move_insn that could crash. */
3027 if (nominal_mode == VOIDmode)
3029 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
3033 /* See if this arg was passed by invisible reference. It is if
3034 it is an object whose size depends on the contents of the
3035 object itself or if the machine requires these objects be passed
3038 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
3039 && contains_placeholder_p (TYPE_SIZE (passed_type)))
3040 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3041 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
3042 passed_type, ! last_named)
3046 passed_type = build_pointer_type (passed_type);
3048 passed_mode = nominal_mode = Pmode;
3051 promoted_mode = passed_mode;
3053 #ifdef PROMOTE_FUNCTION_ARGS
3054 /* Compute the mode in which the arg is actually extended to. */
3055 if (TREE_CODE (passed_type) == INTEGER_TYPE
3056 || TREE_CODE (passed_type) == ENUMERAL_TYPE
3057 || TREE_CODE (passed_type) == BOOLEAN_TYPE
3058 || TREE_CODE (passed_type) == CHAR_TYPE
3059 || TREE_CODE (passed_type) == REAL_TYPE
3060 || TREE_CODE (passed_type) == POINTER_TYPE
3061 || TREE_CODE (passed_type) == OFFSET_TYPE)
3063 unsignedp = TREE_UNSIGNED (passed_type);
3064 PROMOTE_MODE (promoted_mode, unsignedp, passed_type);
3068 /* Let machine desc say which reg (if any) the parm arrives in.
3069 0 means it arrives on the stack. */
3070 #ifdef FUNCTION_INCOMING_ARG
3071 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3072 passed_type, ! last_named);
3074 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
3075 passed_type, ! last_named);
3079 passed_mode = promoted_mode;
3081 #ifdef SETUP_INCOMING_VARARGS
3082 /* If this is the last named parameter, do any required setup for
3083 varargs or stdargs. We need to know about the case of this being an
3084 addressable type, in which case we skip the registers it
3085 would have arrived in.
3087 For stdargs, LAST_NAMED will be set for two parameters, the one that
3088 is actually the last named, and the dummy parameter. We only
3089 want to do this action once.
3091 Also, indicate when RTL generation is to be suppressed. */
3092 if (last_named && !varargs_setup)
3094 SETUP_INCOMING_VARARGS (args_so_far, passed_mode, passed_type,
3095 current_function_pretend_args_size,
3101 /* Determine parm's home in the stack,
3102 in case it arrives in the stack or we should pretend it did.
3104 Compute the stack position and rtx where the argument arrives
3107 There is one complexity here: If this was a parameter that would
3108 have been passed in registers, but wasn't only because it is
3109 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
3110 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
3111 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
3112 0 as it was the previous time. */
3114 locate_and_pad_parm (passed_mode, passed_type,
3115 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3118 #ifdef FUNCTION_INCOMING_ARG
3119 FUNCTION_INCOMING_ARG (args_so_far, passed_mode,
3122 || varargs_setup)) != 0,
3124 FUNCTION_ARG (args_so_far, passed_mode,
3126 ! last_named || varargs_setup) != 0,
3129 fndecl, &stack_args_size, &stack_offset, &arg_size);
3133 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
3135 if (offset_rtx == const0_rtx)
3136 stack_parm = gen_rtx (MEM, passed_mode, internal_arg_pointer);
3138 stack_parm = gen_rtx (MEM, passed_mode,
3139 gen_rtx (PLUS, Pmode,
3140 internal_arg_pointer, offset_rtx));
3142 /* If this is a memory ref that contains aggregate components,
3143 mark it as such for cse and loop optimize. */
3144 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3147 /* If this parameter was passed both in registers and in the stack,
3148 use the copy on the stack. */
3149 if (MUST_PASS_IN_STACK (passed_mode, passed_type))
3152 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3153 /* If this parm was passed part in regs and part in memory,
3154 pretend it arrived entirely in memory
3155 by pushing the register-part onto the stack.
3157 In the special case of a DImode or DFmode that is split,
3158 we could put it together in a pseudoreg directly,
3159 but for now that's not worth bothering with. */
3163 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, passed_mode,
3164 passed_type, ! last_named);
3168 current_function_pretend_args_size
3169 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
3170 / (PARM_BOUNDARY / BITS_PER_UNIT)
3171 * (PARM_BOUNDARY / BITS_PER_UNIT));
3174 move_block_from_reg (REGNO (entry_parm),
3175 validize_mem (stack_parm), nregs,
3176 int_size_in_bytes (TREE_TYPE (parm)));
3177 entry_parm = stack_parm;
3182 /* If we didn't decide this parm came in a register,
3183 by default it came on the stack. */
3184 if (entry_parm == 0)
3185 entry_parm = stack_parm;
3187 /* Record permanently how this parm was passed. */
3189 DECL_INCOMING_RTL (parm) = entry_parm;
3191 /* If there is actually space on the stack for this parm,
3192 count it in stack_args_size; otherwise set stack_parm to 0
3193 to indicate there is no preallocated stack slot for the parm. */
3195 if (entry_parm == stack_parm
3196 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
3197 /* On some machines, even if a parm value arrives in a register
3198 there is still an (uninitialized) stack slot allocated for it.
3200 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
3201 whether this parameter already has a stack slot allocated,
3202 because an arg block exists only if current_function_args_size
3203 is larger than some threshhold, and we haven't calculated that
3204 yet. So, for now, we just assume that stack slots never exist
3206 || REG_PARM_STACK_SPACE (fndecl) > 0
3210 stack_args_size.constant += arg_size.constant;
3212 ADD_PARM_SIZE (stack_args_size, arg_size.var);
3215 /* No stack slot was pushed for this parm. */
3218 /* Update info on where next arg arrives in registers. */
3220 FUNCTION_ARG_ADVANCE (args_so_far, passed_mode,
3221 passed_type, ! last_named);
3223 /* If this is our second time through, we are done with this parm. */
3227 /* If we can't trust the parm stack slot to be aligned enough
3228 for its ultimate type, don't use that slot after entry.
3229 We'll make another stack slot, if we need one. */
3231 int thisparm_boundary
3232 = FUNCTION_ARG_BOUNDARY (passed_mode, passed_type);
3234 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
3238 /* If parm was passed in memory, and we need to convert it on entry,
3239 don't store it back in that same slot. */
3241 && nominal_mode != BLKmode && nominal_mode != passed_mode)
3245 /* Now adjust STACK_PARM to the mode and precise location
3246 where this parameter should live during execution,
3247 if we discover that it must live in the stack during execution.
3248 To make debuggers happier on big-endian machines, we store
3249 the value in the last bytes of the space available. */
3251 if (nominal_mode != BLKmode && nominal_mode != passed_mode
3256 #if BYTES_BIG_ENDIAN
3257 if (GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
3258 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
3259 - GET_MODE_SIZE (nominal_mode));
3262 offset_rtx = ARGS_SIZE_RTX (stack_offset);
3263 if (offset_rtx == const0_rtx)
3264 stack_parm = gen_rtx (MEM, nominal_mode, internal_arg_pointer);
3266 stack_parm = gen_rtx (MEM, nominal_mode,
3267 gen_rtx (PLUS, Pmode,
3268 internal_arg_pointer, offset_rtx));
3270 /* If this is a memory ref that contains aggregate components,
3271 mark it as such for cse and loop optimize. */
3272 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3276 /* ENTRY_PARM is an RTX for the parameter as it arrives,
3277 in the mode in which it arrives.
3278 STACK_PARM is an RTX for a stack slot where the parameter can live
3279 during the function (in case we want to put it there).
3280 STACK_PARM is 0 if no stack slot was pushed for it.
3282 Now output code if necessary to convert ENTRY_PARM to
3283 the type in which this function declares it,
3284 and store that result in an appropriate place,
3285 which may be a pseudo reg, may be STACK_PARM,
3286 or may be a local stack slot if STACK_PARM is 0.
3288 Set DECL_RTL to that place. */
3290 if (nominal_mode == BLKmode)
3292 /* If a BLKmode arrives in registers, copy it to a stack slot. */
3293 if (GET_CODE (entry_parm) == REG)
3295 int size_stored = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
3298 /* Note that we will be storing an integral number of words.
3299 So we have to be careful to ensure that we allocate an
3300 integral number of words. We do this below in the
3301 assign_stack_local if space was not allocated in the argument
3302 list. If it was, this will not work if PARM_BOUNDARY is not
3303 a multiple of BITS_PER_WORD. It isn't clear how to fix this
3304 if it becomes a problem. */
3306 if (stack_parm == 0)
3309 = assign_stack_local (GET_MODE (entry_parm), size_stored, 0);
3310 /* If this is a memory ref that contains aggregate components,
3311 mark it as such for cse and loop optimize. */
3312 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3315 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
3318 move_block_from_reg (REGNO (entry_parm),
3319 validize_mem (stack_parm),
3320 size_stored / UNITS_PER_WORD,
3321 int_size_in_bytes (TREE_TYPE (parm)));
3323 DECL_RTL (parm) = stack_parm;
3325 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
3326 && ! DECL_INLINE (fndecl))
3327 /* layout_decl may set this. */
3328 || TREE_ADDRESSABLE (parm)
3329 || TREE_SIDE_EFFECTS (parm)
3330 /* If -ffloat-store specified, don't put explicit
3331 float variables into registers. */
3332 || (flag_float_store
3333 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
3334 /* Always assign pseudo to structure return or item passed
3335 by invisible reference. */
3336 || passed_pointer || parm == function_result_decl)
3338 /* Store the parm in a pseudoregister during the function, but we
3339 may need to do it in a wider mode. */
3341 register rtx parmreg;
3343 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
3344 if (TREE_CODE (TREE_TYPE (parm)) == INTEGER_TYPE
3345 || TREE_CODE (TREE_TYPE (parm)) == ENUMERAL_TYPE
3346 || TREE_CODE (TREE_TYPE (parm)) == BOOLEAN_TYPE
3347 || TREE_CODE (TREE_TYPE (parm)) == CHAR_TYPE
3348 || TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE
3349 || TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE
3350 || TREE_CODE (TREE_TYPE (parm)) == OFFSET_TYPE)
3352 PROMOTE_MODE (nominal_mode, unsignedp, TREE_TYPE (parm));
3355 parmreg = gen_reg_rtx (nominal_mode);
3356 REG_USERVAR_P (parmreg) = 1;
3358 /* If this was an item that we received a pointer to, set DECL_RTL
3362 DECL_RTL (parm) = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
3363 MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
3366 DECL_RTL (parm) = parmreg;
3368 /* Copy the value into the register. */
3369 if (GET_MODE (parmreg) != GET_MODE (entry_parm))
3371 /* If ENTRY_PARM is a hard register, it might be in a register
3372 not valid for operating in its mode (e.g., an odd-numbered
3373 register for a DFmode). In that case, moves are the only
3374 thing valid, so we can't do a convert from there. This
3375 occurs when the calling sequence allow such misaligned
3378 In addition, the conversion may involve a call, which could
3379 clobber parameters which haven't been copied to pseudo
3380 registers yet. Therefore, we must first copy the parm to
3381 a pseudo reg here, and save the conversion until after all
3382 parameters have been moved. */
3384 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
3386 emit_move_insn (tempreg, validize_mem (entry_parm));
3388 push_to_sequence (conversion_insns);
3389 convert_move (parmreg, tempreg, unsignedp);
3390 conversion_insns = get_insns ();
3394 emit_move_insn (parmreg, validize_mem (entry_parm));
3396 /* If we were passed a pointer but the actual value
3397 can safely live in a register, put it in one. */
3398 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
3399 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
3400 && ! DECL_INLINE (fndecl))
3401 /* layout_decl may set this. */
3402 || TREE_ADDRESSABLE (parm)
3403 || TREE_SIDE_EFFECTS (parm)
3404 /* If -ffloat-store specified, don't put explicit
3405 float variables into registers. */
3406 || (flag_float_store
3407 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
3409 /* We can't use nominal_mode, because it will have been set to
3410 Pmode above. We must use the actual mode of the parm. */
3411 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3412 emit_move_insn (parmreg, DECL_RTL (parm));
3413 DECL_RTL (parm) = parmreg;
3415 #ifdef FUNCTION_ARG_CALLEE_COPIES
3416 /* If we are passed an arg by reference and it is our responsibility
3417 to make a copy, do it now.
3418 PASSED_TYPE and PASSED mode now refer to the pointer, not the
3419 original argument, so we must recreate them in the call to
3420 FUNCTION_ARG_CALLEE_COPIES. */
3421 /* ??? Later add code to handle the case that if the argument isn't
3422 modified, don't do the copy. */
3424 else if (passed_pointer
3425 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
3426 TYPE_MODE (DECL_ARG_TYPE (parm)),
3427 DECL_ARG_TYPE (parm),
3431 tree type = DECL_ARG_TYPE (parm);
3433 /* This sequence may involve a library call perhaps clobbering
3434 registers that haven't been copied to pseudos yet. */
3436 push_to_sequence (conversion_insns);
3438 if (TYPE_SIZE (type) == 0
3439 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3441 /* This is a variable sized object. */
3442 /* ??? Can we use expr_size here? */
3443 rtx size_rtx = expand_expr (size_in_bytes (type), NULL_RTX,
3444 TYPE_MODE (sizetype), 0);
3446 copy = gen_rtx (MEM, BLKmode,
3447 allocate_dynamic_stack_space (size_rtx, NULL_RTX,
3448 TYPE_ALIGN (type)));
3452 int size = int_size_in_bytes (type);
3453 copy = assign_stack_temp (TYPE_MODE (type), size, 1);
3456 store_expr (parm, copy, 0);
3457 emit_move_insn (parmreg, XEXP (copy, 0));
3458 conversion_insns = get_insns ();
3461 #endif /* FUNCTION_ARG_CALLEE_COPIES */
3463 /* In any case, record the parm's desired stack location
3464 in case we later discover it must live in the stack. */
3465 if (REGNO (parmreg) >= nparmregs)
3468 int old_nparmregs = nparmregs;
3469 nparmregs = REGNO (parmreg) + 5;
3470 new = (rtx *) oballoc (nparmregs * sizeof (rtx));
3471 bcopy (parm_reg_stack_loc, new, old_nparmregs * sizeof (rtx));
3472 bzero (new + old_nparmregs, (nparmregs - old_nparmregs) * sizeof (rtx));
3473 parm_reg_stack_loc = new;
3475 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
3477 /* Mark the register as eliminable if we did no conversion
3478 and it was copied from memory at a fixed offset,
3479 and the arg pointer was not copied to a pseudo-reg.
3480 If the arg pointer is a pseudo reg or the offset formed
3481 an invalid address, such memory-equivalences
3482 as we make here would screw up life analysis for it. */
3483 if (nominal_mode == passed_mode
3484 && GET_CODE (entry_parm) == MEM
3485 && entry_parm == stack_parm
3486 && stack_offset.var == 0
3487 && reg_mentioned_p (virtual_incoming_args_rtx,
3488 XEXP (entry_parm, 0)))
3489 REG_NOTES (get_last_insn ())
3490 = gen_rtx (EXPR_LIST, REG_EQUIV,
3491 entry_parm, REG_NOTES (get_last_insn ()));
3493 /* For pointer data type, suggest pointer register. */
3494 if (TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE)
3495 mark_reg_pointer (parmreg);
3499 /* Value must be stored in the stack slot STACK_PARM
3500 during function execution. */
3502 if (passed_mode != nominal_mode)
3504 /* Conversion is required. */
3505 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
3507 emit_move_insn (tempreg, validize_mem (entry_parm));
3509 push_to_sequence (conversion_insns);
3510 entry_parm = convert_to_mode (nominal_mode, tempreg,
3511 TREE_UNSIGNED (TREE_TYPE (parm)));
3512 conversion_insns = get_insns ();
3516 if (entry_parm != stack_parm)
3518 if (stack_parm == 0)
3521 = assign_stack_local (GET_MODE (entry_parm),
3522 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
3523 /* If this is a memory ref that contains aggregate components,
3524 mark it as such for cse and loop optimize. */
3525 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3528 if (passed_mode != nominal_mode)
3530 push_to_sequence (conversion_insns);
3531 emit_move_insn (validize_mem (stack_parm),
3532 validize_mem (entry_parm));
3533 conversion_insns = get_insns ();
3537 emit_move_insn (validize_mem (stack_parm),
3538 validize_mem (entry_parm));
3541 DECL_RTL (parm) = stack_parm;
3544 /* If this "parameter" was the place where we are receiving the
3545 function's incoming structure pointer, set up the result. */
3546 if (parm == function_result_decl)
3547 DECL_RTL (DECL_RESULT (fndecl))
3548 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (fndecl)), DECL_RTL (parm));
3550 if (TREE_THIS_VOLATILE (parm))
3551 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
3552 if (TREE_READONLY (parm))
3553 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
3556 /* Output all parameter conversion instructions (possibly including calls)
3557 now that all parameters have been copied out of hard registers. */
3558 emit_insns (conversion_insns);
3560 max_parm_reg = max_reg_num ();
3561 last_parm_insn = get_last_insn ();
3563 current_function_args_size = stack_args_size.constant;
3565 /* Adjust function incoming argument size for alignment and
3568 #ifdef REG_PARM_STACK_SPACE
3569 #ifndef MAYBE_REG_PARM_STACK_SPACE
3570 current_function_args_size = MAX (current_function_args_size,
3571 REG_PARM_STACK_SPACE (fndecl));
3575 #ifdef STACK_BOUNDARY
3576 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
3578 current_function_args_size
3579 = ((current_function_args_size + STACK_BYTES - 1)
3580 / STACK_BYTES) * STACK_BYTES;
3583 #ifdef ARGS_GROW_DOWNWARD
3584 current_function_arg_offset_rtx
3585 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
3586 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
3587 size_int (-stack_args_size.constant)),
3588 NULL_RTX, VOIDmode, 0));
3590 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
3593 /* See how many bytes, if any, of its args a function should try to pop
3596 current_function_pops_args = RETURN_POPS_ARGS (TREE_TYPE (fndecl),
3597 current_function_args_size);
3599 /* For stdarg.h function, save info about regs and stack space
3600 used by the named args. */
3603 current_function_args_info = args_so_far;
3605 /* Set the rtx used for the function return value. Put this in its
3606 own variable so any optimizers that need this information don't have
3607 to include tree.h. Do this here so it gets done when an inlined
3608 function gets output. */
3610 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
3613 /* Indicate whether REGNO is an incoming argument to the current function
3614 that was promoted to a wider mode. If so, return the RTX for the
3615 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
3616 that REGNO is promoted from and whether the promotion was signed or
3619 #ifdef PROMOTE_FUNCTION_ARGS
3622 promoted_input_arg (regno, pmode, punsignedp)
3624 enum machine_mode *pmode;
3629 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
3630 arg = TREE_CHAIN (arg))
3631 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
3632 && REGNO (DECL_INCOMING_RTL (arg)) == regno
3633 && (TREE_CODE (TREE_TYPE (arg)) == INTEGER_TYPE
3634 || TREE_CODE (TREE_TYPE (arg)) == ENUMERAL_TYPE
3635 || TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE
3636 || TREE_CODE (TREE_TYPE (arg)) == CHAR_TYPE
3637 || TREE_CODE (TREE_TYPE (arg)) == REAL_TYPE
3638 || TREE_CODE (TREE_TYPE (arg)) == POINTER_TYPE
3639 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE))
3641 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
3642 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
3644 PROMOTE_MODE (mode, unsignedp, TREE_TYPE (arg));
3645 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
3646 && mode != DECL_MODE (arg))
3648 *pmode = DECL_MODE (arg);
3649 *punsignedp = unsignedp;
3650 return DECL_INCOMING_RTL (arg);
3659 /* Compute the size and offset from the start of the stacked arguments for a
3660 parm passed in mode PASSED_MODE and with type TYPE.
3662 INITIAL_OFFSET_PTR points to the current offset into the stacked
3665 The starting offset and size for this parm are returned in *OFFSET_PTR
3666 and *ARG_SIZE_PTR, respectively.
3668 IN_REGS is non-zero if the argument will be passed in registers. It will
3669 never be set if REG_PARM_STACK_SPACE is not defined.
3671 FNDECL is the function in which the argument was defined.
3673 There are two types of rounding that are done. The first, controlled by
3674 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3675 list to be aligned to the specific boundary (in bits). This rounding
3676 affects the initial and starting offsets, but not the argument size.
3678 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3679 optionally rounds the size of the parm to PARM_BOUNDARY. The
3680 initial offset is not affected by this rounding, while the size always
3681 is and the starting offset may be. */
3683 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
3684 initial_offset_ptr is positive because locate_and_pad_parm's
3685 callers pass in the total size of args so far as
3686 initial_offset_ptr. arg_size_ptr is always positive.*/
3688 static void pad_to_arg_alignment (), pad_below ();
3691 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
3692 initial_offset_ptr, offset_ptr, arg_size_ptr)
3693 enum machine_mode passed_mode;
3697 struct args_size *initial_offset_ptr;
3698 struct args_size *offset_ptr;
3699 struct args_size *arg_size_ptr;
3702 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3703 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3704 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3705 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3706 int reg_parm_stack_space = 0;
3708 #ifdef REG_PARM_STACK_SPACE
3709 /* If we have found a stack parm before we reach the end of the
3710 area reserved for registers, skip that area. */
3713 #ifdef MAYBE_REG_PARM_STACK_SPACE
3714 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3716 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3718 if (reg_parm_stack_space > 0)
3720 if (initial_offset_ptr->var)
3722 initial_offset_ptr->var
3723 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3724 size_int (reg_parm_stack_space));
3725 initial_offset_ptr->constant = 0;
3727 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3728 initial_offset_ptr->constant = reg_parm_stack_space;
3731 #endif /* REG_PARM_STACK_SPACE */
3733 arg_size_ptr->var = 0;
3734 arg_size_ptr->constant = 0;
3736 #ifdef ARGS_GROW_DOWNWARD
3737 if (initial_offset_ptr->var)
3739 offset_ptr->constant = 0;
3740 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
3741 initial_offset_ptr->var);
3745 offset_ptr->constant = - initial_offset_ptr->constant;
3746 offset_ptr->var = 0;
3748 if (where_pad == upward
3749 && (TREE_CODE (sizetree) != INTEGER_CST
3750 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
3751 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3752 SUB_PARM_SIZE (*offset_ptr, sizetree);
3753 if (where_pad != downward)
3754 pad_to_arg_alignment (offset_ptr, boundary);
3755 if (initial_offset_ptr->var)
3757 arg_size_ptr->var = size_binop (MINUS_EXPR,
3758 size_binop (MINUS_EXPR,
3760 initial_offset_ptr->var),
3765 arg_size_ptr->constant = (- initial_offset_ptr->constant -
3766 offset_ptr->constant);
3768 /* ADD_PARM_SIZE (*arg_size_ptr, sizetree); */
3769 if (where_pad == downward)
3770 pad_below (arg_size_ptr, passed_mode, sizetree);
3771 #else /* !ARGS_GROW_DOWNWARD */
3772 pad_to_arg_alignment (initial_offset_ptr, boundary);
3773 *offset_ptr = *initial_offset_ptr;
3775 #ifdef PUSH_ROUNDING
3776 if (passed_mode != BLKmode)
3777 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3780 if (where_pad != none
3781 && (TREE_CODE (sizetree) != INTEGER_CST
3782 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
3783 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3785 /* This must be done after rounding sizetree, so that it will subtract
3786 the same value that we explicitly add below. */
3787 if (where_pad == downward)
3788 pad_below (offset_ptr, passed_mode, sizetree);
3789 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
3790 #endif /* ARGS_GROW_DOWNWARD */
3793 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3794 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3797 pad_to_arg_alignment (offset_ptr, boundary)
3798 struct args_size *offset_ptr;
3801 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3803 if (boundary > BITS_PER_UNIT)
3805 if (offset_ptr->var)
3808 #ifdef ARGS_GROW_DOWNWARD
3813 (ARGS_SIZE_TREE (*offset_ptr),
3814 boundary / BITS_PER_UNIT);
3815 offset_ptr->constant = 0; /*?*/
3818 offset_ptr->constant =
3819 #ifdef ARGS_GROW_DOWNWARD
3820 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
3822 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
3828 pad_below (offset_ptr, passed_mode, sizetree)
3829 struct args_size *offset_ptr;
3830 enum machine_mode passed_mode;
3833 if (passed_mode != BLKmode)
3835 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3836 offset_ptr->constant
3837 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3838 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3839 - GET_MODE_SIZE (passed_mode));
3843 if (TREE_CODE (sizetree) != INTEGER_CST
3844 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3846 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3847 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3849 ADD_PARM_SIZE (*offset_ptr, s2);
3850 SUB_PARM_SIZE (*offset_ptr, sizetree);
3856 round_down (value, divisor)
3860 return size_binop (MULT_EXPR,
3861 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
3862 size_int (divisor));
3865 /* Walk the tree of blocks describing the binding levels within a function
3866 and warn about uninitialized variables.
3867 This is done after calling flow_analysis and before global_alloc
3868 clobbers the pseudo-regs to hard regs. */
3871 uninitialized_vars_warning (block)
3874 register tree decl, sub;
3875 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3877 if (TREE_CODE (decl) == VAR_DECL
3878 /* These warnings are unreliable for and aggregates
3879 because assigning the fields one by one can fail to convince
3880 flow.c that the entire aggregate was initialized.
3881 Unions are troublesome because members may be shorter. */
3882 && TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
3883 && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE
3884 && TREE_CODE (TREE_TYPE (decl)) != QUAL_UNION_TYPE
3885 && TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE
3886 && DECL_RTL (decl) != 0
3887 && GET_CODE (DECL_RTL (decl)) == REG
3888 && regno_uninitialized (REGNO (DECL_RTL (decl))))
3889 warning_with_decl (decl,
3890 "`%s' may be used uninitialized in this function");
3891 if (TREE_CODE (decl) == VAR_DECL
3892 && DECL_RTL (decl) != 0
3893 && GET_CODE (DECL_RTL (decl)) == REG
3894 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3895 warning_with_decl (decl,
3896 "variable `%s' may be clobbered by `longjmp' or `vfork'");
3898 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3899 uninitialized_vars_warning (sub);
3902 /* Do the appropriate part of uninitialized_vars_warning
3903 but for arguments instead of local variables. */
3906 setjmp_args_warning (block)
3910 for (decl = DECL_ARGUMENTS (current_function_decl);
3911 decl; decl = TREE_CHAIN (decl))
3912 if (DECL_RTL (decl) != 0
3913 && GET_CODE (DECL_RTL (decl)) == REG
3914 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3915 warning_with_decl (decl, "argument `%s' may be clobbered by `longjmp' or `vfork'");
3918 /* If this function call setjmp, put all vars into the stack
3919 unless they were declared `register'. */
3922 setjmp_protect (block)
3925 register tree decl, sub;
3926 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3927 if ((TREE_CODE (decl) == VAR_DECL
3928 || TREE_CODE (decl) == PARM_DECL)
3929 && DECL_RTL (decl) != 0
3930 && GET_CODE (DECL_RTL (decl)) == REG
3931 /* If this variable came from an inline function, it must be
3932 that it's life doesn't overlap the setjmp. If there was a
3933 setjmp in the function, it would already be in memory. We
3934 must exclude such variable because their DECL_RTL might be
3935 set to strange things such as virtual_stack_vars_rtx. */
3936 && ! DECL_FROM_INLINE (decl)
3938 #ifdef NON_SAVING_SETJMP
3939 /* If longjmp doesn't restore the registers,
3940 don't put anything in them. */
3944 ! DECL_REGISTER (decl)))
3945 put_var_into_stack (decl);
3946 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3947 setjmp_protect (sub);
3950 /* Like the previous function, but for args instead of local variables. */
3953 setjmp_protect_args ()
3955 register tree decl, sub;
3956 for (decl = DECL_ARGUMENTS (current_function_decl);
3957 decl; decl = TREE_CHAIN (decl))
3958 if ((TREE_CODE (decl) == VAR_DECL
3959 || TREE_CODE (decl) == PARM_DECL)
3960 && DECL_RTL (decl) != 0
3961 && GET_CODE (DECL_RTL (decl)) == REG
3963 /* If longjmp doesn't restore the registers,
3964 don't put anything in them. */
3965 #ifdef NON_SAVING_SETJMP
3969 ! DECL_REGISTER (decl)))
3970 put_var_into_stack (decl);
3973 /* Return the context-pointer register corresponding to DECL,
3974 or 0 if it does not need one. */
3977 lookup_static_chain (decl)
3980 tree context = decl_function_context (decl);
3986 /* We treat inline_function_decl as an alias for the current function
3987 because that is the inline function whose vars, types, etc.
3988 are being merged into the current function.
3989 See expand_inline_function. */
3990 if (context == current_function_decl || context == inline_function_decl)
3991 return virtual_stack_vars_rtx;
3993 for (link = context_display; link; link = TREE_CHAIN (link))
3994 if (TREE_PURPOSE (link) == context)
3995 return RTL_EXPR_RTL (TREE_VALUE (link));
4000 /* Convert a stack slot address ADDR for variable VAR
4001 (from a containing function)
4002 into an address valid in this function (using a static chain). */
4005 fix_lexical_addr (addr, var)
4011 tree context = decl_function_context (var);
4012 struct function *fp;
4015 /* If this is the present function, we need not do anything. */
4016 if (context == current_function_decl || context == inline_function_decl)
4019 for (fp = outer_function_chain; fp; fp = fp->next)
4020 if (fp->decl == context)
4026 /* Decode given address as base reg plus displacement. */
4027 if (GET_CODE (addr) == REG)
4028 basereg = addr, displacement = 0;
4029 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4030 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
4034 /* We accept vars reached via the containing function's
4035 incoming arg pointer and via its stack variables pointer. */
4036 if (basereg == fp->internal_arg_pointer)
4038 /* If reached via arg pointer, get the arg pointer value
4039 out of that function's stack frame.
4041 There are two cases: If a separate ap is needed, allocate a
4042 slot in the outer function for it and dereference it that way.
4043 This is correct even if the real ap is actually a pseudo.
4044 Otherwise, just adjust the offset from the frame pointer to
4047 #ifdef NEED_SEPARATE_AP
4050 if (fp->arg_pointer_save_area == 0)
4051 fp->arg_pointer_save_area
4052 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
4054 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
4055 addr = memory_address (Pmode, addr);
4057 base = copy_to_reg (gen_rtx (MEM, Pmode, addr));
4059 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
4060 base = lookup_static_chain (var);
4064 else if (basereg == virtual_stack_vars_rtx)
4066 /* This is the same code as lookup_static_chain, duplicated here to
4067 avoid an extra call to decl_function_context. */
4070 for (link = context_display; link; link = TREE_CHAIN (link))
4071 if (TREE_PURPOSE (link) == context)
4073 base = RTL_EXPR_RTL (TREE_VALUE (link));
4081 /* Use same offset, relative to appropriate static chain or argument
4083 return plus_constant (base, displacement);
4086 /* Return the address of the trampoline for entering nested fn FUNCTION.
4087 If necessary, allocate a trampoline (in the stack frame)
4088 and emit rtl to initialize its contents (at entry to this function). */
4091 trampoline_address (function)
4097 struct function *fp;
4100 /* Find an existing trampoline and return it. */
4101 for (link = trampoline_list; link; link = TREE_CHAIN (link))
4102 if (TREE_PURPOSE (link) == function)
4103 return XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0);
4104 for (fp = outer_function_chain; fp; fp = fp->next)
4105 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
4106 if (TREE_PURPOSE (link) == function)
4108 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
4110 return round_trampoline_addr (tramp);
4113 /* None exists; we must make one. */
4115 /* Find the `struct function' for the function containing FUNCTION. */
4117 fn_context = decl_function_context (function);
4118 if (fn_context != current_function_decl)
4119 for (fp = outer_function_chain; fp; fp = fp->next)
4120 if (fp->decl == fn_context)
4123 /* Allocate run-time space for this trampoline
4124 (usually in the defining function's stack frame). */
4125 #ifdef ALLOCATE_TRAMPOLINE
4126 tramp = ALLOCATE_TRAMPOLINE (fp);
4128 /* If rounding needed, allocate extra space
4129 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
4130 #ifdef TRAMPOLINE_ALIGNMENT
4131 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE + TRAMPOLINE_ALIGNMENT - 1)
4133 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
4136 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
4138 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
4141 /* Record the trampoline for reuse and note it for later initialization
4142 by expand_function_end. */
4145 push_obstacks (fp->function_maybepermanent_obstack,
4146 fp->function_maybepermanent_obstack);
4147 rtlexp = make_node (RTL_EXPR);
4148 RTL_EXPR_RTL (rtlexp) = tramp;
4149 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
4154 /* Make the RTL_EXPR node temporary, not momentary, so that the
4155 trampoline_list doesn't become garbage. */
4156 int momentary = suspend_momentary ();
4157 rtlexp = make_node (RTL_EXPR);
4158 resume_momentary (momentary);
4160 RTL_EXPR_RTL (rtlexp) = tramp;
4161 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
4164 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
4165 return round_trampoline_addr (tramp);
4168 /* Given a trampoline address,
4169 round it to multiple of TRAMPOLINE_ALIGNMENT. */
4172 round_trampoline_addr (tramp)
4175 #ifdef TRAMPOLINE_ALIGNMENT
4176 /* Round address up to desired boundary. */
4177 rtx temp = gen_reg_rtx (Pmode);
4178 temp = expand_binop (Pmode, add_optab, tramp,
4179 GEN_INT (TRAMPOLINE_ALIGNMENT - 1),
4180 temp, 0, OPTAB_LIB_WIDEN);
4181 tramp = expand_binop (Pmode, and_optab, temp,
4182 GEN_INT (- TRAMPOLINE_ALIGNMENT),
4183 temp, 0, OPTAB_LIB_WIDEN);
4188 /* The functions identify_blocks and reorder_blocks provide a way to
4189 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
4190 duplicate portions of the RTL code. Call identify_blocks before
4191 changing the RTL, and call reorder_blocks after. */
4193 static int all_blocks ();
4194 static tree blocks_nreverse ();
4196 /* Put all this function's BLOCK nodes into a vector, and return it.
4197 Also store in each NOTE for the beginning or end of a block
4198 the index of that block in the vector.
4199 The arguments are TOP_BLOCK, the top-level block of the function,
4200 and INSNS, the insn chain of the function. */
4203 identify_blocks (top_block, insns)
4211 int next_block_number = 0;
4212 int current_block_number = 0;
4218 n_blocks = all_blocks (top_block, 0);
4219 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
4220 block_stack = (int *) alloca (n_blocks * sizeof (int));
4222 all_blocks (top_block, block_vector);
4224 for (insn = insns; insn; insn = NEXT_INSN (insn))
4225 if (GET_CODE (insn) == NOTE)
4227 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
4229 block_stack[depth++] = current_block_number;
4230 current_block_number = next_block_number;
4231 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
4233 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
4235 current_block_number = block_stack[--depth];
4236 NOTE_BLOCK_NUMBER (insn) = current_block_number;
4240 return block_vector;
4243 /* Given BLOCK_VECTOR which was returned by identify_blocks,
4244 and a revised instruction chain, rebuild the tree structure
4245 of BLOCK nodes to correspond to the new order of RTL.
4246 The new block tree is inserted below TOP_BLOCK.
4247 Returns the current top-level block. */
4250 reorder_blocks (block_vector, top_block, insns)
4255 tree current_block = top_block;
4258 if (block_vector == 0)
4261 /* Prune the old tree away, so that it doesn't get in the way. */
4262 BLOCK_SUBBLOCKS (current_block) = 0;
4264 for (insn = insns; insn; insn = NEXT_INSN (insn))
4265 if (GET_CODE (insn) == NOTE)
4267 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
4269 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
4270 /* If we have seen this block before, copy it. */
4271 if (TREE_ASM_WRITTEN (block))
4272 block = copy_node (block);
4273 BLOCK_SUBBLOCKS (block) = 0;
4274 TREE_ASM_WRITTEN (block) = 1;
4275 BLOCK_SUPERCONTEXT (block) = current_block;
4276 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4277 BLOCK_SUBBLOCKS (current_block) = block;
4278 current_block = block;
4279 NOTE_SOURCE_FILE (insn) = 0;
4281 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
4283 BLOCK_SUBBLOCKS (current_block)
4284 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
4285 current_block = BLOCK_SUPERCONTEXT (current_block);
4286 NOTE_SOURCE_FILE (insn) = 0;
4290 return current_block;
4293 /* Reverse the order of elements in the chain T of blocks,
4294 and return the new head of the chain (old last element). */
4300 register tree prev = 0, decl, next;
4301 for (decl = t; decl; decl = next)
4303 next = BLOCK_CHAIN (decl);
4304 BLOCK_CHAIN (decl) = prev;
4310 /* Count the subblocks of BLOCK, and list them all into the vector VECTOR.
4311 Also clear TREE_ASM_WRITTEN in all blocks. */
4314 all_blocks (block, vector)
4321 TREE_ASM_WRITTEN (block) = 0;
4322 /* Record this block. */
4326 /* Record the subblocks, and their subblocks. */
4327 for (subblocks = BLOCK_SUBBLOCKS (block);
4328 subblocks; subblocks = BLOCK_CHAIN (subblocks))
4329 n_blocks += all_blocks (subblocks, vector ? vector + n_blocks : 0);
4334 /* Build bytecode call descriptor for function SUBR. */
4336 bc_build_calldesc (subr)
4339 tree calldesc = 0, arg;
4342 /* Build the argument description vector in reverse order. */
4343 DECL_ARGUMENTS (subr) = nreverse (DECL_ARGUMENTS (subr));
4346 for (arg = DECL_ARGUMENTS (subr); arg; arg = TREE_CHAIN (arg))
4350 calldesc = tree_cons ((tree) 0, size_in_bytes (TREE_TYPE (arg)), calldesc);
4351 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (TREE_TYPE (arg)), calldesc);
4354 DECL_ARGUMENTS (subr) = nreverse (DECL_ARGUMENTS (subr));
4356 /* Prepend the function's return type. */
4357 calldesc = tree_cons ((tree) 0,
4358 size_in_bytes (TREE_TYPE (TREE_TYPE (subr))),
4361 calldesc = tree_cons ((tree) 0,
4362 bc_runtime_type_code (TREE_TYPE (TREE_TYPE (subr))),
4365 /* Prepend the arg count. */
4366 calldesc = tree_cons ((tree) 0, build_int_2 (nargs, 0), calldesc);
4368 /* Output the call description vector and get its address. */
4369 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
4370 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
4371 build_index_type (build_int_2 (nargs * 2, 0)));
4373 return output_constant_def (calldesc);
4377 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4378 and initialize static variables for generating RTL for the statements
4382 init_function_start (subr, filename, line)
4389 if (output_bytecode)
4391 this_function_decl = subr;
4392 this_function_calldesc = bc_build_calldesc (subr);
4393 local_vars_size = 0;
4395 max_stack_depth = 0;
4396 stmt_expr_depth = 0;
4400 init_stmt_for_function ();
4402 cse_not_expected = ! optimize;
4404 /* Caller save not needed yet. */
4405 caller_save_needed = 0;
4407 /* No stack slots have been made yet. */
4408 stack_slot_list = 0;
4410 /* There is no stack slot for handling nonlocal gotos. */
4411 nonlocal_goto_handler_slot = 0;
4412 nonlocal_goto_stack_level = 0;
4414 /* No labels have been declared for nonlocal use. */
4415 nonlocal_labels = 0;
4417 /* No function calls so far in this function. */
4418 function_call_count = 0;
4420 /* No parm regs have been allocated.
4421 (This is important for output_inline_function.) */
4422 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4424 /* Initialize the RTL mechanism. */
4427 /* Initialize the queue of pending postincrement and postdecrements,
4428 and some other info in expr.c. */
4431 /* We haven't done register allocation yet. */
4434 init_const_rtx_hash_table ();
4436 current_function_name = (*decl_printable_name) (subr, &junk);
4438 /* Nonzero if this is a nested function that uses a static chain. */
4440 current_function_needs_context
4441 = (decl_function_context (current_function_decl) != 0);
4443 /* Set if a call to setjmp is seen. */
4444 current_function_calls_setjmp = 0;
4446 /* Set if a call to longjmp is seen. */
4447 current_function_calls_longjmp = 0;
4449 current_function_calls_alloca = 0;
4450 current_function_has_nonlocal_label = 0;
4451 current_function_has_nonlocal_goto = 0;
4452 current_function_contains_functions = 0;
4454 current_function_returns_pcc_struct = 0;
4455 current_function_returns_struct = 0;
4456 current_function_epilogue_delay_list = 0;
4457 current_function_uses_const_pool = 0;
4458 current_function_uses_pic_offset_table = 0;
4460 /* We have not yet needed to make a label to jump to for tail-recursion. */
4461 tail_recursion_label = 0;
4463 /* We haven't had a need to make a save area for ap yet. */
4465 arg_pointer_save_area = 0;
4467 /* No stack slots allocated yet. */
4470 /* No SAVE_EXPRs in this function yet. */
4473 /* No RTL_EXPRs in this function yet. */
4476 /* We have not allocated any temporaries yet. */
4478 temp_slot_level = 0;
4480 /* Within function body, compute a type's size as soon it is laid out. */
4481 immediate_size_expand++;
4483 /* We haven't made any trampolines for this function yet. */
4484 trampoline_list = 0;
4486 init_pending_stack_adjust ();
4487 inhibit_defer_pop = 0;
4489 current_function_outgoing_args_size = 0;
4491 /* Initialize the insn lengths. */
4492 init_insn_lengths ();
4494 /* Prevent ever trying to delete the first instruction of a function.
4495 Also tell final how to output a linenum before the function prologue. */
4496 emit_line_note (filename, line);
4498 /* Make sure first insn is a note even if we don't want linenums.
4499 This makes sure the first insn will never be deleted.
4500 Also, final expects a note to appear there. */
4501 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4503 /* Set flags used by final.c. */
4504 if (aggregate_value_p (DECL_RESULT (subr)))
4506 #ifdef PCC_STATIC_STRUCT_RETURN
4507 current_function_returns_pcc_struct = 1;
4509 current_function_returns_struct = 1;
4512 /* Warn if this value is an aggregate type,
4513 regardless of which calling convention we are using for it. */
4514 if (warn_aggregate_return
4515 && (TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == RECORD_TYPE
4516 || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == UNION_TYPE
4517 || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == QUAL_UNION_TYPE
4518 || TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == ARRAY_TYPE))
4519 warning ("function returns an aggregate");
4521 current_function_returns_pointer
4522 = (TREE_CODE (TREE_TYPE (DECL_RESULT (subr))) == POINTER_TYPE);
4524 /* Indicate that we need to distinguish between the return value of the
4525 present function and the return value of a function being called. */
4526 rtx_equal_function_value_matters = 1;
4528 /* Indicate that we have not instantiated virtual registers yet. */
4529 virtuals_instantiated = 0;
4531 /* Indicate we have no need of a frame pointer yet. */
4532 frame_pointer_needed = 0;
4534 /* By default assume not varargs. */
4535 current_function_varargs = 0;
4538 /* Indicate that the current function uses extra args
4539 not explicitly mentioned in the argument list in any fashion. */
4544 current_function_varargs = 1;
4547 /* Expand a call to __main at the beginning of a possible main function. */
4550 expand_main_function ()
4552 if (!output_bytecode)
4554 /* The zero below avoids a possible parse error */
4556 #if !defined (INIT_SECTION_ASM_OP) || defined (INVOKE__main)
4557 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, NAME__MAIN), 0,
4559 #endif /* not INIT_SECTION_ASM_OP or INVOKE__main */
4563 extern struct obstack permanent_obstack;
4565 /* Expand start of bytecode function. See comment at
4566 expand_function_start below for details. */
4569 bc_expand_function_start (subr, parms_have_cleanups)
4571 int parms_have_cleanups;
4573 char label[20], *name;
4578 if (TREE_PUBLIC (subr))
4579 bc_globalize_label (IDENTIFIER_POINTER (DECL_NAME (subr)));
4581 #ifdef DEBUG_PRINT_CODE
4582 fprintf (stderr, "\n<func %s>\n", IDENTIFIER_POINTER (DECL_NAME (subr)));
4585 for (argsz = 0, thisarg = DECL_ARGUMENTS (subr); thisarg; thisarg = TREE_CHAIN (thisarg))
4587 if (DECL_RTL (thisarg))
4588 abort (); /* Should be NULL here I think. */
4589 else if (TREE_CONSTANT (DECL_SIZE (thisarg)))
4591 DECL_RTL (thisarg) = bc_gen_rtx ((char *) 0, argsz, (struct bc_label *) 0);
4592 argsz += TREE_INT_CST_LOW (DECL_SIZE (thisarg));
4596 /* Variable-sized objects are pointers to their storage. */
4597 DECL_RTL (thisarg) = bc_gen_rtx ((char *) 0, argsz, (struct bc_label *) 0);
4598 argsz += POINTER_SIZE;
4602 bc_begin_function (bc_xstrdup (IDENTIFIER_POINTER (DECL_NAME (subr))));
4604 ASM_GENERATE_INTERNAL_LABEL (label, "LX", nlab);
4607 name = (char *) obstack_copy0 (&permanent_obstack, label, strlen (label));
4608 this_function_callinfo = bc_gen_rtx (name, 0, (struct bc_label *) 0);
4609 this_function_bytecode =
4610 bc_emit_trampoline (BYTECODE_LABEL (this_function_callinfo));
4614 /* Expand end of bytecode function. See details the comment of
4615 expand_function_end(), below. */
4618 bc_expand_function_end ()
4622 expand_null_return ();
4624 /* Emit any fixup code. This must be done before the call to
4625 to BC_END_FUNCTION (), since that will cause the bytecode
4626 segment to be finished off and closed. */
4628 fixup_gotos (0, 0, 0, 0, 0);
4630 ptrconsts = bc_end_function ();
4632 bc_align_const (2 /* INT_ALIGN */);
4634 /* If this changes also make sure to change bc-interp.h! */
4636 bc_emit_const_labeldef (BYTECODE_LABEL (this_function_callinfo));
4637 bc_emit_const ((char *) &max_stack_depth, sizeof max_stack_depth);
4638 bc_emit_const ((char *) &local_vars_size, sizeof local_vars_size);
4639 bc_emit_const_labelref (this_function_bytecode, 0);
4640 bc_emit_const_labelref (ptrconsts, 0);
4641 bc_emit_const_labelref (BYTECODE_LABEL (this_function_calldesc), 0);
4645 /* Start the RTL for a new function, and set variables used for
4647 SUBR is the FUNCTION_DECL node.
4648 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4649 the function's parameters, which must be run at any return statement. */
4652 expand_function_start (subr, parms_have_cleanups)
4654 int parms_have_cleanups;
4660 if (output_bytecode)
4662 bc_expand_function_start (subr, parms_have_cleanups);
4666 /* Make sure volatile mem refs aren't considered
4667 valid operands of arithmetic insns. */
4668 init_recog_no_volatile ();
4670 /* If function gets a static chain arg, store it in the stack frame.
4671 Do this first, so it gets the first stack slot offset. */
4672 if (current_function_needs_context)
4674 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
4675 emit_move_insn (last_ptr, static_chain_incoming_rtx);
4678 /* If the parameters of this function need cleaning up, get a label
4679 for the beginning of the code which executes those cleanups. This must
4680 be done before doing anything with return_label. */
4681 if (parms_have_cleanups)
4682 cleanup_label = gen_label_rtx ();
4686 /* Make the label for return statements to jump to, if this machine
4687 does not have a one-instruction return and uses an epilogue,
4688 or if it returns a structure, or if it has parm cleanups. */
4690 if (cleanup_label == 0 && HAVE_return
4691 && ! current_function_returns_pcc_struct
4692 && ! (current_function_returns_struct && ! optimize))
4695 return_label = gen_label_rtx ();
4697 return_label = gen_label_rtx ();
4700 /* Initialize rtx used to return the value. */
4701 /* Do this before assign_parms so that we copy the struct value address
4702 before any library calls that assign parms might generate. */
4704 /* Decide whether to return the value in memory or in a register. */
4705 if (aggregate_value_p (DECL_RESULT (subr)))
4707 /* Returning something that won't go in a register. */
4708 register rtx value_address;
4710 #ifdef PCC_STATIC_STRUCT_RETURN
4711 if (current_function_returns_pcc_struct)
4713 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4714 value_address = assemble_static_space (size);
4719 /* Expect to be passed the address of a place to store the value.
4720 If it is passed as an argument, assign_parms will take care of
4722 if (struct_value_incoming_rtx)
4724 value_address = gen_reg_rtx (Pmode);
4725 emit_move_insn (value_address, struct_value_incoming_rtx);
4729 DECL_RTL (DECL_RESULT (subr))
4730 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (subr)),
4733 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4734 /* If return mode is void, this decl rtl should not be used. */
4735 DECL_RTL (DECL_RESULT (subr)) = 0;
4736 else if (parms_have_cleanups)
4738 /* If function will end with cleanup code for parms,
4739 compute the return values into a pseudo reg,
4740 which we will copy into the true return register
4741 after the cleanups are done. */
4743 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
4744 #ifdef PROMOTE_FUNCTION_RETURN
4745 tree type = TREE_TYPE (DECL_RESULT (subr));
4746 int unsignedp = TREE_UNSIGNED (type);
4748 if (TREE_CODE (type) == INTEGER_TYPE || TREE_CODE (type) == ENUMERAL_TYPE
4749 || TREE_CODE (type) == BOOLEAN_TYPE || TREE_CODE (type) == CHAR_TYPE
4750 || TREE_CODE (type) == REAL_TYPE || TREE_CODE (type) == POINTER_TYPE
4751 || TREE_CODE (type) == OFFSET_TYPE)
4753 PROMOTE_MODE (mode, unsignedp, type);
4757 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
4760 /* Scalar, returned in a register. */
4762 #ifdef FUNCTION_OUTGOING_VALUE
4763 DECL_RTL (DECL_RESULT (subr))
4764 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
4766 DECL_RTL (DECL_RESULT (subr))
4767 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
4770 /* Mark this reg as the function's return value. */
4771 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
4773 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
4774 /* Needed because we may need to move this to memory
4775 in case it's a named return value whose address is taken. */
4776 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4780 /* Initialize rtx for parameters and local variables.
4781 In some cases this requires emitting insns. */
4783 assign_parms (subr, 0);
4785 /* The following was moved from init_function_start.
4786 The move is supposed to make sdb output more accurate. */
4787 /* Indicate the beginning of the function body,
4788 as opposed to parm setup. */
4789 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
4791 /* If doing stupid allocation, mark parms as born here. */
4793 if (GET_CODE (get_last_insn ()) != NOTE)
4794 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4795 parm_birth_insn = get_last_insn ();
4799 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
4800 use_variable (regno_reg_rtx[i]);
4802 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
4803 use_variable (current_function_internal_arg_pointer);
4806 /* Fetch static chain values for containing functions. */
4807 tem = decl_function_context (current_function_decl);
4808 /* If not doing stupid register allocation, then start off with the static
4809 chain pointer in a pseudo register. Otherwise, we use the stack
4810 address that was generated above. */
4811 if (tem && ! obey_regdecls)
4812 last_ptr = copy_to_reg (static_chain_incoming_rtx);
4813 context_display = 0;
4816 tree rtlexp = make_node (RTL_EXPR);
4818 RTL_EXPR_RTL (rtlexp) = last_ptr;
4819 context_display = tree_cons (tem, rtlexp, context_display);
4820 tem = decl_function_context (tem);
4823 /* Chain thru stack frames, assuming pointer to next lexical frame
4824 is found at the place we always store it. */
4825 #ifdef FRAME_GROWS_DOWNWARD
4826 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
4828 last_ptr = copy_to_reg (gen_rtx (MEM, Pmode,
4829 memory_address (Pmode, last_ptr)));
4832 /* After the display initializations is where the tail-recursion label
4833 should go, if we end up needing one. Ensure we have a NOTE here
4834 since some things (like trampolines) get placed before this. */
4835 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
4837 /* Evaluate now the sizes of any types declared among the arguments. */
4838 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
4839 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0);
4841 /* Make sure there is a line number after the function entry setup code. */
4842 force_next_line_note ();
4845 /* Generate RTL for the end of the current function.
4846 FILENAME and LINE are the current position in the source file. */
4848 /* It is up to language-specific callers to do cleanups for parameters. */
4851 expand_function_end (filename, line)
4858 static rtx initial_trampoline;
4860 if (output_bytecode)
4862 bc_expand_function_end ();
4866 #ifdef NON_SAVING_SETJMP
4867 /* Don't put any variables in registers if we call setjmp
4868 on a machine that fails to restore the registers. */
4869 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
4871 setjmp_protect (DECL_INITIAL (current_function_decl));
4872 setjmp_protect_args ();
4876 /* Save the argument pointer if a save area was made for it. */
4877 if (arg_pointer_save_area)
4879 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
4880 emit_insn_before (x, tail_recursion_reentry);
4883 /* Initialize any trampolines required by this function. */
4884 for (link = trampoline_list; link; link = TREE_CHAIN (link))
4886 tree function = TREE_PURPOSE (link);
4887 rtx context = lookup_static_chain (function);
4888 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
4891 /* First make sure this compilation has a template for
4892 initializing trampolines. */
4893 if (initial_trampoline == 0)
4895 end_temporary_allocation ();
4897 = gen_rtx (MEM, BLKmode, assemble_trampoline_template ());
4898 resume_temporary_allocation ();
4901 /* Generate insns to initialize the trampoline. */
4903 tramp = change_address (initial_trampoline, BLKmode,
4904 round_trampoline_addr (XEXP (tramp, 0)));
4905 emit_block_move (tramp, initial_trampoline, GEN_INT (TRAMPOLINE_SIZE),
4906 FUNCTION_BOUNDARY / BITS_PER_UNIT);
4907 INITIALIZE_TRAMPOLINE (XEXP (tramp, 0),
4908 XEXP (DECL_RTL (function), 0), context);
4912 /* Put those insns at entry to the containing function (this one). */
4913 emit_insns_before (seq, tail_recursion_reentry);
4916 #if 0 /* I think unused parms are legitimate enough. */
4917 /* Warn about unused parms. */
4922 for (decl = DECL_ARGUMENTS (current_function_decl);
4923 decl; decl = TREE_CHAIN (decl))
4924 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL)
4925 warning_with_decl (decl, "unused parameter `%s'");
4929 /* Delete handlers for nonlocal gotos if nothing uses them. */
4930 if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label)
4933 /* End any sequences that failed to be closed due to syntax errors. */
4934 while (in_sequence_p ())
4937 /* Outside function body, can't compute type's actual size
4938 until next function's body starts. */
4939 immediate_size_expand--;
4941 /* If doing stupid register allocation,
4942 mark register parms as dying here. */
4947 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
4948 use_variable (regno_reg_rtx[i]);
4950 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
4952 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
4954 use_variable (XEXP (tem, 0));
4955 use_variable_after (XEXP (tem, 0), parm_birth_insn);
4958 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
4959 use_variable (current_function_internal_arg_pointer);
4962 clear_pending_stack_adjust ();
4963 do_pending_stack_adjust ();
4965 /* Mark the end of the function body.
4966 If control reaches this insn, the function can drop through
4967 without returning a value. */
4968 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
4970 /* Output a linenumber for the end of the function.
4971 SDB depends on this. */
4972 emit_line_note_force (filename, line);
4974 /* Output the label for the actual return from the function,
4975 if one is expected. This happens either because a function epilogue
4976 is used instead of a return instruction, or because a return was done
4977 with a goto in order to run local cleanups, or because of pcc-style
4978 structure returning. */
4981 emit_label (return_label);
4983 /* If we had calls to alloca, and this machine needs
4984 an accurate stack pointer to exit the function,
4985 insert some code to save and restore the stack pointer. */
4986 #ifdef EXIT_IGNORE_STACK
4987 if (! EXIT_IGNORE_STACK)
4989 if (current_function_calls_alloca)
4993 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4994 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4997 /* If scalar return value was computed in a pseudo-reg,
4998 copy that to the hard return register. */
4999 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
5000 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
5001 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
5002 >= FIRST_PSEUDO_REGISTER))
5004 rtx real_decl_result;
5006 #ifdef FUNCTION_OUTGOING_VALUE
5008 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
5009 current_function_decl);
5012 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
5013 current_function_decl);
5015 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
5016 emit_move_insn (real_decl_result,
5017 DECL_RTL (DECL_RESULT (current_function_decl)));
5018 emit_insn (gen_rtx (USE, VOIDmode, real_decl_result));
5021 /* If returning a structure, arrange to return the address of the value
5022 in a place where debuggers expect to find it.
5024 If returning a structure PCC style,
5025 the caller also depends on this value.
5026 And current_function_returns_pcc_struct is not necessarily set. */
5027 if (current_function_returns_struct
5028 || current_function_returns_pcc_struct)
5030 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5031 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5032 #ifdef FUNCTION_OUTGOING_VALUE
5034 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
5035 current_function_decl);
5038 = FUNCTION_VALUE (build_pointer_type (type),
5039 current_function_decl);
5042 /* Mark this as a function return value so integrate will delete the
5043 assignment and USE below when inlining this function. */
5044 REG_FUNCTION_VALUE_P (outgoing) = 1;
5046 emit_move_insn (outgoing, value_address);
5047 use_variable (outgoing);
5050 /* Output a return insn if we are using one.
5051 Otherwise, let the rtl chain end here, to drop through
5052 into the epilogue. */
5057 emit_jump_insn (gen_return ());
5062 /* Fix up any gotos that jumped out to the outermost
5063 binding level of the function.
5064 Must follow emitting RETURN_LABEL. */
5066 /* If you have any cleanups to do at this point,
5067 and they need to create temporary variables,
5068 then you will lose. */
5069 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, get_insns (), 0);
5072 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
5074 static int *prologue;
5075 static int *epilogue;
5077 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
5078 or a single insn). */
5081 record_insns (insns)
5086 if (GET_CODE (insns) == SEQUENCE)
5088 int len = XVECLEN (insns, 0);
5089 vec = (int *) oballoc ((len + 1) * sizeof (int));
5092 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
5096 vec = (int *) oballoc (2 * sizeof (int));
5097 vec[0] = INSN_UID (insns);
5103 /* Determine how many INSN_UIDs in VEC are part of INSN. */
5106 contains (insn, vec)
5112 if (GET_CODE (insn) == INSN
5113 && GET_CODE (PATTERN (insn)) == SEQUENCE)
5116 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
5117 for (j = 0; vec[j]; j++)
5118 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
5124 for (j = 0; vec[j]; j++)
5125 if (INSN_UID (insn) == vec[j])
5131 /* Generate the prologe and epilogue RTL if the machine supports it. Thread
5132 this into place with notes indicating where the prologue ends and where
5133 the epilogue begins. Update the basic block information when possible. */
5136 thread_prologue_and_epilogue_insns (f)
5139 #ifdef HAVE_prologue
5142 rtx head, seq, insn;
5144 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
5145 prologue insns and a NOTE_INSN_PROLOGUE_END. */
5146 emit_note_after (NOTE_INSN_PROLOGUE_END, f);
5147 seq = gen_prologue ();
5148 head = emit_insn_after (seq, f);
5150 /* Include the new prologue insns in the first block. Ignore them
5151 if they form a basic block unto themselves. */
5152 if (basic_block_head && n_basic_blocks
5153 && GET_CODE (basic_block_head[0]) != CODE_LABEL)
5154 basic_block_head[0] = NEXT_INSN (f);
5156 /* Retain a map of the prologue insns. */
5157 prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
5163 #ifdef HAVE_epilogue
5166 rtx insn = get_last_insn ();
5167 rtx prev = prev_nonnote_insn (insn);
5169 /* If we end with a BARRIER, we don't need an epilogue. */
5170 if (! (prev && GET_CODE (prev) == BARRIER))
5176 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
5177 epilogue insns, the USE insns at the end of a function,
5178 the jump insn that returns, and then a BARRIER. */
5180 /* Move the USE insns at the end of a function onto a list. */
5182 && GET_CODE (prev) == INSN
5183 && GET_CODE (PATTERN (prev)) == USE)
5186 prev = prev_nonnote_insn (prev);
5188 NEXT_INSN (PREV_INSN (tem)) = NEXT_INSN (tem);
5189 PREV_INSN (NEXT_INSN (tem)) = PREV_INSN (tem);
5193 NEXT_INSN (last_use) = tem;
5198 emit_barrier_after (insn);
5200 seq = gen_epilogue ();
5201 tail = emit_jump_insn_after (seq, insn);
5203 /* Insert the USE insns immediately before the return insn, which
5204 must be the first instruction before the final barrier. */
5207 tem = prev_nonnote_insn (get_last_insn ());
5208 NEXT_INSN (PREV_INSN (tem)) = first_use;
5209 PREV_INSN (first_use) = PREV_INSN (tem);
5210 PREV_INSN (tem) = last_use;
5211 NEXT_INSN (last_use) = tem;
5214 emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
5216 /* Include the new epilogue insns in the last block. Ignore
5217 them if they form a basic block unto themselves. */
5218 if (basic_block_end && n_basic_blocks
5219 && GET_CODE (basic_block_end[n_basic_blocks - 1]) != JUMP_INSN)
5220 basic_block_end[n_basic_blocks - 1] = tail;
5222 /* Retain a map of the epilogue insns. */
5223 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
5231 /* Reposition the prologue-end and epilogue-begin notes after instruction
5232 scheduling and delayed branch scheduling. */
5235 reposition_prologue_and_epilogue_notes (f)
5238 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5239 /* Reposition the prologue and epilogue notes. */
5247 register rtx insn, note = 0;
5249 /* Scan from the beginning until we reach the last prologue insn.
5250 We apparently can't depend on basic_block_{head,end} after
5252 for (len = 0; prologue[len]; len++)
5254 for (insn = f; len && insn; insn = NEXT_INSN (insn))
5256 if (GET_CODE (insn) == NOTE)
5258 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
5261 else if ((len -= contains (insn, prologue)) == 0)
5263 /* Find the prologue-end note if we haven't already, and
5264 move it to just after the last prologue insn. */
5267 for (note = insn; note = NEXT_INSN (note);)
5268 if (GET_CODE (note) == NOTE
5269 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
5272 next = NEXT_INSN (note);
5273 prev = PREV_INSN (note);
5275 NEXT_INSN (prev) = next;
5277 PREV_INSN (next) = prev;
5278 add_insn_after (note, insn);
5285 register rtx insn, note = 0;
5287 /* Scan from the end until we reach the first epilogue insn.
5288 We apparently can't depend on basic_block_{head,end} after
5290 for (len = 0; epilogue[len]; len++)
5292 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
5294 if (GET_CODE (insn) == NOTE)
5296 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
5299 else if ((len -= contains (insn, epilogue)) == 0)
5301 /* Find the epilogue-begin note if we haven't already, and
5302 move it to just before the first epilogue insn. */
5305 for (note = insn; note = PREV_INSN (note);)
5306 if (GET_CODE (note) == NOTE
5307 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
5310 next = NEXT_INSN (note);
5311 prev = PREV_INSN (note);
5313 NEXT_INSN (prev) = next;
5315 PREV_INSN (next) = prev;
5316 add_insn_after (note, PREV_INSN (insn));
5321 #endif /* HAVE_prologue or HAVE_epilogue */