#include "recog.h"
#include "output.h"
#include "basic-block.h"
-#include "obstack.h"
#include "toplev.h"
-#include "hash.h"
+#include "hashtab.h"
#include "ggc.h"
#include "tm_p.h"
#include "integrate.h"
must define both, or neither. */
#ifndef NAME__MAIN
#define NAME__MAIN "__main"
-#define SYMBOL__MAIN __main
#endif
/* Round a value to the lowest integer less than it that is a multiple of
post-instantiation libcalls. */
int virtuals_instantiated;
+/* Assign unique numbers to labels generated for profiling, debugging, etc. */
+static int funcdef_no;
+
/* These variables hold pointers to functions to create and destroy
target specific, per-function data structures. */
-void (*init_machine_status) PARAMS ((struct function *));
-void (*free_machine_status) PARAMS ((struct function *));
-/* This variable holds a pointer to a function to register any
- data items in the target specific, per-function data structure
- that will need garbage collection. */
-void (*mark_machine_status) PARAMS ((struct function *));
-
-/* Likewise, but for language-specific data. */
-void (*init_lang_status) PARAMS ((struct function *));
-void (*save_lang_status) PARAMS ((struct function *));
-void (*restore_lang_status) PARAMS ((struct function *));
-void (*mark_lang_status) PARAMS ((struct function *));
-void (*free_lang_status) PARAMS ((struct function *));
+struct machine_function * (*init_machine_status) PARAMS ((void));
/* The FUNCTION_DECL for an inline function currently being expanded. */
tree inline_function_decl;
struct function *cfun = 0;
/* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
-static varray_type prologue;
-static varray_type epilogue;
+static GTY(()) varray_type prologue;
+static GTY(()) varray_type epilogue;
/* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
in this function. */
-static varray_type sibcall_epilogue;
+static GTY(()) varray_type sibcall_epilogue;
\f
/* In order to evaluate some expressions, such as function calls returning
structures in memory, we need to temporarily allocate stack locations.
level where they are defined. They are marked a "kept" so that
free_temp_slots will not free them. */
-struct temp_slot
+struct temp_slot GTY(())
{
/* Points to next temporary slot. */
struct temp_slot *next;
tree type;
/* The value of `sequence_rtl_expr' when this temporary is allocated. */
tree rtl_expr;
- /* Non-zero if this temporary is currently in use. */
+ /* Nonzero if this temporary is currently in use. */
char in_use;
- /* Non-zero if this temporary has its address taken. */
+ /* Nonzero if this temporary has its address taken. */
char addr_taken;
/* Nesting level at which this slot is being used. */
int level;
- /* Non-zero if this should survive a call to free_temp_slots. */
+ /* Nonzero if this should survive a call to free_temp_slots. */
int keep;
/* The offset of the slot from the frame_pointer, including extra space
for alignment. This info is for combine_temp_slots. */
maintain this list in case two operands of an insn were required to match;
in that case we must ensure we use the same replacement. */
-struct fixup_replacement
+struct fixup_replacement GTY(())
{
rtx old;
rtx new;
struct insns_for_mem_entry
{
- /* The KEY in HE will be a MEM. */
- struct hash_entry he;
- /* These are the INSNS which reference the MEM. */
+ /* A MEM. */
+ rtx key;
+ /* These are the INSNs which reference the MEM. */
rtx insns;
};
static void put_reg_into_stack PARAMS ((struct function *, rtx, tree,
enum machine_mode, enum machine_mode,
int, unsigned int, int,
- struct hash_table *));
+ htab_t));
static void schedule_fixup_var_refs PARAMS ((struct function *, rtx, tree,
enum machine_mode,
- struct hash_table *));
+ htab_t));
static void fixup_var_refs PARAMS ((rtx, enum machine_mode, int, rtx,
- struct hash_table *));
+ htab_t));
static struct fixup_replacement
*find_fixup_replacement PARAMS ((struct fixup_replacement **, rtx));
static void fixup_var_refs_insns PARAMS ((rtx, rtx, enum machine_mode,
int, int, rtx));
static void fixup_var_refs_insns_with_hash
- PARAMS ((struct hash_table *, rtx,
+ PARAMS ((htab_t, rtx,
enum machine_mode, int, rtx));
static void fixup_var_refs_insn PARAMS ((rtx, rtx, enum machine_mode,
int, int, rtx));
static void fixup_var_refs_1 PARAMS ((rtx, enum machine_mode, rtx *, rtx,
struct fixup_replacement **, rtx));
-static rtx fixup_memory_subreg PARAMS ((rtx, rtx, int));
-static rtx walk_fixup_memory_subreg PARAMS ((rtx, rtx, int));
+static rtx fixup_memory_subreg PARAMS ((rtx, rtx, enum machine_mode, int));
+static rtx walk_fixup_memory_subreg PARAMS ((rtx, rtx, enum machine_mode,
+ int));
static rtx fixup_stack_1 PARAMS ((rtx, rtx));
static void optimize_bit_field PARAMS ((rtx, rtx, rtx *));
static void instantiate_decls PARAMS ((tree, int));
static void delete_handlers PARAMS ((void));
static void pad_to_arg_alignment PARAMS ((struct args_size *, int,
struct args_size *));
-#ifndef ARGS_GROW_DOWNWARD
static void pad_below PARAMS ((struct args_size *, enum machine_mode,
tree));
-#endif
static rtx round_trampoline_addr PARAMS ((rtx));
static rtx adjust_trampoline_addr PARAMS ((rtx));
static tree *identify_blocks_1 PARAMS ((rtx, tree *, tree *, tree *));
#ifdef HAVE_return
static void emit_return_into_block PARAMS ((basic_block, rtx));
#endif
-static void put_addressof_into_stack PARAMS ((rtx, struct hash_table *));
+static void put_addressof_into_stack PARAMS ((rtx, htab_t));
static bool purge_addressof_1 PARAMS ((rtx *, rtx, int, int,
- struct hash_table *));
+ htab_t));
static void purge_single_hard_subreg_set PARAMS ((rtx));
#if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
static rtx keep_stack_depressed PARAMS ((rtx));
#endif
static int is_addressof PARAMS ((rtx *, void *));
-static struct hash_entry *insns_for_mem_newfunc PARAMS ((struct hash_entry *,
- struct hash_table *,
- hash_table_key));
-static unsigned long insns_for_mem_hash PARAMS ((hash_table_key));
-static bool insns_for_mem_comp PARAMS ((hash_table_key, hash_table_key));
+static hashval_t insns_for_mem_hash PARAMS ((const void *));
+static int insns_for_mem_comp PARAMS ((const void *, const void *));
static int insns_for_mem_walk PARAMS ((rtx *, void *));
-static void compute_insns_for_mem PARAMS ((rtx, rtx, struct hash_table *));
-static void mark_function_status PARAMS ((struct function *));
-static void maybe_mark_struct_function PARAMS ((void *));
+static void compute_insns_for_mem PARAMS ((rtx, rtx, htab_t));
static void prepare_function_start PARAMS ((void));
static void do_clobber_return_reg PARAMS ((rtx, void *));
static void do_use_return_reg PARAMS ((rtx, void *));
\f
/* Pointer to chain of `struct function' for containing functions. */
-static struct function *outer_function_chain;
+static GTY(()) struct function *outer_function_chain;
/* Given a function decl for a containing function,
return the `struct function' for it. */
/* Save the current context for compilation of a nested function.
This is called from language-specific code. The caller should use
- the save_lang_status callback to save any language-specific state,
+ the enter_nested langhook to save any language-specific state,
since this function knows only about language-independent
variables. */
outer_function_chain = p;
p->fixup_var_refs_queue = 0;
- if (save_lang_status)
- (*save_lang_status) (p);
+ (*lang_hooks.function.enter_nested) (p);
cfun = 0;
}
restore_emit_status (p);
- if (restore_lang_status)
- (*restore_lang_status) (p);
+ (*lang_hooks.function.leave_nested) (p);
/* Finish doing put_var_into_stack for any of our variables which became
addressable during the nested function. If only one entry has to be
/* f->varasm is used by code generation. */
/* f->eh->eh_return_stub_label is used by code generation. */
- if (free_lang_status)
- (*free_lang_status) (f);
- free_stmt_status (f);
+ (*lang_hooks.function.final) (f);
+ f->stmt = NULL;
}
/* Clear out all parts of the state in F that can safely be discarded
free_after_compilation (f)
struct function *f;
{
- free_eh_status (f);
- free_expr_status (f);
- free_emit_status (f);
- free_varasm_status (f);
-
- if (free_machine_status)
- (*free_machine_status) (f);
-
- if (f->x_parm_reg_stack_loc)
- free (f->x_parm_reg_stack_loc);
+ f->eh = NULL;
+ f->expr = NULL;
+ f->emit = NULL;
+ f->varasm = NULL;
+ f->machine = NULL;
f->x_temp_slots = NULL;
f->arg_offset_rtx = NULL;
{
unsigned int align;
struct temp_slot *p, *best_p = 0;
+ rtx slot;
/* If SIZE is -1 it means that somebody tried to allocate a temporary
of a variable size. */
p->keep = keep;
}
- /* We may be reusing an old slot, so clear any MEM flags that may have been
- set from before. */
- RTX_UNCHANGING_P (p->slot) = 0;
- MEM_IN_STRUCT_P (p->slot) = 0;
- MEM_SCALAR_P (p->slot) = 0;
- MEM_VOLATILE_P (p->slot) = 0;
- set_mem_alias_set (p->slot, 0);
+
+ /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
+ slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
+ stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
/* If we know the alias set for the memory that will be used, use
it. If there's no TYPE, then we don't know anything about the
alias set for the memory. */
- set_mem_alias_set (p->slot, type ? get_alias_set (type) : 0);
- set_mem_align (p->slot, align);
+ set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
+ set_mem_align (slot, align);
/* If a type is specified, set the relevant flags. */
if (type != 0)
{
- RTX_UNCHANGING_P (p->slot) = TYPE_READONLY (type);
- MEM_VOLATILE_P (p->slot) = TYPE_VOLATILE (type);
- MEM_SET_IN_STRUCT_P (p->slot, AGGREGATE_TYPE_P (type));
+ RTX_UNCHANGING_P (slot) = (lang_hooks.honor_readonly
+ && TYPE_READONLY (type));
+ MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
+ MEM_SET_IN_STRUCT_P (slot, AGGREGATE_TYPE_P (type));
}
- return p->slot;
+ return slot;
}
/* Allocate a temporary stack slot and record it for possible later
/* The size of the temporary may be too large to fit into an integer. */
/* ??? Not sure this should happen except for user silliness, so limit
- this to things that aren't compiler-generated temporaries. The
+ this to things that aren't compiler-generated temporaries. The
rest of the time we'll abort in assign_stack_temp_for_type. */
if (decl && size == -1
&& TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
context = decl_function_context (decl);
/* Get the current rtl used for this object and its original mode. */
- reg = (TREE_CODE (decl) == SAVE_EXPR
- ? SAVE_EXPR_RTL (decl)
+ reg = (TREE_CODE (decl) == SAVE_EXPR
+ ? SAVE_EXPR_RTL (decl)
: DECL_RTL_IF_SET (decl));
/* No need to do anything if decl has no rtx yet
int volatile_p;
unsigned int original_regno;
int used_p;
- struct hash_table *ht;
+ htab_t ht;
{
struct function *func = function ? function : cfun;
rtx new = 0;
rtx reg;
tree type;
enum machine_mode promoted_mode;
- struct hash_table *ht;
+ htab_t ht;
{
int unsigned_p = type ? TREE_UNSIGNED (type) : 0;
rtx var;
enum machine_mode promoted_mode;
int unsignedp;
- struct hash_table *ht;
+ htab_t ht;
rtx may_share;
{
tree pending;
static void
fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp, may_share)
- struct hash_table *ht;
+ htab_t ht;
rtx var;
enum machine_mode promoted_mode;
int unsignedp;
rtx may_share;
{
- struct insns_for_mem_entry *ime
- = (struct insns_for_mem_entry *) hash_lookup (ht, var,
- /*create=*/0, /*copy=*/0);
+ struct insns_for_mem_entry tmp;
+ struct insns_for_mem_entry *ime;
rtx insn_list;
+ tmp.key = var;
+ ime = (struct insns_for_mem_entry *) htab_find (ht, &tmp);
for (insn_list = ime->insns; insn_list != 0; insn_list = XEXP (insn_list, 1))
if (INSN_P (XEXP (insn_list, 0)))
fixup_var_refs_insn (XEXP (insn_list, 0), var, promoted_mode,
/* OLD might be a (subreg (mem)). */
if (GET_CODE (replacements->old) == SUBREG)
replacements->old
- = fixup_memory_subreg (replacements->old, insn, 0);
+ = fixup_memory_subreg (replacements->old, insn,
+ promoted_mode, 0);
else
replacements->old
= fixup_stack_1 (replacements->old, insn);
start_sequence ();
convert_move (replacements->new,
replacements->old, unsignedp);
- seq = gen_sequence ();
+ seq = get_insns ();
end_sequence ();
}
else
{
if (GET_CODE (note) != INSN_LIST)
XEXP (note, 0)
- = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
+ = walk_fixup_memory_subreg (XEXP (note, 0), insn,
+ promoted_mode, 1);
note = XEXP (note, 1);
}
}
start_sequence ();
new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
- seq = gen_sequence ();
+ seq = get_insns ();
end_sequence ();
if (recog_memoized (new_insn) < 0)
sub = force_operand (sub, y);
if (sub != y)
emit_insn (gen_move_insn (y, sub));
- seq = gen_sequence ();
+ seq = get_insns ();
end_sequence ();
}
return;
}
else
- tem = fixup_memory_subreg (tem, insn, 0);
+ tem = fixup_memory_subreg (tem, insn, promoted_mode, 0);
}
else
tem = fixup_stack_1 (tem, insn);
return;
}
- replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
+ replacement->new = *loc = fixup_memory_subreg (x, insn,
+ promoted_mode, 0);
INSN_CODE (insn) = -1;
if (! flag_force_mem && recog_memoized (insn) >= 0)
This was legitimate when the MEM was a REG. */
if (GET_CODE (tem) == SUBREG
&& SUBREG_REG (tem) == var)
- tem = fixup_memory_subreg (tem, insn, 0);
+ tem = fixup_memory_subreg (tem, insn, promoted_mode, 0);
else
tem = fixup_stack_1 (tem, insn);
SET_SRC (x) = replacement->new;
else if (GET_CODE (SET_SRC (x)) == SUBREG)
SET_SRC (x) = replacement->new
- = fixup_memory_subreg (SET_SRC (x), insn, 0);
+ = fixup_memory_subreg (SET_SRC (x), insn, promoted_mode,
+ 0);
else
SET_SRC (x) = replacement->new
= fixup_stack_1 (SET_SRC (x), insn);
no other function that to do X. */
pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
- if (GET_CODE (pat) == SEQUENCE)
+ if (NEXT_INSN (pat) != NULL_RTX)
{
last = emit_insn_before (pat, insn);
delete_insn (last);
}
else
- PATTERN (insn) = pat;
+ PATTERN (insn) = PATTERN (pat);
return;
}
rtx pat, last;
if (GET_CODE (SET_DEST (x)) == SUBREG)
- SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
+ SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn,
+ promoted_mode, 0);
else
SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
return;
pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
- if (GET_CODE (pat) == SEQUENCE)
+ if (NEXT_INSN (pat) != NULL_RTX)
{
last = emit_insn_before (pat, insn);
delete_insn (last);
}
else
- PATTERN (insn) = pat;
+ PATTERN (insn) = PATTERN (pat);
return;
}
{
rtx temp;
rtx fixeddest = SET_DEST (x);
+ enum machine_mode temp_mode;
/* STRICT_LOW_PART can be discarded, around a MEM. */
if (GET_CODE (fixeddest) == STRICT_LOW_PART)
/* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
if (GET_CODE (fixeddest) == SUBREG)
{
- fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
- promoted_mode = GET_MODE (fixeddest);
+ fixeddest = fixup_memory_subreg (fixeddest, insn,
+ promoted_mode, 0);
+ temp_mode = GET_MODE (fixeddest);
}
else
- fixeddest = fixup_stack_1 (fixeddest, insn);
+ {
+ fixeddest = fixup_stack_1 (fixeddest, insn);
+ temp_mode = promoted_mode;
+ }
- temp = gen_reg_rtx (promoted_mode);
+ temp = gen_reg_rtx (temp_mode);
emit_insn_after (gen_move_insn (fixeddest,
gen_lowpart (GET_MODE (fixeddest),
}
}
\f
-/* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
- return an rtx (MEM:m1 newaddr) which is equivalent.
- If any insns must be emitted to compute NEWADDR, put them before INSN.
+/* Previously, X had the form (SUBREG:m1 (REG:PROMOTED_MODE ...)).
+ The REG was placed on the stack, so X now has the form (SUBREG:m1
+ (MEM:m2 ...)).
+
+ Return an rtx (MEM:m1 newaddr) which is equivalent. If any insns
+ must be emitted to compute NEWADDR, put them before INSN.
UNCRITICAL nonzero means accept paradoxical subregs.
This is used for subregs found inside REG_NOTES. */
static rtx
-fixup_memory_subreg (x, insn, uncritical)
+fixup_memory_subreg (x, insn, promoted_mode, uncritical)
rtx x;
rtx insn;
+ enum machine_mode promoted_mode;
int uncritical;
{
- int offset = SUBREG_BYTE (x);
- rtx addr = XEXP (SUBREG_REG (x), 0);
+ int offset;
+ rtx mem = SUBREG_REG (x);
+ rtx addr = XEXP (mem, 0);
enum machine_mode mode = GET_MODE (x);
- rtx result;
+ rtx result, seq;
/* Paradoxical SUBREGs are usually invalid during RTL generation. */
- if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
- && ! uncritical)
+ if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (mem)) && ! uncritical)
abort ();
+ offset = SUBREG_BYTE (x);
+ if (BYTES_BIG_ENDIAN)
+ /* If the PROMOTED_MODE is wider than the mode of the MEM, adjust
+ the offset so that it points to the right location within the
+ MEM. */
+ offset -= (GET_MODE_SIZE (promoted_mode) - GET_MODE_SIZE (GET_MODE (mem)));
+
if (!flag_force_addr
&& memory_address_p (mode, plus_constant (addr, offset)))
/* Shortcut if no insns need be emitted. */
- return adjust_address (SUBREG_REG (x), mode, offset);
+ return adjust_address (mem, mode, offset);
start_sequence ();
- result = adjust_address (SUBREG_REG (x), mode, offset);
- emit_insn_before (gen_sequence (), insn);
+ result = adjust_address (mem, mode, offset);
+ seq = get_insns ();
end_sequence ();
+
+ emit_insn_before (seq, insn);
return result;
}
If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
Otherwise return X, with its contents possibly altered.
- If any insns must be emitted to compute NEWADDR, put them before INSN.
-
- UNCRITICAL is as in fixup_memory_subreg. */
+ INSN, PROMOTED_MODE and UNCRITICAL are as for
+ fixup_memory_subreg. */
static rtx
-walk_fixup_memory_subreg (x, insn, uncritical)
+walk_fixup_memory_subreg (x, insn, promoted_mode, uncritical)
rtx x;
rtx insn;
+ enum machine_mode promoted_mode;
int uncritical;
{
enum rtx_code code;
code = GET_CODE (x);
if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
- return fixup_memory_subreg (x, insn, uncritical);
+ return fixup_memory_subreg (x, insn, promoted_mode, uncritical);
/* Nothing special about this RTX; fix its operands. */
for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
{
if (fmt[i] == 'e')
- XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
+ XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn,
+ promoted_mode, uncritical);
else if (fmt[i] == 'E')
{
int j;
for (j = 0; j < XVECLEN (x, i); j++)
XVECEXP (x, i, j)
- = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
+ = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn,
+ promoted_mode, uncritical);
}
}
return x;
start_sequence ();
temp = copy_to_reg (ad);
- seq = gen_sequence ();
+ seq = get_insns ();
end_sequence ();
emit_insn_before (seq, insn);
return replace_equiv_address (x, temp);
memref = adjust_address (memref, mode, offset);
insns = get_insns ();
end_sequence ();
- emit_insns_before (insns, insn);
+ emit_insn_before (insns, insn);
/* Store this memory reference where
we found the bit field reference. */
special; just let the optimization be suppressed. */
if (apply_change_group () && seq)
- emit_insns_before (seq, insn);
+ emit_insn_before (seq, insn);
}
}
}
static void
put_addressof_into_stack (r, ht)
rtx r;
- struct hash_table *ht;
+ htab_t ht;
{
tree decl, type;
int volatile_p, used_p;
rtx *loc;
rtx insn;
int force, store;
- struct hash_table *ht;
+ htab_t ht;
{
rtx x;
RTX_CODE code;
put_addressof_into_stack (x, ht);
return true;
}
-
+
/* We must create a copy of the rtx because it was created by
overwriting a REG rtx which is always shared. */
sub = copy_rtx (XEXP (XEXP (x, 0), 0));
&& ! validate_replace_rtx (x, sub, insn))
abort ();
- insns = gen_sequence ();
+ insns = get_insns ();
end_sequence ();
emit_insn_before (insns, insn);
return true;
end_sequence ();
goto give_up;
}
- seq = gen_sequence ();
+ seq = get_insns ();
end_sequence ();
emit_insn_before (seq, insn);
compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
might have created. */
unshare_all_rtl_again (get_insns ());
- seq = gen_sequence ();
+ seq = get_insns ();
end_sequence ();
p = emit_insn_after (seq, insn);
if (NEXT_INSN (insn))
goto give_up;
}
- seq = gen_sequence ();
+ seq = get_insns ();
end_sequence ();
emit_insn_before (seq, insn);
compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
return result;
}
-/* Return a new hash table entry in HT. */
-
-static struct hash_entry *
-insns_for_mem_newfunc (he, ht, k)
- struct hash_entry *he;
- struct hash_table *ht;
- hash_table_key k ATTRIBUTE_UNUSED;
-{
- struct insns_for_mem_entry *ifmhe;
- if (he)
- return he;
-
- ifmhe = ((struct insns_for_mem_entry *)
- hash_allocate (ht, sizeof (struct insns_for_mem_entry)));
- ifmhe->insns = NULL_RTX;
-
- return &ifmhe->he;
-}
-
/* Return a hash value for K, a REG. */
-static unsigned long
+static hashval_t
insns_for_mem_hash (k)
- hash_table_key k;
+ const void * k;
{
- /* K is really a RTX. Just use the address as the hash value. */
- return (unsigned long) k;
+ /* Use the address of the key for the hash value. */
+ struct insns_for_mem_entry *m = (struct insns_for_mem_entry *) k;
+ return htab_hash_pointer (m->key);
}
-/* Return non-zero if K1 and K2 (two REGs) are the same. */
+/* Return nonzero if K1 and K2 (two REGs) are the same. */
-static bool
+static int
insns_for_mem_comp (k1, k2)
- hash_table_key k1;
- hash_table_key k2;
+ const void * k1;
+ const void * k2;
{
- return k1 == k2;
+ struct insns_for_mem_entry *m1 = (struct insns_for_mem_entry *) k1;
+ struct insns_for_mem_entry *m2 = (struct insns_for_mem_entry *) k2;
+ return m1->key == m2->key;
}
struct insns_for_mem_walk_info
{
/* The hash table that we are using to record which INSNs use which
MEMs. */
- struct hash_table *ht;
+ htab_t ht;
/* The INSN we are currently processing. */
rtx insn;
{
struct insns_for_mem_walk_info *ifmwi
= (struct insns_for_mem_walk_info *) data;
+ struct insns_for_mem_entry tmp;
+ tmp.insns = NULL_RTX;
if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
&& GET_CODE (XEXP (*r, 0)) == REG)
- hash_lookup (ifmwi->ht, XEXP (*r, 0), /*create=*/1, /*copy=*/0);
+ {
+ PTR *e;
+ tmp.key = XEXP (*r, 0);
+ e = htab_find_slot (ifmwi->ht, &tmp, INSERT);
+ if (*e == NULL)
+ {
+ *e = ggc_alloc (sizeof (tmp));
+ memcpy (*e, &tmp, sizeof (tmp));
+ }
+ }
else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
{
- /* Lookup this MEM in the hashtable, creating it if necessary. */
- struct insns_for_mem_entry *ifme
- = (struct insns_for_mem_entry *) hash_lookup (ifmwi->ht,
- *r,
- /*create=*/0,
- /*copy=*/0);
+ struct insns_for_mem_entry *ifme;
+ tmp.key = *r;
+ ifme = (struct insns_for_mem_entry *) htab_find (ifmwi->ht, &tmp);
/* If we have not already recorded this INSN, do so now. Since
we process the INSNs in order, we know that if we have
compute_insns_for_mem (insns, last_insn, ht)
rtx insns;
rtx last_insn;
- struct hash_table *ht;
+ htab_t ht;
{
rtx insn;
struct insns_for_mem_walk_info ifmwi;
rtx insns;
{
rtx insn;
- struct hash_table ht;
+ htab_t ht;
/* When we actually purge ADDRESSOFs, we turn REGs into MEMs. That
requires a fixup pass over the instruction stream to correct
mentioned in very many instructions. So, we speed up the process
by pre-calculating which REGs occur in which INSNs; that allows
us to perform the fixup passes much more quickly. */
- hash_table_init (&ht,
- insns_for_mem_newfunc,
- insns_for_mem_hash,
- insns_for_mem_comp);
- compute_insns_for_mem (insns, NULL_RTX, &ht);
+ ht = htab_create_ggc (1000, insns_for_mem_hash, insns_for_mem_comp, NULL);
+ compute_insns_for_mem (insns, NULL_RTX, ht);
for (insn = insns; insn; insn = NEXT_INSN (insn))
if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
|| GET_CODE (insn) == CALL_INSN)
{
if (! purge_addressof_1 (&PATTERN (insn), insn,
- asm_noperands (PATTERN (insn)) > 0, 0, &ht))
+ asm_noperands (PATTERN (insn)) > 0, 0, ht))
/* If we could not replace the ADDRESSOFs in the insn,
something is wrong. */
abort ();
- if (! purge_addressof_1 (®_NOTES (insn), NULL_RTX, 0, 0, &ht))
+ if (! purge_addressof_1 (®_NOTES (insn), NULL_RTX, 0, 0, ht))
{
/* If we could not replace the ADDRESSOFs in the insn's notes,
we can just remove the offending notes instead. */
}
/* Clean up. */
- hash_table_free (&ht);
purge_bitfield_addressof_replacements = 0;
purge_addressof_replacements = 0;
reg = SUBREG_REG (reg);
}
-
+
if (GET_CODE (reg) == REG && REGNO (reg) < FIRST_PSEUDO_REGISTER)
{
reg = gen_rtx_REG (mode, REGNO (reg) + offset);
case SET:
if (GET_CODE (SET_DEST (pattern)) == SUBREG)
purge_single_hard_subreg_set (pattern);
- break;
+ break;
case PARALLEL:
{
int j;
for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
if (DECL_RTL_SET_P (t))
- instantiate_decl (DECL_RTL (t),
+ instantiate_decl (DECL_RTL (t),
int_size_in_bytes (TREE_TYPE (t)),
valid_only);
/* Subroutine of the preceding procedures: Given RTL representing a
decl and the size of the object, do any instantiation required.
- If VALID_ONLY is non-zero, it means that the RTL should only be
+ If VALID_ONLY is nonzero, it means that the RTL should only be
changed if the new address is valid. */
static void
seq = get_insns ();
end_sequence ();
- emit_insns_before (seq, object);
+ emit_insn_before (seq, object);
SET_DEST (x) = new;
if (! validate_change (object, &SET_SRC (x), temp, 0)
seq = get_insns ();
end_sequence ();
- emit_insns_before (seq, object);
+ emit_insn_before (seq, object);
if (! validate_change (object, loc, temp, 0)
&& ! validate_replace_rtx (x, temp, object))
abort ();
seq = get_insns ();
end_sequence ();
- emit_insns_before (seq, object);
+ emit_insn_before (seq, object);
if (! validate_change (object, loc, temp, 0)
&& ! validate_replace_rtx (x, temp, object))
abort ();
rtx conversion_insns = 0;
struct args_size alignment_pad;
- /* Nonzero if the last arg is named `__builtin_va_alist',
- which is used on some machines for old-fashioned non-ANSI varargs.h;
- this should be stuck onto the stack as if it had arrived there. */
- int hide_last_arg
- = (current_function_varargs
- && fnargs
- && (parm = tree_last (fnargs)) != 0
- && DECL_NAME (parm)
- && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
- "__builtin_va_alist")));
-
/* Nonzero if function takes extra anonymous args.
This means the last named arg must be on the stack
right before the anonymous ones. */
}
max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
- parm_reg_stack_loc = (rtx *) xcalloc (max_parm_reg, sizeof (rtx));
+ parm_reg_stack_loc = (rtx *) ggc_alloc_cleared (max_parm_reg * sizeof (rtx));
#ifdef INIT_CUMULATIVE_INCOMING_ARGS
INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
/* Set LAST_NAMED if this is last named arg before last
anonymous args. */
- if (stdarg || current_function_varargs)
+ if (stdarg)
{
tree tem;
continue;
}
- /* For varargs.h function, save info about regs and stack space
- used by the individual args, not including the va_alist arg. */
- if (hide_last_arg && last_named)
- current_function_args_info = args_so_far;
-
/* Find mode of arg as it is passed, and mode of arg
as it should be during execution of this function. */
passed_mode = TYPE_MODE (passed_type);
passed_pointer = 1;
passed_mode = nominal_mode = Pmode;
}
+ /* See if the frontend wants to pass this by invisible reference. */
+ else if (passed_type != nominal_type
+ && POINTER_TYPE_P (passed_type)
+ && TREE_TYPE (passed_type) == nominal_type)
+ {
+ nominal_type = passed_type;
+ passed_pointer = 1;
+ passed_mode = nominal_mode = Pmode;
+ }
promoted_mode = passed_mode;
to indicate there is no preallocated stack slot for the parm. */
if (entry_parm == stack_parm
- || (GET_CODE (entry_parm) == PARALLEL
- && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
+ || (GET_CODE (entry_parm) == PARALLEL
+ && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
#if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
/* On some machines, even if a parm value arrives in a register
there is still an (uninitialized) stack slot allocated for it.
if (passed_pointer)
{
rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)),
- parmreg);
+ parmreg);
set_mem_attributes (x, parm, 1);
SET_DECL_RTL (parm, x);
}
SET_DECL_RTL (parm, parmreg);
maybe_set_unchanging (DECL_RTL (parm), parm);
}
-
+
/* Copy the value into the register. */
if (nominal_mode != passed_mode
|| promoted_nominal_mode != promoted_mode)
push_to_sequence (conversion_insns);
emit_move_insn (tempreg, DECL_RTL (parm));
SET_DECL_RTL (parm,
- convert_to_mode (GET_MODE (parmreg),
+ convert_to_mode (GET_MODE (parmreg),
tempreg,
unsigned_p));
emit_move_insn (parmreg, DECL_RTL (parm));
but it's also rare and we need max_parm_reg to be
precisely correct. */
max_parm_reg = regno + 1;
- new = (rtx *) xrealloc (parm_reg_stack_loc,
+ new = (rtx *) ggc_realloc (parm_reg_stack_loc,
max_parm_reg * sizeof (rtx));
memset ((char *) (new + old_max_parm_reg), 0,
(max_parm_reg - old_max_parm_reg) * sizeof (rtx));
/* Output all parameter conversion instructions (possibly including calls)
now that all parameters have been copied out of hard registers. */
- emit_insns (conversion_insns);
+ emit_insn (conversion_insns);
last_parm_insn = get_last_insn ();
/* For stdarg.h function, save info about
regs and stack space used by the named args. */
- if (!hide_last_arg)
- current_function_args_info = args_so_far;
+ current_function_args_info = args_so_far;
/* Set the rtx used for the function return value. Put this in its
own variable so any optimizers that need this information don't have
The starting offset and size for this parm are returned in *OFFSET_PTR
and *ARG_SIZE_PTR, respectively.
- IN_REGS is non-zero if the argument will be passed in registers. It will
+ IN_REGS is nonzero if the argument will be passed in registers. It will
never be set if REG_PARM_STACK_SPACE is not defined.
FNDECL is the function in which the argument was defined.
= type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
+#ifdef ARGS_GROW_DOWNWARD
+ tree s2 = sizetree;
+#endif
#ifdef REG_PARM_STACK_SPACE
/* If we have found a stack parm before we reach the end of the
offset_ptr->constant = -initial_offset_ptr->constant;
offset_ptr->var = 0;
}
+
if (where_pad != none
&& (!host_integerp (sizetree, 1)
|| (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
- sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
- SUB_PARM_SIZE (*offset_ptr, sizetree);
- if (where_pad != downward)
+ s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
+ SUB_PARM_SIZE (*offset_ptr, s2);
+
+ if (!in_regs
+#ifdef REG_PARM_STACK_SPACE
+ || REG_PARM_STACK_SPACE (fndecl) > 0
+#endif
+ )
pad_to_arg_alignment (offset_ptr, boundary, alignment_pad);
+
if (initial_offset_ptr->var)
arg_size_ptr->var = size_binop (MINUS_EXPR,
size_binop (MINUS_EXPR,
arg_size_ptr->constant = (-initial_offset_ptr->constant
- offset_ptr->constant);
+ /* Pad_below needs the pre-rounded size to know how much to pad below.
+ We only pad parameters which are not in registers as they have their
+ padding done elsewhere. */
+ if (where_pad == downward
+ && !in_regs)
+ pad_below (offset_ptr, passed_mode, sizetree);
+
#else /* !ARGS_GROW_DOWNWARD */
if (!in_regs
#ifdef REG_PARM_STACK_SPACE
(ARGS_SIZE_TREE (*offset_ptr),
boundary / BITS_PER_UNIT);
offset_ptr->constant = 0; /*?*/
- if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
- alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
+ if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
+ alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
save_var);
}
else
}
}
-#ifndef ARGS_GROW_DOWNWARD
static void
pad_below (offset_ptr, passed_mode, sizetree)
struct args_size *offset_ptr;
}
}
}
-#endif
\f
/* Walk the tree of blocks describing the binding levels within a function
and warn about uninitialized variables.
for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
{
if (warn_uninitialized
- && TREE_CODE (decl) == VAR_DECL
+ && TREE_CODE (decl) == VAR_DECL
/* These warnings are unreliable for and aggregates
because assigning the fields one by one can fail to convince
flow.c that the entire aggregate was initialized.
warning_with_decl (decl,
"`%s' might be used uninitialized in this function");
if (extra_warnings
- && TREE_CODE (decl) == VAR_DECL
+ && TREE_CODE (decl) == VAR_DECL
&& DECL_RTL (decl) != 0
&& GET_CODE (DECL_RTL (decl)) == REG
&& regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
#else
/* If rounding needed, allocate extra space
to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
-#ifdef TRAMPOLINE_ALIGNMENT
#define TRAMPOLINE_REAL_SIZE \
(TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
-#else
-#define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
-#endif
tramp = assign_stack_local_1 (BLKmode, TRAMPOLINE_REAL_SIZE, 0,
fp ? fp : cfun);
#endif
round_trampoline_addr (tramp)
rtx tramp;
{
-#ifdef TRAMPOLINE_ALIGNMENT
/* Round address up to desired boundary. */
rtx temp = gen_reg_rtx (Pmode);
rtx addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
temp, 0, OPTAB_LIB_WIDEN);
tramp = expand_simple_binop (Pmode, AND, temp, mask,
temp, 0, OPTAB_LIB_WIDEN);
-#endif
+
return tramp;
}
/* Remove deleted blocks from the block fragment chains. */
reorder_fix_fragments (block);
-
- VARRAY_FREE (block_stack);
}
/* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
static void
reorder_fix_fragments (block)
- tree block;
+ tree block;
{
while (block)
{
if (! TREE_ASM_WRITTEN (dup_origin))
{
new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin);
-
+
/* Find the first of the remaining fragments. There must
be at least one -- the current block. */
while (! TREE_ASM_WRITTEN (new_origin))
/* Indicate we have no need of a frame pointer yet. */
frame_pointer_needed = 0;
- /* By default assume not varargs or stdarg. */
- current_function_varargs = 0;
+ /* By default assume not stdarg. */
current_function_stdarg = 0;
/* We haven't made any trampolines for this function yet. */
current_function_outgoing_args_size = 0;
- if (init_lang_status)
- (*init_lang_status) (cfun);
+ current_function_funcdef_no = funcdef_no++;
+
+ cfun->arc_profile = profile_arc_flag || flag_test_coverage;
+
+ cfun->arc_profile = profile_arc_flag || flag_test_coverage;
+
+ cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
+
+ cfun->max_jumptable_ents = 0;
+
+ (*lang_hooks.function.init) (cfun);
if (init_machine_status)
- (*init_machine_status) (cfun);
+ cfun->machine = (*init_machine_status) ();
}
/* Initialize the rtl expansion mechanism so that we can do simple things
VARRAY_GROW (sibcall_epilogue, 0);
}
-/* Indicate that the current function uses extra args
- not explicitly mentioned in the argument list in any fashion. */
-
-void
-mark_varargs ()
-{
- current_function_varargs = 1;
-}
-
/* Expand a call to __main at the beginning of a possible main function. */
#if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
#endif
if (tmp != stack_pointer_rtx)
emit_move_insn (stack_pointer_rtx, tmp);
-
+
/* Enlist allocate_dynamic_stack_space to pick up the pieces. */
tmp = force_reg (Pmode, const0_rtx);
allocate_dynamic_stack_space (tmp, NULL_RTX, BIGGEST_ALIGNMENT);
- seq = gen_sequence ();
+ seq = get_insns ();
end_sequence ();
for (tmp = get_last_insn (); tmp; tmp = PREV_INSN (tmp))
#endif
}
\f
-extern struct obstack permanent_obstack;
-
/* The PENDING_SIZES represent the sizes of variable-sized types.
Create RTL for the various sizes now (using temporary variables),
so that we can refer to the sizes from the RTL we are generating
avoid conflicts with the parameter passing registers. */
if (SMALL_REGISTER_CLASSES && current_function_needs_context)
- if (GET_CODE (static_chain_incoming_rtx) != REG)
- emit_move_insn (last_ptr, static_chain_incoming_rtx);
+ if (GET_CODE (static_chain_incoming_rtx) != REG)
+ emit_move_insn (last_ptr, static_chain_incoming_rtx);
/* The following was moved from init_function_start.
The move is supposed to make sdb output more accurate. */
Pmode);
}
-#ifdef PROFILE_HOOK
if (current_function_profile)
- PROFILE_HOOK (profile_label_no);
+ {
+#ifdef PROFILE_HOOK
+ PROFILE_HOOK (current_function_funcdef_no);
#endif
+ }
/* After the display initializations is where the tail-recursion label
should go, if we end up needing one. Ensure we have a NOTE here
diddle_return_value (do_use_return_reg, NULL);
}
+static GTY(()) rtx initial_trampoline;
+
/* Generate RTL for the end of the current function.
FILENAME and LINE are the current position in the source file.
tree link;
rtx clobber_after;
-#ifdef TRAMPOLINE_TEMPLATE
- static rtx initial_trampoline;
-#endif
-
finish_expr_for_function ();
/* If arg_pointer_save_area was referenced only from a nested
initial_trampoline
= gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
set_mem_align (initial_trampoline, TRAMPOLINE_ALIGNMENT);
-
- ggc_add_rtx_root (&initial_trampoline, 1);
}
#endif
#ifdef TRAMPOLINE_TEMPLATE
blktramp = replace_equiv_address (initial_trampoline, tramp);
emit_block_move (blktramp, initial_trampoline,
- GEN_INT (TRAMPOLINE_SIZE));
+ GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
#endif
INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
seq = get_insns ();
end_sequence ();
/* Put those insns at entry to the containing function (this one). */
- emit_insns_before (seq, tail_recursion_reentry);
+ emit_insn_before (seq, tail_recursion_reentry);
}
/* If we are doing stack checking and this function makes calls,
GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
seq = get_insns ();
end_sequence ();
- emit_insns_before (seq, tail_recursion_reentry);
+ emit_insn_before (seq, tail_recursion_reentry);
break;
}
}
/* If this is a BLKmode structure being returned in registers,
then use the mode computed in expand_return. Note that if
- decl_rtl is memory, then its mode may have been changed,
+ decl_rtl is memory, then its mode may have been changed,
but that current_function_return_rtx has not. */
if (GET_MODE (real_decl_rtl) == BLKmode)
PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
/* If a named return value dumped decl_return to memory, then
- we may need to re-do the PROMOTE_MODE signed/unsigned
+ we may need to re-do the PROMOTE_MODE signed/unsigned
extension. */
if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
{
/* Emit the actual code to clobber return register. */
{
rtx seq, after;
-
+
start_sequence ();
clobber_return_register ();
- seq = gen_sequence ();
+ seq = get_insns ();
end_sequence ();
after = emit_insn_after (seq, clobber_after);
-
+
if (clobber_after != after)
cfun->x_clobber_return_insn = after;
}
{
rtx seq;
- /* Save the arg pointer at the beginning of the function. The
+ /* Save the arg pointer at the beginning of the function. The
generated stack slot may not be a valid memory address, so we
have to check it and fix it if necessary. */
start_sequence ();
emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
- seq = gen_sequence ();
+ seq = get_insns ();
end_sequence ();
push_topmost_sequence ();
return ret;
}
\f
-/* Extend a vector that records the INSN_UIDs of INSNS (either a
- sequence or a single insn). */
+/* Extend a vector that records the INSN_UIDs of INSNS
+ (a list of one or more insns). */
static void
record_insns (insns, vecp)
rtx insns;
varray_type *vecp;
{
- if (GET_CODE (insns) == SEQUENCE)
- {
- int len = XVECLEN (insns, 0);
- int i = VARRAY_SIZE (*vecp);
+ int i, len;
+ rtx tmp;
- VARRAY_GROW (*vecp, i + len);
- while (--len >= 0)
- {
- VARRAY_INT (*vecp, i) = INSN_UID (XVECEXP (insns, 0, len));
- ++i;
- }
+ tmp = insns;
+ len = 0;
+ while (tmp != NULL_RTX)
+ {
+ len++;
+ tmp = NEXT_INSN (tmp);
}
- else
+
+ i = VARRAY_SIZE (*vecp);
+ VARRAY_GROW (*vecp, i + len);
+ tmp = insns;
+ while (tmp != NULL_RTX)
{
- int i = VARRAY_SIZE (*vecp);
- VARRAY_GROW (*vecp, i + 1);
- VARRAY_INT (*vecp, i) = INSN_UID (insns);
+ VARRAY_INT (*vecp, i) = INSN_UID (tmp);
+ i++;
+ tmp = NEXT_INSN (tmp);
}
}
-/* Determine how many INSN_UIDs in VEC are part of INSN. */
+/* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
+ be running after reorg, SEQUENCE rtl is possible. */
static int
contains (insn, vec)
static void handle_epilogue_set PARAMS ((rtx, struct epi_info *));
static void emit_equiv_load PARAMS ((struct epi_info *));
-/* Modify SEQ, a SEQUENCE that is part of the epilogue, to no modifications
- to the stack pointer. Return the new sequence. */
+/* Modify INSN, a list of one or more insns that is part of the epilogue, to
+ no modifications to the stack pointer. Return the new list of insns. */
static rtx
-keep_stack_depressed (seq)
- rtx seq;
+keep_stack_depressed (insns)
+ rtx insns;
{
- int i, j;
+ int j;
struct epi_info info;
+ rtx insn, next;
/* If the epilogue is just a single instruction, it ust be OK as is. */
- if (GET_CODE (seq) != SEQUENCE)
- return seq;
+ if (NEXT_INSN (insns) == NULL_RTX)
+ return insns;
/* Otherwise, start a sequence, initialize the information we have, and
process all the insns we were given. */
info.sp_offset = 0;
info.equiv_reg_src = 0;
- for (i = 0; i < XVECLEN (seq, 0); i++)
+ insn = insns;
+ next = NULL_RTX;
+ while (insn != NULL_RTX)
{
- rtx insn = XVECEXP (seq, 0, i);
+ next = NEXT_INSN (insn);
if (!INSN_P (insn))
{
add_insn (insn);
+ insn = next;
continue;
}
{
emit_equiv_load (&info);
add_insn (insn);
+ insn = next;
continue;
}
else if (GET_CODE (retaddr) == MEM
info.sp_equiv_reg = info.new_sp_equiv_reg;
info.sp_offset = info.new_sp_offset;
+
+ insn = next;
}
- seq = gen_sequence ();
+ insns = get_insns ();
end_sequence ();
- return seq;
+ return insns;
}
/* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
structure that contains information about what we've seen so far. We
- process this SET by either updating that data or by emitting one or
+ process this SET by either updating that data or by emitting one or
more insns. */
static void
emit_insn (seq);
/* Retain a map of the prologue insns. */
- if (GET_CODE (seq) != SEQUENCE)
- seq = get_insns ();
record_insns (seq, &prologue);
prologue_end = emit_note (NULL, NOTE_INSN_PROLOGUE_END);
- seq = gen_sequence ();
+ seq = get_insns ();
end_sequence ();
/* Can't deal with multiple successors of the entry block
emit_jump_insn (seq);
/* Retain a map of the epilogue insns. */
- if (GET_CODE (seq) != SEQUENCE)
- seq = get_insns ();
record_insns (seq, &epilogue);
- seq = gen_sequence ();
+ seq = get_insns ();
end_sequence ();
insert_insn_on_edge (seq, e);
continue;
start_sequence ();
- seq = gen_sibcall_epilogue ();
+ emit_insn (gen_sibcall_epilogue ());
+ seq = get_insns ();
end_sequence ();
+ /* Retain a map of the epilogue insns. Used in life analysis to
+ avoid getting rid of sibcall epilogue insns. Do this before we
+ actually emit the sequence. */
+ record_insns (seq, &sibcall_epilogue);
+
i = PREV_INSN (insn);
newinsn = emit_insn_before (seq, insn);
-
- /* Retain a map of the epilogue insns. Used in life analysis to
- avoid getting rid of sibcall epilogue insns. */
- record_insns (GET_CODE (seq) == SEQUENCE
- ? seq : newinsn, &sibcall_epilogue);
}
#endif
note before the end of the first basic block, if there isn't
one already there.
- ??? This behaviour is completely broken when dealing with
+ ??? This behavior is completely broken when dealing with
multiple entry functions. We simply place the note always
into first basic block and let alternate entry points
to be missed.
}
/* Find the last line number note in the first block. */
- for (insn = BASIC_BLOCK (0)->end;
+ for (insn = ENTRY_BLOCK_PTR->next_bb->end;
insn != prologue_end && insn;
insn = PREV_INSN (insn))
if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
break;
}
}
-
+
if (last)
{
rtx next;
#endif /* HAVE_prologue or HAVE_epilogue */
}
-/* Mark P for GC. */
-
-static void
-mark_function_status (p)
- struct function *p;
-{
- struct var_refs_queue *q;
- struct temp_slot *t;
- int i;
- rtx *r;
-
- if (p == 0)
- return;
-
- ggc_mark_rtx (p->arg_offset_rtx);
-
- if (p->x_parm_reg_stack_loc)
- for (i = p->x_max_parm_reg, r = p->x_parm_reg_stack_loc;
- i > 0; --i, ++r)
- ggc_mark_rtx (*r);
-
- ggc_mark_rtx (p->return_rtx);
- ggc_mark_rtx (p->x_cleanup_label);
- ggc_mark_rtx (p->x_return_label);
- ggc_mark_rtx (p->x_save_expr_regs);
- ggc_mark_rtx (p->x_stack_slot_list);
- ggc_mark_rtx (p->x_parm_birth_insn);
- ggc_mark_rtx (p->x_tail_recursion_label);
- ggc_mark_rtx (p->x_tail_recursion_reentry);
- ggc_mark_rtx (p->internal_arg_pointer);
- ggc_mark_rtx (p->x_arg_pointer_save_area);
- ggc_mark_tree (p->x_rtl_expr_chain);
- ggc_mark_rtx (p->x_last_parm_insn);
- ggc_mark_tree (p->x_context_display);
- ggc_mark_tree (p->x_trampoline_list);
- ggc_mark_rtx (p->epilogue_delay_list);
- ggc_mark_rtx (p->x_clobber_return_insn);
-
- for (t = p->x_temp_slots; t != 0; t = t->next)
- {
- ggc_mark (t);
- ggc_mark_rtx (t->slot);
- ggc_mark_rtx (t->address);
- ggc_mark_tree (t->rtl_expr);
- ggc_mark_tree (t->type);
- }
-
- for (q = p->fixup_var_refs_queue; q != 0; q = q->next)
- {
- ggc_mark (q);
- ggc_mark_rtx (q->modified);
- }
-
- ggc_mark_rtx (p->x_nonlocal_goto_handler_slots);
- ggc_mark_rtx (p->x_nonlocal_goto_handler_labels);
- ggc_mark_rtx (p->x_nonlocal_goto_stack_level);
- ggc_mark_tree (p->x_nonlocal_labels);
-
- mark_hard_reg_initial_vals (p);
-}
-
-/* Mark the struct function pointed to by *ARG for GC, if it is not
- NULL. This is used to mark the current function and the outer
- function chain. */
-
-static void
-maybe_mark_struct_function (arg)
- void *arg;
-{
- struct function *f = *(struct function **) arg;
-
- if (f == 0)
- return;
-
- ggc_mark_struct_function (f);
-}
-
-/* Mark a struct function * for GC. This is called from ggc-common.c. */
-
-void
-ggc_mark_struct_function (f)
- struct function *f;
-{
- ggc_mark (f);
- ggc_mark_tree (f->decl);
-
- mark_function_status (f);
- mark_eh_status (f->eh);
- mark_stmt_status (f->stmt);
- mark_expr_status (f->expr);
- mark_emit_status (f->emit);
- mark_varasm_status (f->varasm);
-
- if (mark_machine_status)
- (*mark_machine_status) (f);
- if (mark_lang_status)
- (*mark_lang_status) (f);
-
- if (f->original_arg_vector)
- ggc_mark_rtvec ((rtvec) f->original_arg_vector);
- if (f->original_decl_initial)
- ggc_mark_tree (f->original_decl_initial);
- if (f->outer)
- ggc_mark_struct_function (f->outer);
-}
-
/* Called once, at initialization, to initialize function.c. */
void
init_function_once ()
{
- ggc_add_root (&cfun, 1, sizeof cfun, maybe_mark_struct_function);
- ggc_add_root (&outer_function_chain, 1, sizeof outer_function_chain,
- maybe_mark_struct_function);
-
VARRAY_INT_INIT (prologue, 0, "prologue");
VARRAY_INT_INIT (epilogue, 0, "epilogue");
VARRAY_INT_INIT (sibcall_epilogue, 0, "sibcall_epilogue");
}
+
+#include "gt-function.h"