1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
25 #include "insn-config.h"
26 #include "insn-flags.h"
27 #include "insn-codes.h"
31 #include "hard-reg-set.h"
34 #include "basic-block.h"
37 /* This file contains the reload pass of the compiler, which is
38 run after register allocation has been done. It checks that
39 each insn is valid (operands required to be in registers really
40 are in registers of the proper class) and fixes up invalid ones
41 by copying values temporarily into registers for the insns
44 The results of register allocation are described by the vector
45 reg_renumber; the insns still contain pseudo regs, but reg_renumber
46 can be used to find which hard reg, if any, a pseudo reg is in.
48 The technique we always use is to free up a few hard regs that are
49 called ``reload regs'', and for each place where a pseudo reg
50 must be in a hard reg, copy it temporarily into one of the reload regs.
52 All the pseudos that were formerly allocated to the hard regs that
53 are now in use as reload regs must be ``spilled''. This means
54 that they go to other hard regs, or to stack slots if no other
55 available hard regs can be found. Spilling can invalidate more
56 insns, requiring additional need for reloads, so we must keep checking
57 until the process stabilizes.
59 For machines with different classes of registers, we must keep track
60 of the register class needed for each reload, and make sure that
61 we allocate enough reload registers of each class.
63 The file reload.c contains the code that checks one insn for
64 validity and reports the reloads that it needs. This file
65 is in charge of scanning the entire rtl code, accumulating the
66 reload needs, spilling, assigning reload registers to use for
67 fixing up each insn, and generating the new insns to copy values
68 into the reload registers. */
71 #ifndef REGISTER_MOVE_COST
72 #define REGISTER_MOVE_COST(x, y) 2
75 #ifndef MEMORY_MOVE_COST
76 #define MEMORY_MOVE_COST(x) 4
79 /* During reload_as_needed, element N contains a REG rtx for the hard reg
80 into which reg N has been reloaded (perhaps for a previous insn). */
81 static rtx *reg_last_reload_reg;
83 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
84 for an output reload that stores into reg N. */
85 static char *reg_has_output_reload;
87 /* Indicates which hard regs are reload-registers for an output reload
88 in the current insn. */
89 static HARD_REG_SET reg_is_output_reload;
91 /* Element N is the constant value to which pseudo reg N is equivalent,
92 or zero if pseudo reg N is not equivalent to a constant.
93 find_reloads looks at this in order to replace pseudo reg N
94 with the constant it stands for. */
95 rtx *reg_equiv_constant;
97 /* Element N is a memory location to which pseudo reg N is equivalent,
98 prior to any register elimination (such as frame pointer to stack
99 pointer). Depending on whether or not it is a valid address, this value
100 is transferred to either reg_equiv_address or reg_equiv_mem. */
101 rtx *reg_equiv_memory_loc;
103 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
104 This is used when the address is not valid as a memory address
105 (because its displacement is too big for the machine.) */
106 rtx *reg_equiv_address;
108 /* Element N is the memory slot to which pseudo reg N is equivalent,
109 or zero if pseudo reg N is not equivalent to a memory slot. */
112 /* Widest width in which each pseudo reg is referred to (via subreg). */
113 static int *reg_max_ref_width;
115 /* Element N is the insn that initialized reg N from its equivalent
116 constant or memory slot. */
117 static rtx *reg_equiv_init;
119 /* During reload_as_needed, element N contains the last pseudo regno
120 reloaded into the Nth reload register. This vector is in parallel
121 with spill_regs. If that pseudo reg occupied more than one register,
122 reg_reloaded_contents points to that pseudo for each spill register in
123 use; all of these must remain set for an inheritance to occur. */
124 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
126 /* During reload_as_needed, element N contains the insn for which
127 the Nth reload register was last used. This vector is in parallel
128 with spill_regs, and its contents are significant only when
129 reg_reloaded_contents is significant. */
130 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
132 /* Number of spill-regs so far; number of valid elements of spill_regs. */
135 /* In parallel with spill_regs, contains REG rtx's for those regs.
136 Holds the last rtx used for any given reg, or 0 if it has never
137 been used for spilling yet. This rtx is reused, provided it has
139 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
141 /* In parallel with spill_regs, contains nonzero for a spill reg
142 that was stored after the last time it was used.
143 The precise value is the insn generated to do the store. */
144 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
146 /* This table is the inverse mapping of spill_regs:
147 indexed by hard reg number,
148 it contains the position of that reg in spill_regs,
149 or -1 for something that is not in spill_regs. */
150 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
152 /* This reg set indicates registers that may not be used for retrying global
153 allocation. The registers that may not be used include all spill registers
154 and the frame pointer (if we are using one). */
155 HARD_REG_SET forbidden_regs;
157 /* This reg set indicates registers that are not good for spill registers.
158 They will not be used to complete groups of spill registers. This includes
159 all fixed registers, registers that may be eliminated, and, if
160 SMALL_REGISTER_CLASSES is not defined, registers explicitly used in the rtl.
162 (spill_reg_order prevents these registers from being used to start a
164 static HARD_REG_SET bad_spill_regs;
166 /* Describes order of use of registers for reloading
167 of spilled pseudo-registers. `spills' is the number of
168 elements that are actually valid; new ones are added at the end. */
169 static short spill_regs[FIRST_PSEUDO_REGISTER];
171 /* Describes order of preference for putting regs into spill_regs.
172 Contains the numbers of all the hard regs, in order most preferred first.
173 This order is different for each function.
174 It is set up by order_regs_for_reload.
175 Empty elements at the end contain -1. */
176 static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
178 /* 1 for a hard register that appears explicitly in the rtl
179 (for example, function value registers, special registers
180 used by insns, structure value pointer registers). */
181 static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
183 /* Indicates if a register was counted against the need for
184 groups. 0 means it can count against max_nongroup instead. */
185 static HARD_REG_SET counted_for_groups;
187 /* Indicates if a register was counted against the need for
188 non-groups. 0 means it can become part of a new group.
189 During choose_reload_regs, 1 here means don't use this reg
190 as part of a group, even if it seems to be otherwise ok. */
191 static HARD_REG_SET counted_for_nongroups;
193 /* Indexed by pseudo reg number N,
194 says may not delete stores into the real (memory) home of pseudo N.
195 This is set if we already substituted a memory equivalent in some uses,
196 which happens when we have to eliminate the fp from it. */
197 static char *cannot_omit_stores;
199 /* Nonzero if indirect addressing is supported on the machine; this means
200 that spilling (REG n) does not require reloading it into a register in
201 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
202 value indicates the level of indirect addressing supported, e.g., two
203 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
206 static char spill_indirect_levels;
208 /* Nonzero if indirect addressing is supported when the innermost MEM is
209 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
210 which these are valid is the same as spill_indirect_levels, above. */
212 char indirect_symref_ok;
214 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
216 char double_reg_address_ok;
218 /* Record the stack slot for each spilled hard register. */
220 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
222 /* Width allocated so far for that stack slot. */
224 static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
226 /* Indexed by register class and basic block number, nonzero if there is
227 any need for a spill register of that class in that basic block.
228 The pointer is 0 if we did stupid allocation and don't know
229 the structure of basic blocks. */
231 char *basic_block_needs[N_REG_CLASSES];
233 /* First uid used by insns created by reload in this function.
234 Used in find_equiv_reg. */
235 int reload_first_uid;
237 /* Flag set by local-alloc or global-alloc if anything is live in
238 a call-clobbered reg across calls. */
240 int caller_save_needed;
242 /* Set to 1 while reload_as_needed is operating.
243 Required by some machines to handle any generated moves differently. */
245 int reload_in_progress = 0;
247 /* These arrays record the insn_code of insns that may be needed to
248 perform input and output reloads of special objects. They provide a
249 place to pass a scratch register. */
251 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
252 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
254 /* This obstack is used for allocation of rtl during register elimination.
255 The allocated storage can be freed once find_reloads has processed the
258 struct obstack reload_obstack;
259 char *reload_firstobj;
261 #define obstack_chunk_alloc xmalloc
262 #define obstack_chunk_free free
264 /* List of labels that must never be deleted. */
265 extern rtx forced_labels;
267 /* This structure is used to record information about register eliminations.
268 Each array entry describes one possible way of eliminating a register
269 in favor of another. If there is more than one way of eliminating a
270 particular register, the most preferred should be specified first. */
272 static struct elim_table
274 int from; /* Register number to be eliminated. */
275 int to; /* Register number used as replacement. */
276 int initial_offset; /* Initial difference between values. */
277 int can_eliminate; /* Non-zero if this elimination can be done. */
278 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
279 insns made by reload. */
280 int offset; /* Current offset between the two regs. */
281 int max_offset; /* Maximum offset between the two regs. */
282 int previous_offset; /* Offset at end of previous insn. */
283 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
284 rtx from_rtx; /* REG rtx for the register to be eliminated.
285 We cannot simply compare the number since
286 we might then spuriously replace a hard
287 register corresponding to a pseudo
288 assigned to the reg to be eliminated. */
289 rtx to_rtx; /* REG rtx for the replacement. */
292 /* If a set of eliminable registers was specified, define the table from it.
293 Otherwise, default to the normal case of the frame pointer being
294 replaced by the stack pointer. */
296 #ifdef ELIMINABLE_REGS
299 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
302 #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
304 /* Record the number of pending eliminations that have an offset not equal
305 to their initial offset. If non-zero, we use a new copy of each
306 replacement result in any insns encountered. */
307 static int num_not_at_initial_offset;
309 /* Count the number of registers that we may be able to eliminate. */
310 static int num_eliminable;
312 /* For each label, we record the offset of each elimination. If we reach
313 a label by more than one path and an offset differs, we cannot do the
314 elimination. This information is indexed by the number of the label.
315 The first table is an array of flags that records whether we have yet
316 encountered a label and the second table is an array of arrays, one
317 entry in the latter array for each elimination. */
319 static char *offsets_known_at;
320 static int (*offsets_at)[NUM_ELIMINABLE_REGS];
322 /* Number of labels in the current function. */
324 static int num_labels;
326 struct hard_reg_n_uses { int regno; int uses; };
328 static int possible_group_p PROTO((int, int *));
329 static void count_possible_groups PROTO((int *, enum machine_mode *,
331 static int modes_equiv_for_class_p PROTO((enum machine_mode,
334 static void spill_failure PROTO((rtx));
335 static int new_spill_reg PROTO((int, int, int *, int *, int,
337 static void delete_dead_insn PROTO((rtx));
338 static void alter_reg PROTO((int, int));
339 static void mark_scratch_live PROTO((rtx));
340 static void set_label_offsets PROTO((rtx, rtx, int));
341 static int eliminate_regs_in_insn PROTO((rtx, int));
342 static void mark_not_eliminable PROTO((rtx, rtx));
343 static int spill_hard_reg PROTO((int, int, FILE *, int));
344 static void scan_paradoxical_subregs PROTO((rtx));
345 static int hard_reg_use_compare PROTO((struct hard_reg_n_uses *,
346 struct hard_reg_n_uses *));
347 static void order_regs_for_reload PROTO((void));
348 static void reload_as_needed PROTO((rtx, int));
349 static void forget_old_reloads_1 PROTO((rtx, rtx));
350 static int reload_reg_class_lower PROTO((short *, short *));
351 static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
353 static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
355 static int reload_reg_free_p PROTO((int, int, enum reload_type));
356 static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
357 static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
358 static int allocate_reload_reg PROTO((int, rtx, int, int));
359 static void choose_reload_regs PROTO((rtx, rtx));
360 static void merge_assigned_reloads PROTO((rtx));
361 static void emit_reload_insns PROTO((rtx));
362 static void delete_output_reload PROTO((rtx, int, rtx));
363 static void inc_for_reload PROTO((rtx, rtx, int));
364 static int constraint_accepts_reg_p PROTO((char *, rtx));
365 static int count_occurrences PROTO((rtx, rtx));
367 /* Initialize the reload pass once per compilation. */
374 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
375 Set spill_indirect_levels to the number of levels such addressing is
376 permitted, zero if it is not permitted at all. */
379 = gen_rtx (MEM, Pmode,
380 gen_rtx (PLUS, Pmode,
381 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
383 spill_indirect_levels = 0;
385 while (memory_address_p (QImode, tem))
387 spill_indirect_levels++;
388 tem = gen_rtx (MEM, Pmode, tem);
391 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
393 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
394 indirect_symref_ok = memory_address_p (QImode, tem);
396 /* See if reg+reg is a valid (and offsettable) address. */
398 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
400 tem = gen_rtx (PLUS, Pmode,
401 gen_rtx (REG, Pmode, HARD_FRAME_POINTER_REGNUM),
402 gen_rtx (REG, Pmode, i));
403 /* This way, we make sure that reg+reg is an offsettable address. */
404 tem = plus_constant (tem, 4);
406 if (memory_address_p (QImode, tem))
408 double_reg_address_ok = 1;
413 /* Initialize obstack for our rtl allocation. */
414 gcc_obstack_init (&reload_obstack);
415 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
418 /* Main entry point for the reload pass.
420 FIRST is the first insn of the function being compiled.
422 GLOBAL nonzero means we were called from global_alloc
423 and should attempt to reallocate any pseudoregs that we
424 displace from hard regs we will use for reloads.
425 If GLOBAL is zero, we do not have enough information to do that,
426 so any pseudo reg that is spilled must go to the stack.
428 DUMPFILE is the global-reg debugging dump file stream, or 0.
429 If it is nonzero, messages are written to it to describe
430 which registers are seized as reload regs, which pseudo regs
431 are spilled from them, and where the pseudo regs are reallocated to.
433 Return value is nonzero if reload failed
434 and we must not do any more for this function. */
437 reload (first, global, dumpfile)
445 register struct elim_table *ep;
447 int something_changed;
448 int something_needs_reloads;
449 int something_needs_elimination;
450 int new_basic_block_needs;
451 enum reg_class caller_save_spill_class = NO_REGS;
452 int caller_save_group_size = 1;
454 /* Nonzero means we couldn't get enough spill regs. */
457 /* The basic block number currently being processed for INSN. */
460 /* Make sure even insns with volatile mem refs are recognizable. */
463 /* Enable find_equiv_reg to distinguish insns made by reload. */
464 reload_first_uid = get_max_uid ();
466 for (i = 0; i < N_REG_CLASSES; i++)
467 basic_block_needs[i] = 0;
469 #ifdef SECONDARY_MEMORY_NEEDED
470 /* Initialize the secondary memory table. */
471 clear_secondary_mem ();
474 /* Remember which hard regs appear explicitly
475 before we merge into `regs_ever_live' the ones in which
476 pseudo regs have been allocated. */
477 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
479 /* We don't have a stack slot for any spill reg yet. */
480 bzero (spill_stack_slot, sizeof spill_stack_slot);
481 bzero (spill_stack_slot_width, sizeof spill_stack_slot_width);
483 /* Initialize the save area information for caller-save, in case some
487 /* Compute which hard registers are now in use
488 as homes for pseudo registers.
489 This is done here rather than (eg) in global_alloc
490 because this point is reached even if not optimizing. */
492 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
495 for (i = 0; i < scratch_list_length; i++)
497 mark_scratch_live (scratch_list[i]);
499 /* Make sure that the last insn in the chain
500 is not something that needs reloading. */
501 emit_note (NULL_PTR, NOTE_INSN_DELETED);
503 /* Find all the pseudo registers that didn't get hard regs
504 but do have known equivalent constants or memory slots.
505 These include parameters (known equivalent to parameter slots)
506 and cse'd or loop-moved constant memory addresses.
508 Record constant equivalents in reg_equiv_constant
509 so they will be substituted by find_reloads.
510 Record memory equivalents in reg_mem_equiv so they can
511 be substituted eventually by altering the REG-rtx's. */
513 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
514 bzero (reg_equiv_constant, max_regno * sizeof (rtx));
515 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
516 bzero (reg_equiv_memory_loc, max_regno * sizeof (rtx));
517 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
518 bzero (reg_equiv_mem, max_regno * sizeof (rtx));
519 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
520 bzero (reg_equiv_init, max_regno * sizeof (rtx));
521 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
522 bzero (reg_equiv_address, max_regno * sizeof (rtx));
523 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
524 bzero (reg_max_ref_width, max_regno * sizeof (int));
525 cannot_omit_stores = (char *) alloca (max_regno);
526 bzero (cannot_omit_stores, max_regno);
528 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
529 Also find all paradoxical subregs
530 and find largest such for each pseudo. */
532 for (insn = first; insn; insn = NEXT_INSN (insn))
534 rtx set = single_set (insn);
536 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
538 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
540 #ifdef LEGITIMATE_PIC_OPERAND_P
541 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
542 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
546 rtx x = XEXP (note, 0);
547 i = REGNO (SET_DEST (set));
548 if (i > LAST_VIRTUAL_REGISTER)
550 if (GET_CODE (x) == MEM)
551 reg_equiv_memory_loc[i] = x;
552 else if (CONSTANT_P (x))
554 if (LEGITIMATE_CONSTANT_P (x))
555 reg_equiv_constant[i] = x;
557 reg_equiv_memory_loc[i]
558 = force_const_mem (GET_MODE (SET_DEST (set)), x);
563 /* If this register is being made equivalent to a MEM
564 and the MEM is not SET_SRC, the equivalencing insn
565 is one with the MEM as a SET_DEST and it occurs later.
566 So don't mark this insn now. */
567 if (GET_CODE (x) != MEM
568 || rtx_equal_p (SET_SRC (set), x))
569 reg_equiv_init[i] = insn;
574 /* If this insn is setting a MEM from a register equivalent to it,
575 this is the equivalencing insn. */
576 else if (set && GET_CODE (SET_DEST (set)) == MEM
577 && GET_CODE (SET_SRC (set)) == REG
578 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
579 && rtx_equal_p (SET_DEST (set),
580 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
581 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
583 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
584 scan_paradoxical_subregs (PATTERN (insn));
587 /* Does this function require a frame pointer? */
589 frame_pointer_needed = (! flag_omit_frame_pointer
590 #ifdef EXIT_IGNORE_STACK
591 /* ?? If EXIT_IGNORE_STACK is set, we will not save
592 and restore sp for alloca. So we can't eliminate
593 the frame pointer in that case. At some point,
594 we should improve this by emitting the
595 sp-adjusting insns for this case. */
596 || (current_function_calls_alloca
597 && EXIT_IGNORE_STACK)
599 || FRAME_POINTER_REQUIRED);
603 /* Initialize the table of registers to eliminate. The way we do this
604 depends on how the eliminable registers were defined. */
605 #ifdef ELIMINABLE_REGS
606 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
608 ep->can_eliminate = ep->can_eliminate_previous
609 = (CAN_ELIMINATE (ep->from, ep->to)
610 && (ep->from != HARD_FRAME_POINTER_REGNUM
611 || ! frame_pointer_needed));
614 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
615 = ! frame_pointer_needed;
618 /* Count the number of eliminable registers and build the FROM and TO
619 REG rtx's. Note that code in gen_rtx will cause, e.g.,
620 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
621 We depend on this. */
622 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
624 num_eliminable += ep->can_eliminate;
625 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
626 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
629 num_labels = max_label_num () - get_first_label_num ();
631 /* Allocate the tables used to store offset information at labels. */
632 offsets_known_at = (char *) alloca (num_labels);
634 = (int (*)[NUM_ELIMINABLE_REGS])
635 alloca (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
637 offsets_known_at -= get_first_label_num ();
638 offsets_at -= get_first_label_num ();
640 /* Alter each pseudo-reg rtx to contain its hard reg number.
641 Assign stack slots to the pseudos that lack hard regs or equivalents.
642 Do not touch virtual registers. */
644 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
647 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done here
648 because the stack size may be a part of the offset computation for
649 register elimination. */
650 assign_stack_local (BLKmode, 0, 0);
652 /* If we have some registers we think can be eliminated, scan all insns to
653 see if there is an insn that sets one of these registers to something
654 other than itself plus a constant. If so, the register cannot be
655 eliminated. Doing this scan here eliminates an extra pass through the
656 main reload loop in the most common case where register elimination
658 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
659 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
660 || GET_CODE (insn) == CALL_INSN)
661 note_stores (PATTERN (insn), mark_not_eliminable);
663 #ifndef REGISTER_CONSTRAINTS
664 /* If all the pseudo regs have hard regs,
665 except for those that are never referenced,
666 we know that no reloads are needed. */
667 /* But that is not true if there are register constraints, since
668 in that case some pseudos might be in the wrong kind of hard reg. */
670 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
671 if (reg_renumber[i] == -1 && reg_n_refs[i] != 0)
674 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
678 /* Compute the order of preference for hard registers to spill.
679 Store them by decreasing preference in potential_reload_regs. */
681 order_regs_for_reload ();
683 /* So far, no hard regs have been spilled. */
685 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
686 spill_reg_order[i] = -1;
688 /* On most machines, we can't use any register explicitly used in the
689 rtl as a spill register. But on some, we have to. Those will have
690 taken care to keep the life of hard regs as short as possible. */
692 #ifdef SMALL_REGISTER_CLASSES
693 CLEAR_HARD_REG_SET (forbidden_regs);
695 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
698 /* Spill any hard regs that we know we can't eliminate. */
699 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
700 if (! ep->can_eliminate)
702 spill_hard_reg (ep->from, global, dumpfile, 1);
703 regs_ever_live[ep->from] = 1;
707 for (i = 0; i < N_REG_CLASSES; i++)
709 basic_block_needs[i] = (char *)alloca (n_basic_blocks);
710 bzero (basic_block_needs[i], n_basic_blocks);
713 /* From now on, we need to emit any moves without making new pseudos. */
714 reload_in_progress = 1;
716 /* This loop scans the entire function each go-round
717 and repeats until one repetition spills no additional hard regs. */
719 /* This flag is set when a pseudo reg is spilled,
720 to require another pass. Note that getting an additional reload
721 reg does not necessarily imply any pseudo reg was spilled;
722 sometimes we find a reload reg that no pseudo reg was allocated in. */
723 something_changed = 1;
724 /* This flag is set if there are any insns that require reloading. */
725 something_needs_reloads = 0;
726 /* This flag is set if there are any insns that require register
728 something_needs_elimination = 0;
729 while (something_changed)
733 /* For each class, number of reload regs needed in that class.
734 This is the maximum over all insns of the needs in that class
735 of the individual insn. */
736 int max_needs[N_REG_CLASSES];
737 /* For each class, size of group of consecutive regs
738 that is needed for the reloads of this class. */
739 int group_size[N_REG_CLASSES];
740 /* For each class, max number of consecutive groups needed.
741 (Each group contains group_size[CLASS] consecutive registers.) */
742 int max_groups[N_REG_CLASSES];
743 /* For each class, max number needed of regs that don't belong
744 to any of the groups. */
745 int max_nongroups[N_REG_CLASSES];
746 /* For each class, the machine mode which requires consecutive
747 groups of regs of that class.
748 If two different modes ever require groups of one class,
749 they must be the same size and equally restrictive for that class,
750 otherwise we can't handle the complexity. */
751 enum machine_mode group_mode[N_REG_CLASSES];
752 /* Record the insn where each maximum need is first found. */
753 rtx max_needs_insn[N_REG_CLASSES];
754 rtx max_groups_insn[N_REG_CLASSES];
755 rtx max_nongroups_insn[N_REG_CLASSES];
757 int starting_frame_size = get_frame_size ();
758 static char *reg_class_names[] = REG_CLASS_NAMES;
760 something_changed = 0;
761 bzero (max_needs, sizeof max_needs);
762 bzero (max_groups, sizeof max_groups);
763 bzero (max_nongroups, sizeof max_nongroups);
764 bzero (max_needs_insn, sizeof max_needs_insn);
765 bzero (max_groups_insn, sizeof max_groups_insn);
766 bzero (max_nongroups_insn, sizeof max_nongroups_insn);
767 bzero (group_size, sizeof group_size);
768 for (i = 0; i < N_REG_CLASSES; i++)
769 group_mode[i] = VOIDmode;
771 /* Keep track of which basic blocks are needing the reloads. */
774 /* Remember whether any element of basic_block_needs
775 changes from 0 to 1 in this pass. */
776 new_basic_block_needs = 0;
778 /* Reset all offsets on eliminable registers to their initial values. */
779 #ifdef ELIMINABLE_REGS
780 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
782 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
783 ep->previous_offset = ep->offset
784 = ep->max_offset = ep->initial_offset;
787 #ifdef INITIAL_FRAME_POINTER_OFFSET
788 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
790 if (!FRAME_POINTER_REQUIRED)
792 reg_eliminate[0].initial_offset = 0;
794 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
795 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
798 num_not_at_initial_offset = 0;
800 bzero (&offsets_known_at[get_first_label_num ()], num_labels);
802 /* Set a known offset for each forced label to be at the initial offset
803 of each elimination. We do this because we assume that all
804 computed jumps occur from a location where each elimination is
805 at its initial offset. */
807 for (x = forced_labels; x; x = XEXP (x, 1))
809 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
811 /* For each pseudo register that has an equivalent location defined,
812 try to eliminate any eliminable registers (such as the frame pointer)
813 assuming initial offsets for the replacement register, which
816 If the resulting location is directly addressable, substitute
817 the MEM we just got directly for the old REG.
819 If it is not addressable but is a constant or the sum of a hard reg
820 and constant, it is probably not addressable because the constant is
821 out of range, in that case record the address; we will generate
822 hairy code to compute the address in a register each time it is
823 needed. Similarly if it is a hard register, but one that is not
824 valid as an address register.
826 If the location is not addressable, but does not have one of the
827 above forms, assign a stack slot. We have to do this to avoid the
828 potential of producing lots of reloads if, e.g., a location involves
829 a pseudo that didn't get a hard register and has an equivalent memory
830 location that also involves a pseudo that didn't get a hard register.
832 Perhaps at some point we will improve reload_when_needed handling
833 so this problem goes away. But that's very hairy. */
835 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
836 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
838 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
840 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
842 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
843 else if (CONSTANT_P (XEXP (x, 0))
844 || (GET_CODE (XEXP (x, 0)) == REG
845 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
846 || (GET_CODE (XEXP (x, 0)) == PLUS
847 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
848 && (REGNO (XEXP (XEXP (x, 0), 0))
849 < FIRST_PSEUDO_REGISTER)
850 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
851 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
854 /* Make a new stack slot. Then indicate that something
855 changed so we go back and recompute offsets for
856 eliminable registers because the allocation of memory
857 below might change some offset. reg_equiv_{mem,address}
858 will be set up for this pseudo on the next pass around
860 reg_equiv_memory_loc[i] = 0;
861 reg_equiv_init[i] = 0;
863 something_changed = 1;
867 /* If we allocated another pseudo to the stack, redo elimination
869 if (something_changed)
872 /* If caller-saves needs a group, initialize the group to include
873 the size and mode required for caller-saves. */
875 if (caller_save_group_size > 1)
877 group_mode[(int) caller_save_spill_class] = Pmode;
878 group_size[(int) caller_save_spill_class] = caller_save_group_size;
881 /* Compute the most additional registers needed by any instruction.
882 Collect information separately for each class of regs. */
884 for (insn = first; insn; insn = NEXT_INSN (insn))
886 if (global && this_block + 1 < n_basic_blocks
887 && insn == basic_block_head[this_block+1])
890 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
891 might include REG_LABEL), we need to see what effects this
892 has on the known offsets at labels. */
894 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
895 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
896 && REG_NOTES (insn) != 0))
897 set_label_offsets (insn, insn, 0);
899 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
901 /* Nonzero means don't use a reload reg that overlaps
902 the place where a function value can be returned. */
903 rtx avoid_return_reg = 0;
905 rtx old_body = PATTERN (insn);
906 int old_code = INSN_CODE (insn);
907 rtx old_notes = REG_NOTES (insn);
908 int did_elimination = 0;
909 int max_total_input_groups = 0, max_total_output_groups = 0;
911 /* To compute the number of reload registers of each class
912 needed for an insn, we must similate what choose_reload_regs
913 can do. We do this by splitting an insn into an "input" and
914 an "output" part. RELOAD_OTHER reloads are used in both.
915 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
916 which must be live over the entire input section of reloads,
917 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
918 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
921 The registers needed for output are RELOAD_OTHER and
922 RELOAD_FOR_OUTPUT, which are live for the entire output
923 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
924 reloads for each operand.
926 The total number of registers needed is the maximum of the
927 inputs and outputs. */
929 /* These just count RELOAD_OTHER. */
930 int insn_needs[N_REG_CLASSES];
931 int insn_groups[N_REG_CLASSES];
932 int insn_total_groups = 0;
934 /* Count RELOAD_FOR_INPUT reloads. */
935 int insn_needs_for_inputs[N_REG_CLASSES];
936 int insn_groups_for_inputs[N_REG_CLASSES];
937 int insn_total_groups_for_inputs = 0;
939 /* Count RELOAD_FOR_OUTPUT reloads. */
940 int insn_needs_for_outputs[N_REG_CLASSES];
941 int insn_groups_for_outputs[N_REG_CLASSES];
942 int insn_total_groups_for_outputs = 0;
944 /* Count RELOAD_FOR_INSN reloads. */
945 int insn_needs_for_insn[N_REG_CLASSES];
946 int insn_groups_for_insn[N_REG_CLASSES];
947 int insn_total_groups_for_insn = 0;
949 /* Count RELOAD_FOR_OTHER_ADDRESS reloads. */
950 int insn_needs_for_other_addr[N_REG_CLASSES];
951 int insn_groups_for_other_addr[N_REG_CLASSES];
952 int insn_total_groups_for_other_addr = 0;
954 /* Count RELOAD_FOR_INPUT_ADDRESS reloads. */
955 int insn_needs_for_in_addr[MAX_RECOG_OPERANDS][N_REG_CLASSES];
956 int insn_groups_for_in_addr[MAX_RECOG_OPERANDS][N_REG_CLASSES];
957 int insn_total_groups_for_in_addr[MAX_RECOG_OPERANDS];
959 /* Count RELOAD_FOR_OUTPUT_ADDRESS reloads. */
960 int insn_needs_for_out_addr[MAX_RECOG_OPERANDS][N_REG_CLASSES];
961 int insn_groups_for_out_addr[MAX_RECOG_OPERANDS][N_REG_CLASSES];
962 int insn_total_groups_for_out_addr[MAX_RECOG_OPERANDS];
964 /* Count RELOAD_FOR_OPERAND_ADDRESS reloads. */
965 int insn_needs_for_op_addr[N_REG_CLASSES];
966 int insn_groups_for_op_addr[N_REG_CLASSES];
967 int insn_total_groups_for_op_addr = 0;
969 #if 0 /* This wouldn't work nowadays, since optimize_bit_field
970 looks for non-strict memory addresses. */
971 /* Optimization: a bit-field instruction whose field
972 happens to be a byte or halfword in memory
973 can be changed to a move instruction. */
975 if (GET_CODE (PATTERN (insn)) == SET)
977 rtx dest = SET_DEST (PATTERN (insn));
978 rtx src = SET_SRC (PATTERN (insn));
980 if (GET_CODE (dest) == ZERO_EXTRACT
981 || GET_CODE (dest) == SIGN_EXTRACT)
982 optimize_bit_field (PATTERN (insn), insn, reg_equiv_mem);
983 if (GET_CODE (src) == ZERO_EXTRACT
984 || GET_CODE (src) == SIGN_EXTRACT)
985 optimize_bit_field (PATTERN (insn), insn, reg_equiv_mem);
989 /* If needed, eliminate any eliminable registers. */
991 did_elimination = eliminate_regs_in_insn (insn, 0);
993 #ifdef SMALL_REGISTER_CLASSES
994 /* Set avoid_return_reg if this is an insn
995 that might use the value of a function call. */
996 if (GET_CODE (insn) == CALL_INSN)
998 if (GET_CODE (PATTERN (insn)) == SET)
999 after_call = SET_DEST (PATTERN (insn));
1000 else if (GET_CODE (PATTERN (insn)) == PARALLEL
1001 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1002 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1006 else if (after_call != 0
1007 && !(GET_CODE (PATTERN (insn)) == SET
1008 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
1010 if (reg_mentioned_p (after_call, PATTERN (insn)))
1011 avoid_return_reg = after_call;
1014 #endif /* SMALL_REGISTER_CLASSES */
1016 /* Analyze the instruction. */
1017 find_reloads (insn, 0, spill_indirect_levels, global,
1020 /* Remember for later shortcuts which insns had any reloads or
1021 register eliminations.
1023 One might think that it would be worthwhile to mark insns
1024 that need register replacements but not reloads, but this is
1025 not safe because find_reloads may do some manipulation of
1026 the insn (such as swapping commutative operands), which would
1027 be lost when we restore the old pattern after register
1028 replacement. So the actions of find_reloads must be redone in
1029 subsequent passes or in reload_as_needed.
1031 However, it is safe to mark insns that need reloads
1032 but not register replacement. */
1034 PUT_MODE (insn, (did_elimination ? QImode
1035 : n_reloads ? HImode
1036 : GET_MODE (insn) == DImode ? DImode
1039 /* Discard any register replacements done. */
1040 if (did_elimination)
1042 obstack_free (&reload_obstack, reload_firstobj);
1043 PATTERN (insn) = old_body;
1044 INSN_CODE (insn) = old_code;
1045 REG_NOTES (insn) = old_notes;
1046 something_needs_elimination = 1;
1049 /* If this insn has no reloads, we need not do anything except
1050 in the case of a CALL_INSN when we have caller-saves and
1051 caller-save needs reloads. */
1054 && ! (GET_CODE (insn) == CALL_INSN
1055 && caller_save_spill_class != NO_REGS))
1058 something_needs_reloads = 1;
1060 for (i = 0; i < N_REG_CLASSES; i++)
1062 insn_needs[i] = 0, insn_groups[i] = 0;
1063 insn_needs_for_inputs[i] = 0, insn_groups_for_inputs[i] = 0;
1064 insn_needs_for_outputs[i] = 0, insn_groups_for_outputs[i] = 0;
1065 insn_needs_for_insn[i] = 0, insn_groups_for_insn[i] = 0;
1066 insn_needs_for_op_addr[i] = 0, insn_groups_for_op_addr[i] = 0;
1067 insn_needs_for_other_addr[i] = 0;
1068 insn_groups_for_other_addr[i] = 0;
1071 for (i = 0; i < reload_n_operands; i++)
1073 insn_total_groups_for_in_addr[i] = 0;
1074 insn_total_groups_for_out_addr[i] = 0;
1076 for (j = 0; j < N_REG_CLASSES; j++)
1078 insn_needs_for_in_addr[i][j] = 0;
1079 insn_needs_for_out_addr[i][j] = 0;
1080 insn_groups_for_in_addr[i][j] = 0;
1081 insn_groups_for_out_addr[i][j] = 0;
1085 /* Count each reload once in every class
1086 containing the reload's own class. */
1088 for (i = 0; i < n_reloads; i++)
1090 register enum reg_class *p;
1091 enum reg_class class = reload_reg_class[i];
1093 enum machine_mode mode;
1096 int *this_total_groups;
1098 /* Don't count the dummy reloads, for which one of the
1099 regs mentioned in the insn can be used for reloading.
1100 Don't count optional reloads.
1101 Don't count reloads that got combined with others. */
1102 if (reload_reg_rtx[i] != 0
1103 || reload_optional[i] != 0
1104 || (reload_out[i] == 0 && reload_in[i] == 0
1105 && ! reload_secondary_p[i]))
1108 /* Show that a reload register of this class is needed
1109 in this basic block. We do not use insn_needs and
1110 insn_groups because they are overly conservative for
1112 if (global && ! basic_block_needs[(int) class][this_block])
1114 basic_block_needs[(int) class][this_block] = 1;
1115 new_basic_block_needs = 1;
1118 /* Decide which time-of-use to count this reload for. */
1119 switch (reload_when_needed[i])
1122 this_needs = insn_needs;
1123 this_groups = insn_groups;
1124 this_total_groups = &insn_total_groups;
1127 case RELOAD_FOR_INPUT:
1128 this_needs = insn_needs_for_inputs;
1129 this_groups = insn_groups_for_inputs;
1130 this_total_groups = &insn_total_groups_for_inputs;
1133 case RELOAD_FOR_OUTPUT:
1134 this_needs = insn_needs_for_outputs;
1135 this_groups = insn_groups_for_outputs;
1136 this_total_groups = &insn_total_groups_for_outputs;
1139 case RELOAD_FOR_INSN:
1140 this_needs = insn_needs_for_insn;
1141 this_groups = insn_groups_for_insn;
1142 this_total_groups = &insn_total_groups_for_insn;
1145 case RELOAD_FOR_OTHER_ADDRESS:
1146 this_needs = insn_needs_for_other_addr;
1147 this_groups = insn_groups_for_other_addr;
1148 this_total_groups = &insn_total_groups_for_other_addr;
1151 case RELOAD_FOR_INPUT_ADDRESS:
1152 this_needs = insn_needs_for_in_addr[reload_opnum[i]];
1153 this_groups = insn_groups_for_in_addr[reload_opnum[i]];
1155 = &insn_total_groups_for_in_addr[reload_opnum[i]];
1158 case RELOAD_FOR_OUTPUT_ADDRESS:
1159 this_needs = insn_needs_for_out_addr[reload_opnum[i]];
1160 this_groups = insn_groups_for_out_addr[reload_opnum[i]];
1162 = &insn_total_groups_for_out_addr[reload_opnum[i]];
1165 case RELOAD_FOR_OPERAND_ADDRESS:
1166 this_needs = insn_needs_for_op_addr;
1167 this_groups = insn_groups_for_op_addr;
1168 this_total_groups = &insn_total_groups_for_op_addr;
1172 mode = reload_inmode[i];
1173 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1174 mode = reload_outmode[i];
1175 size = CLASS_MAX_NREGS (class, mode);
1178 enum machine_mode other_mode, allocate_mode;
1180 /* Count number of groups needed separately from
1181 number of individual regs needed. */
1182 this_groups[(int) class]++;
1183 p = reg_class_superclasses[(int) class];
1184 while (*p != LIM_REG_CLASSES)
1185 this_groups[(int) *p++]++;
1186 (*this_total_groups)++;
1188 /* Record size and mode of a group of this class. */
1189 /* If more than one size group is needed,
1190 make all groups the largest needed size. */
1191 if (group_size[(int) class] < size)
1193 other_mode = group_mode[(int) class];
1194 allocate_mode = mode;
1196 group_size[(int) class] = size;
1197 group_mode[(int) class] = mode;
1202 allocate_mode = group_mode[(int) class];
1205 /* Crash if two dissimilar machine modes both need
1206 groups of consecutive regs of the same class. */
1208 if (other_mode != VOIDmode
1209 && other_mode != allocate_mode
1210 && ! modes_equiv_for_class_p (allocate_mode,
1217 this_needs[(int) class] += 1;
1218 p = reg_class_superclasses[(int) class];
1219 while (*p != LIM_REG_CLASSES)
1220 this_needs[(int) *p++] += 1;
1226 /* All reloads have been counted for this insn;
1227 now merge the various times of use.
1228 This sets insn_needs, etc., to the maximum total number
1229 of registers needed at any point in this insn. */
1231 for (i = 0; i < N_REG_CLASSES; i++)
1233 int in_max, out_max;
1235 for (in_max = 0, out_max = 0, j = 0;
1236 j < reload_n_operands; j++)
1238 in_max = MAX (in_max, insn_needs_for_in_addr[j][i]);
1239 out_max = MAX (out_max, insn_needs_for_out_addr[j][i]);
1242 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1243 and operand addresses but not things used to reload them.
1244 Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads don't
1245 conflict with things needed to reload inputs or
1248 in_max = MAX (in_max, insn_needs_for_op_addr[i]);
1249 out_max = MAX (out_max, insn_needs_for_insn[i]);
1251 insn_needs_for_inputs[i]
1252 = MAX (insn_needs_for_inputs[i]
1253 + insn_needs_for_op_addr[i]
1254 + insn_needs_for_insn[i],
1255 in_max + insn_needs_for_inputs[i]);
1257 insn_needs_for_outputs[i] += out_max;
1258 insn_needs[i] += MAX (MAX (insn_needs_for_inputs[i],
1259 insn_needs_for_outputs[i]),
1260 insn_needs_for_other_addr[i]);
1262 for (in_max = 0, out_max = 0, j = 0;
1263 j < reload_n_operands; j++)
1265 in_max = MAX (in_max, insn_groups_for_in_addr[j][i]);
1266 out_max = MAX (out_max, insn_groups_for_out_addr[j][i]);
1269 in_max = MAX (in_max, insn_groups_for_op_addr[i]);
1270 out_max = MAX (out_max, insn_groups_for_insn[i]);
1272 insn_groups_for_inputs[i]
1273 = MAX (insn_groups_for_inputs[i]
1274 + insn_groups_for_op_addr[i]
1275 + insn_groups_for_insn[i],
1276 in_max + insn_groups_for_inputs[i]);
1278 insn_groups_for_outputs[i] += out_max;
1279 insn_groups[i] += MAX (MAX (insn_groups_for_inputs[i],
1280 insn_groups_for_outputs[i]),
1281 insn_groups_for_other_addr[i]);
1284 for (i = 0; i < reload_n_operands; i++)
1286 max_total_input_groups
1287 = MAX (max_total_input_groups,
1288 insn_total_groups_for_in_addr[i]);
1289 max_total_output_groups
1290 = MAX (max_total_output_groups,
1291 insn_total_groups_for_out_addr[i]);
1294 max_total_input_groups = MAX (max_total_input_groups,
1295 insn_total_groups_for_op_addr);
1296 max_total_output_groups = MAX (max_total_output_groups,
1297 insn_total_groups_for_insn);
1299 insn_total_groups_for_inputs
1300 = MAX (max_total_input_groups + insn_total_groups_for_op_addr
1301 + insn_total_groups_for_insn,
1302 max_total_input_groups + insn_total_groups_for_inputs);
1304 insn_total_groups_for_outputs += max_total_output_groups;
1306 insn_total_groups += MAX (MAX (insn_total_groups_for_outputs,
1307 insn_total_groups_for_inputs),
1308 insn_total_groups_for_other_addr);
1310 /* If this is a CALL_INSN and caller-saves will need
1311 a spill register, act as if the spill register is
1312 needed for this insn. However, the spill register
1313 can be used by any reload of this insn, so we only
1314 need do something if no need for that class has
1317 The assumption that every CALL_INSN will trigger a
1318 caller-save is highly conservative, however, the number
1319 of cases where caller-saves will need a spill register but
1320 a block containing a CALL_INSN won't need a spill register
1321 of that class should be quite rare.
1323 If a group is needed, the size and mode of the group will
1324 have been set up at the beginning of this loop. */
1326 if (GET_CODE (insn) == CALL_INSN
1327 && caller_save_spill_class != NO_REGS)
1329 int *caller_save_needs
1330 = (caller_save_group_size > 1 ? insn_groups : insn_needs);
1332 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1334 register enum reg_class *p
1335 = reg_class_superclasses[(int) caller_save_spill_class];
1337 caller_save_needs[(int) caller_save_spill_class]++;
1339 while (*p != LIM_REG_CLASSES)
1340 caller_save_needs[(int) *p++] += 1;
1343 if (caller_save_group_size > 1)
1344 insn_total_groups = MAX (insn_total_groups, 1);
1347 /* Show that this basic block will need a register of
1351 && ! (basic_block_needs[(int) caller_save_spill_class]
1354 basic_block_needs[(int) caller_save_spill_class]
1356 new_basic_block_needs = 1;
1360 #ifdef SMALL_REGISTER_CLASSES
1361 /* If this insn stores the value of a function call,
1362 and that value is in a register that has been spilled,
1363 and if the insn needs a reload in a class
1364 that might use that register as the reload register,
1365 then add add an extra need in that class.
1366 This makes sure we have a register available that does
1367 not overlap the return value. */
1368 if (avoid_return_reg)
1370 int regno = REGNO (avoid_return_reg);
1372 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1374 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1376 /* First compute the "basic needs", which counts a
1377 need only in the smallest class in which it
1380 bcopy (insn_needs, basic_needs, sizeof basic_needs);
1381 bcopy (insn_groups, basic_groups, sizeof basic_groups);
1383 for (i = 0; i < N_REG_CLASSES; i++)
1387 if (basic_needs[i] >= 0)
1388 for (p = reg_class_superclasses[i];
1389 *p != LIM_REG_CLASSES; p++)
1390 basic_needs[(int) *p] -= basic_needs[i];
1392 if (basic_groups[i] >= 0)
1393 for (p = reg_class_superclasses[i];
1394 *p != LIM_REG_CLASSES; p++)
1395 basic_groups[(int) *p] -= basic_groups[i];
1398 /* Now count extra regs if there might be a conflict with
1399 the return value register.
1401 ??? This is not quite correct because we don't properly
1402 handle the case of groups, but if we end up doing
1403 something wrong, it either will end up not mattering or
1404 we will abort elsewhere. */
1406 for (r = regno; r < regno + nregs; r++)
1407 if (spill_reg_order[r] >= 0)
1408 for (i = 0; i < N_REG_CLASSES; i++)
1409 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1411 if (basic_needs[i] > 0 || basic_groups[i] > 0)
1416 p = reg_class_superclasses[i];
1417 while (*p != LIM_REG_CLASSES)
1418 insn_needs[(int) *p++]++;
1422 #endif /* SMALL_REGISTER_CLASSES */
1424 /* For each class, collect maximum need of any insn. */
1426 for (i = 0; i < N_REG_CLASSES; i++)
1428 if (max_needs[i] < insn_needs[i])
1430 max_needs[i] = insn_needs[i];
1431 max_needs_insn[i] = insn;
1433 if (max_groups[i] < insn_groups[i])
1435 max_groups[i] = insn_groups[i];
1436 max_groups_insn[i] = insn;
1438 if (insn_total_groups > 0)
1439 if (max_nongroups[i] < insn_needs[i])
1441 max_nongroups[i] = insn_needs[i];
1442 max_nongroups_insn[i] = insn;
1446 /* Note that there is a continue statement above. */
1449 /* If we allocated any new memory locations, make another pass
1450 since it might have changed elimination offsets. */
1451 if (starting_frame_size != get_frame_size ())
1452 something_changed = 1;
1455 for (i = 0; i < N_REG_CLASSES; i++)
1457 if (max_needs[i] > 0)
1459 ";; Need %d reg%s of class %s (for insn %d).\n",
1460 max_needs[i], max_needs[i] == 1 ? "" : "s",
1461 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1462 if (max_nongroups[i] > 0)
1464 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1465 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1466 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1467 if (max_groups[i] > 0)
1469 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1470 max_groups[i], max_groups[i] == 1 ? "" : "s",
1471 mode_name[(int) group_mode[i]],
1472 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1475 /* If we have caller-saves, set up the save areas and see if caller-save
1476 will need a spill register. */
1478 if (caller_save_needed
1479 && ! setup_save_areas (&something_changed)
1480 && caller_save_spill_class == NO_REGS)
1482 /* The class we will need depends on whether the machine
1483 supports the sum of two registers for an address; see
1484 find_address_reloads for details. */
1486 caller_save_spill_class
1487 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1488 caller_save_group_size
1489 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1490 something_changed = 1;
1493 /* See if anything that happened changes which eliminations are valid.
1494 For example, on the Sparc, whether or not the frame pointer can
1495 be eliminated can depend on what registers have been used. We need
1496 not check some conditions again (such as flag_omit_frame_pointer)
1497 since they can't have changed. */
1499 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1500 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
1501 #ifdef ELIMINABLE_REGS
1502 || ! CAN_ELIMINATE (ep->from, ep->to)
1505 ep->can_eliminate = 0;
1507 /* Look for the case where we have discovered that we can't replace
1508 register A with register B and that means that we will now be
1509 trying to replace register A with register C. This means we can
1510 no longer replace register C with register B and we need to disable
1511 such an elimination, if it exists. This occurs often with A == ap,
1512 B == sp, and C == fp. */
1514 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1516 struct elim_table *op;
1517 register int new_to = -1;
1519 if (! ep->can_eliminate && ep->can_eliminate_previous)
1521 /* Find the current elimination for ep->from, if there is a
1523 for (op = reg_eliminate;
1524 op < ®_eliminate[NUM_ELIMINABLE_REGS]; op++)
1525 if (op->from == ep->from && op->can_eliminate)
1531 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1533 for (op = reg_eliminate;
1534 op < ®_eliminate[NUM_ELIMINABLE_REGS]; op++)
1535 if (op->from == new_to && op->to == ep->to)
1536 op->can_eliminate = 0;
1540 /* See if any registers that we thought we could eliminate the previous
1541 time are no longer eliminable. If so, something has changed and we
1542 must spill the register. Also, recompute the number of eliminable
1543 registers and see if the frame pointer is needed; it is if there is
1544 no elimination of the frame pointer that we can perform. */
1546 frame_pointer_needed = 1;
1547 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1549 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
1550 && ep->to != HARD_FRAME_POINTER_REGNUM)
1551 frame_pointer_needed = 0;
1553 if (! ep->can_eliminate && ep->can_eliminate_previous)
1555 ep->can_eliminate_previous = 0;
1556 spill_hard_reg (ep->from, global, dumpfile, 1);
1557 regs_ever_live[ep->from] = 1;
1558 something_changed = 1;
1563 /* If all needs are met, we win. */
1565 for (i = 0; i < N_REG_CLASSES; i++)
1566 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1568 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1571 /* Not all needs are met; must spill some hard regs. */
1573 /* Put all registers spilled so far back in potential_reload_regs, but
1574 put them at the front, since we've already spilled most of the
1575 psuedos in them (we might have left some pseudos unspilled if they
1576 were in a block that didn't need any spill registers of a conflicting
1577 class. We used to try to mark off the need for those registers,
1578 but doing so properly is very complex and reallocating them is the
1579 simpler approach. First, "pack" potential_reload_regs by pushing
1580 any nonnegative entries towards the end. That will leave room
1581 for the registers we already spilled.
1583 Also, undo the marking of the spill registers from the last time
1584 around in FORBIDDEN_REGS since we will be probably be allocating
1587 ??? It is theoretically possible that we might end up not using one
1588 of our previously-spilled registers in this allocation, even though
1589 they are at the head of the list. It's not clear what to do about
1590 this, but it was no better before, when we marked off the needs met
1591 by the previously-spilled registers. With the current code, globals
1592 can be allocated into these registers, but locals cannot. */
1596 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1597 if (potential_reload_regs[i] != -1)
1598 potential_reload_regs[j--] = potential_reload_regs[i];
1600 for (i = 0; i < n_spills; i++)
1602 potential_reload_regs[i] = spill_regs[i];
1603 spill_reg_order[spill_regs[i]] = -1;
1604 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1610 /* Now find more reload regs to satisfy the remaining need
1611 Do it by ascending class number, since otherwise a reg
1612 might be spilled for a big class and might fail to count
1613 for a smaller class even though it belongs to that class.
1615 Count spilled regs in `spills', and add entries to
1616 `spill_regs' and `spill_reg_order'.
1618 ??? Note there is a problem here.
1619 When there is a need for a group in a high-numbered class,
1620 and also need for non-group regs that come from a lower class,
1621 the non-group regs are chosen first. If there aren't many regs,
1622 they might leave no room for a group.
1624 This was happening on the 386. To fix it, we added the code
1625 that calls possible_group_p, so that the lower class won't
1626 break up the last possible group.
1628 Really fixing the problem would require changes above
1629 in counting the regs already spilled, and in choose_reload_regs.
1630 It might be hard to avoid introducing bugs there. */
1632 CLEAR_HARD_REG_SET (counted_for_groups);
1633 CLEAR_HARD_REG_SET (counted_for_nongroups);
1635 for (class = 0; class < N_REG_CLASSES; class++)
1637 /* First get the groups of registers.
1638 If we got single registers first, we might fragment
1640 while (max_groups[class] > 0)
1642 /* If any single spilled regs happen to form groups,
1643 count them now. Maybe we don't really need
1644 to spill another group. */
1645 count_possible_groups (group_size, group_mode, max_groups);
1647 if (max_groups[class] <= 0)
1650 /* Groups of size 2 (the only groups used on most machines)
1651 are treated specially. */
1652 if (group_size[class] == 2)
1654 /* First, look for a register that will complete a group. */
1655 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1659 j = potential_reload_regs[i];
1660 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1662 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1663 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1664 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1665 && HARD_REGNO_MODE_OK (other, group_mode[class])
1666 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1668 /* We don't want one part of another group.
1669 We could get "two groups" that overlap! */
1670 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1672 (j < FIRST_PSEUDO_REGISTER - 1
1673 && (other = j + 1, spill_reg_order[other] >= 0)
1674 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1675 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1676 && HARD_REGNO_MODE_OK (j, group_mode[class])
1677 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1679 && ! TEST_HARD_REG_BIT (counted_for_groups,
1682 register enum reg_class *p;
1684 /* We have found one that will complete a group,
1685 so count off one group as provided. */
1686 max_groups[class]--;
1687 p = reg_class_superclasses[class];
1688 while (*p != LIM_REG_CLASSES)
1689 max_groups[(int) *p++]--;
1691 /* Indicate both these regs are part of a group. */
1692 SET_HARD_REG_BIT (counted_for_groups, j);
1693 SET_HARD_REG_BIT (counted_for_groups, other);
1697 /* We can't complete a group, so start one. */
1698 #ifdef SMALL_REGISTER_CLASSES
1699 /* Look for a pair neither of which is explicitly used. */
1700 if (i == FIRST_PSEUDO_REGISTER)
1701 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1704 j = potential_reload_regs[i];
1705 /* Verify that J+1 is a potential reload reg. */
1706 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1707 if (potential_reload_regs[k] == j + 1)
1709 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1710 && k < FIRST_PSEUDO_REGISTER
1711 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1712 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1713 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1714 && HARD_REGNO_MODE_OK (j, group_mode[class])
1715 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1717 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1)
1718 /* Reject J at this stage
1719 if J+1 was explicitly used. */
1720 && ! regs_explicitly_used[j + 1])
1724 /* Now try any group at all
1725 whose registers are not in bad_spill_regs. */
1726 if (i == FIRST_PSEUDO_REGISTER)
1727 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1730 j = potential_reload_regs[i];
1731 /* Verify that J+1 is a potential reload reg. */
1732 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1733 if (potential_reload_regs[k] == j + 1)
1735 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1736 && k < FIRST_PSEUDO_REGISTER
1737 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1738 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1739 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1740 && HARD_REGNO_MODE_OK (j, group_mode[class])
1741 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1743 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
1747 /* I should be the index in potential_reload_regs
1748 of the new reload reg we have found. */
1750 if (i >= FIRST_PSEUDO_REGISTER)
1752 /* There are no groups left to spill. */
1753 spill_failure (max_groups_insn[class]);
1759 |= new_spill_reg (i, class, max_needs, NULL_PTR,
1764 /* For groups of more than 2 registers,
1765 look for a sufficient sequence of unspilled registers,
1766 and spill them all at once. */
1767 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1771 j = potential_reload_regs[i];
1773 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
1774 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1776 /* Check each reg in the sequence. */
1777 for (k = 0; k < group_size[class]; k++)
1778 if (! (spill_reg_order[j + k] < 0
1779 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1780 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1782 /* We got a full sequence, so spill them all. */
1783 if (k == group_size[class])
1785 register enum reg_class *p;
1786 for (k = 0; k < group_size[class]; k++)
1789 SET_HARD_REG_BIT (counted_for_groups, j + k);
1790 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1791 if (potential_reload_regs[idx] == j + k)
1794 |= new_spill_reg (idx, class,
1795 max_needs, NULL_PTR,
1799 /* We have found one that will complete a group,
1800 so count off one group as provided. */
1801 max_groups[class]--;
1802 p = reg_class_superclasses[class];
1803 while (*p != LIM_REG_CLASSES)
1804 max_groups[(int) *p++]--;
1810 /* We couldn't find any registers for this reload.
1811 Avoid going into an infinite loop. */
1812 if (i >= FIRST_PSEUDO_REGISTER)
1814 /* There are no groups left. */
1815 spill_failure (max_groups_insn[class]);
1822 /* Now similarly satisfy all need for single registers. */
1824 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1826 #ifdef SMALL_REGISTER_CLASSES
1827 /* This should be right for all machines, but only the 386
1828 is known to need it, so this conditional plays safe.
1829 ??? For 2.5, try making this unconditional. */
1830 /* If we spilled enough regs, but they weren't counted
1831 against the non-group need, see if we can count them now.
1832 If so, we can avoid some actual spilling. */
1833 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
1834 for (i = 0; i < n_spills; i++)
1835 if (TEST_HARD_REG_BIT (reg_class_contents[class],
1837 && !TEST_HARD_REG_BIT (counted_for_groups,
1839 && !TEST_HARD_REG_BIT (counted_for_nongroups,
1841 && max_nongroups[class] > 0)
1843 register enum reg_class *p;
1845 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1846 max_nongroups[class]--;
1847 p = reg_class_superclasses[class];
1848 while (*p != LIM_REG_CLASSES)
1849 max_nongroups[(int) *p++]--;
1851 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
1855 /* Consider the potential reload regs that aren't
1856 yet in use as reload regs, in order of preference.
1857 Find the most preferred one that's in this class. */
1859 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1860 if (potential_reload_regs[i] >= 0
1861 && TEST_HARD_REG_BIT (reg_class_contents[class],
1862 potential_reload_regs[i])
1863 /* If this reg will not be available for groups,
1864 pick one that does not foreclose possible groups.
1865 This is a kludge, and not very general,
1866 but it should be sufficient to make the 386 work,
1867 and the problem should not occur on machines with
1869 && (max_nongroups[class] == 0
1870 || possible_group_p (potential_reload_regs[i], max_groups)))
1873 /* If we couldn't get a register, try to get one even if we
1874 might foreclose possible groups. This may cause problems
1875 later, but that's better than aborting now, since it is
1876 possible that we will, in fact, be able to form the needed
1877 group even with this allocation. */
1879 if (i >= FIRST_PSEUDO_REGISTER
1880 && (asm_noperands (max_needs[class] > 0
1881 ? max_needs_insn[class]
1882 : max_nongroups_insn[class])
1884 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1885 if (potential_reload_regs[i] >= 0
1886 && TEST_HARD_REG_BIT (reg_class_contents[class],
1887 potential_reload_regs[i]))
1890 /* I should be the index in potential_reload_regs
1891 of the new reload reg we have found. */
1893 if (i >= FIRST_PSEUDO_REGISTER)
1895 /* There are no possible registers left to spill. */
1896 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
1897 : max_nongroups_insn[class]);
1903 |= new_spill_reg (i, class, max_needs, max_nongroups,
1909 /* If global-alloc was run, notify it of any register eliminations we have
1912 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1913 if (ep->can_eliminate)
1914 mark_elimination (ep->from, ep->to);
1916 /* Insert code to save and restore call-clobbered hard regs
1917 around calls. Tell if what mode to use so that we will process
1918 those insns in reload_as_needed if we have to. */
1920 if (caller_save_needed)
1921 save_call_clobbered_regs (num_eliminable ? QImode
1922 : caller_save_spill_class != NO_REGS ? HImode
1925 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1926 If that insn didn't set the register (i.e., it copied the register to
1927 memory), just delete that insn instead of the equivalencing insn plus
1928 anything now dead. If we call delete_dead_insn on that insn, we may
1929 delete the insn that actually sets the register if the register die
1930 there and that is incorrect. */
1932 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1933 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
1934 && GET_CODE (reg_equiv_init[i]) != NOTE)
1936 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
1937 delete_dead_insn (reg_equiv_init[i]);
1940 PUT_CODE (reg_equiv_init[i], NOTE);
1941 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
1942 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
1946 /* Use the reload registers where necessary
1947 by generating move instructions to move the must-be-register
1948 values into or out of the reload registers. */
1950 if (something_needs_reloads || something_needs_elimination
1951 || (caller_save_needed && num_eliminable)
1952 || caller_save_spill_class != NO_REGS)
1953 reload_as_needed (first, global);
1955 /* If we were able to eliminate the frame pointer, show that it is no
1956 longer live at the start of any basic block. If it ls live by
1957 virtue of being in a pseudo, that pseudo will be marked live
1958 and hence the frame pointer will be known to be live via that
1961 if (! frame_pointer_needed)
1962 for (i = 0; i < n_basic_blocks; i++)
1963 basic_block_live_at_start[i][HARD_FRAME_POINTER_REGNUM / REGSET_ELT_BITS]
1964 &= ~ ((REGSET_ELT_TYPE) 1 << (HARD_FRAME_POINTER_REGNUM
1965 % REGSET_ELT_BITS));
1967 /* Come here (with failure set nonzero) if we can't get enough spill regs
1968 and we decide not to abort about it. */
1971 reload_in_progress = 0;
1973 /* Now eliminate all pseudo regs by modifying them into
1974 their equivalent memory references.
1975 The REG-rtx's for the pseudos are modified in place,
1976 so all insns that used to refer to them now refer to memory.
1978 For a reg that has a reg_equiv_address, all those insns
1979 were changed by reloading so that no insns refer to it any longer;
1980 but the DECL_RTL of a variable decl may refer to it,
1981 and if so this causes the debugging info to mention the variable. */
1983 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1987 if (reg_equiv_mem[i])
1989 addr = XEXP (reg_equiv_mem[i], 0);
1990 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
1992 if (reg_equiv_address[i])
1993 addr = reg_equiv_address[i];
1996 if (reg_renumber[i] < 0)
1998 rtx reg = regno_reg_rtx[i];
1999 XEXP (reg, 0) = addr;
2000 REG_USERVAR_P (reg) = 0;
2001 MEM_IN_STRUCT_P (reg) = in_struct;
2002 PUT_CODE (reg, MEM);
2004 else if (reg_equiv_mem[i])
2005 XEXP (reg_equiv_mem[i], 0) = addr;
2009 #ifdef PRESERVE_DEATH_INFO_REGNO_P
2010 /* Make a pass over all the insns and remove death notes for things that
2011 are no longer registers or no longer die in the insn (e.g., an input
2012 and output pseudo being tied). */
2014 for (insn = first; insn; insn = NEXT_INSN (insn))
2015 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2019 for (note = REG_NOTES (insn); note; note = next)
2021 next = XEXP (note, 1);
2022 if (REG_NOTE_KIND (note) == REG_DEAD
2023 && (GET_CODE (XEXP (note, 0)) != REG
2024 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
2025 remove_note (insn, note);
2030 /* Indicate that we no longer have known memory locations or constants. */
2031 reg_equiv_constant = 0;
2032 reg_equiv_memory_loc = 0;
2035 free (scratch_list);
2038 free (scratch_block);
2044 /* Nonzero if, after spilling reg REGNO for non-groups,
2045 it will still be possible to find a group if we still need one. */
2048 possible_group_p (regno, max_groups)
2053 int class = (int) NO_REGS;
2055 for (i = 0; i < (int) N_REG_CLASSES; i++)
2056 if (max_groups[i] > 0)
2062 if (class == (int) NO_REGS)
2065 /* Consider each pair of consecutive registers. */
2066 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2068 /* Ignore pairs that include reg REGNO. */
2069 if (i == regno || i + 1 == regno)
2072 /* Ignore pairs that are outside the class that needs the group.
2073 ??? Here we fail to handle the case where two different classes
2074 independently need groups. But this never happens with our
2075 current machine descriptions. */
2076 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2077 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2080 /* A pair of consecutive regs we can still spill does the trick. */
2081 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2082 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2083 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2086 /* A pair of one already spilled and one we can spill does it
2087 provided the one already spilled is not otherwise reserved. */
2088 if (spill_reg_order[i] < 0
2089 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2090 && spill_reg_order[i + 1] >= 0
2091 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2092 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2094 if (spill_reg_order[i + 1] < 0
2095 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2096 && spill_reg_order[i] >= 0
2097 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2098 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2105 /* Count any groups that can be formed from the registers recently spilled.
2106 This is done class by class, in order of ascending class number. */
2109 count_possible_groups (group_size, group_mode, max_groups)
2111 enum machine_mode *group_mode;
2115 /* Now find all consecutive groups of spilled registers
2116 and mark each group off against the need for such groups.
2117 But don't count them against ordinary need, yet. */
2119 for (i = 0; i < N_REG_CLASSES; i++)
2120 if (group_size[i] > 1)
2125 CLEAR_HARD_REG_SET (new);
2127 /* Make a mask of all the regs that are spill regs in class I. */
2128 for (j = 0; j < n_spills; j++)
2129 if (TEST_HARD_REG_BIT (reg_class_contents[i], spill_regs[j])
2130 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[j])
2131 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
2133 SET_HARD_REG_BIT (new, spill_regs[j]);
2135 /* Find each consecutive group of them. */
2136 for (j = 0; j < FIRST_PSEUDO_REGISTER && max_groups[i] > 0; j++)
2137 if (TEST_HARD_REG_BIT (new, j)
2138 && j + group_size[i] <= FIRST_PSEUDO_REGISTER
2139 /* Next line in case group-mode for this class
2140 demands an even-odd pair. */
2141 && HARD_REGNO_MODE_OK (j, group_mode[i]))
2144 for (k = 1; k < group_size[i]; k++)
2145 if (! TEST_HARD_REG_BIT (new, j + k))
2147 if (k == group_size[i])
2149 /* We found a group. Mark it off against this class's
2150 need for groups, and against each superclass too. */
2151 register enum reg_class *p;
2153 p = reg_class_superclasses[i];
2154 while (*p != LIM_REG_CLASSES)
2155 max_groups[(int) *p++]--;
2156 /* Don't count these registers again. */
2157 for (k = 0; k < group_size[i]; k++)
2158 SET_HARD_REG_BIT (counted_for_groups, j + k);
2160 /* Skip to the last reg in this group. When j is incremented
2161 above, it will then point to the first reg of the next
2169 /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2170 another mode that needs to be reloaded for the same register class CLASS.
2171 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2172 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2174 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2175 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2176 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2177 causes unnecessary failures on machines requiring alignment of register
2178 groups when the two modes are different sizes, because the larger mode has
2179 more strict alignment rules than the smaller mode. */
2182 modes_equiv_for_class_p (allocate_mode, other_mode, class)
2183 enum machine_mode allocate_mode, other_mode;
2184 enum reg_class class;
2187 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2189 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2190 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2191 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2197 /* Handle the failure to find a register to spill.
2198 INSN should be one of the insns which needed this particular spill reg. */
2201 spill_failure (insn)
2204 if (asm_noperands (PATTERN (insn)) >= 0)
2205 error_for_asm (insn, "`asm' needs too many reloads");
2210 /* Add a new register to the tables of available spill-registers
2211 (as well as spilling all pseudos allocated to the register).
2212 I is the index of this register in potential_reload_regs.
2213 CLASS is the regclass whose need is being satisfied.
2214 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2215 so that this register can count off against them.
2216 MAX_NONGROUPS is 0 if this register is part of a group.
2217 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2220 new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2228 register enum reg_class *p;
2230 int regno = potential_reload_regs[i];
2232 if (i >= FIRST_PSEUDO_REGISTER)
2233 abort (); /* Caller failed to find any register. */
2235 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2236 fatal ("fixed or forbidden register was spilled.\n\
2237 This may be due to a compiler bug or to impossible asm statements.");
2239 /* Make reg REGNO an additional reload reg. */
2241 potential_reload_regs[i] = -1;
2242 spill_regs[n_spills] = regno;
2243 spill_reg_order[regno] = n_spills;
2245 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2247 /* Clear off the needs we just satisfied. */
2250 p = reg_class_superclasses[class];
2251 while (*p != LIM_REG_CLASSES)
2252 max_needs[(int) *p++]--;
2254 if (max_nongroups && max_nongroups[class] > 0)
2256 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2257 max_nongroups[class]--;
2258 p = reg_class_superclasses[class];
2259 while (*p != LIM_REG_CLASSES)
2260 max_nongroups[(int) *p++]--;
2263 /* Spill every pseudo reg that was allocated to this reg
2264 or to something that overlaps this reg. */
2266 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2268 /* If there are some registers still to eliminate and this register
2269 wasn't ever used before, additional stack space may have to be
2270 allocated to store this register. Thus, we may have changed the offset
2271 between the stack and frame pointers, so mark that something has changed.
2272 (If new pseudos were spilled, thus requiring more space, VAL would have
2273 been set non-zero by the call to spill_hard_reg above since additional
2274 reloads may be needed in that case.
2276 One might think that we need only set VAL to 1 if this is a call-used
2277 register. However, the set of registers that must be saved by the
2278 prologue is not identical to the call-used set. For example, the
2279 register used by the call insn for the return PC is a call-used register,
2280 but must be saved by the prologue. */
2281 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2284 regs_ever_live[spill_regs[n_spills]] = 1;
2290 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2291 data that is dead in INSN. */
2294 delete_dead_insn (insn)
2297 rtx prev = prev_real_insn (insn);
2300 /* If the previous insn sets a register that dies in our insn, delete it
2302 if (prev && GET_CODE (PATTERN (prev)) == SET
2303 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2304 && reg_mentioned_p (prev_dest, PATTERN (insn))
2305 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2306 delete_dead_insn (prev);
2308 PUT_CODE (insn, NOTE);
2309 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2310 NOTE_SOURCE_FILE (insn) = 0;
2313 /* Modify the home of pseudo-reg I.
2314 The new home is present in reg_renumber[I].
2316 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2317 or it may be -1, meaning there is none or it is not relevant.
2318 This is used so that all pseudos spilled from a given hard reg
2319 can share one stack slot. */
2322 alter_reg (i, from_reg)
2326 /* When outputting an inline function, this can happen
2327 for a reg that isn't actually used. */
2328 if (regno_reg_rtx[i] == 0)
2331 /* If the reg got changed to a MEM at rtl-generation time,
2333 if (GET_CODE (regno_reg_rtx[i]) != REG)
2336 /* Modify the reg-rtx to contain the new hard reg
2337 number or else to contain its pseudo reg number. */
2338 REGNO (regno_reg_rtx[i])
2339 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2341 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2342 allocate a stack slot for it. */
2344 if (reg_renumber[i] < 0
2345 && reg_n_refs[i] > 0
2346 && reg_equiv_constant[i] == 0
2347 && reg_equiv_memory_loc[i] == 0)
2350 int inherent_size = PSEUDO_REGNO_BYTES (i);
2351 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2354 /* Each pseudo reg has an inherent size which comes from its own mode,
2355 and a total size which provides room for paradoxical subregs
2356 which refer to the pseudo reg in wider modes.
2358 We can use a slot already allocated if it provides both
2359 enough inherent space and enough total space.
2360 Otherwise, we allocate a new slot, making sure that it has no less
2361 inherent space, and no less total space, then the previous slot. */
2364 /* No known place to spill from => no slot to reuse. */
2365 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size, -1);
2366 #if BYTES_BIG_ENDIAN
2367 /* Cancel the big-endian correction done in assign_stack_local.
2368 Get the address of the beginning of the slot.
2369 This is so we can do a big-endian correction unconditionally
2371 adjust = inherent_size - total_size;
2374 /* Reuse a stack slot if possible. */
2375 else if (spill_stack_slot[from_reg] != 0
2376 && spill_stack_slot_width[from_reg] >= total_size
2377 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2379 x = spill_stack_slot[from_reg];
2380 /* Allocate a bigger slot. */
2383 /* Compute maximum size needed, both for inherent size
2384 and for total size. */
2385 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2386 if (spill_stack_slot[from_reg])
2388 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2390 mode = GET_MODE (spill_stack_slot[from_reg]);
2391 if (spill_stack_slot_width[from_reg] > total_size)
2392 total_size = spill_stack_slot_width[from_reg];
2394 /* Make a slot with that size. */
2395 x = assign_stack_local (mode, total_size, -1);
2396 #if BYTES_BIG_ENDIAN
2397 /* Cancel the big-endian correction done in assign_stack_local.
2398 Get the address of the beginning of the slot.
2399 This is so we can do a big-endian correction unconditionally
2401 adjust = GET_MODE_SIZE (mode) - total_size;
2403 spill_stack_slot[from_reg] = x;
2404 spill_stack_slot_width[from_reg] = total_size;
2407 #if BYTES_BIG_ENDIAN
2408 /* On a big endian machine, the "address" of the slot
2409 is the address of the low part that fits its inherent mode. */
2410 if (inherent_size < total_size)
2411 adjust += (total_size - inherent_size);
2412 #endif /* BYTES_BIG_ENDIAN */
2414 /* If we have any adjustment to make, or if the stack slot is the
2415 wrong mode, make a new stack slot. */
2416 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2418 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2419 plus_constant (XEXP (x, 0), adjust));
2420 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2423 /* Save the stack slot for later. */
2424 reg_equiv_memory_loc[i] = x;
2428 /* Mark the slots in regs_ever_live for the hard regs
2429 used by pseudo-reg number REGNO. */
2432 mark_home_live (regno)
2435 register int i, lim;
2436 i = reg_renumber[regno];
2439 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2441 regs_ever_live[i++] = 1;
2444 /* Mark the registers used in SCRATCH as being live. */
2447 mark_scratch_live (scratch)
2451 int regno = REGNO (scratch);
2452 int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch));
2454 for (i = regno; i < lim; i++)
2455 regs_ever_live[i] = 1;
2458 /* This function handles the tracking of elimination offsets around branches.
2460 X is a piece of RTL being scanned.
2462 INSN is the insn that it came from, if any.
2464 INITIAL_P is non-zero if we are to set the offset to be the initial
2465 offset and zero if we are setting the offset of the label to be the
2469 set_label_offsets (x, insn, initial_p)
2474 enum rtx_code code = GET_CODE (x);
2477 struct elim_table *p;
2482 if (LABEL_REF_NONLOCAL_P (x))
2487 /* ... fall through ... */
2490 /* If we know nothing about this label, set the desired offsets. Note
2491 that this sets the offset at a label to be the offset before a label
2492 if we don't know anything about the label. This is not correct for
2493 the label after a BARRIER, but is the best guess we can make. If
2494 we guessed wrong, we will suppress an elimination that might have
2495 been possible had we been able to guess correctly. */
2497 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2499 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2500 offsets_at[CODE_LABEL_NUMBER (x)][i]
2501 = (initial_p ? reg_eliminate[i].initial_offset
2502 : reg_eliminate[i].offset);
2503 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2506 /* Otherwise, if this is the definition of a label and it is
2507 preceded by a BARRIER, set our offsets to the known offset of
2511 && (tem = prev_nonnote_insn (insn)) != 0
2512 && GET_CODE (tem) == BARRIER)
2514 num_not_at_initial_offset = 0;
2515 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2517 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2518 = offsets_at[CODE_LABEL_NUMBER (x)][i];
2519 if (reg_eliminate[i].can_eliminate
2520 && (reg_eliminate[i].offset
2521 != reg_eliminate[i].initial_offset))
2522 num_not_at_initial_offset++;
2527 /* If neither of the above cases is true, compare each offset
2528 with those previously recorded and suppress any eliminations
2529 where the offsets disagree. */
2531 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2532 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2533 != (initial_p ? reg_eliminate[i].initial_offset
2534 : reg_eliminate[i].offset))
2535 reg_eliminate[i].can_eliminate = 0;
2540 set_label_offsets (PATTERN (insn), insn, initial_p);
2542 /* ... fall through ... */
2546 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2547 and hence must have all eliminations at their initial offsets. */
2548 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2549 if (REG_NOTE_KIND (tem) == REG_LABEL)
2550 set_label_offsets (XEXP (tem, 0), insn, 1);
2555 /* Each of the labels in the address vector must be at their initial
2556 offsets. We want the first first for ADDR_VEC and the second
2557 field for ADDR_DIFF_VEC. */
2559 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2560 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2565 /* We only care about setting PC. If the source is not RETURN,
2566 IF_THEN_ELSE, or a label, disable any eliminations not at
2567 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2568 isn't one of those possibilities. For branches to a label,
2569 call ourselves recursively.
2571 Note that this can disable elimination unnecessarily when we have
2572 a non-local goto since it will look like a non-constant jump to
2573 someplace in the current function. This isn't a significant
2574 problem since such jumps will normally be when all elimination
2575 pairs are back to their initial offsets. */
2577 if (SET_DEST (x) != pc_rtx)
2580 switch (GET_CODE (SET_SRC (x)))
2587 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2591 tem = XEXP (SET_SRC (x), 1);
2592 if (GET_CODE (tem) == LABEL_REF)
2593 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2594 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2597 tem = XEXP (SET_SRC (x), 2);
2598 if (GET_CODE (tem) == LABEL_REF)
2599 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2600 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2605 /* If we reach here, all eliminations must be at their initial
2606 offset because we are doing a jump to a variable address. */
2607 for (p = reg_eliminate; p < ®_eliminate[NUM_ELIMINABLE_REGS]; p++)
2608 if (p->offset != p->initial_offset)
2609 p->can_eliminate = 0;
2613 /* Used for communication between the next two function to properly share
2614 the vector for an ASM_OPERANDS. */
2616 static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2618 /* Scan X and replace any eliminable registers (such as fp) with a
2619 replacement (such as sp), plus an offset.
2621 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2622 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2623 MEM, we are allowed to replace a sum of a register and the constant zero
2624 with the register, which we cannot do outside a MEM. In addition, we need
2625 to record the fact that a register is referenced outside a MEM.
2627 If INSN is an insn, it is the insn containing X. If we replace a REG
2628 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2629 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2630 that the REG is being modified.
2632 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2633 That's used when we eliminate in expressions stored in notes.
2634 This means, do not set ref_outside_mem even if the reference
2637 If we see a modification to a register we know about, take the
2638 appropriate action (see case SET, below).
2640 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2641 replacements done assuming all offsets are at their initial values. If
2642 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2643 encounter, return the actual location so that find_reloads will do
2644 the proper thing. */
2647 eliminate_regs (x, mem_mode, insn)
2649 enum machine_mode mem_mode;
2652 enum rtx_code code = GET_CODE (x);
2653 struct elim_table *ep;
2678 /* First handle the case where we encounter a bare register that
2679 is eliminable. Replace it with a PLUS. */
2680 if (regno < FIRST_PSEUDO_REGISTER)
2682 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
2684 if (ep->from_rtx == x && ep->can_eliminate)
2687 /* Refs inside notes don't count for this purpose. */
2688 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2689 || GET_CODE (insn) == INSN_LIST)))
2690 ep->ref_outside_mem = 1;
2691 return plus_constant (ep->to_rtx, ep->previous_offset);
2695 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2696 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2698 /* In this case, find_reloads would attempt to either use an
2699 incorrect address (if something is not at its initial offset)
2700 or substitute an replaced address into an insn (which loses
2701 if the offset is changed by some later action). So we simply
2702 return the replaced stack slot (assuming it is changed by
2703 elimination) and ignore the fact that this is actually a
2704 reference to the pseudo. Ensure we make a copy of the
2705 address in case it is shared. */
2706 new = eliminate_regs (reg_equiv_memory_loc[regno],
2708 if (new != reg_equiv_memory_loc[regno])
2710 cannot_omit_stores[regno] = 1;
2711 return copy_rtx (new);
2717 /* If this is the sum of an eliminable register and a constant, rework
2719 if (GET_CODE (XEXP (x, 0)) == REG
2720 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2721 && CONSTANT_P (XEXP (x, 1)))
2723 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
2725 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2728 /* Refs inside notes don't count for this purpose. */
2729 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2730 || GET_CODE (insn) == INSN_LIST)))
2731 ep->ref_outside_mem = 1;
2733 /* The only time we want to replace a PLUS with a REG (this
2734 occurs when the constant operand of the PLUS is the negative
2735 of the offset) is when we are inside a MEM. We won't want
2736 to do so at other times because that would change the
2737 structure of the insn in a way that reload can't handle.
2738 We special-case the commonest situation in
2739 eliminate_regs_in_insn, so just replace a PLUS with a
2740 PLUS here, unless inside a MEM. */
2741 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2742 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2745 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2746 plus_constant (XEXP (x, 1),
2747 ep->previous_offset));
2750 /* If the register is not eliminable, we are done since the other
2751 operand is a constant. */
2755 /* If this is part of an address, we want to bring any constant to the
2756 outermost PLUS. We will do this by doing register replacement in
2757 our operands and seeing if a constant shows up in one of them.
2759 We assume here this is part of an address (or a "load address" insn)
2760 since an eliminable register is not likely to appear in any other
2763 If we have (plus (eliminable) (reg)), we want to produce
2764 (plus (plus (replacement) (reg) (const))). If this was part of a
2765 normal add insn, (plus (replacement) (reg)) will be pushed as a
2766 reload. This is the desired action. */
2769 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2770 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2772 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2774 /* If one side is a PLUS and the other side is a pseudo that
2775 didn't get a hard register but has a reg_equiv_constant,
2776 we must replace the constant here since it may no longer
2777 be in the position of any operand. */
2778 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2779 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2780 && reg_renumber[REGNO (new1)] < 0
2781 && reg_equiv_constant != 0
2782 && reg_equiv_constant[REGNO (new1)] != 0)
2783 new1 = reg_equiv_constant[REGNO (new1)];
2784 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2785 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2786 && reg_renumber[REGNO (new0)] < 0
2787 && reg_equiv_constant[REGNO (new0)] != 0)
2788 new0 = reg_equiv_constant[REGNO (new0)];
2790 new = form_sum (new0, new1);
2792 /* As above, if we are not inside a MEM we do not want to
2793 turn a PLUS into something else. We might try to do so here
2794 for an addition of 0 if we aren't optimizing. */
2795 if (! mem_mode && GET_CODE (new) != PLUS)
2796 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2804 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2807 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2808 if (new != XEXP (x, 0))
2809 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
2812 /* ... fall through ... */
2815 /* Now do eliminations in the rest of the chain. If this was
2816 an EXPR_LIST, this might result in allocating more memory than is
2817 strictly needed, but it simplifies the code. */
2820 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2821 if (new != XEXP (x, 1))
2822 return gen_rtx (INSN_LIST, GET_MODE (x), XEXP (x, 0), new);
2830 case DIV: case UDIV:
2831 case MOD: case UMOD:
2832 case AND: case IOR: case XOR:
2833 case LSHIFT: case ASHIFT: case ROTATE:
2834 case ASHIFTRT: case LSHIFTRT: case ROTATERT:
2836 case GE: case GT: case GEU: case GTU:
2837 case LE: case LT: case LEU: case LTU:
2839 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2841 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
2843 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2844 return gen_rtx (code, GET_MODE (x), new0, new1);
2852 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2853 if (ep->to_rtx == XEXP (x, 0))
2855 int size = GET_MODE_SIZE (mem_mode);
2857 /* If more bytes than MEM_MODE are pushed, account for them. */
2858 #ifdef PUSH_ROUNDING
2859 if (ep->to_rtx == stack_pointer_rtx)
2860 size = PUSH_ROUNDING (size);
2862 if (code == PRE_DEC || code == POST_DEC)
2868 /* Fall through to generic unary operation case. */
2870 case STRICT_LOW_PART:
2872 case SIGN_EXTEND: case ZERO_EXTEND:
2873 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2874 case FLOAT: case FIX:
2875 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2879 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2880 if (new != XEXP (x, 0))
2881 return gen_rtx (code, GET_MODE (x), new);
2885 /* Similar to above processing, but preserve SUBREG_WORD.
2886 Convert (subreg (mem)) to (mem) if not paradoxical.
2887 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2888 pseudo didn't get a hard reg, we must replace this with the
2889 eliminated version of the memory location because push_reloads
2890 may do the replacement in certain circumstances. */
2891 if (GET_CODE (SUBREG_REG (x)) == REG
2892 && (GET_MODE_SIZE (GET_MODE (x))
2893 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2894 && reg_equiv_memory_loc != 0
2895 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2897 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
2900 /* If we didn't change anything, we must retain the pseudo. */
2901 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
2904 /* Otherwise, ensure NEW isn't shared in case we have to reload
2906 new = copy_rtx (new);
2909 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
2911 if (new != XEXP (x, 0))
2913 if (GET_CODE (new) == MEM
2914 && (GET_MODE_SIZE (GET_MODE (x))
2915 <= GET_MODE_SIZE (GET_MODE (new)))
2916 #ifdef LOAD_EXTEND_OP
2917 /* On these machines we will be reloading what is
2918 inside the SUBREG if it originally was a pseudo and
2919 the inner and outer modes are both a word or
2920 smaller. So leave the SUBREG then. */
2921 && ! (GET_CODE (SUBREG_REG (x)) == REG
2922 && GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
2923 && GET_MODE_SIZE (GET_MODE (new)) <= UNITS_PER_WORD)
2927 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2928 enum machine_mode mode = GET_MODE (x);
2930 #if BYTES_BIG_ENDIAN
2931 offset += (MIN (UNITS_PER_WORD,
2932 GET_MODE_SIZE (GET_MODE (new)))
2933 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2936 PUT_MODE (new, mode);
2937 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2941 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
2947 /* If clobbering a register that is the replacement register for an
2948 elimination we still think can be performed, note that it cannot
2949 be performed. Otherwise, we need not be concerned about it. */
2950 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2951 if (ep->to_rtx == XEXP (x, 0))
2952 ep->can_eliminate = 0;
2954 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2955 if (new != XEXP (x, 0))
2956 return gen_rtx (code, GET_MODE (x), new);
2962 /* Properly handle sharing input and constraint vectors. */
2963 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
2965 /* When we come to a new vector not seen before,
2966 scan all its elements; keep the old vector if none
2967 of them changes; otherwise, make a copy. */
2968 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
2969 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
2970 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2971 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
2974 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2975 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
2978 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
2979 new_asm_operands_vec = old_asm_operands_vec;
2981 new_asm_operands_vec
2982 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
2985 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
2986 if (new_asm_operands_vec == old_asm_operands_vec)
2989 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
2990 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2991 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
2992 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
2993 ASM_OPERANDS_SOURCE_FILE (x),
2994 ASM_OPERANDS_SOURCE_LINE (x));
2995 new->volatil = x->volatil;
3000 /* Check for setting a register that we know about. */
3001 if (GET_CODE (SET_DEST (x)) == REG)
3003 /* See if this is setting the replacement register for an
3006 If DEST is the hard frame pointer, we do nothing because we
3007 assume that all assignments to the frame pointer are for
3008 non-local gotos and are being done at a time when they are valid
3009 and do not disturb anything else. Some machines want to
3010 eliminate a fake argument pointer (or even a fake frame pointer)
3011 with either the real frame or the stack pointer. Assignments to
3012 the hard frame pointer must not prevent this elimination. */
3014 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
3016 if (ep->to_rtx == SET_DEST (x)
3017 && SET_DEST (x) != hard_frame_pointer_rtx)
3019 /* If it is being incremented, adjust the offset. Otherwise,
3020 this elimination can't be done. */
3021 rtx src = SET_SRC (x);
3023 if (GET_CODE (src) == PLUS
3024 && XEXP (src, 0) == SET_DEST (x)
3025 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3026 ep->offset -= INTVAL (XEXP (src, 1));
3028 ep->can_eliminate = 0;
3031 /* Now check to see we are assigning to a register that can be
3032 eliminated. If so, it must be as part of a PARALLEL, since we
3033 will not have been called if this is a single SET. So indicate
3034 that we can no longer eliminate this reg. */
3035 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
3037 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3038 ep->can_eliminate = 0;
3041 /* Now avoid the loop below in this common case. */
3043 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn);
3044 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn);
3046 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
3047 write a CLOBBER insn. */
3048 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
3049 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3050 && GET_CODE (insn) != INSN_LIST)
3051 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
3053 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3054 return gen_rtx (SET, VOIDmode, new0, new1);
3060 /* Our only special processing is to pass the mode of the MEM to our
3061 recursive call and copy the flags. While we are here, handle this
3062 case more efficiently. */
3063 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn);
3064 if (new != XEXP (x, 0))
3066 new = gen_rtx (MEM, GET_MODE (x), new);
3067 new->volatil = x->volatil;
3068 new->unchanging = x->unchanging;
3069 new->in_struct = x->in_struct;
3076 /* Process each of our operands recursively. If any have changed, make a
3078 fmt = GET_RTX_FORMAT (code);
3079 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3083 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
3084 if (new != XEXP (x, i) && ! copied)
3086 rtx new_x = rtx_alloc (code);
3087 bcopy (x, new_x, (sizeof (*new_x) - sizeof (new_x->fld)
3088 + (sizeof (new_x->fld[0])
3089 * GET_RTX_LENGTH (code))));
3095 else if (*fmt == 'E')
3098 for (j = 0; j < XVECLEN (x, i); j++)
3100 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
3101 if (new != XVECEXP (x, i, j) && ! copied_vec)
3103 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
3104 &XVECEXP (x, i, 0));
3107 rtx new_x = rtx_alloc (code);
3108 bcopy (x, new_x, (sizeof (*new_x) - sizeof (new_x->fld)
3109 + (sizeof (new_x->fld[0])
3110 * GET_RTX_LENGTH (code))));
3114 XVEC (x, i) = new_v;
3117 XVECEXP (x, i, j) = new;
3125 /* Scan INSN and eliminate all eliminable registers in it.
3127 If REPLACE is nonzero, do the replacement destructively. Also
3128 delete the insn as dead it if it is setting an eliminable register.
3130 If REPLACE is zero, do all our allocations in reload_obstack.
3132 If no eliminations were done and this insn doesn't require any elimination
3133 processing (these are not identical conditions: it might be updating sp,
3134 but not referencing fp; this needs to be seen during reload_as_needed so
3135 that the offset between fp and sp can be taken into consideration), zero
3136 is returned. Otherwise, 1 is returned. */
3139 eliminate_regs_in_insn (insn, replace)
3143 rtx old_body = PATTERN (insn);
3146 struct elim_table *ep;
3149 push_obstacks (&reload_obstack, &reload_obstack);
3151 if (GET_CODE (old_body) == SET && GET_CODE (SET_DEST (old_body)) == REG
3152 && REGNO (SET_DEST (old_body)) < FIRST_PSEUDO_REGISTER)
3154 /* Check for setting an eliminable register. */
3155 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3156 if (ep->from_rtx == SET_DEST (old_body) && ep->can_eliminate)
3158 /* In this case this insn isn't serving a useful purpose. We
3159 will delete it in reload_as_needed once we know that this
3160 elimination is, in fact, being done.
3162 If REPLACE isn't set, we can't delete this insn, but neededn't
3163 process it since it won't be used unless something changes. */
3165 delete_dead_insn (insn);
3170 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3171 in the insn is the negative of the offset in FROM. Substitute
3172 (set (reg) (reg to)) for the insn and change its code.
3174 We have to do this here, rather than in eliminate_regs, do that we can
3175 change the insn code. */
3177 if (GET_CODE (SET_SRC (old_body)) == PLUS
3178 && GET_CODE (XEXP (SET_SRC (old_body), 0)) == REG
3179 && GET_CODE (XEXP (SET_SRC (old_body), 1)) == CONST_INT)
3180 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
3182 if (ep->from_rtx == XEXP (SET_SRC (old_body), 0)
3183 && ep->can_eliminate)
3185 /* We must stop at the first elimination that will be used.
3186 If this one would replace the PLUS with a REG, do it
3187 now. Otherwise, quit the loop and let eliminate_regs
3188 do its normal replacement. */
3189 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_body), 1)))
3191 PATTERN (insn) = gen_rtx (SET, VOIDmode,
3192 SET_DEST (old_body), ep->to_rtx);
3193 INSN_CODE (insn) = -1;
3202 old_asm_operands_vec = 0;
3204 /* Replace the body of this insn with a substituted form. If we changed
3205 something, return non-zero.
3207 If we are replacing a body that was a (set X (plus Y Z)), try to
3208 re-recognize the insn. We do this in case we had a simple addition
3209 but now can do this as a load-address. This saves an insn in this
3212 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
3213 if (new_body != old_body)
3215 /* If we aren't replacing things permanently and we changed something,
3216 make another copy to ensure that all the RTL is new. Otherwise
3217 things can go wrong if find_reload swaps commutative operands
3218 and one is inside RTL that has been copied while the other is not. */
3220 /* Don't copy an asm_operands because (1) there's no need and (2)
3221 copy_rtx can't do it properly when there are multiple outputs. */
3222 if (! replace && asm_noperands (old_body) < 0)
3223 new_body = copy_rtx (new_body);
3225 /* If we had a move insn but now we don't, rerecognize it. */
3226 if ((GET_CODE (old_body) == SET && GET_CODE (SET_SRC (old_body)) == REG
3227 && (GET_CODE (new_body) != SET
3228 || GET_CODE (SET_SRC (new_body)) != REG))
3229 /* If this was a load from or store to memory, compare
3230 the MEM in recog_operand to the one in the insn. If they
3231 are not equal, then rerecognize the insn. */
3232 || (GET_CODE (old_body) == SET
3233 && ((GET_CODE (SET_SRC (old_body)) == MEM
3234 && SET_SRC (old_body) != recog_operand[1])
3235 || (GET_CODE (SET_DEST (old_body)) == MEM
3236 && SET_DEST (old_body) != recog_operand[0])))
3237 /* If this was an add insn before, rerecognize. */
3239 (GET_CODE (old_body) == SET
3240 && GET_CODE (SET_SRC (old_body)) == PLUS))
3242 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
3243 /* If recognition fails, store the new body anyway.
3244 It's normal to have recognition failures here
3245 due to bizarre memory addresses; reloading will fix them. */
3246 PATTERN (insn) = new_body;
3249 PATTERN (insn) = new_body;
3254 /* Loop through all elimination pairs. See if any have changed and
3255 recalculate the number not at initial offset.
3257 Compute the maximum offset (minimum offset if the stack does not
3258 grow downward) for each elimination pair.
3260 We also detect a cases where register elimination cannot be done,
3261 namely, if a register would be both changed and referenced outside a MEM
3262 in the resulting insn since such an insn is often undefined and, even if
3263 not, we cannot know what meaning will be given to it. Note that it is
3264 valid to have a register used in an address in an insn that changes it
3265 (presumably with a pre- or post-increment or decrement).
3267 If anything changes, return nonzero. */
3269 num_not_at_initial_offset = 0;
3270 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3272 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3273 ep->can_eliminate = 0;
3275 ep->ref_outside_mem = 0;
3277 if (ep->previous_offset != ep->offset)
3280 ep->previous_offset = ep->offset;
3281 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3282 num_not_at_initial_offset++;
3284 #ifdef STACK_GROWS_DOWNWARD
3285 ep->max_offset = MAX (ep->max_offset, ep->offset);
3287 ep->max_offset = MIN (ep->max_offset, ep->offset);
3292 /* If we changed something, perform elmination in REG_NOTES. This is
3293 needed even when REPLACE is zero because a REG_DEAD note might refer
3294 to a register that we eliminate and could cause a different number
3295 of spill registers to be needed in the final reload pass than in
3297 if (val && REG_NOTES (insn) != 0)
3298 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
3306 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3307 replacement we currently believe is valid, mark it as not eliminable if X
3308 modifies DEST in any way other than by adding a constant integer to it.
3310 If DEST is the frame pointer, we do nothing because we assume that
3311 all assignments to the hard frame pointer are nonlocal gotos and are being
3312 done at a time when they are valid and do not disturb anything else.
3313 Some machines want to eliminate a fake argument pointer with either the
3314 frame or stack pointer. Assignments to the hard frame pointer must not
3315 prevent this elimination.
3317 Called via note_stores from reload before starting its passes to scan
3318 the insns of the function. */
3321 mark_not_eliminable (dest, x)
3327 /* A SUBREG of a hard register here is just changing its mode. We should
3328 not see a SUBREG of an eliminable hard register, but check just in
3330 if (GET_CODE (dest) == SUBREG)
3331 dest = SUBREG_REG (dest);
3333 if (dest == hard_frame_pointer_rtx)
3336 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3337 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3338 && (GET_CODE (x) != SET
3339 || GET_CODE (SET_SRC (x)) != PLUS
3340 || XEXP (SET_SRC (x), 0) != dest
3341 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3343 reg_eliminate[i].can_eliminate_previous
3344 = reg_eliminate[i].can_eliminate = 0;
3349 /* Kick all pseudos out of hard register REGNO.
3350 If GLOBAL is nonzero, try to find someplace else to put them.
3351 If DUMPFILE is nonzero, log actions taken on that file.
3353 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3354 because we found we can't eliminate some register. In the case, no pseudos
3355 are allowed to be in the register, even if they are only in a block that
3356 doesn't require spill registers, unlike the case when we are spilling this
3357 hard reg to produce another spill register.
3359 Return nonzero if any pseudos needed to be kicked out. */
3362 spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3368 enum reg_class class = REGNO_REG_CLASS (regno);
3369 int something_changed = 0;
3372 SET_HARD_REG_BIT (forbidden_regs, regno);
3374 /* Spill every pseudo reg that was allocated to this reg
3375 or to something that overlaps this reg. */
3377 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3378 if (reg_renumber[i] >= 0
3379 && reg_renumber[i] <= regno
3381 + HARD_REGNO_NREGS (reg_renumber[i],
3382 PSEUDO_REGNO_MODE (i))
3385 /* If this register belongs solely to a basic block which needed no
3386 spilling of any class that this register is contained in,
3387 leave it be, unless we are spilling this register because
3388 it was a hard register that can't be eliminated. */
3390 if (! cant_eliminate
3391 && basic_block_needs[0]
3392 && reg_basic_block[i] >= 0
3393 && basic_block_needs[(int) class][reg_basic_block[i]] == 0)
3397 for (p = reg_class_superclasses[(int) class];
3398 *p != LIM_REG_CLASSES; p++)
3399 if (basic_block_needs[(int) *p][reg_basic_block[i]] > 0)
3402 if (*p == LIM_REG_CLASSES)
3406 /* Mark it as no longer having a hard register home. */
3407 reg_renumber[i] = -1;
3408 /* We will need to scan everything again. */
3409 something_changed = 1;
3411 retry_global_alloc (i, forbidden_regs);
3413 alter_reg (i, regno);
3416 if (reg_renumber[i] == -1)
3417 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3419 fprintf (dumpfile, " Register %d now in %d.\n\n",
3420 i, reg_renumber[i]);
3423 for (i = 0; i < scratch_list_length; i++)
3425 if (scratch_list[i] && REGNO (scratch_list[i]) == regno)
3427 if (! cant_eliminate && basic_block_needs[0]
3428 && ! basic_block_needs[(int) class][scratch_block[i]])
3432 for (p = reg_class_superclasses[(int) class];
3433 *p != LIM_REG_CLASSES; p++)
3434 if (basic_block_needs[(int) *p][scratch_block[i]] > 0)
3437 if (*p == LIM_REG_CLASSES)
3440 PUT_CODE (scratch_list[i], SCRATCH);
3441 scratch_list[i] = 0;
3442 something_changed = 1;
3447 return something_changed;
3450 /* Find all paradoxical subregs within X and update reg_max_ref_width. */
3453 scan_paradoxical_subregs (x)
3458 register enum rtx_code code = GET_CODE (x);
3475 if (GET_CODE (SUBREG_REG (x)) == REG
3476 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3477 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3478 = GET_MODE_SIZE (GET_MODE (x));
3482 fmt = GET_RTX_FORMAT (code);
3483 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3486 scan_paradoxical_subregs (XEXP (x, i));
3487 else if (fmt[i] == 'E')
3490 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3491 scan_paradoxical_subregs (XVECEXP (x, i, j));
3497 hard_reg_use_compare (p1, p2)
3498 struct hard_reg_n_uses *p1, *p2;
3500 int tem = p1->uses - p2->uses;
3501 if (tem != 0) return tem;
3502 /* If regs are equally good, sort by regno,
3503 so that the results of qsort leave nothing to chance. */
3504 return p1->regno - p2->regno;
3507 /* Choose the order to consider regs for use as reload registers
3508 based on how much trouble would be caused by spilling one.
3509 Store them in order of decreasing preference in potential_reload_regs. */
3512 order_regs_for_reload ()
3518 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3520 CLEAR_HARD_REG_SET (bad_spill_regs);
3522 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3523 potential_reload_regs[i] = -1;
3525 /* Count number of uses of each hard reg by pseudo regs allocated to it
3526 and then order them by decreasing use. */
3528 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3530 hard_reg_n_uses[i].uses = 0;
3531 hard_reg_n_uses[i].regno = i;
3534 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3536 int regno = reg_renumber[i];
3539 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3541 hard_reg_n_uses[regno++].uses += reg_n_refs[i];
3543 large += reg_n_refs[i];
3546 /* Now fixed registers (which cannot safely be used for reloading)
3547 get a very high use count so they will be considered least desirable.
3548 Registers used explicitly in the rtl code are almost as bad. */
3550 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3554 hard_reg_n_uses[i].uses += 2 * large + 2;
3555 SET_HARD_REG_BIT (bad_spill_regs, i);
3557 else if (regs_explicitly_used[i])
3559 hard_reg_n_uses[i].uses += large + 1;
3560 #ifndef SMALL_REGISTER_CLASSES
3561 /* ??? We are doing this here because of the potential that
3562 bad code may be generated if a register explicitly used in
3563 an insn was used as a spill register for that insn. But
3564 not using these are spill registers may lose on some machine.
3565 We'll have to see how this works out. */
3566 SET_HARD_REG_BIT (bad_spill_regs, i);
3570 hard_reg_n_uses[HARD_FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3571 SET_HARD_REG_BIT (bad_spill_regs, HARD_FRAME_POINTER_REGNUM);
3573 #ifdef ELIMINABLE_REGS
3574 /* If registers other than the frame pointer are eliminable, mark them as
3576 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3578 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3579 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3583 /* Prefer registers not so far used, for use in temporary loading.
3584 Among them, if REG_ALLOC_ORDER is defined, use that order.
3585 Otherwise, prefer registers not preserved by calls. */
3587 #ifdef REG_ALLOC_ORDER
3588 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3590 int regno = reg_alloc_order[i];
3592 if (hard_reg_n_uses[regno].uses == 0)
3593 potential_reload_regs[o++] = regno;
3596 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3598 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3599 potential_reload_regs[o++] = i;
3601 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3603 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3604 potential_reload_regs[o++] = i;
3608 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3609 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3611 /* Now add the regs that are already used,
3612 preferring those used less often. The fixed and otherwise forbidden
3613 registers will be at the end of this list. */
3615 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3616 if (hard_reg_n_uses[i].uses != 0)
3617 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3620 /* Reload pseudo-registers into hard regs around each insn as needed.
3621 Additional register load insns are output before the insn that needs it
3622 and perhaps store insns after insns that modify the reloaded pseudo reg.
3624 reg_last_reload_reg and reg_reloaded_contents keep track of
3625 which registers are already available in reload registers.
3626 We update these for the reloads that we perform,
3627 as the insns are scanned. */
3630 reload_as_needed (first, live_known)
3640 bzero (spill_reg_rtx, sizeof spill_reg_rtx);
3641 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3642 bzero (reg_last_reload_reg, max_regno * sizeof (rtx));
3643 reg_has_output_reload = (char *) alloca (max_regno);
3644 for (i = 0; i < n_spills; i++)
3646 reg_reloaded_contents[i] = -1;
3647 reg_reloaded_insn[i] = 0;
3650 /* Reset all offsets on eliminable registers to their initial values. */
3651 #ifdef ELIMINABLE_REGS
3652 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3654 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
3655 reg_eliminate[i].initial_offset);
3656 reg_eliminate[i].previous_offset
3657 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3660 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3661 reg_eliminate[0].previous_offset
3662 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3665 num_not_at_initial_offset = 0;
3667 for (insn = first; insn;)
3669 register rtx next = NEXT_INSN (insn);
3671 /* Notice when we move to a new basic block. */
3672 if (live_known && this_block + 1 < n_basic_blocks
3673 && insn == basic_block_head[this_block+1])
3676 /* If we pass a label, copy the offsets from the label information
3677 into the current offsets of each elimination. */
3678 if (GET_CODE (insn) == CODE_LABEL)
3680 num_not_at_initial_offset = 0;
3681 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3683 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
3684 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
3685 if (reg_eliminate[i].can_eliminate
3686 && (reg_eliminate[i].offset
3687 != reg_eliminate[i].initial_offset))
3688 num_not_at_initial_offset++;
3692 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3694 rtx avoid_return_reg = 0;
3696 #ifdef SMALL_REGISTER_CLASSES
3697 /* Set avoid_return_reg if this is an insn
3698 that might use the value of a function call. */
3699 if (GET_CODE (insn) == CALL_INSN)
3701 if (GET_CODE (PATTERN (insn)) == SET)
3702 after_call = SET_DEST (PATTERN (insn));
3703 else if (GET_CODE (PATTERN (insn)) == PARALLEL
3704 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
3705 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
3709 else if (after_call != 0
3710 && !(GET_CODE (PATTERN (insn)) == SET
3711 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
3713 if (reg_mentioned_p (after_call, PATTERN (insn)))
3714 avoid_return_reg = after_call;
3717 #endif /* SMALL_REGISTER_CLASSES */
3719 /* If this is a USE and CLOBBER of a MEM, ensure that any
3720 references to eliminable registers have been removed. */
3722 if ((GET_CODE (PATTERN (insn)) == USE
3723 || GET_CODE (PATTERN (insn)) == CLOBBER)
3724 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
3725 XEXP (XEXP (PATTERN (insn), 0), 0)
3726 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
3727 GET_MODE (XEXP (PATTERN (insn), 0)), NULL_RTX);
3729 /* If we need to do register elimination processing, do so.
3730 This might delete the insn, in which case we are done. */
3731 if (num_eliminable && GET_MODE (insn) == QImode)
3733 eliminate_regs_in_insn (insn, 1);
3734 if (GET_CODE (insn) == NOTE)
3741 if (GET_MODE (insn) == VOIDmode)
3743 /* First find the pseudo regs that must be reloaded for this insn.
3744 This info is returned in the tables reload_... (see reload.h).
3745 Also modify the body of INSN by substituting RELOAD
3746 rtx's for those pseudo regs. */
3749 bzero (reg_has_output_reload, max_regno);
3750 CLEAR_HARD_REG_SET (reg_is_output_reload);
3752 find_reloads (insn, 1, spill_indirect_levels, live_known,
3758 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
3762 /* If this block has not had spilling done for a
3763 particular clas and we have any non-optionals that need a
3764 spill reg in that class, abort. */
3766 for (class = 0; class < N_REG_CLASSES; class++)
3767 if (basic_block_needs[class] != 0
3768 && basic_block_needs[class][this_block] == 0)
3769 for (i = 0; i < n_reloads; i++)
3770 if (class == (int) reload_reg_class[i]
3771 && reload_reg_rtx[i] == 0
3772 && ! reload_optional[i]
3773 && (reload_in[i] != 0 || reload_out[i] != 0
3774 || reload_secondary_p[i] != 0))
3777 /* Now compute which reload regs to reload them into. Perhaps
3778 reusing reload regs from previous insns, or else output
3779 load insns to reload them. Maybe output store insns too.
3780 Record the choices of reload reg in reload_reg_rtx. */
3781 choose_reload_regs (insn, avoid_return_reg);
3783 #ifdef SMALL_REGISTER_CLASSES
3784 /* Merge any reloads that we didn't combine for fear of
3785 increasing the number of spill registers needed but now
3786 discover can be safely merged. */
3787 merge_assigned_reloads (insn);
3790 /* Generate the insns to reload operands into or out of
3791 their reload regs. */
3792 emit_reload_insns (insn);
3794 /* Substitute the chosen reload regs from reload_reg_rtx
3795 into the insn's body (or perhaps into the bodies of other
3796 load and store insn that we just made for reloading
3797 and that we moved the structure into). */
3800 /* If this was an ASM, make sure that all the reload insns
3801 we have generated are valid. If not, give an error
3804 if (asm_noperands (PATTERN (insn)) >= 0)
3805 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
3806 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
3807 && (recog_memoized (p) < 0
3808 || (insn_extract (p),
3809 ! constrain_operands (INSN_CODE (p), 1))))
3811 error_for_asm (insn,
3812 "`asm' operand requires impossible reload");
3814 NOTE_SOURCE_FILE (p) = 0;
3815 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3818 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3819 is no longer validly lying around to save a future reload.
3820 Note that this does not detect pseudos that were reloaded
3821 for this insn in order to be stored in
3822 (obeying register constraints). That is correct; such reload
3823 registers ARE still valid. */
3824 note_stores (PATTERN (insn), forget_old_reloads_1);
3826 /* There may have been CLOBBER insns placed after INSN. So scan
3827 between INSN and NEXT and use them to forget old reloads. */
3828 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
3829 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
3830 note_stores (PATTERN (x), forget_old_reloads_1);
3833 /* Likewise for regs altered by auto-increment in this insn.
3834 But note that the reg-notes are not changed by reloading:
3835 they still contain the pseudo-regs, not the spill regs. */
3836 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
3837 if (REG_NOTE_KIND (x) == REG_INC)
3839 /* See if this pseudo reg was reloaded in this insn.
3840 If so, its last-reload info is still valid
3841 because it is based on this insn's reload. */
3842 for (i = 0; i < n_reloads; i++)
3843 if (reload_out[i] == XEXP (x, 0))
3847 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
3851 /* A reload reg's contents are unknown after a label. */
3852 if (GET_CODE (insn) == CODE_LABEL)
3853 for (i = 0; i < n_spills; i++)
3855 reg_reloaded_contents[i] = -1;
3856 reg_reloaded_insn[i] = 0;
3859 /* Don't assume a reload reg is still good after a call insn
3860 if it is a call-used reg. */
3861 else if (GET_CODE (insn) == CALL_INSN)
3862 for (i = 0; i < n_spills; i++)
3863 if (call_used_regs[spill_regs[i]])
3865 reg_reloaded_contents[i] = -1;
3866 reg_reloaded_insn[i] = 0;
3869 /* In case registers overlap, allow certain insns to invalidate
3870 particular hard registers. */
3872 #ifdef INSN_CLOBBERS_REGNO_P
3873 for (i = 0 ; i < n_spills ; i++)
3874 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
3876 reg_reloaded_contents[i] = -1;
3877 reg_reloaded_insn[i] = 0;
3889 /* Discard all record of any value reloaded from X,
3890 or reloaded in X from someplace else;
3891 unless X is an output reload reg of the current insn.
3893 X may be a hard reg (the reload reg)
3894 or it may be a pseudo reg that was reloaded from. */
3897 forget_old_reloads_1 (x, ignored)
3905 /* note_stores does give us subregs of hard regs. */
3906 while (GET_CODE (x) == SUBREG)
3908 offset += SUBREG_WORD (x);
3912 if (GET_CODE (x) != REG)
3915 regno = REGNO (x) + offset;
3917 if (regno >= FIRST_PSEUDO_REGISTER)
3922 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
3923 /* Storing into a spilled-reg invalidates its contents.
3924 This can happen if a block-local pseudo is allocated to that reg
3925 and it wasn't spilled because this block's total need is 0.
3926 Then some insn might have an optional reload and use this reg. */
3927 for (i = 0; i < nr; i++)
3928 if (spill_reg_order[regno + i] >= 0
3929 /* But don't do this if the reg actually serves as an output
3930 reload reg in the current instruction. */
3932 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
3934 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
3935 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
3939 /* Since value of X has changed,
3940 forget any value previously copied from it. */
3943 /* But don't forget a copy if this is the output reload
3944 that establishes the copy's validity. */
3945 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
3946 reg_last_reload_reg[regno + nr] = 0;
3949 /* For each reload, the mode of the reload register. */
3950 static enum machine_mode reload_mode[MAX_RELOADS];
3952 /* For each reload, the largest number of registers it will require. */
3953 static int reload_nregs[MAX_RELOADS];
3955 /* Comparison function for qsort to decide which of two reloads
3956 should be handled first. *P1 and *P2 are the reload numbers. */
3959 reload_reg_class_lower (p1, p2)
3962 register int r1 = *p1, r2 = *p2;
3965 /* Consider required reloads before optional ones. */
3966 t = reload_optional[r1] - reload_optional[r2];
3970 /* Count all solitary classes before non-solitary ones. */
3971 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
3972 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
3976 /* Aside from solitaires, consider all multi-reg groups first. */
3977 t = reload_nregs[r2] - reload_nregs[r1];
3981 /* Consider reloads in order of increasing reg-class number. */
3982 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
3986 /* If reloads are equally urgent, sort by reload number,
3987 so that the results of qsort leave nothing to chance. */
3991 /* The following HARD_REG_SETs indicate when each hard register is
3992 used for a reload of various parts of the current insn. */
3994 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
3995 static HARD_REG_SET reload_reg_used;
3996 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
3997 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
3998 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
3999 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4000 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4001 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4002 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4003 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4004 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4005 static HARD_REG_SET reload_reg_used_in_op_addr;
4006 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4007 static HARD_REG_SET reload_reg_used_in_insn;
4008 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4009 static HARD_REG_SET reload_reg_used_in_other_addr;
4011 /* If reg is in use as a reload reg for any sort of reload. */
4012 static HARD_REG_SET reload_reg_used_at_all;
4014 /* If reg is use as an inherited reload. We just mark the first register
4016 static HARD_REG_SET reload_reg_used_for_inherit;
4018 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4019 TYPE. MODE is used to indicate how many consecutive regs are
4023 mark_reload_reg_in_use (regno, opnum, type, mode)
4026 enum reload_type type;
4027 enum machine_mode mode;
4029 int nregs = HARD_REGNO_NREGS (regno, mode);
4032 for (i = regno; i < nregs + regno; i++)
4037 SET_HARD_REG_BIT (reload_reg_used, i);
4040 case RELOAD_FOR_INPUT_ADDRESS:
4041 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4044 case RELOAD_FOR_OUTPUT_ADDRESS:
4045 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4048 case RELOAD_FOR_OPERAND_ADDRESS:
4049 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4052 case RELOAD_FOR_OTHER_ADDRESS:
4053 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4056 case RELOAD_FOR_INPUT:
4057 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4060 case RELOAD_FOR_OUTPUT:
4061 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4064 case RELOAD_FOR_INSN:
4065 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4069 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4073 /* Similarly, but show REGNO is no longer in use for a reload. */
4076 clear_reload_reg_in_use (regno, opnum, type, mode)
4079 enum reload_type type;
4080 enum machine_mode mode;
4082 int nregs = HARD_REGNO_NREGS (regno, mode);
4085 for (i = regno; i < nregs + regno; i++)
4090 CLEAR_HARD_REG_BIT (reload_reg_used, i);
4093 case RELOAD_FOR_INPUT_ADDRESS:
4094 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4097 case RELOAD_FOR_OUTPUT_ADDRESS:
4098 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4101 case RELOAD_FOR_OPERAND_ADDRESS:
4102 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4105 case RELOAD_FOR_OTHER_ADDRESS:
4106 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4109 case RELOAD_FOR_INPUT:
4110 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4113 case RELOAD_FOR_OUTPUT:
4114 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4117 case RELOAD_FOR_INSN:
4118 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4124 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4125 specified by OPNUM and TYPE. */
4128 reload_reg_free_p (regno, opnum, type)
4131 enum reload_type type;
4135 /* In use for a RELOAD_OTHER means it's not available for anything except
4136 RELOAD_FOR_OTHER_ADDRESS. Recall that RELOAD_FOR_OTHER_ADDRESS is known
4137 to be used only for inputs. */
4139 if (type != RELOAD_FOR_OTHER_ADDRESS
4140 && TEST_HARD_REG_BIT (reload_reg_used, regno))
4146 /* In use for anything means not available for a RELOAD_OTHER. */
4147 return ! TEST_HARD_REG_BIT (reload_reg_used_at_all, regno);
4149 /* The other kinds of use can sometimes share a register. */
4150 case RELOAD_FOR_INPUT:
4151 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4152 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4155 /* If it is used for some other input, can't use it. */
4156 for (i = 0; i < reload_n_operands; i++)
4157 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4160 /* If it is used in a later operand's address, can't use it. */
4161 for (i = opnum + 1; i < reload_n_operands; i++)
4162 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4167 case RELOAD_FOR_INPUT_ADDRESS:
4168 /* Can't use a register if it is used for an input address for this
4169 operand or used as an input in an earlier one. */
4170 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
4173 for (i = 0; i < opnum; i++)
4174 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4179 case RELOAD_FOR_OUTPUT_ADDRESS:
4180 /* Can't use a register if it is used for an output address for this
4181 operand or used as an output in this or a later operand. */
4182 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4185 for (i = opnum; i < reload_n_operands; i++)
4186 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4191 case RELOAD_FOR_OPERAND_ADDRESS:
4192 for (i = 0; i < reload_n_operands; i++)
4193 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4196 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4197 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4199 case RELOAD_FOR_OUTPUT:
4200 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4201 outputs, or an operand address for this or an earlier output. */
4202 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4205 for (i = 0; i < reload_n_operands; i++)
4206 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4209 for (i = 0; i <= opnum; i++)
4210 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4215 case RELOAD_FOR_INSN:
4216 for (i = 0; i < reload_n_operands; i++)
4217 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4218 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4221 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4222 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4224 case RELOAD_FOR_OTHER_ADDRESS:
4225 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4230 /* Return 1 if the value in reload reg REGNO, as used by a reload
4231 needed for the part of the insn specified by OPNUM and TYPE,
4232 is not in use for a reload in any prior part of the insn.
4234 We can assume that the reload reg was already tested for availability
4235 at the time it is needed, and we should not check this again,
4236 in case the reg has already been marked in use. */
4239 reload_reg_free_before_p (regno, opnum, type)
4242 enum reload_type type;
4248 case RELOAD_FOR_OTHER_ADDRESS:
4249 /* These always come first. */
4253 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4255 /* If this use is for part of the insn,
4256 check the reg is not in use for any prior part. It is tempting
4257 to try to do this by falling through from objecs that occur
4258 later in the insn to ones that occur earlier, but that will not
4259 correctly take into account the fact that here we MUST ignore
4260 things that would prevent the register from being allocated in
4261 the first place, since we know that it was allocated. */
4263 case RELOAD_FOR_OUTPUT_ADDRESS:
4264 /* Earlier reloads are for earlier outputs or their addresses,
4265 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4266 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4268 for (i = 0; i < opnum; i++)
4269 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4270 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4273 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4276 for (i = 0; i < reload_n_operands; i++)
4277 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4278 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4281 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4282 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4283 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4285 case RELOAD_FOR_OUTPUT:
4286 /* This can't be used in the output address for this operand and
4287 anything that can't be used for it, except that we've already
4288 tested for RELOAD_FOR_INSN objects. */
4290 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4293 for (i = 0; i < opnum; i++)
4294 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4295 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4298 for (i = 0; i < reload_n_operands; i++)
4299 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4300 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4301 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4304 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4306 case RELOAD_FOR_OPERAND_ADDRESS:
4307 case RELOAD_FOR_INSN:
4308 /* These can't conflict with inputs, or each other, so all we have to
4309 test is input addresses and the addresses of OTHER items. */
4311 for (i = 0; i < reload_n_operands; i++)
4312 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4315 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4317 case RELOAD_FOR_INPUT:
4318 /* The only things earlier are the address for this and
4319 earlier inputs, other inputs (which we know we don't conflict
4320 with), and addresses of RELOAD_OTHER objects. */
4322 for (i = 0; i <= opnum; i++)
4323 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4326 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4328 case RELOAD_FOR_INPUT_ADDRESS:
4329 /* Similarly, all we have to check is for use in earlier inputs'
4331 for (i = 0; i < opnum; i++)
4332 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4335 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4340 /* Return 1 if the value in reload reg REGNO, as used by a reload
4341 needed for the part of the insn specified by OPNUM and TYPE,
4342 is still available in REGNO at the end of the insn.
4344 We can assume that the reload reg was already tested for availability
4345 at the time it is needed, and we should not check this again,
4346 in case the reg has already been marked in use. */
4349 reload_reg_reaches_end_p (regno, opnum, type)
4352 enum reload_type type;
4359 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4360 its value must reach the end. */
4363 /* If this use is for part of the insn,
4364 its value reaches if no subsequent part uses the same register.
4365 Just like the above function, don't try to do this with lots
4368 case RELOAD_FOR_OTHER_ADDRESS:
4369 /* Here we check for everything else, since these don't conflict
4370 with anything else and everything comes later. */
4372 for (i = 0; i < reload_n_operands; i++)
4373 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4374 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4375 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4376 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4379 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4380 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4381 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4383 case RELOAD_FOR_INPUT_ADDRESS:
4384 /* Similar, except that we check only for this and subsequent inputs
4385 and the address of only subsequent inputs and we do not need
4386 to check for RELOAD_OTHER objects since they are known not to
4389 for (i = opnum; i < reload_n_operands; i++)
4390 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4393 for (i = opnum + 1; i < reload_n_operands; i++)
4394 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4397 for (i = 0; i < reload_n_operands; i++)
4398 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4399 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4402 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4403 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4405 case RELOAD_FOR_INPUT:
4406 /* Similar to input address, except we start at the next operand for
4407 both input and input address and we do not check for
4408 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4411 for (i = opnum + 1; i < reload_n_operands; i++)
4412 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4413 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4416 /* ... fall through ... */
4418 case RELOAD_FOR_OPERAND_ADDRESS:
4419 /* Check outputs and their addresses. */
4421 for (i = 0; i < reload_n_operands; i++)
4422 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4423 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4428 case RELOAD_FOR_INSN:
4429 /* These conflict with other outputs with with RELOAD_OTHER. So
4430 we need only check for output addresses. */
4434 /* ... fall through ... */
4436 case RELOAD_FOR_OUTPUT:
4437 case RELOAD_FOR_OUTPUT_ADDRESS:
4438 /* We already know these can't conflict with a later output. So the
4439 only thing to check are later output addresses. */
4440 for (i = opnum + 1; i < reload_n_operands; i++)
4441 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4450 /* Vector of reload-numbers showing the order in which the reloads should
4452 short reload_order[MAX_RELOADS];
4454 /* Indexed by reload number, 1 if incoming value
4455 inherited from previous insns. */
4456 char reload_inherited[MAX_RELOADS];
4458 /* For an inherited reload, this is the insn the reload was inherited from,
4459 if we know it. Otherwise, this is 0. */
4460 rtx reload_inheritance_insn[MAX_RELOADS];
4462 /* If non-zero, this is a place to get the value of the reload,
4463 rather than using reload_in. */
4464 rtx reload_override_in[MAX_RELOADS];
4466 /* For each reload, the index in spill_regs of the spill register used,
4467 or -1 if we did not need one of the spill registers for this reload. */
4468 int reload_spill_index[MAX_RELOADS];
4470 /* Index of last register assigned as a spill register. We allocate in
4471 a round-robin fashio. */
4473 static int last_spill_reg = 0;
4475 /* Find a spill register to use as a reload register for reload R.
4476 LAST_RELOAD is non-zero if this is the last reload for the insn being
4479 Set reload_reg_rtx[R] to the register allocated.
4481 If NOERROR is nonzero, we return 1 if successful,
4482 or 0 if we couldn't find a spill reg and we didn't change anything. */
4485 allocate_reload_reg (r, insn, last_reload, noerror)
4497 /* If we put this reload ahead, thinking it is a group,
4498 then insist on finding a group. Otherwise we can grab a
4499 reg that some other reload needs.
4500 (That can happen when we have a 68000 DATA_OR_FP_REG
4501 which is a group of data regs or one fp reg.)
4502 We need not be so restrictive if there are no more reloads
4505 ??? Really it would be nicer to have smarter handling
4506 for that kind of reg class, where a problem like this is normal.
4507 Perhaps those classes should be avoided for reloading
4508 by use of more alternatives. */
4510 int force_group = reload_nregs[r] > 1 && ! last_reload;
4512 /* If we want a single register and haven't yet found one,
4513 take any reg in the right class and not in use.
4514 If we want a consecutive group, here is where we look for it.
4516 We use two passes so we can first look for reload regs to
4517 reuse, which are already in use for other reloads in this insn,
4518 and only then use additional registers.
4519 I think that maximizing reuse is needed to make sure we don't
4520 run out of reload regs. Suppose we have three reloads, and
4521 reloads A and B can share regs. These need two regs.
4522 Suppose A and B are given different regs.
4523 That leaves none for C. */
4524 for (pass = 0; pass < 2; pass++)
4526 /* I is the index in spill_regs.
4527 We advance it round-robin between insns to use all spill regs
4528 equally, so that inherited reloads have a chance
4529 of leapfrogging each other. */
4531 for (count = 0, i = last_spill_reg; count < n_spills; count++)
4533 int class = (int) reload_reg_class[r];
4535 i = (i + 1) % n_spills;
4537 if (reload_reg_free_p (spill_regs[i], reload_opnum[r],
4538 reload_when_needed[r])
4539 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
4540 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
4541 /* Look first for regs to share, then for unshared. But
4542 don't share regs used for inherited reloads; they are
4543 the ones we want to preserve. */
4545 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
4547 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
4550 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4551 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
4552 (on 68000) got us two FP regs. If NR is 1,
4553 we would reject both of them. */
4555 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
4556 /* If we need only one reg, we have already won. */
4559 /* But reject a single reg if we demand a group. */
4564 /* Otherwise check that as many consecutive regs as we need
4566 Also, don't use for a group registers that are
4567 needed for nongroups. */
4568 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
4571 regno = spill_regs[i] + nr - 1;
4572 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
4573 && spill_reg_order[regno] >= 0
4574 && reload_reg_free_p (regno, reload_opnum[r],
4575 reload_when_needed[r])
4576 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
4586 /* If we found something on pass 1, omit pass 2. */
4587 if (count < n_spills)
4591 /* We should have found a spill register by now. */
4592 if (count == n_spills)
4599 /* I is the index in SPILL_REG_RTX of the reload register we are to
4600 allocate. Get an rtx for it and find its register number. */
4602 new = spill_reg_rtx[i];
4604 if (new == 0 || GET_MODE (new) != reload_mode[r])
4605 spill_reg_rtx[i] = new
4606 = gen_rtx (REG, reload_mode[r], spill_regs[i]);
4608 regno = true_regnum (new);
4610 /* Detect when the reload reg can't hold the reload mode.
4611 This used to be one `if', but Sequent compiler can't handle that. */
4612 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4614 enum machine_mode test_mode = VOIDmode;
4616 test_mode = GET_MODE (reload_in[r]);
4617 /* If reload_in[r] has VOIDmode, it means we will load it
4618 in whatever mode the reload reg has: to wit, reload_mode[r].
4619 We have already tested that for validity. */
4620 /* Aside from that, we need to test that the expressions
4621 to reload from or into have modes which are valid for this
4622 reload register. Otherwise the reload insns would be invalid. */
4623 if (! (reload_in[r] != 0 && test_mode != VOIDmode
4624 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
4625 if (! (reload_out[r] != 0
4626 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
4628 /* The reg is OK. */
4631 /* Mark as in use for this insn the reload regs we use
4633 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
4634 reload_when_needed[r], reload_mode[r]);
4636 reload_reg_rtx[r] = new;
4637 reload_spill_index[r] = i;
4642 /* The reg is not OK. */
4647 if (asm_noperands (PATTERN (insn)) < 0)
4648 /* It's the compiler's fault. */
4651 /* It's the user's fault; the operand's mode and constraint
4652 don't match. Disable this reload so we don't crash in final. */
4653 error_for_asm (insn,
4654 "`asm' operand constraint incompatible with operand size");
4657 reload_reg_rtx[r] = 0;
4658 reload_optional[r] = 1;
4659 reload_secondary_p[r] = 1;
4664 /* Assign hard reg targets for the pseudo-registers we must reload
4665 into hard regs for this insn.
4666 Also output the instructions to copy them in and out of the hard regs.
4668 For machines with register classes, we are responsible for
4669 finding a reload reg in the proper class. */
4672 choose_reload_regs (insn, avoid_return_reg)
4674 rtx avoid_return_reg;
4677 int max_group_size = 1;
4678 enum reg_class group_class = NO_REGS;
4681 rtx save_reload_reg_rtx[MAX_RELOADS];
4682 char save_reload_inherited[MAX_RELOADS];
4683 rtx save_reload_inheritance_insn[MAX_RELOADS];
4684 rtx save_reload_override_in[MAX_RELOADS];
4685 int save_reload_spill_index[MAX_RELOADS];
4686 HARD_REG_SET save_reload_reg_used;
4687 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4688 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4689 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4690 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4691 HARD_REG_SET save_reload_reg_used_in_op_addr;
4692 HARD_REG_SET save_reload_reg_used_in_insn;
4693 HARD_REG_SET save_reload_reg_used_in_other_addr;
4694 HARD_REG_SET save_reload_reg_used_at_all;
4696 bzero (reload_inherited, MAX_RELOADS);
4697 bzero (reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
4698 bzero (reload_override_in, MAX_RELOADS * sizeof (rtx));
4700 CLEAR_HARD_REG_SET (reload_reg_used);
4701 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
4702 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
4703 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
4704 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
4706 for (i = 0; i < reload_n_operands; i++)
4708 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
4709 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
4710 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
4711 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
4714 #ifdef SMALL_REGISTER_CLASSES
4715 /* Don't bother with avoiding the return reg
4716 if we have no mandatory reload that could use it. */
4717 if (avoid_return_reg)
4720 int regno = REGNO (avoid_return_reg);
4722 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4725 for (r = regno; r < regno + nregs; r++)
4726 if (spill_reg_order[r] >= 0)
4727 for (j = 0; j < n_reloads; j++)
4728 if (!reload_optional[j] && reload_reg_rtx[j] == 0
4729 && (reload_in[j] != 0 || reload_out[j] != 0
4730 || reload_secondary_p[j])
4732 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
4735 avoid_return_reg = 0;
4737 #endif /* SMALL_REGISTER_CLASSES */
4739 #if 0 /* Not needed, now that we can always retry without inheritance. */
4740 /* See if we have more mandatory reloads than spill regs.
4741 If so, then we cannot risk optimizations that could prevent
4742 reloads from sharing one spill register.
4744 Since we will try finding a better register than reload_reg_rtx
4745 unless it is equal to reload_in or reload_out, count such reloads. */
4749 #ifdef SMALL_REGISTER_CLASSES
4750 int tem = (avoid_return_reg != 0);
4752 for (j = 0; j < n_reloads; j++)
4753 if (! reload_optional[j]
4754 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
4755 && (reload_reg_rtx[j] == 0
4756 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
4757 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
4764 #ifdef SMALL_REGISTER_CLASSES
4765 /* Don't use the subroutine call return reg for a reload
4766 if we are supposed to avoid it. */
4767 if (avoid_return_reg)
4769 int regno = REGNO (avoid_return_reg);
4771 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4774 for (r = regno; r < regno + nregs; r++)
4775 if (spill_reg_order[r] >= 0)
4776 SET_HARD_REG_BIT (reload_reg_used, r);
4778 #endif /* SMALL_REGISTER_CLASSES */
4780 /* In order to be certain of getting the registers we need,
4781 we must sort the reloads into order of increasing register class.
4782 Then our grabbing of reload registers will parallel the process
4783 that provided the reload registers.
4785 Also note whether any of the reloads wants a consecutive group of regs.
4786 If so, record the maximum size of the group desired and what
4787 register class contains all the groups needed by this insn. */
4789 for (j = 0; j < n_reloads; j++)
4791 reload_order[j] = j;
4792 reload_spill_index[j] = -1;
4795 = (reload_inmode[j] == VOIDmode
4796 || (GET_MODE_SIZE (reload_outmode[j])
4797 > GET_MODE_SIZE (reload_inmode[j])))
4798 ? reload_outmode[j] : reload_inmode[j];
4800 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
4802 if (reload_nregs[j] > 1)
4804 max_group_size = MAX (reload_nregs[j], max_group_size);
4805 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
4808 /* If we have already decided to use a certain register,
4809 don't use it in another way. */
4810 if (reload_reg_rtx[j])
4811 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
4812 reload_when_needed[j], reload_mode[j]);
4816 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
4818 bcopy (reload_reg_rtx, save_reload_reg_rtx, sizeof reload_reg_rtx);
4819 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
4820 bcopy (reload_inheritance_insn, save_reload_inheritance_insn,
4821 sizeof reload_inheritance_insn);
4822 bcopy (reload_override_in, save_reload_override_in,
4823 sizeof reload_override_in);
4824 bcopy (reload_spill_index, save_reload_spill_index,
4825 sizeof reload_spill_index);
4826 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
4827 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
4828 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
4829 reload_reg_used_in_op_addr);
4830 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
4831 reload_reg_used_in_insn);
4832 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
4833 reload_reg_used_in_other_addr);
4835 for (i = 0; i < reload_n_operands; i++)
4837 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
4838 reload_reg_used_in_output[i]);
4839 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
4840 reload_reg_used_in_input[i]);
4841 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
4842 reload_reg_used_in_input_addr[i]);
4843 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
4844 reload_reg_used_in_output_addr[i]);
4847 /* If -O, try first with inheritance, then turning it off.
4848 If not -O, don't do inheritance.
4849 Using inheritance when not optimizing leads to paradoxes
4850 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
4851 because one side of the comparison might be inherited. */
4853 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
4855 /* Process the reloads in order of preference just found.
4856 Beyond this point, subregs can be found in reload_reg_rtx.
4858 This used to look for an existing reloaded home for all
4859 of the reloads, and only then perform any new reloads.
4860 But that could lose if the reloads were done out of reg-class order
4861 because a later reload with a looser constraint might have an old
4862 home in a register needed by an earlier reload with a tighter constraint.
4864 To solve this, we make two passes over the reloads, in the order
4865 described above. In the first pass we try to inherit a reload
4866 from a previous insn. If there is a later reload that needs a
4867 class that is a proper subset of the class being processed, we must
4868 also allocate a spill register during the first pass.
4870 Then make a second pass over the reloads to allocate any reloads
4871 that haven't been given registers yet. */
4873 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
4875 for (j = 0; j < n_reloads; j++)
4877 register int r = reload_order[j];
4879 /* Ignore reloads that got marked inoperative. */
4880 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
4883 /* If find_reloads chose a to use reload_in or reload_out as a reload
4884 register, we don't need to chose one. Otherwise, try even if it found
4885 one since we might save an insn if we find the value lying around. */
4886 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
4887 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
4888 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
4891 #if 0 /* No longer needed for correct operation.
4892 It might give better code, or might not; worth an experiment? */
4893 /* If this is an optional reload, we can't inherit from earlier insns
4894 until we are sure that any non-optional reloads have been allocated.
4895 The following code takes advantage of the fact that optional reloads
4896 are at the end of reload_order. */
4897 if (reload_optional[r] != 0)
4898 for (i = 0; i < j; i++)
4899 if ((reload_out[reload_order[i]] != 0
4900 || reload_in[reload_order[i]] != 0
4901 || reload_secondary_p[reload_order[i]])
4902 && ! reload_optional[reload_order[i]]
4903 && reload_reg_rtx[reload_order[i]] == 0)
4904 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
4907 /* First see if this pseudo is already available as reloaded
4908 for a previous insn. We cannot try to inherit for reloads
4909 that are smaller than the maximum number of registers needed
4910 for groups unless the register we would allocate cannot be used
4913 We could check here to see if this is a secondary reload for
4914 an object that is already in a register of the desired class.
4915 This would avoid the need for the secondary reload register.
4916 But this is complex because we can't easily determine what
4917 objects might want to be loaded via this reload. So let a register
4918 be allocated here. In `emit_reload_insns' we suppress one of the
4919 loads in the case described above. */
4923 register int regno = -1;
4924 enum machine_mode mode;
4926 if (reload_in[r] == 0)
4928 else if (GET_CODE (reload_in[r]) == REG)
4930 regno = REGNO (reload_in[r]);
4931 mode = GET_MODE (reload_in[r]);
4933 else if (GET_CODE (reload_in_reg[r]) == REG)
4935 regno = REGNO (reload_in_reg[r]);
4936 mode = GET_MODE (reload_in_reg[r]);
4939 /* This won't work, since REGNO can be a pseudo reg number.
4940 Also, it takes much more hair to keep track of all the things
4941 that can invalidate an inherited reload of part of a pseudoreg. */
4942 else if (GET_CODE (reload_in[r]) == SUBREG
4943 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
4944 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
4947 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
4949 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
4951 if (reg_reloaded_contents[i] == regno
4952 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
4953 >= GET_MODE_SIZE (mode))
4954 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
4955 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
4957 && (reload_nregs[r] == max_group_size
4958 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
4960 && reload_reg_free_p (spill_regs[i], reload_opnum[r],
4961 reload_when_needed[r])
4962 && reload_reg_free_before_p (spill_regs[i],
4964 reload_when_needed[r]))
4966 /* If a group is needed, verify that all the subsequent
4967 registers still have their values intact. */
4969 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4972 for (k = 1; k < nr; k++)
4973 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
4981 /* We found a register that contains the
4982 value we need. If this register is the
4983 same as an `earlyclobber' operand of the
4984 current insn, just mark it as a place to
4985 reload from since we can't use it as the
4986 reload register itself. */
4988 for (i1 = 0; i1 < n_earlyclobbers; i1++)
4989 if (reg_overlap_mentioned_for_reload_p
4990 (reg_last_reload_reg[regno],
4991 reload_earlyclobbers[i1]))
4994 if (i1 != n_earlyclobbers
4995 /* Don't really use the inherited spill reg
4996 if we need it wider than we've got it. */
4997 || (GET_MODE_SIZE (reload_mode[r])
4998 > GET_MODE_SIZE (mode)))
4999 reload_override_in[r] = reg_last_reload_reg[regno];
5003 /* We can use this as a reload reg. */
5004 /* Mark the register as in use for this part of
5006 mark_reload_reg_in_use (spill_regs[i],
5008 reload_when_needed[r],
5010 reload_reg_rtx[r] = reg_last_reload_reg[regno];
5011 reload_inherited[r] = 1;
5012 reload_inheritance_insn[r]
5013 = reg_reloaded_insn[i];
5014 reload_spill_index[r] = i;
5015 for (k = 0; k < nr; k++)
5016 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5024 /* Here's another way to see if the value is already lying around. */
5026 && reload_in[r] != 0
5027 && ! reload_inherited[r]
5028 && reload_out[r] == 0
5029 && (CONSTANT_P (reload_in[r])
5030 || GET_CODE (reload_in[r]) == PLUS
5031 || GET_CODE (reload_in[r]) == REG
5032 || GET_CODE (reload_in[r]) == MEM)
5033 && (reload_nregs[r] == max_group_size
5034 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
5037 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
5038 -1, NULL_PTR, 0, reload_mode[r]);
5043 if (GET_CODE (equiv) == REG)
5044 regno = REGNO (equiv);
5045 else if (GET_CODE (equiv) == SUBREG)
5047 regno = REGNO (SUBREG_REG (equiv));
5048 if (regno < FIRST_PSEUDO_REGISTER)
5049 regno += SUBREG_WORD (equiv);
5055 /* If we found a spill reg, reject it unless it is free
5056 and of the desired class. */
5058 && ((spill_reg_order[regno] >= 0
5059 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5060 reload_when_needed[r]))
5061 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5065 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
5068 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5071 /* We found a register that contains the value we need.
5072 If this register is the same as an `earlyclobber' operand
5073 of the current insn, just mark it as a place to reload from
5074 since we can't use it as the reload register itself. */
5077 for (i = 0; i < n_earlyclobbers; i++)
5078 if (reg_overlap_mentioned_for_reload_p (equiv,
5079 reload_earlyclobbers[i]))
5081 reload_override_in[r] = equiv;
5086 /* JRV: If the equiv register we have found is explicitly
5087 clobbered in the current insn, mark but don't use, as above. */
5089 if (equiv != 0 && regno_clobbered_p (regno, insn))
5091 reload_override_in[r] = equiv;
5095 /* If we found an equivalent reg, say no code need be generated
5096 to load it, and use it as our reload reg. */
5097 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
5099 reload_reg_rtx[r] = equiv;
5100 reload_inherited[r] = 1;
5101 /* If it is a spill reg,
5102 mark the spill reg as in use for this insn. */
5103 i = spill_reg_order[regno];
5106 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
5108 mark_reload_reg_in_use (regno, reload_opnum[r],
5109 reload_when_needed[r],
5111 for (k = 0; k < nr; k++)
5112 SET_HARD_REG_BIT (reload_reg_used_for_inherit, regno + k);
5117 /* If we found a register to use already, or if this is an optional
5118 reload, we are done. */
5119 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
5122 #if 0 /* No longer needed for correct operation. Might or might not
5123 give better code on the average. Want to experiment? */
5125 /* See if there is a later reload that has a class different from our
5126 class that intersects our class or that requires less register
5127 than our reload. If so, we must allocate a register to this
5128 reload now, since that reload might inherit a previous reload
5129 and take the only available register in our class. Don't do this
5130 for optional reloads since they will force all previous reloads
5131 to be allocated. Also don't do this for reloads that have been
5134 for (i = j + 1; i < n_reloads; i++)
5136 int s = reload_order[i];
5138 if ((reload_in[s] == 0 && reload_out[s] == 0
5139 && ! reload_secondary_p[s])
5140 || reload_optional[s])
5143 if ((reload_reg_class[s] != reload_reg_class[r]
5144 && reg_classes_intersect_p (reload_reg_class[r],
5145 reload_reg_class[s]))
5146 || reload_nregs[s] < reload_nregs[r])
5153 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
5157 /* Now allocate reload registers for anything non-optional that
5158 didn't get one yet. */
5159 for (j = 0; j < n_reloads; j++)
5161 register int r = reload_order[j];
5163 /* Ignore reloads that got marked inoperative. */
5164 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5167 /* Skip reloads that already have a register allocated or are
5169 if (reload_reg_rtx[r] != 0 || reload_optional[r])
5172 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
5176 /* If that loop got all the way, we have won. */
5181 /* Loop around and try without any inheritance. */
5182 /* First undo everything done by the failed attempt
5183 to allocate with inheritance. */
5184 bcopy (save_reload_reg_rtx, reload_reg_rtx, sizeof reload_reg_rtx);
5185 bcopy (save_reload_inherited, reload_inherited, sizeof reload_inherited);
5186 bcopy (save_reload_inheritance_insn, reload_inheritance_insn,
5187 sizeof reload_inheritance_insn);
5188 bcopy (save_reload_override_in, reload_override_in,
5189 sizeof reload_override_in);
5190 bcopy (save_reload_spill_index, reload_spill_index,
5191 sizeof reload_spill_index);
5192 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5193 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
5194 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5195 save_reload_reg_used_in_op_addr);
5196 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5197 save_reload_reg_used_in_insn);
5198 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5199 save_reload_reg_used_in_other_addr);
5201 for (i = 0; i < reload_n_operands; i++)
5203 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5204 save_reload_reg_used_in_input[i]);
5205 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5206 save_reload_reg_used_in_output[i]);
5207 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5208 save_reload_reg_used_in_input_addr[i]);
5209 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5210 save_reload_reg_used_in_output_addr[i]);
5214 /* If we thought we could inherit a reload, because it seemed that
5215 nothing else wanted the same reload register earlier in the insn,
5216 verify that assumption, now that all reloads have been assigned. */
5218 for (j = 0; j < n_reloads; j++)
5220 register int r = reload_order[j];
5222 if (reload_inherited[r] && reload_reg_rtx[r] != 0
5223 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
5225 reload_when_needed[r]))
5226 reload_inherited[r] = 0;
5228 /* If we found a better place to reload from,
5229 validate it in the same fashion, if it is a reload reg. */
5230 if (reload_override_in[r]
5231 && (GET_CODE (reload_override_in[r]) == REG
5232 || GET_CODE (reload_override_in[r]) == SUBREG))
5234 int regno = true_regnum (reload_override_in[r]);
5235 if (spill_reg_order[regno] >= 0
5236 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5237 reload_when_needed[r]))
5238 reload_override_in[r] = 0;
5242 /* Now that reload_override_in is known valid,
5243 actually override reload_in. */
5244 for (j = 0; j < n_reloads; j++)
5245 if (reload_override_in[j])
5246 reload_in[j] = reload_override_in[j];
5248 /* If this reload won't be done because it has been cancelled or is
5249 optional and not inherited, clear reload_reg_rtx so other
5250 routines (such as subst_reloads) don't get confused. */
5251 for (j = 0; j < n_reloads; j++)
5252 if (reload_reg_rtx[j] != 0
5253 && ((reload_optional[j] && ! reload_inherited[j])
5254 || (reload_in[j] == 0 && reload_out[j] == 0
5255 && ! reload_secondary_p[j])))
5257 int regno = true_regnum (reload_reg_rtx[j]);
5259 if (spill_reg_order[regno] >= 0)
5260 clear_reload_reg_in_use (regno, reload_opnum[j],
5261 reload_when_needed[j], reload_mode[j]);
5262 reload_reg_rtx[j] = 0;
5265 /* Record which pseudos and which spill regs have output reloads. */
5266 for (j = 0; j < n_reloads; j++)
5268 register int r = reload_order[j];
5270 i = reload_spill_index[r];
5272 /* I is nonneg if this reload used one of the spill regs.
5273 If reload_reg_rtx[r] is 0, this is an optional reload
5274 that we opted to ignore. */
5275 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
5276 && reload_reg_rtx[r] != 0)
5278 register int nregno = REGNO (reload_out[r]);
5281 if (nregno < FIRST_PSEUDO_REGISTER)
5282 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
5285 reg_has_output_reload[nregno + nr] = 1;
5289 nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5291 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
5294 if (reload_when_needed[r] != RELOAD_OTHER
5295 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
5296 && reload_when_needed[r] != RELOAD_FOR_INSN)
5302 /* If SMALL_REGISTER_CLASSES are defined, we may not have merged two
5303 reloads of the same item for fear that we might not have enough reload
5304 registers. However, normally they will get the same reload register
5305 and hence actually need not be loaded twice.
5307 Here we check for the most common case of this phenomenon: when we have
5308 a number of reloads for the same object, each of which were allocated
5309 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5310 reload, and is not modified in the insn itself. If we find such,
5311 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5312 This will not increase the number of spill registers needed and will
5313 prevent redundant code. */
5315 #ifdef SMALL_REGISTER_CLASSES
5318 merge_assigned_reloads (insn)
5323 /* Scan all the reloads looking for ones that only load values and
5324 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5325 assigned and not modified by INSN. */
5327 for (i = 0; i < n_reloads; i++)
5329 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
5330 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
5331 || reg_set_p (reload_reg_rtx[i], insn))
5334 /* Look at all other reloads. Ensure that the only use of this
5335 reload_reg_rtx is in a reload that just loads the same value
5336 as we do. Note that any secondary reloads must be of the identical
5337 class since the values, modes, and result registers are the
5338 same, so we need not do anything with any secondary reloads. */
5340 for (j = 0; j < n_reloads; j++)
5342 if (i == j || reload_reg_rtx[j] == 0
5343 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
5347 /* If the reload regs aren't exactly the same (e.g, different modes)
5348 or if the values are different, we can't merge anything with this
5351 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
5352 || reload_out[j] != 0 || reload_in[j] == 0
5353 || ! rtx_equal_p (reload_in[i], reload_in[j]))
5357 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5358 we, in fact, found any matching reloads. */
5362 for (j = 0; j < n_reloads; j++)
5363 if (i != j && reload_reg_rtx[j] != 0
5364 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j]))
5366 reload_when_needed[i] = RELOAD_OTHER;
5368 transfer_replacements (i, j);
5371 /* If this is now RELOAD_OTHER, look for any reloads that load
5372 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5373 if they were for inputs, RELOAD_OTHER for outputs. Note that
5374 this test is equivalent to looking for reloads for this operand
5377 if (reload_when_needed[i] == RELOAD_OTHER)
5378 for (j = 0; j < n_reloads; j++)
5379 if (reload_in[j] != 0
5380 && reload_when_needed[i] != RELOAD_OTHER
5381 && reg_overlap_mentioned_for_reload_p (reload_in[j],
5383 reload_when_needed[j]
5384 = reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
5385 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER;
5389 #endif /* SMALL_RELOAD_CLASSES */
5391 /* Output insns to reload values in and out of the chosen reload regs. */
5394 emit_reload_insns (insn)
5398 rtx input_reload_insns[MAX_RECOG_OPERANDS];
5399 rtx other_input_address_reload_insns = 0;
5400 rtx other_input_reload_insns = 0;
5401 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
5402 rtx output_reload_insns[MAX_RECOG_OPERANDS];
5403 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
5404 rtx operand_reload_insns = 0;
5405 rtx following_insn = NEXT_INSN (insn);
5406 rtx before_insn = insn;
5408 /* Values to be put in spill_reg_store are put here first. */
5409 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
5411 for (j = 0; j < reload_n_operands; j++)
5412 input_reload_insns[j] = input_address_reload_insns[j]
5413 = output_reload_insns[j] = output_address_reload_insns[j] = 0;
5415 /* If this is a CALL_INSN preceded by USE insns, any reload insns
5416 must go in front of the first USE insn, not in front of INSN. */
5418 if (GET_CODE (insn) == CALL_INSN && GET_CODE (PREV_INSN (insn)) == INSN
5419 && GET_CODE (PATTERN (PREV_INSN (insn))) == USE)
5420 while (GET_CODE (PREV_INSN (before_insn)) == INSN
5421 && GET_CODE (PATTERN (PREV_INSN (before_insn))) == USE)
5422 before_insn = PREV_INSN (before_insn);
5424 /* If INSN is followed by any CLOBBER insns made by find_reloads,
5425 put our reloads after them since they may otherwise be
5428 while (GET_CODE (following_insn) == INSN
5429 && GET_MODE (following_insn) == DImode
5430 && GET_CODE (PATTERN (following_insn)) == CLOBBER
5431 && NEXT_INSN (following_insn) != 0)
5432 following_insn = NEXT_INSN (following_insn);
5434 /* Now output the instructions to copy the data into and out of the
5435 reload registers. Do these in the order that the reloads were reported,
5436 since reloads of base and index registers precede reloads of operands
5437 and the operands may need the base and index registers reloaded. */
5439 for (j = 0; j < n_reloads; j++)
5442 rtx oldequiv_reg = 0;
5446 if (old != 0 && ! reload_inherited[j]
5447 && ! rtx_equal_p (reload_reg_rtx[j], old)
5448 && reload_reg_rtx[j] != 0)
5450 register rtx reloadreg = reload_reg_rtx[j];
5452 enum machine_mode mode;
5455 /* Determine the mode to reload in.
5456 This is very tricky because we have three to choose from.
5457 There is the mode the insn operand wants (reload_inmode[J]).
5458 There is the mode of the reload register RELOADREG.
5459 There is the intrinsic mode of the operand, which we could find
5460 by stripping some SUBREGs.
5461 It turns out that RELOADREG's mode is irrelevant:
5462 we can change that arbitrarily.
5464 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
5465 then the reload reg may not support QImode moves, so use SImode.
5466 If foo is in memory due to spilling a pseudo reg, this is safe,
5467 because the QImode value is in the least significant part of a
5468 slot big enough for a SImode. If foo is some other sort of
5469 memory reference, then it is impossible to reload this case,
5470 so previous passes had better make sure this never happens.
5472 Then consider a one-word union which has SImode and one of its
5473 members is a float, being fetched as (SUBREG:SF union:SI).
5474 We must fetch that as SFmode because we could be loading into
5475 a float-only register. In this case OLD's mode is correct.
5477 Consider an immediate integer: it has VOIDmode. Here we need
5478 to get a mode from something else.
5480 In some cases, there is a fourth mode, the operand's
5481 containing mode. If the insn specifies a containing mode for
5482 this operand, it overrides all others.
5484 I am not sure whether the algorithm here is always right,
5485 but it does the right things in those cases. */
5487 mode = GET_MODE (old);
5488 if (mode == VOIDmode)
5489 mode = reload_inmode[j];
5491 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5492 /* If we need a secondary register for this operation, see if
5493 the value is already in a register in that class. Don't
5494 do this if the secondary register will be used as a scratch
5497 if (reload_secondary_reload[j] >= 0
5498 && reload_secondary_icode[j] == CODE_FOR_nothing
5501 = find_equiv_reg (old, insn,
5502 reload_reg_class[reload_secondary_reload[j]],
5503 -1, NULL_PTR, 0, mode);
5506 /* If reloading from memory, see if there is a register
5507 that already holds the same value. If so, reload from there.
5508 We can pass 0 as the reload_reg_p argument because
5509 any other reload has either already been emitted,
5510 in which case find_equiv_reg will see the reload-insn,
5511 or has yet to be emitted, in which case it doesn't matter
5512 because we will use this equiv reg right away. */
5514 if (oldequiv == 0 && optimize
5515 && (GET_CODE (old) == MEM
5516 || (GET_CODE (old) == REG
5517 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5518 && reg_renumber[REGNO (old)] < 0)))
5519 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
5520 -1, NULL_PTR, 0, mode);
5524 int regno = true_regnum (oldequiv);
5526 /* If OLDEQUIV is a spill register, don't use it for this
5527 if any other reload needs it at an earlier stage of this insn
5528 or at this stage. */
5529 if (spill_reg_order[regno] >= 0
5530 && (! reload_reg_free_p (regno, reload_opnum[j],
5531 reload_when_needed[j])
5532 || ! reload_reg_free_before_p (regno, reload_opnum[j],
5533 reload_when_needed[j])))
5536 /* If OLDEQUIV is not a spill register,
5537 don't use it if any other reload wants it. */
5538 if (spill_reg_order[regno] < 0)
5541 for (k = 0; k < n_reloads; k++)
5542 if (reload_reg_rtx[k] != 0 && k != j
5543 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
5551 /* If it is no cheaper to copy from OLDEQUIV into the
5552 reload register than it would be to move from memory,
5553 don't use it. Likewise, if we need a secondary register
5557 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
5558 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
5559 reload_reg_class[j])
5560 >= MEMORY_MOVE_COST (mode)))
5561 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5562 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5566 #ifdef SECONDARY_MEMORY_NEEDED
5567 || SECONDARY_MEMORY_NEEDED (reload_reg_class[j],
5568 REGNO_REG_CLASS (regno),
5577 else if (GET_CODE (oldequiv) == REG)
5578 oldequiv_reg = oldequiv;
5579 else if (GET_CODE (oldequiv) == SUBREG)
5580 oldequiv_reg = SUBREG_REG (oldequiv);
5582 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
5583 then load RELOADREG from OLDEQUIV. Note that we cannot use
5584 gen_lowpart_common since it can do the wrong thing when
5585 RELOADREG has a multi-word mode. Note that RELOADREG
5586 must always be a REG here. */
5588 if (GET_MODE (reloadreg) != mode)
5589 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
5590 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
5591 oldequiv = SUBREG_REG (oldequiv);
5592 if (GET_MODE (oldequiv) != VOIDmode
5593 && mode != GET_MODE (oldequiv))
5594 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
5596 /* Switch to the right place to emit the reload insns. */
5597 switch (reload_when_needed[j])
5600 where = &other_input_reload_insns;
5602 case RELOAD_FOR_INPUT:
5603 where = &input_reload_insns[reload_opnum[j]];
5605 case RELOAD_FOR_INPUT_ADDRESS:
5606 where = &input_address_reload_insns[reload_opnum[j]];
5608 case RELOAD_FOR_OUTPUT_ADDRESS:
5609 where = &output_address_reload_insns[reload_opnum[j]];
5611 case RELOAD_FOR_OPERAND_ADDRESS:
5612 where = &operand_reload_insns;
5614 case RELOAD_FOR_OTHER_ADDRESS:
5615 where = &other_input_address_reload_insns;
5621 push_to_sequence (*where);
5624 /* Auto-increment addresses must be reloaded in a special way. */
5625 if (GET_CODE (oldequiv) == POST_INC
5626 || GET_CODE (oldequiv) == POST_DEC
5627 || GET_CODE (oldequiv) == PRE_INC
5628 || GET_CODE (oldequiv) == PRE_DEC)
5630 /* We are not going to bother supporting the case where a
5631 incremented register can't be copied directly from
5632 OLDEQUIV since this seems highly unlikely. */
5633 if (reload_secondary_reload[j] >= 0)
5635 /* Prevent normal processing of this reload. */
5637 /* Output a special code sequence for this case. */
5638 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
5641 /* If we are reloading a pseudo-register that was set by the previous
5642 insn, see if we can get rid of that pseudo-register entirely
5643 by redirecting the previous insn into our reload register. */
5645 else if (optimize && GET_CODE (old) == REG
5646 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5647 && dead_or_set_p (insn, old)
5648 /* This is unsafe if some other reload
5649 uses the same reg first. */
5650 && reload_reg_free_before_p (REGNO (reloadreg),
5652 reload_when_needed[j]))
5654 rtx temp = PREV_INSN (insn);
5655 while (temp && GET_CODE (temp) == NOTE)
5656 temp = PREV_INSN (temp);
5658 && GET_CODE (temp) == INSN
5659 && GET_CODE (PATTERN (temp)) == SET
5660 && SET_DEST (PATTERN (temp)) == old
5661 /* Make sure we can access insn_operand_constraint. */
5662 && asm_noperands (PATTERN (temp)) < 0
5663 /* This is unsafe if prev insn rejects our reload reg. */
5664 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
5666 /* This is unsafe if operand occurs more than once in current
5667 insn. Perhaps some occurrences aren't reloaded. */
5668 && count_occurrences (PATTERN (insn), old) == 1
5669 /* Don't risk splitting a matching pair of operands. */
5670 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
5672 /* Store into the reload register instead of the pseudo. */
5673 SET_DEST (PATTERN (temp)) = reloadreg;
5674 /* If these are the only uses of the pseudo reg,
5675 pretend for GDB it lives in the reload reg we used. */
5676 if (reg_n_deaths[REGNO (old)] == 1
5677 && reg_n_sets[REGNO (old)] == 1)
5679 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
5680 alter_reg (REGNO (old), -1);
5686 /* We can't do that, so output an insn to load RELOADREG. */
5690 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5691 rtx second_reload_reg = 0;
5692 enum insn_code icode;
5694 /* If we have a secondary reload, pick up the secondary register
5695 and icode, if any. If OLDEQUIV and OLD are different or
5696 if this is an in-out reload, recompute whether or not we
5697 still need a secondary register and what the icode should
5698 be. If we still need a secondary register and the class or
5699 icode is different, go back to reloading from OLD if using
5700 OLDEQUIV means that we got the wrong type of register. We
5701 cannot have different class or icode due to an in-out reload
5702 because we don't make such reloads when both the input and
5703 output need secondary reload registers. */
5705 if (reload_secondary_reload[j] >= 0)
5707 int secondary_reload = reload_secondary_reload[j];
5708 rtx real_oldequiv = oldequiv;
5711 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
5712 and similarly for OLD.
5713 See comments in find_secondary_reload in reload.c. */
5714 if (GET_CODE (oldequiv) == REG
5715 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
5716 && reg_equiv_mem[REGNO (oldequiv)] != 0)
5717 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
5719 if (GET_CODE (old) == REG
5720 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5721 && reg_equiv_mem[REGNO (old)] != 0)
5722 real_old = reg_equiv_mem[REGNO (old)];
5724 second_reload_reg = reload_reg_rtx[secondary_reload];
5725 icode = reload_secondary_icode[j];
5727 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
5728 || (reload_in[j] != 0 && reload_out[j] != 0))
5730 enum reg_class new_class
5731 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5732 mode, real_oldequiv);
5734 if (new_class == NO_REGS)
5735 second_reload_reg = 0;
5738 enum insn_code new_icode;
5739 enum machine_mode new_mode;
5741 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
5742 REGNO (second_reload_reg)))
5743 oldequiv = old, real_oldequiv = real_old;
5746 new_icode = reload_in_optab[(int) mode];
5747 if (new_icode != CODE_FOR_nothing
5748 && ((insn_operand_predicate[(int) new_icode][0]
5749 && ! ((*insn_operand_predicate[(int) new_icode][0])
5751 || (insn_operand_predicate[(int) new_icode][1]
5752 && ! ((*insn_operand_predicate[(int) new_icode][1])
5753 (real_oldequiv, mode)))))
5754 new_icode = CODE_FOR_nothing;
5756 if (new_icode == CODE_FOR_nothing)
5759 new_mode = insn_operand_mode[(int) new_icode][2];
5761 if (GET_MODE (second_reload_reg) != new_mode)
5763 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
5765 oldequiv = old, real_oldequiv = real_old;
5768 = gen_rtx (REG, new_mode,
5769 REGNO (second_reload_reg));
5775 /* If we still need a secondary reload register, check
5776 to see if it is being used as a scratch or intermediate
5777 register and generate code appropriately. If we need
5778 a scratch register, use REAL_OLDEQUIV since the form of
5779 the insn may depend on the actual address if it is
5782 if (second_reload_reg)
5784 if (icode != CODE_FOR_nothing)
5786 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
5787 second_reload_reg));
5792 /* See if we need a scratch register to load the
5793 intermediate register (a tertiary reload). */
5794 enum insn_code tertiary_icode
5795 = reload_secondary_icode[secondary_reload];
5797 if (tertiary_icode != CODE_FOR_nothing)
5799 rtx third_reload_reg
5800 = reload_reg_rtx[reload_secondary_reload[secondary_reload]];
5802 emit_insn ((GEN_FCN (tertiary_icode)
5803 (second_reload_reg, real_oldequiv,
5804 third_reload_reg)));
5807 gen_input_reload (second_reload_reg, oldequiv,
5809 reload_when_needed[j]);
5811 oldequiv = second_reload_reg;
5818 gen_input_reload (reloadreg, oldequiv, reload_opnum[j],
5819 reload_when_needed[j]);
5821 #if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
5822 /* We may have to make a REG_DEAD note for the secondary reload
5823 register in the insns we just made. Find the last insn that
5824 mentioned the register. */
5825 if (! special && second_reload_reg
5826 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
5830 for (prev = get_last_insn (); prev;
5831 prev = PREV_INSN (prev))
5832 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
5833 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
5836 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
5845 /* End this sequence. */
5846 *where = get_insns ();
5850 /* Add a note saying the input reload reg
5851 dies in this insn, if anyone cares. */
5852 #ifdef PRESERVE_DEATH_INFO_REGNO_P
5854 && reload_reg_rtx[j] != old
5855 && reload_reg_rtx[j] != 0
5856 && reload_out[j] == 0
5857 && ! reload_inherited[j]
5858 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
5860 register rtx reloadreg = reload_reg_rtx[j];
5863 /* We can't abort here because we need to support this for sched.c.
5864 It's not terrible to miss a REG_DEAD note, but we should try
5865 to figure out how to do this correctly. */
5866 /* The code below is incorrect for address-only reloads. */
5867 if (reload_when_needed[j] != RELOAD_OTHER
5868 && reload_when_needed[j] != RELOAD_FOR_INPUT)
5872 /* Add a death note to this insn, for an input reload. */
5874 if ((reload_when_needed[j] == RELOAD_OTHER
5875 || reload_when_needed[j] == RELOAD_FOR_INPUT)
5876 && ! dead_or_set_p (insn, reloadreg))
5878 = gen_rtx (EXPR_LIST, REG_DEAD,
5879 reloadreg, REG_NOTES (insn));
5882 /* When we inherit a reload, the last marked death of the reload reg
5883 may no longer really be a death. */
5884 if (reload_reg_rtx[j] != 0
5885 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
5886 && reload_inherited[j])
5888 /* Handle inheriting an output reload.
5889 Remove the death note from the output reload insn. */
5890 if (reload_spill_index[j] >= 0
5891 && GET_CODE (reload_in[j]) == REG
5892 && spill_reg_store[reload_spill_index[j]] != 0
5893 && find_regno_note (spill_reg_store[reload_spill_index[j]],
5894 REG_DEAD, REGNO (reload_reg_rtx[j])))
5895 remove_death (REGNO (reload_reg_rtx[j]),
5896 spill_reg_store[reload_spill_index[j]]);
5897 /* Likewise for input reloads that were inherited. */
5898 else if (reload_spill_index[j] >= 0
5899 && GET_CODE (reload_in[j]) == REG
5900 && spill_reg_store[reload_spill_index[j]] == 0
5901 && reload_inheritance_insn[j] != 0
5902 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
5903 REGNO (reload_reg_rtx[j])))
5904 remove_death (REGNO (reload_reg_rtx[j]),
5905 reload_inheritance_insn[j]);
5910 /* We got this register from find_equiv_reg.
5911 Search back for its last death note and get rid of it.
5912 But don't search back too far.
5913 Don't go past a place where this reg is set,
5914 since a death note before that remains valid. */
5915 for (prev = PREV_INSN (insn);
5916 prev && GET_CODE (prev) != CODE_LABEL;
5917 prev = PREV_INSN (prev))
5918 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
5919 && dead_or_set_p (prev, reload_reg_rtx[j]))
5921 if (find_regno_note (prev, REG_DEAD,
5922 REGNO (reload_reg_rtx[j])))
5923 remove_death (REGNO (reload_reg_rtx[j]), prev);
5929 /* We might have used find_equiv_reg above to choose an alternate
5930 place from which to reload. If so, and it died, we need to remove
5931 that death and move it to one of the insns we just made. */
5933 if (oldequiv_reg != 0
5934 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
5938 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
5939 prev = PREV_INSN (prev))
5940 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
5941 && dead_or_set_p (prev, oldequiv_reg))
5943 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
5945 for (prev1 = this_reload_insn;
5946 prev1; prev1 = PREV_INSN (prev1))
5947 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
5948 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
5951 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
5956 remove_death (REGNO (oldequiv_reg), prev);
5963 /* If we are reloading a register that was recently stored in with an
5964 output-reload, see if we can prove there was
5965 actually no need to store the old value in it. */
5967 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
5968 && reload_in[j] != 0
5969 && GET_CODE (reload_in[j]) == REG
5971 /* There doesn't seem to be any reason to restrict this to pseudos
5972 and doing so loses in the case where we are copying from a
5973 register of the wrong class. */
5974 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
5976 && spill_reg_store[reload_spill_index[j]] != 0
5977 /* This is unsafe if some other reload uses the same reg first. */
5978 && reload_reg_free_before_p (spill_regs[reload_spill_index[j]],
5979 reload_opnum[j], reload_when_needed[j])
5980 && dead_or_set_p (insn, reload_in[j])
5981 /* This is unsafe if operand occurs more than once in current
5982 insn. Perhaps some occurrences weren't reloaded. */
5983 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
5984 delete_output_reload (insn, j,
5985 spill_reg_store[reload_spill_index[j]]);
5987 /* Input-reloading is done. Now do output-reloading,
5988 storing the value from the reload-register after the main insn
5989 if reload_out[j] is nonzero.
5991 ??? At some point we need to support handling output reloads of
5992 JUMP_INSNs or insns that set cc0. */
5993 old = reload_out[j];
5995 && reload_reg_rtx[j] != old
5996 && reload_reg_rtx[j] != 0)
5998 register rtx reloadreg = reload_reg_rtx[j];
5999 register rtx second_reloadreg = 0;
6001 enum machine_mode mode;
6004 /* An output operand that dies right away does need a reload,
6005 but need not be copied from it. Show the new location in the
6007 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
6008 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
6010 XEXP (note, 0) = reload_reg_rtx[j];
6013 else if (GET_CODE (old) == SCRATCH)
6014 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6015 but we don't want to make an output reload. */
6019 /* Strip off of OLD any size-increasing SUBREGs such as
6020 (SUBREG:SI foo:QI 0). */
6022 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
6023 && (GET_MODE_SIZE (GET_MODE (old))
6024 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
6025 old = SUBREG_REG (old);
6028 /* If is a JUMP_INSN, we can't support output reloads yet. */
6029 if (GET_CODE (insn) == JUMP_INSN)
6032 push_to_sequence (output_reload_insns[reload_opnum[j]]);
6034 /* Determine the mode to reload in.
6035 See comments above (for input reloading). */
6037 mode = GET_MODE (old);
6038 if (mode == VOIDmode)
6040 /* VOIDmode should never happen for an output. */
6041 if (asm_noperands (PATTERN (insn)) < 0)
6042 /* It's the compiler's fault. */
6044 error_for_asm (insn, "output operand is constant in `asm'");
6045 /* Prevent crash--use something we know is valid. */
6047 old = gen_rtx (REG, mode, REGNO (reloadreg));
6050 if (GET_MODE (reloadreg) != mode)
6051 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6053 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6055 /* If we need two reload regs, set RELOADREG to the intermediate
6056 one, since it will be stored into OUT. We might need a secondary
6057 register only for an input reload, so check again here. */
6059 if (reload_secondary_reload[j] >= 0)
6063 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6064 && reg_equiv_mem[REGNO (old)] != 0)
6065 real_old = reg_equiv_mem[REGNO (old)];
6067 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
6071 second_reloadreg = reloadreg;
6072 reloadreg = reload_reg_rtx[reload_secondary_reload[j]];
6074 /* See if RELOADREG is to be used as a scratch register
6075 or as an intermediate register. */
6076 if (reload_secondary_icode[j] != CODE_FOR_nothing)
6078 emit_insn ((GEN_FCN (reload_secondary_icode[j])
6079 (real_old, second_reloadreg, reloadreg)));
6084 /* See if we need both a scratch and intermediate reload
6086 int secondary_reload = reload_secondary_reload[j];
6087 enum insn_code tertiary_icode
6088 = reload_secondary_icode[secondary_reload];
6091 if (GET_MODE (reloadreg) != mode)
6092 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6094 if (tertiary_icode != CODE_FOR_nothing)
6097 = reload_reg_rtx[reload_secondary_reload[secondary_reload]];
6098 pat = (GEN_FCN (tertiary_icode)
6099 (reloadreg, second_reloadreg, third_reloadreg));
6101 #ifdef SECONDARY_MEMORY_NEEDED
6102 /* If we need a memory location to do the move, do it that way. */
6103 else if (GET_CODE (reloadreg) == REG
6104 && REGNO (reloadreg) < FIRST_PSEUDO_REGISTER
6105 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (reloadreg)),
6106 REGNO_REG_CLASS (REGNO (second_reloadreg)),
6107 GET_MODE (second_reloadreg)))
6109 /* Get the memory to use and rewrite both registers
6112 = get_secondary_mem (reloadreg,
6113 GET_MODE (second_reloadreg),
6115 reload_when_needed[j]);
6118 if (GET_MODE (loc) != GET_MODE (second_reloadreg))
6119 second_reloadreg = gen_rtx (REG, GET_MODE (loc),
6120 REGNO (second_reloadreg));
6122 if (GET_MODE (loc) != GET_MODE (reloadreg))
6123 tmp_reloadreg = gen_rtx (REG, GET_MODE (loc),
6126 tmp_reloadreg = reloadreg;
6128 emit_move_insn (loc, second_reloadreg);
6129 pat = gen_move_insn (tmp_reloadreg, loc);
6133 pat = gen_move_insn (reloadreg, second_reloadreg);
6141 /* Output the last reload insn. */
6144 #ifdef SECONDARY_MEMORY_NEEDED
6145 /* If we need a memory location to do the move, do it that way. */
6146 if (GET_CODE (old) == REG && REGNO (old) < FIRST_PSEUDO_REGISTER
6147 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (old)),
6148 REGNO_REG_CLASS (REGNO (reloadreg)),
6149 GET_MODE (reloadreg)))
6151 /* Get the memory to use and rewrite both registers to
6153 rtx loc = get_secondary_mem (old, GET_MODE (reloadreg),
6155 reload_when_needed[j]);
6157 if (GET_MODE (loc) != GET_MODE (reloadreg))
6158 reloadreg = gen_rtx (REG, GET_MODE (loc),
6161 if (GET_MODE (loc) != GET_MODE (old))
6162 old = gen_rtx (REG, GET_MODE (loc), REGNO (old));
6164 emit_insn (gen_move_insn (loc, reloadreg));
6165 emit_insn (gen_move_insn (old, loc));
6169 emit_insn (gen_move_insn (old, reloadreg));
6172 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6173 /* If final will look at death notes for this reg,
6174 put one on the last output-reload insn to use it. Similarly
6175 for any secondary register. */
6176 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
6177 for (p = get_last_insn (); p; p = PREV_INSN (p))
6178 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6179 && reg_overlap_mentioned_for_reload_p (reloadreg,
6181 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6182 reloadreg, REG_NOTES (p));
6184 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6186 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
6187 for (p = get_last_insn (); p; p = PREV_INSN (p))
6188 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6189 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
6191 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6192 second_reloadreg, REG_NOTES (p));
6195 /* Look at all insns we emitted, just to be safe. */
6196 for (p = get_insns (); p; p = NEXT_INSN (p))
6197 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
6199 /* If this output reload doesn't come from a spill reg,
6200 clear any memory of reloaded copies of the pseudo reg.
6201 If this output reload comes from a spill reg,
6202 reg_has_output_reload will make this do nothing. */
6203 note_stores (PATTERN (p), forget_old_reloads_1);
6205 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p)))
6209 output_reload_insns[reload_opnum[j]] = get_insns ();
6214 if (reload_spill_index[j] >= 0)
6215 new_spill_reg_store[reload_spill_index[j]] = store_insn;
6218 /* Now write all the insns we made for reloads in the order expected by
6219 the allocation functions. Prior to the insn being reloaded, we write
6220 the following reloads:
6222 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6224 RELOAD_OTHER reloads.
6226 For each operand, any RELOAD_FOR_INPUT_ADDRESS reloads followed by
6227 the RELOAD_FOR_INPUT reload for the operand.
6229 RELOAD_FOR_OPERAND_ADDRESS reloads.
6231 After the insn being reloaded, we write the following:
6233 For each operand, any RELOAD_FOR_OUTPUT_ADDRESS reload followed by
6234 the RELOAD_FOR_OUTPUT reload for that operand. */
6236 emit_insns_before (other_input_address_reload_insns, before_insn);
6237 emit_insns_before (other_input_reload_insns, before_insn);
6239 for (j = 0; j < reload_n_operands; j++)
6241 emit_insns_before (input_address_reload_insns[j], before_insn);
6242 emit_insns_before (input_reload_insns[j], before_insn);
6245 emit_insns_before (operand_reload_insns, before_insn);
6247 for (j = 0; j < reload_n_operands; j++)
6249 emit_insns_before (output_address_reload_insns[j], following_insn);
6250 emit_insns_before (output_reload_insns[j], following_insn);
6253 /* Move death notes from INSN
6254 to output-operand-address and output reload insns. */
6255 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6258 /* Loop over those insns, last ones first. */
6259 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
6260 insn1 = PREV_INSN (insn1))
6261 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
6263 rtx source = SET_SRC (PATTERN (insn1));
6264 rtx dest = SET_DEST (PATTERN (insn1));
6266 /* The note we will examine next. */
6267 rtx reg_notes = REG_NOTES (insn);
6268 /* The place that pointed to this note. */
6269 rtx *prev_reg_note = ®_NOTES (insn);
6271 /* If the note is for something used in the source of this
6272 reload insn, or in the output address, move the note. */
6275 rtx next_reg_notes = XEXP (reg_notes, 1);
6276 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
6277 && GET_CODE (XEXP (reg_notes, 0)) == REG
6278 && ((GET_CODE (dest) != REG
6279 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6281 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6284 *prev_reg_note = next_reg_notes;
6285 XEXP (reg_notes, 1) = REG_NOTES (insn1);
6286 REG_NOTES (insn1) = reg_notes;
6289 prev_reg_note = &XEXP (reg_notes, 1);
6291 reg_notes = next_reg_notes;
6297 /* For all the spill regs newly reloaded in this instruction,
6298 record what they were reloaded from, so subsequent instructions
6299 can inherit the reloads.
6301 Update spill_reg_store for the reloads of this insn.
6302 Copy the elements that were updated in the loop above. */
6304 for (j = 0; j < n_reloads; j++)
6306 register int r = reload_order[j];
6307 register int i = reload_spill_index[r];
6309 /* I is nonneg if this reload used one of the spill regs.
6310 If reload_reg_rtx[r] is 0, this is an optional reload
6311 that we opted to ignore.
6313 Also ignore reloads that don't reach the end of the insn,
6314 since we will eventually see the one that does. */
6316 if (i >= 0 && reload_reg_rtx[r] != 0
6317 && reload_reg_reaches_end_p (spill_regs[i], reload_opnum[r],
6318 reload_when_needed[r]))
6320 /* First, clear out memory of what used to be in this spill reg.
6321 If consecutive registers are used, clear them all. */
6323 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
6326 for (k = 0; k < nr; k++)
6328 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6329 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6332 /* Maybe the spill reg contains a copy of reload_out. */
6333 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6335 register int nregno = REGNO (reload_out[r]);
6336 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6337 : HARD_REGNO_NREGS (nregno,
6338 GET_MODE (reload_reg_rtx[r])));
6340 spill_reg_store[i] = new_spill_reg_store[i];
6341 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6343 /* If NREGNO is a hard register, it may occupy more than
6344 one register. If it does, say what is in the
6345 rest of the registers assuming that both registers
6346 agree on how many words the object takes. If not,
6347 invalidate the subsequent registers. */
6349 if (nregno < FIRST_PSEUDO_REGISTER)
6350 for (k = 1; k < nnr; k++)
6351 reg_last_reload_reg[nregno + k]
6352 = (nr == nnr ? gen_rtx (REG, word_mode,
6353 REGNO (reload_reg_rtx[r]) + k)
6356 /* Now do the inverse operation. */
6357 for (k = 0; k < nr; k++)
6359 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6360 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6362 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
6366 /* Maybe the spill reg contains a copy of reload_in. Only do
6367 something if there will not be an output reload for
6368 the register being reloaded. */
6369 else if (reload_out[r] == 0
6370 && reload_in[r] != 0
6371 && ((GET_CODE (reload_in[r]) == REG
6372 && ! reg_has_output_reload[REGNO (reload_in[r])]
6373 || (GET_CODE (reload_in_reg[r]) == REG
6374 && ! reg_has_output_reload[REGNO (reload_in_reg[r])]))))
6376 register int nregno;
6379 if (GET_CODE (reload_in[r]) == REG)
6380 nregno = REGNO (reload_in[r]);
6382 nregno = REGNO (reload_in_reg[r]);
6384 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6385 : HARD_REGNO_NREGS (nregno,
6386 GET_MODE (reload_reg_rtx[r])));
6388 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6390 if (nregno < FIRST_PSEUDO_REGISTER)
6391 for (k = 1; k < nnr; k++)
6392 reg_last_reload_reg[nregno + k]
6393 = (nr == nnr ? gen_rtx (REG, word_mode,
6394 REGNO (reload_reg_rtx[r]) + k)
6397 /* Unless we inherited this reload, show we haven't
6398 recently done a store. */
6399 if (! reload_inherited[r])
6400 spill_reg_store[i] = 0;
6402 for (k = 0; k < nr; k++)
6404 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6405 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6407 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
6413 /* The following if-statement was #if 0'd in 1.34 (or before...).
6414 It's reenabled in 1.35 because supposedly nothing else
6415 deals with this problem. */
6417 /* If a register gets output-reloaded from a non-spill register,
6418 that invalidates any previous reloaded copy of it.
6419 But forget_old_reloads_1 won't get to see it, because
6420 it thinks only about the original insn. So invalidate it here. */
6421 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6423 register int nregno = REGNO (reload_out[r]);
6424 reg_last_reload_reg[nregno] = 0;
6429 /* Emit code to perform an input reload of IN to RELOADREG. IN is from
6430 operand OPNUM with reload type TYPE.
6432 Returns first insn emitted. */
6435 gen_input_reload (reloadreg, in, opnum, type)
6439 enum reload_type type;
6441 rtx last = get_last_insn ();
6443 /* How to do this reload can get quite tricky. Normally, we are being
6444 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
6445 register that didn't get a hard register. In that case we can just
6446 call emit_move_insn.
6448 We can also be asked to reload a PLUS that adds a register or a MEM to
6449 another register, constant or MEM. This can occur during frame pointer
6450 elimination and while reloading addresses. This case is handled by
6451 trying to emit a single insn to perform the add. If it is not valid,
6452 we use a two insn sequence.
6454 Finally, we could be called to handle an 'o' constraint by putting
6455 an address into a register. In that case, we first try to do this
6456 with a named pattern of "reload_load_address". If no such pattern
6457 exists, we just emit a SET insn and hope for the best (it will normally
6458 be valid on machines that use 'o').
6460 This entire process is made complex because reload will never
6461 process the insns we generate here and so we must ensure that
6462 they will fit their constraints and also by the fact that parts of
6463 IN might be being reloaded separately and replaced with spill registers.
6464 Because of this, we are, in some sense, just guessing the right approach
6465 here. The one listed above seems to work.
6467 ??? At some point, this whole thing needs to be rethought. */
6469 if (GET_CODE (in) == PLUS
6470 && (GET_CODE (XEXP (in, 0)) == REG
6471 || GET_CODE (XEXP (in, 0)) == MEM)
6472 && (GET_CODE (XEXP (in, 1)) == REG
6473 || CONSTANT_P (XEXP (in, 1))
6474 || GET_CODE (XEXP (in, 1)) == MEM))
6476 /* We need to compute the sum of a register or a MEM and another
6477 register, constant, or MEM, and put it into the reload
6478 register. The best possible way of doing this is if the machine
6479 has a three-operand ADD insn that accepts the required operands.
6481 The simplest approach is to try to generate such an insn and see if it
6482 is recognized and matches its constraints. If so, it can be used.
6484 It might be better not to actually emit the insn unless it is valid,
6485 but we need to pass the insn as an operand to `recog' and
6486 `insn_extract' and it is simpler to emit and then delete the insn if
6487 not valid than to dummy things up. */
6489 rtx op0, op1, tem, insn;
6492 op0 = find_replacement (&XEXP (in, 0));
6493 op1 = find_replacement (&XEXP (in, 1));
6495 /* Since constraint checking is strict, commutativity won't be
6496 checked, so we need to do that here to avoid spurious failure
6497 if the add instruction is two-address and the second operand
6498 of the add is the same as the reload reg, which is frequently
6499 the case. If the insn would be A = B + A, rearrange it so
6500 it will be A = A + B as constrain_operands expects. */
6502 if (GET_CODE (XEXP (in, 1)) == REG
6503 && REGNO (reloadreg) == REGNO (XEXP (in, 1)))
6504 tem = op0, op0 = op1, op1 = tem;
6506 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
6507 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
6509 insn = emit_insn (gen_rtx (SET, VOIDmode, reloadreg, in));
6510 code = recog_memoized (insn);
6514 insn_extract (insn);
6515 /* We want constrain operands to treat this insn strictly in
6516 its validity determination, i.e., the way it would after reload
6518 if (constrain_operands (code, 1))
6522 delete_insns_since (last);
6524 /* If that failed, we must use a conservative two-insn sequence.
6525 use move to copy constant, MEM, or pseudo register to the reload
6526 register since "move" will be able to handle an arbitrary operand,
6527 unlike add which can't, in general. Then add the registers.
6529 If there is another way to do this for a specific machine, a
6530 DEFINE_PEEPHOLE should be specified that recognizes the sequence
6533 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM
6534 || (GET_CODE (op1) == REG
6535 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
6536 tem = op0, op0 = op1, op1 = tem;
6538 emit_insn (gen_move_insn (reloadreg, op0));
6540 /* If OP0 and OP1 are the same, we can use RELOADREG for OP1.
6541 This fixes a problem on the 32K where the stack pointer cannot
6542 be used as an operand of an add insn. */
6544 if (rtx_equal_p (op0, op1))
6547 emit_insn (gen_add2_insn (reloadreg, op1));
6550 #ifdef SECONDARY_MEMORY_NEEDED
6551 /* If we need a memory location to do the move, do it that way. */
6552 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
6553 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
6554 REGNO_REG_CLASS (REGNO (reloadreg)),
6555 GET_MODE (reloadreg)))
6557 /* Get the memory to use and rewrite both registers to its mode. */
6558 rtx loc = get_secondary_mem (in, GET_MODE (reloadreg), opnum, type);
6560 if (GET_MODE (loc) != GET_MODE (reloadreg))
6561 reloadreg = gen_rtx (REG, GET_MODE (loc), REGNO (reloadreg));
6563 if (GET_MODE (loc) != GET_MODE (in))
6564 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
6566 emit_insn (gen_move_insn (loc, in));
6567 emit_insn (gen_move_insn (reloadreg, loc));
6571 /* If IN is a simple operand, use gen_move_insn. */
6572 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
6573 emit_insn (gen_move_insn (reloadreg, in));
6575 #ifdef HAVE_reload_load_address
6576 else if (HAVE_reload_load_address)
6577 emit_insn (gen_reload_load_address (reloadreg, in));
6580 /* Otherwise, just write (set REGLOADREG IN) and hope for the best. */
6582 emit_insn (gen_rtx (SET, VOIDmode, reloadreg, in));
6584 /* Return the first insn emitted.
6585 We can not just return get_last_insn, because there may have
6586 been multiple instructions emitted. Also note that gen_move_insn may
6587 emit more than one insn itself, so we can not assume that there is one
6588 insn emitted per emit_insn_before call. */
6590 return last ? NEXT_INSN (last) : get_insns ();
6593 /* Delete a previously made output-reload
6594 whose result we now believe is not needed.
6595 First we double-check.
6597 INSN is the insn now being processed.
6598 OUTPUT_RELOAD_INSN is the insn of the output reload.
6599 J is the reload-number for this insn. */
6602 delete_output_reload (insn, j, output_reload_insn)
6605 rtx output_reload_insn;
6609 /* Get the raw pseudo-register referred to. */
6611 rtx reg = reload_in[j];
6612 while (GET_CODE (reg) == SUBREG)
6613 reg = SUBREG_REG (reg);
6615 /* If the pseudo-reg we are reloading is no longer referenced
6616 anywhere between the store into it and here,
6617 and no jumps or labels intervene, then the value can get
6618 here through the reload reg alone.
6619 Otherwise, give up--return. */
6620 for (i1 = NEXT_INSN (output_reload_insn);
6621 i1 != insn; i1 = NEXT_INSN (i1))
6623 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
6625 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
6626 && reg_mentioned_p (reg, PATTERN (i1)))
6630 if (cannot_omit_stores[REGNO (reg)])
6633 /* If this insn will store in the pseudo again,
6634 the previous store can be removed. */
6635 if (reload_out[j] == reload_in[j])
6636 delete_insn (output_reload_insn);
6638 /* See if the pseudo reg has been completely replaced
6639 with reload regs. If so, delete the store insn
6640 and forget we had a stack slot for the pseudo. */
6641 else if (reg_n_deaths[REGNO (reg)] == 1
6642 && reg_basic_block[REGNO (reg)] >= 0
6643 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
6647 /* We know that it was used only between here
6648 and the beginning of the current basic block.
6649 (We also know that the last use before INSN was
6650 the output reload we are thinking of deleting, but never mind that.)
6651 Search that range; see if any ref remains. */
6652 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6654 rtx set = single_set (i2);
6656 /* Uses which just store in the pseudo don't count,
6657 since if they are the only uses, they are dead. */
6658 if (set != 0 && SET_DEST (set) == reg)
6660 if (GET_CODE (i2) == CODE_LABEL
6661 || GET_CODE (i2) == JUMP_INSN)
6663 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
6664 && reg_mentioned_p (reg, PATTERN (i2)))
6665 /* Some other ref remains;
6666 we can't do anything. */
6670 /* Delete the now-dead stores into this pseudo. */
6671 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6673 rtx set = single_set (i2);
6675 if (set != 0 && SET_DEST (set) == reg)
6677 if (GET_CODE (i2) == CODE_LABEL
6678 || GET_CODE (i2) == JUMP_INSN)
6682 /* For the debugging info,
6683 say the pseudo lives in this reload reg. */
6684 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
6685 alter_reg (REGNO (reg), -1);
6689 /* Output reload-insns to reload VALUE into RELOADREG.
6690 VALUE is an autoincrement or autodecrement RTX whose operand
6691 is a register or memory location;
6692 so reloading involves incrementing that location.
6694 INC_AMOUNT is the number to increment or decrement by (always positive).
6695 This cannot be deduced from VALUE. */
6698 inc_for_reload (reloadreg, value, inc_amount)
6703 /* REG or MEM to be copied and incremented. */
6704 rtx incloc = XEXP (value, 0);
6705 /* Nonzero if increment after copying. */
6706 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
6712 /* No hard register is equivalent to this register after
6713 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
6714 we could inc/dec that register as well (maybe even using it for
6715 the source), but I'm not sure it's worth worrying about. */
6716 if (GET_CODE (incloc) == REG)
6717 reg_last_reload_reg[REGNO (incloc)] = 0;
6719 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
6720 inc_amount = - inc_amount;
6722 inc = GEN_INT (inc_amount);
6724 /* If this is post-increment, first copy the location to the reload reg. */
6726 emit_insn (gen_move_insn (reloadreg, incloc));
6728 /* See if we can directly increment INCLOC. Use a method similar to that
6729 in gen_input_reload. */
6731 last = get_last_insn ();
6732 add_insn = emit_insn (gen_rtx (SET, VOIDmode, incloc,
6733 gen_rtx (PLUS, GET_MODE (incloc),
6736 code = recog_memoized (add_insn);
6739 insn_extract (add_insn);
6740 if (constrain_operands (code, 1))
6742 /* If this is a pre-increment and we have incremented the value
6743 where it lives, copy the incremented value to RELOADREG to
6744 be used as an address. */
6747 emit_insn (gen_move_insn (reloadreg, incloc));
6753 delete_insns_since (last);
6755 /* If couldn't do the increment directly, must increment in RELOADREG.
6756 The way we do this depends on whether this is pre- or post-increment.
6757 For pre-increment, copy INCLOC to the reload register, increment it
6758 there, then save back. */
6762 emit_insn (gen_move_insn (reloadreg, incloc));
6763 emit_insn (gen_add2_insn (reloadreg, inc));
6764 emit_insn (gen_move_insn (incloc, reloadreg));
6769 Because this might be a jump insn or a compare, and because RELOADREG
6770 may not be available after the insn in an input reload, we must do
6771 the incrementation before the insn being reloaded for.
6773 We have already copied INCLOC to RELOADREG. Increment the copy in
6774 RELOADREG, save that back, then decrement RELOADREG so it has
6775 the original value. */
6777 emit_insn (gen_add2_insn (reloadreg, inc));
6778 emit_insn (gen_move_insn (incloc, reloadreg));
6779 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
6785 /* Return 1 if we are certain that the constraint-string STRING allows
6786 the hard register REG. Return 0 if we can't be sure of this. */
6789 constraint_accepts_reg_p (string, reg)
6794 int regno = true_regnum (reg);
6797 /* Initialize for first alternative. */
6799 /* Check that each alternative contains `g' or `r'. */
6801 switch (c = *string++)
6804 /* If an alternative lacks `g' or `r', we lose. */
6807 /* If an alternative lacks `g' or `r', we lose. */
6810 /* Initialize for next alternative. */
6815 /* Any general reg wins for this alternative. */
6816 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
6820 /* Any reg in specified class wins for this alternative. */
6822 enum reg_class class = REG_CLASS_FROM_LETTER (c);
6824 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
6830 /* Return the number of places FIND appears within X, but don't count
6831 an occurrence if some SET_DEST is FIND. */
6834 count_occurrences (x, find)
6835 register rtx x, find;
6838 register enum rtx_code code;
6839 register char *format_ptr;
6847 code = GET_CODE (x);
6862 if (SET_DEST (x) == find)
6863 return count_occurrences (SET_SRC (x), find);
6867 format_ptr = GET_RTX_FORMAT (code);
6870 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6872 switch (*format_ptr++)
6875 count += count_occurrences (XEXP (x, i), find);
6879 if (XVEC (x, i) != NULL)
6881 for (j = 0; j < XVECLEN (x, i); j++)
6882 count += count_occurrences (XVECEXP (x, i, j), find);