the Free Software Foundation, 59 Temple Place - Suite 330,
Boston, MA 02111-1307, USA. */
-
/* This file contains subroutines used only from the file reload1.c.
It knows how to scan one insn for operands and values
that need to be copied into registers to make valid code.
2 happens only when REPLACE is 1, which is only when
actually doing the reloads, not when just counting them.
-
Using a reload register for several reloads in one insn:
When an insn has reloads, it is considered as having three parts:
|| GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
{
val.base = XEXP (addr, 0);
- val.start = - GET_MODE_SIZE (GET_MODE (x));
+ val.start = -GET_MODE_SIZE (GET_MODE (x));
val.end = GET_MODE_SIZE (GET_MODE (x));
val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
return val;
}
if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
- {
- if (GET_CODE (XEXP (addr, 1)) == PLUS
+ {
+ if (GET_CODE (XEXP (addr, 1)) == PLUS
&& XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
&& CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
- {
- val.base = XEXP (addr, 0);
- val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
- val.end = INTVAL (XEXP (XEXP (addr, 1), 1));
- val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
- return val;
- }
- }
-
+ {
+ val.base = XEXP (addr, 0);
+ val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
+ val.end = INTVAL (XEXP (XEXP (addr, 1), 1));
+ val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
+ return val;
+ }
+ }
+
if (GET_CODE (addr) == CONST)
{
addr = XEXP (addr, 0);
if (GET_CODE (x) != MEM)
return 1;
- xdata = decompose (x);
+ xdata = decompose (x);
if (! rtx_equal_p (xdata.base, ydata.base))
{
return 0;
}
-
return (xdata.start >= ydata.end || ydata.start >= xdata.end);
}
case '#':
/* Ignore rest of this alternative as far as
reloading is concerned. */
- while (*p && *p != ',') p++;
+ while (*p && *p != ',')
+ p++;
break;
case '0': case '1': case '2': case '3': case '4':
only on one side of its diagonal. */
? (operands_match
[(c == commutative || c == commutative + 1)
- ? 2*commutative + 1 - c : c]
+ ? 2 * commutative + 1 - c : c]
[(i == commutative || i == commutative + 1)
- ? 2*commutative + 1 - i : i])
+ ? 2 * commutative + 1 - i : i])
: operands_match[c][i])
{
/* If we are matching a non-offsettable address where an
&& ! const_to_mem)
bad = 1;
-
/* We prefer to reload pseudos over reloading other things,
since such reloads may be able to be eliminated later.
If we are reloading a SCRATCH, we won't be generating any
Don't do this if the preferred class has only one register
because we might otherwise exhaust the class. */
-
if (! win && this_alternative[i] != (int) NO_REGS
&& GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
&& reg_class_size[(int) preferred_class[i]] > 1)
tem = *recog_data.operand_loc[commutative];
*recog_data.operand_loc[commutative]
= *recog_data.operand_loc[commutative + 1];
- *recog_data.operand_loc[commutative+1] = tem;
+ *recog_data.operand_loc[commutative + 1] = tem;
for (i = 0; i < n_reloads; i++)
{
{
int secondary_in_reload = rld[i].secondary_in_reload;
- rld[secondary_in_reload].when_needed
- = RELOAD_FOR_OPADDR_ADDR;
+ rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
/* If there's a tertiary reload we have to change it also. */
if (secondary_in_reload > 0
{
int secondary_out_reload = rld[i].secondary_out_reload;
- rld[secondary_out_reload].when_needed
- = RELOAD_FOR_OPADDR_ADDR;
+ rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
/* If there's a tertiary reload we have to change it also. */
if (secondary_out_reload
int first_op_addr_num = -2;
int first_inpaddr_num[MAX_RECOG_OPERANDS];
int first_outpaddr_num[MAX_RECOG_OPERANDS];
- int need_change= 0;
+ int need_change = 0;
/* We use last_op_addr_reload and the contents of the above arrays
first as flags - -2 means no instance encountered, -1 means exactly
one instance encountered.
of another register or by a constant value. Thus, these
operands must match. */
if (op0 != XEXP (op1, 0))
- abort();
+ abort ();
/* Require index register (or constant). Let's just handle the
register case in the meantime... If the target allows
auto-modify by a constant then we could try replacing a pseudo
- register with its equivalent constant where applicable. */
+ register with its equivalent constant where applicable. */
if (REG_P (XEXP (op1, 1)))
if (!REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
find_reloads_address_1 (mode, XEXP (op1, 1), 1, &XEXP (op1, 1),
if (!REGNO_MODE_OK_FOR_BASE_P (regno, GET_MODE (x)))
{
push_reload (XEXP (op1, 0), XEXP (x, 0),
- &XEXP (op1, 0), &XEXP (x, 0),
+ &XEXP (op1, 0), &XEXP (x, 0),
BASE_REG_CLASS,
GET_MODE (x), GET_MODE (x), 0, 0,
opnum, RELOAD_OTHER);
}
}
else
- abort();
+ abort ();
}
return 0;
else if (fmt[i] == 'E')
{
register int j;
- for (j = XVECLEN (x, i) - 1; j >=0; j--)
+ for (j = XVECLEN (x, i) - 1; j >= 0; j--)
if (loc != &XVECEXP (x, i, j)
&& refers_to_regno_for_reload_p (regno, endregno,
XVECEXP (x, i, j), loc))
if (GET_CODE (p) == CALL_INSN)
{
int i;
-
+
if (goal_mem || need_stable_sp)
return 0;
-
+
if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
for (i = 0; i < nregs; ++i)
if (call_used_regs[regno + i])
&& xregno + xnregs > regno)
return 0;
else if (xregno < valueno + valuenregs
- && xregno + xnregs > valueno)
+ && xregno + xnregs > valueno)
return 0;
else if (goal_mem_addr_varies
&& reg_overlap_mentioned_for_reload_p (dest,
&& XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
&& XEXP (addr, 0) == inced
&& GET_CODE (XEXP (XEXP (addr, 1), 1)) == CONST_INT)
- {
- i = INTVAL (XEXP (XEXP (addr, 1), 1));
- return i < 0 ? -i : i;
- }
- }
+ {
+ i = INTVAL (XEXP (XEXP (addr, 1), 1));
+ return i < 0 ? -i : i;
+ }
+ }
fmt = GET_RTX_FORMAT (code);
for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
the Free Software Foundation, 59 Temple Place - Suite 330,
Boston, MA 02111-1307, USA. */
-
/* This file handles the generation of rtl code from tree structure
above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
It also creates the rtl expressions for parameters and auto variables
#ifndef CASE_VECTOR_PC_RELATIVE
#define CASE_VECTOR_PC_RELATIVE 0
#endif
-
\f
/* Functions and data structures for expanding case statements. */
/* Character strings, each containing a single decimal digit. */
static char *digit_strings[10];
-
static int n_occurrences PARAMS ((int, const char *));
static void expand_goto_internal PARAMS ((tree, rtx, rtx));
static int expand_fixup PARAMS ((tree, rtx, rtx));
emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
emit_indirect_jump (handler_slot);
}
- }
+ }
else
expand_goto_internal (label, label_rtx (label), NULL_RTX);
}
insert_block (block);
else
{
- BLOCK_CHAIN (block)
+ BLOCK_CHAIN (block)
= BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
= block;
return block != 0;
}
-
-
\f
/* Expand any needed fixups in the outputmost binding level of the
function. FIRST_INSN is the first insn in the function. */
&& ! (f->target_rtl == return_label
&& ((TREE_CODE (TREE_TYPE (current_function_decl))
== FUNCTION_TYPE)
- && (TYPE_RETURNS_STACK_DEPRESSED
+ && (TYPE_RETURNS_STACK_DEPRESSED
(TREE_TYPE (current_function_decl))))))
emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
end_sequence ();
emit_insns_after (cleanup_insns, f->before_jump);
-
f->before_jump = 0;
}
}
error ("too many alternatives in `asm'");
return;
}
-
+
tmp = outputs;
while (tmp)
{
argvec = rtvec_alloc (ninputs);
constraints = rtvec_alloc (ninputs);
- body = gen_rtx_ASM_OPERANDS (VOIDmode, TREE_STRING_POINTER (string),
- empty_string, 0, argvec, constraints,
+ body = gen_rtx_ASM_OPERANDS (VOIDmode, TREE_STRING_POINTER (string),
+ empty_string, 0, argvec, constraints,
filename, line);
MEM_VOLATILE_P (body) = vol;
for (j = constraint[j] - '0'; j > 0; --j)
o = TREE_CHAIN (o);
-
+
c_len = strlen (TREE_STRING_POINTER (TREE_PURPOSE (o)));
constraint = TREE_STRING_POINTER (TREE_PURPOSE (o));
j = 0;
break;
}
- /* ... fall through ... */
+ /* Fall through. */
case 'p': case 'r':
allows_reg = 1;
for (i = 0; i < noutputs; i++)
output_rtx[i] = protect_from_queue (output_rtx[i], 1);
- /* For in-out operands, copy output rtx to input rtx. */
+ /* For in-out operands, copy output rtx to input rtx. */
for (i = 0; i < ninout; i++)
{
int j = inout_opnum[i];
else
{
rtx lab = gen_label_rtx ();
-
+
/* Compare the value with itself to reference it. */
emit_cmp_and_jump_insns (last_expr_value, last_expr_value, EQ,
expand_expr (TYPE_SIZE (last_expr_type),
the user cannot control it. */
if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
return warn_if_unused_value (TREE_OPERAND (exp, 0));
- /* ... fall through ... */
-
+ /* Fall through. */
+
default:
/* Referencing a volatile value is a side effect, so don't warn. */
if ((DECL_P (exp)
POPSTACK (cond_stack);
last_expr_type = 0;
}
-
-
\f
/* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
loop should be exited by `exit_something'. This is a loop for which
insn = PREV_INSN (label);
reorder_insns (label, label, start_label);
- for (prev = PREV_INSN (jump); ; prev = PREV_INSN (prev))
- {
+ for (prev = PREV_INSN (jump);; prev = PREV_INSN (prev))
+ {
/* We ignore line number notes, but if we see any other note,
in particular NOTE_INSN_BLOCK_*, NOTE_INSN_EH_REGION_*,
NOTE_INSN_LOOP_*, we disable this optimization. */
body;
goto start_label;
end_label:
-
+
transform it to look like:
goto start_label;
for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
insn = NEXT_INSN (insn))
{
- if (GET_CODE (insn) == NOTE)
+ if (GET_CODE (insn) == NOTE)
{
if (optimize < 2
&& (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
{
--eh_regions;
- if (eh_regions < 0)
+ if (eh_regions < 0)
/* We've come to the end of an EH region, but
never saw the beginning of that region. That
means that an EH region begins before the top
if (last_test_insn && num_insns > 30)
break;
- if (eh_regions > 0)
+ if (eh_regions > 0)
/* We don't want to move a partial EH region. Consider:
while ( ( { try {
- if (cond ()) 0;
+ if (cond ()) 0;
else {
bar();
1;
}
- } catch (...) {
+ } catch (...) {
1;
} )) {
body;
- }
+ }
This isn't legal C++, but here's what it's supposed to
mean: if cond() is true, stop looping. Otherwise,
call bar, and keep looping. In addition, if cond
throws an exception, catch it and keep looping. Such
- constructs are certainy legal in LISP.
+ constructs are certainy legal in LISP.
We should not move the `if (cond()) 0' test since then
the EH-region for the try-block would be broken up.
(In this case we would the EH_BEG note for the `try'
and `if cond()' but not the call to bar() or the
- EH_END note.)
+ EH_END note.)
So we don't look for tests within an EH region. */
continue;
- if (GET_CODE (insn) == JUMP_INSN
+ if (GET_CODE (insn) == JUMP_INSN
&& GET_CODE (PATTERN (insn)) == SET
&& SET_DEST (PATTERN (insn)) == pc_rtx)
{
do {
if (dest1 && GET_CODE (dest1) == LABEL_REF
- && ((XEXP (dest1, 0)
+ && ((XEXP (dest1, 0)
== loop_stack->data.loop.alt_end_label)
- || (XEXP (dest1, 0)
+ || (XEXP (dest1, 0)
== loop_stack->data.loop.end_label)))
{
last_test_insn = potential_last_test;
around a unconditional branch to exit the loop. If fixups are
necessary, they go before the unconditional branch. */
-
do_jump (cond, NULL_RTX, label);
last_insn = get_last_insn ();
if (GET_CODE (last_insn) == CODE_LABEL)
struct nesting *block = block_stack;
rtx last_insn = get_last_insn ();
- /* If this function was declared to return a value, but we
+ /* If this function was declared to return a value, but we
didn't, clobber the return registers so that they are not
propogated live to the rest of the function. */
clobber_return_register ();
big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
* BITS_PER_UNIT));
- /* Copy the structure BITSIZE bits at a time. */
+ /* Copy the structure BITSIZE bits at a time. */
for (bitpos = 0, xbitpos = big_endian_correction;
bitpos < bytes * BITS_PER_UNIT;
bitpos += bitsize, xbitpos += bitsize)
\f
/* Attempt to optimize a potential tail recursion call into a goto.
ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
- where to place the jump to the tail recursion label.
-
+ where to place the jump to the tail recursion label.
+
Return TRUE if the call was optimized into a goto. */
int
register int j;
for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
- { copy = 1; break; }
+ {
+ copy = 1;
+ break;
+ }
if (copy)
argvec[i] = copy_to_reg (argvec[i]);
}
will not create corresponding BLOCK nodes. (There should be
a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
and BLOCKs.) If this flag is set, MARK_ENDS should be zero
- when expand_end_bindings is called.
+ when expand_end_bindings is called.
If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
optionally be supplied. If so, it becomes the NOTE_BLOCK for the
rtx note;
int exit_flag = ((flags & 1) != 0);
int block_flag = ((flags & 2) == 0);
-
+
/* If a BLOCK is supplied, then the caller should be requesting a
NOTE_INSN_BLOCK_BEG note. */
if (!block_flag && block)
}
else
note = emit_note (NULL_PTR, NOTE_INSN_DELETED);
-
+
/* Make an entry on block_stack for the block we are entering. */
thisblock->next = block_stack;
expand_end_target_temps ()
{
expand_end_bindings (NULL_TREE, 0, 0);
-
+
/* This is so that even if the result is preserved, the space
allocated will be freed, as we know that it is no longer in use. */
pop_temp_slots ();
BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
represents the outermost (function) scope for the function or method (i.e.
the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
- *that* node in turn will point to the relevant FUNCTION_DECL node. */
+ *that* node in turn will point to the relevant FUNCTION_DECL node. */
int
is_body_block (stmt)
if (warn_unused_variable)
for (decl = vars; decl; decl = TREE_CHAIN (decl))
- if (TREE_CODE (decl) == VAR_DECL
+ if (TREE_CODE (decl) == VAR_DECL
&& ! TREE_USED (decl)
&& ! DECL_IN_SYSTEM_HEADER (decl)
- && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
+ && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
warning_with_decl (decl, "unused variable `%s'");
}
/* Since expand_eh_region_start does an expand_start_bindings, we
have to first end all the bindings that were created by
expand_eh_region_start. */
-
+
thisblock = block_stack;
/* If any of the variables in this scope were not used, warn the
if (GET_CODE (insn) == NOTE)
insn = prev_nonnote_insn (insn);
reachable = (! insn || GET_CODE (insn) != BARRIER);
-
+
/* Do the cleanups. */
expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
if (reachable)
free_temp_slots ();
/* Allocate space on the stack for the variable. Note that
- DECL_ALIGN says how the variable is to be aligned and we
+ DECL_ALIGN says how the variable is to be aligned and we
cannot use it to conclude anything about the alignment of
the size. */
address = allocate_dynamic_stack_space (size, NULL_RTX,
TREE_ADDRESSABLE (decl) = 1;
break;
}
-
+
expand_decl (decl);
expand_decl_cleanup (decl, cleanup);
x = DECL_RTL (decl);
start_cleanup_deferral ();
}
-
/* Start a "dummy case statement" within which case labels are invalid
and are not connected to any larger real case statement.
This can be used if you don't want to let a case statement jump
/* If insn is zero, then there must have been a syntax error. */
if (insn)
- warning_with_file_and_line (NOTE_SOURCE_FILE(insn),
- NOTE_LINE_NUMBER(insn),
+ warning_with_file_and_line (NOTE_SOURCE_FILE (insn),
+ NOTE_LINE_NUMBER (insn),
"unreachable code at beginning of %s",
case_stack->data.case_stmt.printname);
break;
if (value2 != 0 && tree_int_cst_lt (value2, value1))
return 4;
- /* If the max was unbounded, use the max of the nominal_type we are
+ /* If the max was unbounded, use the max of the nominal_type we are
converting to. Do this after the < check above to suppress false
positives. */
if (value2 == 0)
return 0;
}
-
\f
/* Returns the number of possible values of TYPE.
Returns -1 if the number is unknown, variable, or if the number does not
TREE_TYPE (val) = type;
if (! root)
- ; /* Do nothing */
+ /* Do nothing. */
+ ;
else if (sparseness == 2)
{
tree t;
duplicate case values (multiple enum constants
with the same value). */
TREE_TYPE (val) = TREE_TYPE (root->low);
- for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
+ for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
t = TREE_CHAIN (t), xlo++)
{
TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
/* Keep going past elements distinctly greater than VAL. */
if (tree_int_cst_lt (val, n->low))
n = n->left;
-
+
/* or distinctly less than VAL. */
else if (tree_int_cst_lt (n->high, val))
n = n->right;
-
+
else
{
/* We have found a matching range. */
{
TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
- while ( ! tree_int_cst_lt (n->high, val))
+ while (! tree_int_cst_lt (n->high, val))
{
/* Calculate (into xlo) the "offset" of the integer (val).
The element with lowest value has offset 0, the next smallest
TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
&xlo, &xhi);
}
-
+
if (xhi == 0 && xlo < (unsigned HOST_WIDE_INT) count)
BITARRAY_SET (cases_seen, xlo);
{
register struct case_node *n;
register tree chain;
-#if 0 /* variable used by 'if 0'ed code below. */
+#if 0 /* variable used by 'if 0'ed code below. */
register struct case_node **l;
int all_values = 1;
#endif
if (size > 0 && size < 600000
/* We deliberately use calloc here, not cmalloc, so that we can suppress
- this optimization if we don't have enough memory rather than
+ this optimization if we don't have enough memory rather than
aborting, as xmalloc would do. */
&& (cases_seen = (unsigned char *) calloc (bytes_needed, 1)) != NULL)
{
mark_seen_cases (type, cases_seen, size, sparseness);
for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
- if (BITARRAY_TEST(cases_seen, i) == 0)
+ if (BITARRAY_TEST (cases_seen, i) == 0)
warning ("enumeration value `%s' not handled in switch",
IDENTIFIER_POINTER (TREE_PURPOSE (v)));
if (thiscase->data.case_stmt.case_list
&& thiscase->data.case_stmt.case_list->left)
thiscase->data.case_stmt.case_list
- = case_tree2list(thiscase->data.case_stmt.case_list, 0);
+ = case_tree2list (thiscase->data.case_stmt.case_list, 0);
/* Simplify the case-list before we count it. */
group_case_nodes (thiscase->data.case_stmt.case_list);
generate the conversion. */
if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
- && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
+ && (cmp_optab->handlers[(int) GET_MODE (index)].insn_code
== CODE_FOR_nothing))
{
enum machine_mode wider_mode;
use_cost_table
= (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
&& estimate_case_costs (thiscase->data.case_stmt.case_list));
- balance_case_nodes (&thiscase->data.case_stmt.case_list,
+ balance_case_nodes (&thiscase->data.case_stmt.case_list,
NULL_PTR);
emit_case_nodes (index, thiscase->data.case_stmt.case_list,
default_label, index_type);
index = protect_from_queue (index, 0);
do_pending_stack_adjust ();
- op_mode = insn_data[(int)CODE_FOR_casesi].operand[0].mode;
- if (! (*insn_data[(int)CODE_FOR_casesi].operand[0].predicate)
+ op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
+ if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
(index, op_mode))
index = copy_to_mode_reg (op_mode, index);
op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
- op_mode = insn_data[(int)CODE_FOR_casesi].operand[1].mode;
- if (! (*insn_data[(int)CODE_FOR_casesi].operand[1].predicate)
+ op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
+ if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
(op1, op_mode))
op1 = copy_to_mode_reg (op_mode, op1);
op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
- op_mode = insn_data[(int)CODE_FOR_casesi].operand[2].mode;
- if (! (*insn_data[(int)CODE_FOR_casesi].operand[2].predicate)
+ op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
+ if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
(op2, op_mode))
op2 = copy_to_mode_reg (op_mode, op2);
{
index_type = thiscase->data.case_stmt.nominal_type;
index_expr = fold (build (MINUS_EXPR, index_type,
- convert (index_type, index_expr),
- convert (index_type, minval)));
+ convert (index_type, index_expr),
+ convert (index_type, minval)));
index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
emit_queue ();
index = protect_from_queue (index, 0);
emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
gen_rtx_LABEL_REF (Pmode, table_label),
gen_rtvec_v (ncases, labelvec),
- const0_rtx, const0_rtx));
+ const0_rtx, const0_rtx));
else
emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
gen_rtvec_v (ncases, labelvec)));
if (node_is_bounded (node->right, index_type))
{
- emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
- VOIDmode, 0),
- GT, NULL_RTX, mode, unsignedp, 0,
- label_rtx (node->right->code_label));
+ emit_cmp_and_jump_insns (index,
+ expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
+ GT, NULL_RTX, mode, unsignedp, 0,
+ label_rtx (node->right->code_label));
emit_case_nodes (index, node->left, default_label, index_type);
}
else if (node_is_bounded (node->left, index_type))
{
- emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
- VOIDmode, 0),
+ emit_cmp_and_jump_insns (index,
+ expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
LT, NULL_RTX, mode, unsignedp, 0,
label_rtx (node->left->code_label));
emit_case_nodes (index, node->right, default_label, index_type);
/* Neither node is bounded. First distinguish the two sides;
then emit the code for one side at a time. */
- tree test_label
- = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
+ tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
/* See if the value is on the right. */
- emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
- VOIDmode, 0),
+ emit_cmp_and_jump_insns (index,
+ expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
GT, NULL_RTX, mode, unsignedp, 0,
label_rtx (test_label));
{
if (!node_has_low_bound (node, index_type))
{
- emit_cmp_and_jump_insns (index, expand_expr (node->high,
- NULL_RTX,
- VOIDmode, 0),
+ emit_cmp_and_jump_insns (index,
+ expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
LT, NULL_RTX, mode, unsignedp, 0,
default_label);
}
a branch-greater-than will get us to the default
label correctly. */
if (use_cost_table
- && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
+ && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
;
#endif /* 0 */
- if (node->left->left || node->left->right
+ if (node->left->left || node->left->right
|| !tree_int_cst_equal (node->left->low, node->left->high))
{
if (!node_has_high_bound (node, index_type))
then handle the two subtrees. */
tree test_label = 0;
-
if (node_is_bounded (node->right, index_type))
/* Right hand node is fully bounded so we can eliminate any
testing and branch directly to the target code. */
Branch to a label where we will handle it later. */
test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
- emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
- VOIDmode, 0),
+ emit_cmp_and_jump_insns (index,
+ expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
GT, NULL_RTX, mode, unsignedp, 0,
label_rtx (test_label));
}
if they are possible. */
if (!node_has_low_bound (node, index_type))
{
- emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
- VOIDmode, 0),
+ emit_cmp_and_jump_insns (index,
+ expand_expr (node->low, NULL_RTX,
+ VOIDmode, 0),
LT, NULL_RTX, mode, unsignedp, 0,
default_label);
}
if they are possible. */
if (!node_has_high_bound (node, index_type))
{
- emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
- VOIDmode, 0),
+ emit_cmp_and_jump_insns (index,
+ expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
GT, NULL_RTX, mode, unsignedp, 0,
default_label);
}
/* Value belongs to this node or to the left-hand subtree. */
- emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
- VOIDmode, 0),
+ emit_cmp_and_jump_insns (index,
+ expand_expr (node->low, NULL_RTX,
+ VOIDmode, 0),
GE, NULL_RTX, mode, unsignedp, 0,
label_rtx (node->code_label));
if (!node_has_high_bound (node, index_type))
{
- emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
- VOIDmode, 0),
+ emit_cmp_and_jump_insns (index,
+ expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
GT, NULL_RTX, mode, unsignedp, 0,
default_label);
}
if (!node_has_low_bound (node, index_type))
{
- emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
- VOIDmode, 0),
+ emit_cmp_and_jump_insns (index,
+ expand_expr (node->low, NULL_RTX,
+ VOIDmode, 0),
LT, NULL_RTX, mode, unsignedp, 0,
default_label);
}
}
}
}
-\f