retry:
- /* Look thru the whole pending tree.
+ /* Look through the whole pending tree.
If we find an element that should be output now,
output it. Otherwise, set NEXT to the element
that comes first among those still pending. */
/* Get possible static chain value for nested function in C. */
static_chain_value = lookup_static_chain (fndecl);
- /* Make a valid memory address and copy constants thru pseudo-regs,
+ /* Make a valid memory address and copy constants through pseudo-regs,
but not for a constant address if -fno-function-cse. */
if (GET_CODE (funexp) != SYMBOL_REF)
/* If we are using registers for parameters, force the
end = BB_END (jump_block);
/* Deleting a block may produce unreachable code warning even when we are
- not deleting anything live. Supress it by moving all the line number
+ not deleting anything live. Suppress it by moving all the line number
notes out of the block. */
for (insn = BB_HEAD (jump_block); insn != NEXT_INSN (BB_END (jump_block));
insn = next)
};
-/* The cgraph data strutcture.
+/* The cgraph data structure.
Each function decl has assigned cgraph_node listing callees and callers. */
struct cgraph_node GTY((chain_next ("%h.next"), chain_prev ("%h.previous")))
bool inline_call;
};
-/* The cgraph_varpool data strutcture.
+/* The cgraph_varpool data structure.
Each static variable decl has assigned cgraph_varpool_node. */
struct cgraph_varpool_node GTY(())
which contains the given subreg. */
rtl = SUBREG_REG (rtl);
- /* Drop thru. */
+ /* Drop through. */
case REG:
which contains the given subreg. */
rtl = SUBREG_REG (rtl);
- /* Drop thru. */
+ /* Drop through. */
case REG:
ASM_OUTPUT_DWARF_STACK_OP (asm_out_file, OP_REG);
return;
}
-/* Go through all the RTL insn bodies and chec that there is no inexpected
+/* Go through all the RTL insn bodies and check that there is no unexpected
sharing in between the subexpressions. */
void
x = convert_memory_address (Pmode, x);
- /* By passing constant addresses thru registers
+ /* By passing constant addresses through registers
we get a chance to cse them. */
if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
x = force_reg (Pmode, x);
if (mode != Pmode)
index = convert_to_mode (Pmode, index, 1);
- /* Don't let a MEM slip thru, because then INDEX that comes
+ /* Don't let a MEM slip through, because then INDEX that comes
out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
and break_out_memory_refs will go to work on it and mess it up. */
#ifdef PIC_CASE_VECTOR_ADDRESS
case SET:
if (GET_CODE (XEXP (x, 1)) == ASM_OPERANDS)
flags |= PROP_ASM_SCAN;
- /* Fall thru */
+ /* Fall through */
case CLOBBER:
mark_set_1 (pbi, code, SET_DEST (x), cond, insn, flags);
return;
mark_set:
if (GET_CODE (XEXP (sub, 1)) == ASM_OPERANDS)
flags |= PROP_ASM_SCAN;
- /* Fall thru */
+ /* Fall through */
case CLOBBER:
mark_clob:
mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, flags);
tem = decl_function_context (tem);
if (tem == 0)
break;
- /* Chain thru stack frames, assuming pointer to next lexical frame
+ /* Chain through stack frames, assuming pointer to next lexical frame
is found at the place we always store it. */
#ifdef FRAME_GROWS_DOWNWARD
last_ptr = plus_constant (last_ptr,
{
/* verbose_only_flag should act as if the spec was
executed, so increment execution_count before
- returning. Theis prevent spurious warnings about
+ returning. This prevents spurious warnings about
unused linker input files, etc. */
execution_count++;
return 0;
If we track inter-zone pointers, we can mark single zones at a
time.
If we have a zone where we guarantee no inter-zone pointers, we
- could mark that zone seperately.
+ could mark that zone separately.
The garbage zone should not be marked, and we should return 1 in
ggc_set_mark for any object in the garbage zone, which cuts off
marking quickly. */
if (HAVE_conditional_execution)
{
/* In the conditional execution case, we have things easy. We know
- the condition is reversible. We don't have to check life info,
- becase we're going to conditionally execute the code anyway.
+ the condition is reversible. We don't have to check life info
+ because we're going to conditionally execute the code anyway.
All that's left is making sure the insns involved can actually
be predicated. */
/* Number of words needed to hold the data in given quantity.
This depends on its machine mode. It is used for these purposes:
- 1. It is used in computing the relative importances of qtys,
+ 1. It is used in computing the relative importance of qtys,
which determines the order in which we look for regs for them.
2. It is used in rules that prevent tying several registers of
different sizes in a way that is geometrically impossible
/* To keep queue of basic blocks to process. */
basic_block next;
- /* True if block needs to be visited in prop_freqency. */
+ /* True if block needs to be visited in propagate_freq. */
int tovisit:1;
/* Number of predecessors we need to visit first. */
}
/* Return the amount of characters PRETTY-PRINTER can accept to
- make a full line. Meaningfull only in line-wrapping mode. */
+ make a full line. Meaningful only in line-wrapping mode. */
int
pp_base_remaining_character_count_for_line (pretty_printer *pp)
{
}
for (e = bb->succ; e; e = e->succ_next)
{
- /* Function may return twice in the cased the called fucntion is
+ /* Function may return twice in the cased the called function is
setjmp or calls fork, but we can't represent this by extra
edge from the entry, since extra edge from the exit is
already present. We get negative frequency from the entry
{
/* The main webs do _not_ conflict, only some parts of both. This
means, that 4 is possibly true, so we need to check this too.
- For this we go thru all sub conflicts between T and C, and see if
+ For this we go through all sub conflicts between T and C, and see if
the target part of C already conflicts with S. When this is not
the case we disallow coalescing. */
struct sub_conflict *sl;
}
}
/* The next if() only gets true, if there was no wl->sub at all, in
- which case we are only making one go thru this loop with W being
+ which case we are only making one go through this loop with W being
a whole web. */
if (!sl)
break;
even if we spill this one here, the situation won't become better
in the next iteration. It probably will have the same conflicts,
those will have the same colors, and we would come here again, for
- all parts, in which this one gets splitted by the spill. This
+ all parts, in which this one gets split by the spill. This
can result in endless iteration spilling the same register again and
again. That's why we try to find a neighbor, which spans more
instructions that ourself, and got a color, and try to spill _that_.
else if (web2->type == SELECT)
/* This means, that WEB2 once was a part of a coalesced
web, which got spilled in the above colorize_one_web()
- call, and whose parts then got splitted and put back
+ call, and whose parts then got split and put back
onto the SELECT stack. As the cause for that splitting
(the coloring of WEB) was worthless, we should again
coalesce the parts, as they were before. For now we
}
/* MAX_VISUAL_LINES is the maximum number of lines in visualization table
- of a basic block. If more lines are needed, table is splitted to two.
+ of a basic block. If more lines are needed, table is split to two.
n_visual_lines is the number of lines printed so far for a block.
visual_tbl contains the block visualization info.
vis_no_unit holds insns in a cycle that are not mapped to any unit. */
else if (DECL_PACKED (decl) && DECL_USER_ALIGN (decl))
/* Don't touch DECL_ALIGN. For other packed fields, go ahead and
round up; we'll reduce it again below. We want packing to
- supercede USER_ALIGN inherited from the type, but defer to
+ supersede USER_ALIGN inherited from the type, but defer to
alignment explicitly specified on the field decl. */;
else
do_type_align (type, decl);