/* A pass for lowering trees to RTL.
- Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
- Free Software Foundation, Inc.
+ Copyright (C) 2004-2013 Free Software Foundation, Inc.
This file is part of GCC.
#include "expr.h"
#include "langhooks.h"
#include "tree-flow.h"
-#include "timevar.h"
-#include "tree-dump.h"
#include "tree-pass.h"
#include "except.h"
#include "flags.h"
#include "diagnostic.h"
-#include "tree-pretty-print.h"
#include "gimple-pretty-print.h"
#include "toplev.h"
#include "debug.h"
#include "ssaexpand.h"
#include "bitmap.h"
#include "sbitmap.h"
+#include "cfgloop.h"
+#include "regs.h" /* For reg_renumber. */
#include "insn-attr.h" /* For INSN_SCHEDULING. */
+#include "asan.h"
/* This variable holds information helping the rewriting of SSA trees
into RTL. */
&& gimple_location (stmt) != EXPR_LOCATION (t))
|| (gimple_block (stmt)
&& currently_expanding_to_rtl
- && EXPR_P (t)
- && gimple_block (stmt) != TREE_BLOCK (t)))
+ && EXPR_P (t)))
t = copy_node (t);
}
else
if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
SET_EXPR_LOCATION (t, gimple_location (stmt));
- if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
- TREE_BLOCK (t) = gimple_block (stmt);
return t;
}
static size_t stack_vars_num;
static struct pointer_map_t *decl_to_stack_part;
+/* Conflict bitmaps go on this obstack. This allows us to destroy
+ all of them in one big sweep. */
+static bitmap_obstack stack_var_bitmap_obstack;
+
/* An array of indices such that stack_vars[stack_vars_sorted[i]].size
is non-decreasing. */
static size_t *stack_vars_sorted;
struct stack_var *a = &stack_vars[x];
struct stack_var *b = &stack_vars[y];
if (!a->conflicts)
- a->conflicts = BITMAP_ALLOC (NULL);
+ a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
if (!b->conflicts)
- b->conflicts = BITMAP_ALLOC (NULL);
+ b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
bitmap_set_bit (a->conflicts, y);
bitmap_set_bit (b->conflicts, x);
}
return bitmap_bit_p (a->conflicts, y);
}
-/* Returns true if TYPE is or contains a union type. */
-
-static bool
-aggregate_contains_union_type (tree type)
-{
- tree field;
-
- if (TREE_CODE (type) == UNION_TYPE
- || TREE_CODE (type) == QUAL_UNION_TYPE)
- return true;
- if (TREE_CODE (type) == ARRAY_TYPE)
- return aggregate_contains_union_type (TREE_TYPE (type));
- if (TREE_CODE (type) != RECORD_TYPE)
- return false;
-
- for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
- if (TREE_CODE (field) == FIELD_DECL)
- if (aggregate_contains_union_type (TREE_TYPE (field)))
- return true;
-
- return false;
-}
-
-/* A subroutine of expand_used_vars. If two variables X and Y have alias
- sets that do not conflict, then do add a conflict for these variables
- in the interference graph. We also need to make sure to add conflicts
- for union containing structures. Else RTL alias analysis comes along
- and due to type based aliasing rules decides that for two overlapping
- union temporaries { short s; int i; } accesses to the same mem through
- different types may not alias and happily reorders stores across
- life-time boundaries of the temporaries (See PR25654). */
-
-static void
-add_alias_set_conflicts (void)
-{
- size_t i, j, n = stack_vars_num;
-
- for (i = 0; i < n; ++i)
- {
- tree type_i = TREE_TYPE (stack_vars[i].decl);
- bool aggr_i = AGGREGATE_TYPE_P (type_i);
- bool contains_union;
-
- contains_union = aggregate_contains_union_type (type_i);
- for (j = 0; j < i; ++j)
- {
- tree type_j = TREE_TYPE (stack_vars[j].decl);
- bool aggr_j = AGGREGATE_TYPE_P (type_j);
- if (aggr_i != aggr_j
- /* Either the objects conflict by means of type based
- aliasing rules, or we need to add a conflict. */
- || !objects_must_conflict_p (type_i, type_j)
- /* In case the types do not conflict ensure that access
- to elements will conflict. In case of unions we have
- to be careful as type based aliasing rules may say
- access to the same memory does not conflict. So play
- safe and add a conflict in this case when
- -fstrict-aliasing is used. */
- || (contains_union && flag_strict_aliasing))
- add_stack_var_conflict (i, j);
- }
- }
-}
-
/* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
enter its partition number into bitmap DATA. */
unsigned i;
EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
{
- unsigned j;
- bitmap_iterator bj;
- EXECUTE_IF_SET_IN_BITMAP (work, i + 1, j, bj)
- add_stack_var_conflict (i, j);
+ struct stack_var *a = &stack_vars[i];
+ if (!a->conflicts)
+ a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
+ bitmap_ior_into (a->conflicts, work);
}
visit = visit_conflict;
}
basic_block bb;
bool changed;
bitmap work = BITMAP_ALLOC (NULL);
+ int *rpo;
+ int n_bbs;
/* We approximate the live range of a stack variable by taking the first
mention of its name as starting point(s), and by the end-of-scope
We then do a mostly classical bitmap liveness algorithm. */
FOR_ALL_BB (bb)
- bb->aux = BITMAP_ALLOC (NULL);
+ bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
+
+ rpo = XNEWVEC (int, last_basic_block);
+ n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
changed = true;
while (changed)
{
+ int i;
changed = false;
- FOR_EACH_BB (bb)
+ for (i = 0; i < n_bbs; i++)
{
- bitmap active = (bitmap)bb->aux;
+ bitmap active;
+ bb = BASIC_BLOCK (rpo[i]);
+ active = (bitmap)bb->aux;
add_scope_conflicts_1 (bb, work, false);
if (bitmap_ior_into (active, work))
changed = true;
FOR_EACH_BB (bb)
add_scope_conflicts_1 (bb, work, true);
+ free (rpo);
BITMAP_FREE (work);
FOR_ALL_BB (bb)
BITMAP_FREE (bb->aux);
{
tree decl = stack_vars[j].decl;
unsigned int uid = DECL_PT_UID (decl);
- /* We should never end up partitioning SSA names (though they
- may end up on the stack). Neither should we allocate stack
- space to something that is unused and thus unreferenced, except
- for -O0 where we are preserving even unreferenced variables. */
- gcc_assert (DECL_P (decl)
- && (!optimize
- || referenced_var_lookup (cfun, DECL_UID (decl))));
bitmap_set_bit (part, uid);
*((bitmap *) pointer_map_insert (decls_to_partitions,
(void *)(size_t) uid)) = part;
{
unsigned i;
struct pointer_set_t *visited = pointer_set_create ();
- bitmap temp = BITMAP_ALLOC (NULL);
+ bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
for (i = 1; i < num_ssa_names; i++)
{
{
size_t i = stack_vars_sorted[si];
unsigned int ialign = stack_vars[i].alignb;
+ HOST_WIDE_INT isize = stack_vars[i].size;
/* Ignore objects that aren't partition representatives. If we
see a var that is not a partition representative, it must
{
size_t j = stack_vars_sorted[sj];
unsigned int jalign = stack_vars[j].alignb;
+ HOST_WIDE_INT jsize = stack_vars[j].size;
/* Ignore objects that aren't partition representatives. */
if (stack_vars[j].representative != j)
continue;
- /* Ignore conflicting objects. */
- if (stack_var_conflict_p (i, j))
- continue;
-
/* Do not mix objects of "small" (supported) alignment
and "large" (unsupported) alignment. */
if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
!= (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
+ break;
+
+ /* For Address Sanitizer do not mix objects with different
+ sizes, as the shorter vars wouldn't be adequately protected.
+ Don't do that for "large" (unsupported) alignment objects,
+ those aren't protected anyway. */
+ if (flag_asan && isize != jsize
+ && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
+ break;
+
+ /* Ignore conflicting objects. */
+ if (stack_var_conflict_p (i, j))
continue;
/* UNION the objects, placing J at OFFSET. */
/* If this fails, we've overflowed the stack frame. Error nicely? */
gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
- x = plus_constant (base, offset);
+ x = plus_constant (Pmode, base, offset);
x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
if (TREE_CODE (decl) != SSA_NAME)
set_rtl (decl, x);
}
+struct stack_vars_data
+{
+ /* Vector of offset pairs, always end of some padding followed
+ by start of the padding that needs Address Sanitizer protection.
+ The vector is in reversed, highest offset pairs come first. */
+ vec<HOST_WIDE_INT> asan_vec;
+
+ /* Vector of partition representative decls in between the paddings. */
+ vec<tree> asan_decl_vec;
+};
+
/* A subroutine of expand_used_vars. Give each partition representative
a unique location within the stack frame. Update each partition member
with that location. */
static void
-expand_stack_vars (bool (*pred) (tree))
+expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
{
size_t si, i, j, n = stack_vars_num;
HOST_WIDE_INT large_size = 0, large_alloc = 0;
/* Check the predicate to see whether this variable should be
allocated in this pass. */
- if (pred && !pred (decl))
+ if (pred && !pred (i))
continue;
alignb = stack_vars[i].alignb;
if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
{
- offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
+ if (flag_asan && pred)
+ {
+ HOST_WIDE_INT prev_offset = frame_offset;
+ tree repr_decl = NULL_TREE;
+
+ offset
+ = alloc_stack_frame_space (stack_vars[i].size
+ + ASAN_RED_ZONE_SIZE,
+ MAX (alignb, ASAN_RED_ZONE_SIZE));
+ data->asan_vec.safe_push (prev_offset);
+ data->asan_vec.safe_push (offset + stack_vars[i].size);
+ /* Find best representative of the partition.
+ Prefer those with DECL_NAME, even better
+ satisfying asan_protect_stack_decl predicate. */
+ for (j = i; j != EOC; j = stack_vars[j].next)
+ if (asan_protect_stack_decl (stack_vars[j].decl)
+ && DECL_NAME (stack_vars[j].decl))
+ {
+ repr_decl = stack_vars[j].decl;
+ break;
+ }
+ else if (repr_decl == NULL_TREE
+ && DECL_P (stack_vars[j].decl)
+ && DECL_NAME (stack_vars[j].decl))
+ repr_decl = stack_vars[j].decl;
+ if (repr_decl == NULL_TREE)
+ repr_decl = stack_vars[i].decl;
+ data->asan_decl_vec.safe_push (repr_decl);
+ }
+ else
+ offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
base = virtual_stack_vars_rtx;
base_align = crtl->max_used_stack_slot_alignment;
}
defer_stack_allocation (tree var, bool toplevel)
{
/* If stack protection is enabled, *all* stack variables must be deferred,
- so that we can re-order the strings to the top of the frame. */
- if (flag_stack_protect)
+ so that we can re-order the strings to the top of the frame.
+ Similarly for Address Sanitizer. */
+ if (flag_stack_protect || flag_asan)
return true;
/* We handle "large" alignment via dynamic allocation. We want to handle
if (really_expand)
expand_one_register_var (origvar);
}
- else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
+ else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
{
+ /* Reject variables which cover more than half of the address-space. */
if (really_expand)
{
error ("size of variable %q+D is too large", var);
as callbacks for expand_stack_vars. */
static bool
-stack_protect_decl_phase_1 (tree decl)
+stack_protect_decl_phase_1 (size_t i)
{
- return stack_protect_decl_phase (decl) == 1;
+ return stack_protect_decl_phase (stack_vars[i].decl) == 1;
}
static bool
-stack_protect_decl_phase_2 (tree decl)
+stack_protect_decl_phase_2 (size_t i)
{
- return stack_protect_decl_phase (decl) == 2;
+ return stack_protect_decl_phase (stack_vars[i].decl) == 2;
+}
+
+/* And helper function that checks for asan phase (with stack protector
+ it is phase 3). This is used as callback for expand_stack_vars.
+ Returns true if any of the vars in the partition need to be protected. */
+
+static bool
+asan_decl_phase_3 (size_t i)
+{
+ while (i != EOC)
+ {
+ if (asan_protect_stack_decl (stack_vars[i].decl))
+ return true;
+ i = stack_vars[i].next;
+ }
+ return false;
}
/* Ensure that variables in different stack protection phases conflict
for (i = 0; i < n; ++i)
{
unsigned char ph_i = phase[i];
- for (j = 0; j < i; ++j)
+ for (j = i + 1; j < n; ++j)
if (ph_i != phase[j])
add_stack_var_conflict (i, j);
}
static void
init_vars_expansion (void)
{
- tree t;
- unsigned ix;
- /* Set TREE_USED on all variables in the local_decls. */
- FOR_EACH_LOCAL_DECL (cfun, ix, t)
- TREE_USED (t) = 1;
+ /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
+ bitmap_obstack_initialize (&stack_var_bitmap_obstack);
- /* Clear TREE_USED on all variables associated with a block scope. */
- clear_tree_used (DECL_INITIAL (current_function_decl));
+ /* A map from decl to stack partition. */
+ decl_to_stack_part = pointer_map_create ();
/* Initialize local stack smashing state. */
has_protected_decls = false;
static void
fini_vars_expansion (void)
{
- size_t i, n = stack_vars_num;
- for (i = 0; i < n; i++)
- BITMAP_FREE (stack_vars[i].conflicts);
- XDELETEVEC (stack_vars);
- XDELETEVEC (stack_vars_sorted);
+ bitmap_obstack_release (&stack_var_bitmap_obstack);
+ if (stack_vars)
+ XDELETEVEC (stack_vars);
+ if (stack_vars_sorted)
+ XDELETEVEC (stack_vars_sorted);
stack_vars = NULL;
+ stack_vars_sorted = NULL;
stack_vars_alloc = stack_vars_num = 0;
pointer_map_destroy (decl_to_stack_part);
decl_to_stack_part = NULL;
HOST_WIDE_INT size = 0;
size_t i;
tree var;
- tree old_cur_fun_decl = current_function_decl;
- referenced_var_iterator rvi;
- struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
+ struct function *fn = DECL_STRUCT_FUNCTION (node->symbol.decl);
- current_function_decl = node->decl;
push_cfun (fn);
- gcc_checking_assert (gimple_referenced_vars (fn));
- FOR_EACH_REFERENCED_VAR (fn, var, rvi)
- size += expand_one_var (var, true, false);
+ init_vars_expansion ();
+
+ FOR_EACH_LOCAL_DECL (fn, i, var)
+ if (auto_var_in_fn_p (var, fn->decl))
+ size += expand_one_var (var, true, false);
if (stack_vars_num > 0)
{
for (i = 0; i < stack_vars_num; ++i)
stack_vars_sorted[i] = i;
size += account_stack_vars ();
- fini_vars_expansion ();
}
+
+ fini_vars_expansion ();
pop_cfun ();
- current_function_decl = old_cur_fun_decl;
return size;
}
/* Expand all variables used in the function. */
-static void
+static rtx
expand_used_vars (void)
{
tree var, outer_block = DECL_INITIAL (current_function_decl);
- VEC(tree,heap) *maybe_local_decls = NULL;
+ vec<tree> maybe_local_decls = vNULL;
+ rtx var_end_seq = NULL_RTX;
+ struct pointer_map_t *ssa_name_decls;
unsigned i;
unsigned len;
frame_phase = off ? align - off : 0;
}
+ /* Set TREE_USED on all variables in the local_decls. */
+ FOR_EACH_LOCAL_DECL (cfun, i, var)
+ TREE_USED (var) = 1;
+ /* Clear TREE_USED on all variables associated with a block scope. */
+ clear_tree_used (DECL_INITIAL (current_function_decl));
+
init_vars_expansion ();
+ ssa_name_decls = pointer_map_create ();
for (i = 0; i < SA.map->num_partitions; i++)
{
tree var = partition_to_var (SA.map, i);
- gcc_assert (is_gimple_reg (var));
+ gcc_assert (!virtual_operand_p (var));
+
+ /* Assign decls to each SSA name partition, share decls for partitions
+ we could have coalesced (those with the same type). */
+ if (SSA_NAME_VAR (var) == NULL_TREE)
+ {
+ void **slot = pointer_map_insert (ssa_name_decls, TREE_TYPE (var));
+ if (!*slot)
+ *slot = (void *) create_tmp_reg (TREE_TYPE (var), NULL);
+ replace_ssa_name_symbol (var, (tree) *slot);
+ }
+
if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
expand_one_var (var, true, true);
else
}
}
}
+ pointer_map_destroy (ssa_name_decls);
/* At this point all variables on the local_decls with TREE_USED
set are not associated with any block scope. Lay them out. */
- len = VEC_length (tree, cfun->local_decls);
+ len = vec_safe_length (cfun->local_decls);
FOR_EACH_LOCAL_DECL (cfun, i, var)
{
bool expand_now = false;
/* If rtl isn't set yet, which can happen e.g. with
-fstack-protector, retry before returning from this
function. */
- VEC_safe_push (tree, heap, maybe_local_decls, var);
+ maybe_local_decls.safe_push (var);
}
}
We just want the duplicates, as those are the artificial
non-ignored vars that we want to keep until instantiate_decls.
Move them down and truncate the array. */
- if (!VEC_empty (tree, cfun->local_decls))
- VEC_block_remove (tree, cfun->local_decls, 0, len);
+ if (!vec_safe_is_empty (cfun->local_decls))
+ cfun->local_decls->block_remove (0, len);
/* At this point, all variables within the block tree with TREE_USED
set are actually used by the optimized function. Lay them out. */
if (stack_vars_num > 0)
{
add_scope_conflicts ();
- /* Due to the way alias sets work, no variables with non-conflicting
- alias sets may be assigned the same address. Add conflicts to
- reflect this. */
- add_alias_set_conflicts ();
/* If stack protection is enabled, we don't share space between
vulnerable data and non-vulnerable data. */
/* Assign rtl to each variable based on these partitions. */
if (stack_vars_num > 0)
{
+ struct stack_vars_data data;
+
+ data.asan_vec = vNULL;
+ data.asan_decl_vec = vNULL;
+
/* Reorder decls to be protected by iterating over the variables
array multiple times, and allocating out of each phase in turn. */
/* ??? We could probably integrate this into the qsort we did
if (has_protected_decls)
{
/* Phase 1 contains only character arrays. */
- expand_stack_vars (stack_protect_decl_phase_1);
+ expand_stack_vars (stack_protect_decl_phase_1, &data);
/* Phase 2 contains other kinds of arrays. */
if (flag_stack_protect == 2)
- expand_stack_vars (stack_protect_decl_phase_2);
+ expand_stack_vars (stack_protect_decl_phase_2, &data);
+ }
+
+ if (flag_asan)
+ /* Phase 3, any partitions that need asan protection
+ in addition to phase 1 and 2. */
+ expand_stack_vars (asan_decl_phase_3, &data);
+
+ if (!data.asan_vec.is_empty ())
+ {
+ HOST_WIDE_INT prev_offset = frame_offset;
+ HOST_WIDE_INT offset
+ = alloc_stack_frame_space (ASAN_RED_ZONE_SIZE,
+ ASAN_RED_ZONE_SIZE);
+ data.asan_vec.safe_push (prev_offset);
+ data.asan_vec.safe_push (offset);
+
+ var_end_seq
+ = asan_emit_stack_protection (virtual_stack_vars_rtx,
+ data.asan_vec.address (),
+ data.asan_decl_vec. address(),
+ data.asan_vec.length ());
}
- expand_stack_vars (NULL);
+ expand_stack_vars (NULL, &data);
- fini_vars_expansion ();
+ data.asan_vec.release ();
+ data.asan_decl_vec.release ();
}
+ fini_vars_expansion ();
+
/* If there were any artificial non-ignored vars without rtl
found earlier, see if deferred stack allocation hasn't assigned
rtl to them. */
- FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
+ FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
{
rtx rtl = DECL_RTL_IF_SET (var);
if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
add_local_decl (cfun, var);
}
- VEC_free (tree, heap, maybe_local_decls);
+ maybe_local_decls.release ();
/* If the target requires that FRAME_OFFSET be aligned, do it. */
if (STACK_ALIGNMENT_NEEDED)
frame_offset += align - 1;
frame_offset &= -align;
}
+
+ return var_end_seq;
}
last2 = last = get_last_insn ();
extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
- set_curr_insn_source_location (gimple_location (stmt));
- set_curr_insn_block (gimple_block (stmt));
+ set_curr_insn_location (gimple_location (stmt));
/* These flags have no purpose in RTL land. */
true_edge->flags &= ~EDGE_TRUE_VALUE;
jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
true_edge->probability);
maybe_dump_rtl_for_gimple_stmt (stmt, last);
- if (true_edge->goto_locus)
- {
- set_curr_insn_source_location (true_edge->goto_locus);
- set_curr_insn_block (true_edge->goto_block);
- true_edge->goto_locus = curr_insn_locator ();
- }
- true_edge->goto_block = NULL;
+ if (true_edge->goto_locus != UNKNOWN_LOCATION)
+ set_curr_insn_location (true_edge->goto_locus);
false_edge->flags |= EDGE_FALLTHRU;
maybe_cleanup_end_of_block (false_edge, last);
return NULL;
jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
false_edge->probability);
maybe_dump_rtl_for_gimple_stmt (stmt, last);
- if (false_edge->goto_locus)
- {
- set_curr_insn_source_location (false_edge->goto_locus);
- set_curr_insn_block (false_edge->goto_block);
- false_edge->goto_locus = curr_insn_locator ();
- }
- false_edge->goto_block = NULL;
+ if (false_edge->goto_locus != UNKNOWN_LOCATION)
+ set_curr_insn_location (false_edge->goto_locus);
true_edge->flags |= EDGE_FALLTHRU;
maybe_cleanup_end_of_block (true_edge, last);
return NULL;
jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
true_edge->probability);
last = get_last_insn ();
- if (false_edge->goto_locus)
- {
- set_curr_insn_source_location (false_edge->goto_locus);
- set_curr_insn_block (false_edge->goto_block);
- false_edge->goto_locus = curr_insn_locator ();
- }
- false_edge->goto_block = NULL;
+ if (false_edge->goto_locus != UNKNOWN_LOCATION)
+ set_curr_insn_location (false_edge->goto_locus);
emit_jump (label_rtx_for_bb (false_edge->dest));
BB_END (bb) = last;
false_edge->flags |= EDGE_FALLTHRU;
new_bb->count = false_edge->count;
new_bb->frequency = EDGE_FREQUENCY (false_edge);
+ if (current_loops && bb->loop_father)
+ add_bb_to_loop (new_bb, bb->loop_father);
new_edge = make_edge (new_bb, dest, 0);
new_edge->probability = REG_BR_PROB_BASE;
new_edge->count = new_bb->count;
maybe_dump_rtl_for_gimple_stmt (stmt, last2);
- if (true_edge->goto_locus)
+ if (true_edge->goto_locus != UNKNOWN_LOCATION)
{
- set_curr_insn_source_location (true_edge->goto_locus);
- set_curr_insn_block (true_edge->goto_block);
- true_edge->goto_locus = curr_insn_locator ();
+ set_curr_insn_location (true_edge->goto_locus);
+ true_edge->goto_locus = curr_insn_location ();
}
- true_edge->goto_block = NULL;
return new_bb;
}
CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
SET_EXPR_LOCATION (exp, gimple_location (stmt));
- TREE_BLOCK (exp) = gimple_block (stmt);
/* Ensure RTL is created for debug args. */
if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
{
- VEC(tree, gc) **debug_args = decl_debug_args_lookup (decl);
+ vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
unsigned int ix;
tree dtemp;
if (debug_args)
- for (ix = 1; VEC_iterate (tree, *debug_args, ix, dtemp); ix += 2)
+ for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
{
gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
expand_debug_expr (dtemp);
{
tree op0;
- set_curr_insn_source_location (gimple_location (stmt));
- set_curr_insn_block (gimple_block (stmt));
+ set_curr_insn_location (gimple_location (stmt));
switch (gimple_code (stmt))
{
}
/* FALLTHROUGH */
case INDIRECT_REF:
+ inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
if (!op0)
return NULL;
if (!op1 || !CONST_INT_P (op1))
return NULL;
- op0 = plus_constant (op0, INTVAL (op1));
+ op0 = plus_constant (inner_mode, op0, INTVAL (op1));
}
if (POINTER_TYPE_P (TREE_TYPE (exp)))
&& (bitoffset % BITS_PER_UNIT) == 0
&& bitsize > 0
&& bitsize == maxsize)
- return plus_constant (gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl),
- bitoffset / BITS_PER_UNIT);
+ {
+ rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
+ return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
+ }
+ }
+
+ if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
+ && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
+ == ADDR_EXPR)
+ {
+ op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
+ 0));
+ if (op0 != NULL
+ && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
+ || (GET_CODE (op0) == PLUS
+ && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
+ && CONST_INT_P (XEXP (op0, 1)))))
+ {
+ op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
+ 1));
+ if (!op1 || !CONST_INT_P (op1))
+ return NULL;
+
+ return plus_constant (mode, op0, INTVAL (op1));
+ }
}
return NULL;
return op0;
case VECTOR_CST:
- exp = build_constructor_from_list (TREE_TYPE (exp),
- TREE_VECTOR_CST_ELTS (exp));
- /* Fall through. */
+ {
+ unsigned i;
+
+ op0 = gen_rtx_CONCATN
+ (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
+
+ for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
+ {
+ op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
+ if (!op1)
+ return NULL;
+ XVECEXP (op0, 0, i) = op1;
+ }
+
+ return op0;
+ }
case CONSTRUCTOR:
if (TREE_CLOBBER_P (exp))
case VEC_UNPACK_LO_EXPR:
case VEC_WIDEN_MULT_HI_EXPR:
case VEC_WIDEN_MULT_LO_EXPR:
+ case VEC_WIDEN_MULT_EVEN_EXPR:
+ case VEC_WIDEN_MULT_ODD_EXPR:
case VEC_WIDEN_LSHIFT_HI_EXPR:
case VEC_WIDEN_LSHIFT_LO_EXPR:
case VEC_PERM_EXPR:
return NULL;
- /* Misc codes. */
+ /* Misc codes. */
case ADDR_SPACE_CONVERT_EXPR:
case FIXED_CONVERT_EXPR:
case OBJ_TYPE_REF:
}
return NULL;
+ case MULT_HIGHPART_EXPR:
+ /* ??? Similar to the above. */
+ return NULL;
+
case WIDEN_SUM_EXPR:
case WIDEN_LSHIFT_EXPR:
if (SCALAR_INT_MODE_P (GET_MODE (op0))
&& !DECL_INCOMING_RTL (exp)
&& DECL_ABSTRACT_ORIGIN (current_function_decl))
{
- tree aexp = exp;
- if (DECL_ABSTRACT_ORIGIN (exp))
- aexp = DECL_ABSTRACT_ORIGIN (exp);
+ tree aexp = DECL_ORIGIN (exp);
if (DECL_CONTEXT (aexp)
== DECL_ABSTRACT_ORIGIN (current_function_decl))
{
- VEC(tree, gc) **debug_args;
+ vec<tree, va_gc> **debug_args;
unsigned int ix;
tree ddecl;
-#ifdef ENABLE_CHECKING
- tree parm;
- for (parm = DECL_ARGUMENTS (current_function_decl);
- parm; parm = DECL_CHAIN (parm))
- gcc_assert (parm != exp
- && DECL_ABSTRACT_ORIGIN (parm) != aexp);
-#endif
debug_args = decl_debug_args_lookup (current_function_decl);
if (debug_args != NULL)
{
- for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl);
+ for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
ix += 2)
if (ddecl == aexp)
return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
gcc_assert (mode == GET_MODE (val)
|| (GET_MODE (val) == VOIDmode
- && (CONST_INT_P (val)
+ && (CONST_SCALAR_INT_P (val)
|| GET_CODE (val) == CONST_FIXED
- || GET_CODE (val) == CONST_DOUBLE
|| GET_CODE (val) == LABEL_REF)));
}
/* Expand basic block BB from GIMPLE trees to RTL. */
static basic_block
-expand_gimple_basic_block (basic_block bb)
+expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
{
gimple_stmt_iterator gsi;
gimple_seq stmts;
block to be in GIMPLE, instead of RTL. Therefore, we need to
access the BB sequence directly. */
stmts = bb_seq (bb);
- bb->il.gimple = NULL;
+ bb->il.gimple.seq = NULL;
+ bb->il.gimple.phi_nodes = NULL;
rtl_profile_for_bb (bb);
init_rtl_bb_info (bb);
bb->flags |= BB_RTL;
tree op;
gimple def;
- location_t sloc = get_curr_insn_source_location ();
- tree sblock = get_curr_insn_block ();
+ location_t sloc = curr_insn_location ();
/* Look for SSA names that have their last use here (TERed
names always have only one real use). */
rtx val;
enum machine_mode mode;
- set_curr_insn_source_location (gimple_location (def));
- set_curr_insn_block (gimple_block (def));
+ set_curr_insn_location (gimple_location (def));
DECL_ARTIFICIAL (vexpr) = 1;
TREE_TYPE (vexpr) = TREE_TYPE (value);
}
}
}
- set_curr_insn_source_location (sloc);
- set_curr_insn_block (sblock);
+ set_curr_insn_location (sloc);
}
currently_expanding_gimple_stmt = stmt;
}
else if (gimple_debug_bind_p (stmt))
{
- location_t sloc = get_curr_insn_source_location ();
- tree sblock = get_curr_insn_block ();
+ location_t sloc = curr_insn_location ();
gimple_stmt_iterator nsi = gsi;
for (;;)
last = get_last_insn ();
- set_curr_insn_source_location (gimple_location (stmt));
- set_curr_insn_block (gimple_block (stmt));
+ set_curr_insn_location (gimple_location (stmt));
if (DECL_P (var))
mode = DECL_MODE (var);
break;
}
- set_curr_insn_source_location (sloc);
- set_curr_insn_block (sblock);
+ set_curr_insn_location (sloc);
}
else if (gimple_debug_source_bind_p (stmt))
{
- location_t sloc = get_curr_insn_source_location ();
- tree sblock = get_curr_insn_block ();
+ location_t sloc = curr_insn_location ();
tree var = gimple_debug_source_bind_get_var (stmt);
tree value = gimple_debug_source_bind_get_value (stmt);
rtx val;
last = get_last_insn ();
- set_curr_insn_source_location (gimple_location (stmt));
- set_curr_insn_block (gimple_block (stmt));
+ set_curr_insn_location (gimple_location (stmt));
mode = DECL_MODE (var);
PAT_VAR_LOCATION_LOC (val) = (rtx)value;
}
- set_curr_insn_source_location (sloc);
- set_curr_insn_block (sblock);
+ set_curr_insn_location (sloc);
}
else
{
+ if (is_gimple_call (stmt)
+ && gimple_call_tail_p (stmt)
+ && disable_tail_calls)
+ gimple_call_set_tail (stmt, false);
+
if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
{
bool can_fallthru;
/* Expand implicit goto and convert goto_locus. */
FOR_EACH_EDGE (e, ei, bb->succs)
{
- if (e->goto_locus && e->goto_block)
- {
- set_curr_insn_source_location (e->goto_locus);
- set_curr_insn_block (e->goto_block);
- e->goto_locus = curr_insn_locator ();
- }
- e->goto_block = NULL;
+ if (e->goto_locus != UNKNOWN_LOCATION)
+ set_curr_insn_location (e->goto_locus);
if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
{
emit_jump (label_rtx_for_bb (e->dest));
ENTRY_BLOCK_PTR);
init_block->frequency = ENTRY_BLOCK_PTR->frequency;
init_block->count = ENTRY_BLOCK_PTR->count;
+ if (current_loops && ENTRY_BLOCK_PTR->loop_father)
+ add_bb_to_loop (init_block, ENTRY_BLOCK_PTR->loop_father);
if (e)
{
first_block = e->dest;
/* Make sure the locus is set to the end of the function, so that
epilogue line numbers and warnings are set properly. */
- if (cfun->function_end_locus != UNKNOWN_LOCATION)
+ if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
input_location = cfun->function_end_locus;
- /* The following insns belong to the top scope. */
- set_curr_insn_block (DECL_INITIAL (current_function_decl));
-
/* Generate rtl for function exit. */
expand_function_end ();
EXIT_BLOCK_PTR->prev_bb);
exit_block->frequency = EXIT_BLOCK_PTR->frequency;
exit_block->count = EXIT_BLOCK_PTR->count;
+ if (current_loops && EXIT_BLOCK_PTR->loop_father)
+ add_bb_to_loop (exit_block, EXIT_BLOCK_PTR->loop_father);
ix = 0;
while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
sbitmap blocks;
edge_iterator ei;
edge e;
- rtx var_seq;
+ rtx var_seq, var_ret_seq;
unsigned i;
timevar_push (TV_OUT_OF_SSA);
rewrite_out_of_ssa (&SA);
timevar_pop (TV_OUT_OF_SSA);
- SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
- sizeof (rtx));
+ SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
+
+ /* Make sure all values used by the optimization passes have sane
+ defaults. */
+ reg_renumber = 0;
/* Some backends want to know that we are expanding to RTL. */
currently_expanding_to_rtl = 1;
+ /* Dominators are not kept up-to-date as we may create new basic-blocks. */
+ free_dominance_info (CDI_DOMINATORS);
rtl_profile_for_bb (ENTRY_BLOCK_PTR);
- insn_locators_alloc ();
+ insn_locations_init ();
if (!DECL_IS_BUILTIN (current_function_decl))
{
/* Eventually, all FEs should explicitly set function_start_locus. */
- if (cfun->function_start_locus == UNKNOWN_LOCATION)
- set_curr_insn_source_location
+ if (LOCATION_LOCUS (cfun->function_start_locus) == UNKNOWN_LOCATION)
+ set_curr_insn_location
(DECL_SOURCE_LOCATION (current_function_decl));
else
- set_curr_insn_source_location (cfun->function_start_locus);
+ set_curr_insn_location (cfun->function_start_locus);
}
else
- set_curr_insn_source_location (UNKNOWN_LOCATION);
- set_curr_insn_block (DECL_INITIAL (current_function_decl));
- prologue_locator = curr_insn_locator ();
+ set_curr_insn_location (UNKNOWN_LOCATION);
+ prologue_location = curr_insn_location ();
#ifdef INSN_SCHEDULING
init_sched_attrs ();
timevar_push (TV_VAR_EXPAND);
start_sequence ();
- expand_used_vars ();
+ var_ret_seq = expand_used_vars ();
var_seq = get_insns ();
end_sequence ();
rtx r;
if (!name
- || !POINTER_TYPE_P (TREE_TYPE (name))
/* We might have generated new SSA names in
update_alias_info_with_stack_vars. They will have a NULL
defining statements, and won't be part of the partitioning,
part = var_to_partition (SA.map, name);
if (part == NO_PARTITION)
continue;
+
+ /* Adjust all partition members to get the underlying decl of
+ the representative which we might have created in expand_one_var. */
+ if (SSA_NAME_VAR (name) == NULL_TREE)
+ {
+ tree leader = partition_to_var (SA.map, part);
+ gcc_assert (SSA_NAME_VAR (leader) != NULL_TREE);
+ replace_ssa_name_symbol (name, SSA_NAME_VAR (leader));
+ }
+ if (!POINTER_TYPE_P (TREE_TYPE (name)))
+ continue;
+
r = SA.partition_to_pseudo[part];
if (REG_P (r))
mark_reg_pointer (r, get_pointer_alignment (name));
lab_rtx_for_bb = pointer_map_create ();
FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
- bb = expand_gimple_basic_block (bb);
+ bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
if (MAY_HAVE_DEBUG_INSNS)
expand_debug_locations ();
- execute_free_datastructures ();
+ /* Free stuff we no longer need after GIMPLE optimizations. */
+ free_dominance_info (CDI_DOMINATORS);
+ free_dominance_info (CDI_POST_DOMINATORS);
+ delete_tree_cfg_annotations ();
+
timevar_push (TV_OUT_OF_SSA);
finish_out_of_ssa (&SA);
timevar_pop (TV_OUT_OF_SSA);
timevar_push (TV_POST_EXPAND);
/* We are no longer in SSA form. */
cfun->gimple_df->in_ssa_p = false;
+ if (current_loops)
+ loops_state_clear (LOOP_CLOSED_SSA);
/* Expansion is used by optimization passes too, set maybe_hot_insn_p
conservatively to true until they are all profile aware. */
free_histograms ();
construct_exit_block ();
- set_curr_insn_block (DECL_INITIAL (current_function_decl));
- insn_locators_finalize ();
+ insn_locations_finalize ();
+
+ if (var_ret_seq)
+ {
+ rtx after = return_label;
+ rtx next = NEXT_INSN (after);
+ if (next && NOTE_INSN_BASIC_BLOCK_P (next))
+ after = next;
+ emit_insn_after (var_ret_seq, after);
+ }
/* Zap the tree EH table. */
set_eh_throw_stmt_table (cfun, NULL);
}
blocks = sbitmap_alloc (last_basic_block);
- sbitmap_ones (blocks);
+ bitmap_ones (blocks);
find_many_sub_basic_blocks (blocks);
sbitmap_free (blocks);
purge_all_dead_edges ();
- compact_blocks ();
-
expand_stack_alignment ();
+ /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
+ function. */
+ if (crtl->tail_call_emit)
+ fixup_tail_calls ();
+
+ /* After initial rtl generation, call back to finish generating
+ exception support code. We need to do this before cleaning up
+ the CFG as the code does not expect dead landing pads. */
+ if (cfun->eh->region_tree != NULL)
+ finish_eh_generation ();
+
+ /* Remove unreachable blocks, otherwise we cannot compute dominators
+ which are needed for loop state verification. As a side-effect
+ this also compacts blocks.
+ ??? We cannot remove trivially dead insns here as for example
+ the DRAP reg on i?86 is not magically live at this point.
+ gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
+ cleanup_cfg (CLEANUP_NO_INSN_DEL);
+
#ifdef ENABLE_CHECKING
verify_flow_info ();
#endif
+ /* Initialize pseudos allocated for hard registers. */
+ emit_initial_value_sets ();
+
+ /* And finally unshare all RTL. */
+ unshare_all_rtl ();
+
/* There's no need to defer outputting this function any more; we
know we want to output it. */
DECL_DEFER_OUTPUT (current_function_decl) = 0;
the common parent easily. */
set_block_levels (DECL_INITIAL (cfun->decl), 0);
default_rtl_profile ();
+
timevar_pop (TV_POST_EXPAND);
+
return 0;
}
{
RTL_PASS,
"expand", /* name */
+ OPTGROUP_NONE, /* optinfo_flags */
NULL, /* gate */
gimple_expand_cfg, /* execute */
NULL, /* sub */