* builtins.c (expand_builtin_trap): Export.
* expr.h (expand_builtin_trap): Declare.
* expr.c (expand_assignment): Emit a trap for integral offsets
from registers that weren't reduced to bitpos.
* tree-cfg.c (mark_array_ref_addressable_1): Remove.
(mark_array_ref_addressable): Remove.
* tree-flow.h (mark_array_ref_addressable): Remove.
* tree-optimize.c (execute_cleanup_cfg_post_optimizing): Don't call it.
git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@105449
138bc75d-0d04-0410-961f-
82ee72b054a4
+2005-10-15 Richard Henderson <rth@redhat.com>
+
+ PR 23714
+ * builtins.c (expand_builtin_trap): Export.
+ * expr.h (expand_builtin_trap): Declare.
+ * expr.c (expand_assignment): Emit a trap for integral offsets
+ from registers that weren't reduced to bitpos.
+
+ * tree-cfg.c (mark_array_ref_addressable_1): Remove.
+ (mark_array_ref_addressable): Remove.
+ * tree-flow.h (mark_array_ref_addressable): Remove.
+ * tree-optimize.c (execute_cleanup_cfg_post_optimizing): Don't call it.
+
2005-10-15 James E Wilson <wilson@specifix.com>
PR target/24232
return ret;
}
-static void
+void
expand_builtin_trap (void)
{
#ifdef HAVE_trap
if (offset != 0)
{
- rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
+ rtx offset_rtx;
- gcc_assert (MEM_P (to_rtx));
+ if (!MEM_P (to_rtx))
+ {
+ /* We can get constant negative offsets into arrays with broken
+ user code. Translate this to a trap instead of ICEing. */
+ gcc_assert (TREE_CODE (offset) == INTEGER_CST);
+ expand_builtin_trap ();
+ to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
+ }
+ offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
#ifdef POINTERS_EXTEND_UNSIGNED
if (GET_MODE (offset_rtx) != Pmode)
offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
extern void expand_builtin_setjmp_setup (rtx, rtx);
extern void expand_builtin_setjmp_receiver (rtx);
extern rtx expand_builtin_saveregs (void);
+extern void expand_builtin_trap (void);
\f
/* Functions from expr.c: */
}
}
-/* Mark the array of any remaining ARRAY_REFs as addressable. */
-
-static tree
-mark_array_ref_addressable_1 (tree *tp, int *walk_subtrees,
- void *data ATTRIBUTE_UNUSED)
-{
- tree t = *tp;
-
- if (DECL_P (t) || TYPE_P (t))
- *walk_subtrees = 0;
- else if (TREE_CODE (t) == ARRAY_REF)
- {
- tree base = get_base_address (TREE_OPERAND (t, 0));
- if (base && DECL_P (base))
- TREE_ADDRESSABLE (base) = 1;
- }
-
- return NULL_TREE;
-}
-
-void
-mark_array_ref_addressable (void)
-{
- basic_block bb;
- block_stmt_iterator i;
-
- FOR_EACH_BB (bb)
- {
- for (i = bsi_start (bb); !bsi_end_p(i); bsi_next(&i))
- walk_tree (bsi_stmt_ptr (i), mark_array_ref_addressable_1, NULL, NULL);
- }
-}
-
/* Join all the blocks in the flowgraph. */
static void
tree, tree, tree, tree);
extern void init_empty_tree_cfg (void);
extern void fold_cond_expr_cond (void);
-extern void mark_array_ref_addressable (void);
extern void replace_uses_by (tree, tree);
extern void start_recording_case_labels (void);
extern void end_recording_case_labels (void);
execute_cleanup_cfg_post_optimizing (void)
{
fold_cond_expr_cond ();
- mark_array_ref_addressable ();
cleanup_tree_cfg ();
cleanup_dead_labels ();
group_case_labels ();