/* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
- Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
- Free Software Foundation, Inc.
+ Copyright (C) 2002-2016 Free Software Foundation, Inc.
Contributed by Jason Merrill <jason@redhat.com>
This file is part of GCC.
#include "config.h"
#include "system.h"
#include "coretypes.h"
-#include "tm.h"
-#include "tree.h"
+#include "target.h"
+#include "basic-block.h"
#include "cp-tree.h"
-#include "c-family/c-common.h"
-#include "tree-iterator.h"
#include "gimple.h"
-#include "hashtab.h"
-#include "pointer-set.h"
-#include "flags.h"
-#include "splay-tree.h"
+#include "predict.h"
+#include "stor-layout.h"
+#include "tree-iterator.h"
+#include "gimplify.h"
+#include "c-family/c-ubsan.h"
+#include "cilk.h"
+#include "cp-cilkplus.h"
+
+/* Forward declarations. */
+
+static tree cp_genericize_r (tree *, int *, void *);
+static tree cp_fold_r (tree *, int *, void *);
+static void cp_genericize_tree (tree*);
+static tree cp_fold (tree);
/* Local declarations. */
/* Begin a scope which can be exited by a break or continue statement. BC
indicates which.
- Just creates a label and pushes it into the current context. */
+ Just creates a label with location LOCATION and pushes it into the current
+ context. */
static tree
-begin_bc_block (enum bc_t bc)
+begin_bc_block (enum bc_t bc, location_t location)
{
- tree label = create_artificial_label (input_location);
+ tree label = create_artificial_label (location);
DECL_CHAIN (label) = bc_label[bc];
bc_label[bc] = label;
+ if (bc == bc_break)
+ LABEL_DECL_BREAK (label) = true;
+ else
+ LABEL_DECL_CONTINUE (label) = true;
return label;
}
/* Finish a scope which can be exited by a break or continue statement.
- LABEL was returned from the most recent call to begin_bc_block. BODY is
+ LABEL was returned from the most recent call to begin_bc_block. BLOCK is
an expression for the contents of the scope.
If we saw a break (or continue) in the scope, append a LABEL_EXPR to
- body. Otherwise, just forget the label. */
+ BLOCK. Otherwise, just forget the label. */
-static gimple_seq
-finish_bc_block (enum bc_t bc, tree label, gimple_seq body)
+static void
+finish_bc_block (tree *block, enum bc_t bc, tree label)
{
gcc_assert (label == bc_label[bc]);
if (TREE_USED (label))
- {
- gimple_seq_add_stmt (&body, gimple_build_label (label));
- }
+ append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
+ block);
bc_label[bc] = DECL_CHAIN (label);
DECL_CHAIN (label) = NULL_TREE;
- return body;
}
+/* This function is a wrapper for cilk_gimplify_call_params_in_spawned_fn.
+ *EXPR_P can be a CALL_EXPR, INIT_EXPR, MODIFY_EXPR, AGGR_INIT_EXPR or
+ TARGET_EXPR. *PRE_P and *POST_P are gimple sequences from the caller
+ of gimplify_cilk_spawn. */
+
+static void
+cilk_cp_gimplify_call_params_in_spawned_fn (tree *expr_p, gimple_seq *pre_p,
+ gimple_seq *post_p)
+{
+ int ii = 0;
+
+ cilk_gimplify_call_params_in_spawned_fn (expr_p, pre_p);
+ if (TREE_CODE (*expr_p) == AGGR_INIT_EXPR)
+ for (ii = 0; ii < aggr_init_expr_nargs (*expr_p); ii++)
+ gimplify_expr (&AGGR_INIT_EXPR_ARG (*expr_p, ii), pre_p, post_p,
+ is_gimple_reg, fb_rvalue);
+}
+
+
/* Get the LABEL_EXPR to represent a break or continue statement
in the current block scope. BC indicates which. */
stmt = else_;
else
stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
- if (CAN_HAVE_LOCATION_P (stmt) && !EXPR_HAS_LOCATION (stmt))
- SET_EXPR_LOCATION (stmt, locus);
+ if (!EXPR_HAS_LOCATION (stmt))
+ protected_set_expr_location (stmt, locus);
*stmt_p = stmt;
}
evaluated before the loop body as in while and for loops, or after the
loop body as in do-while loops. */
-static gimple_seq
-gimplify_cp_loop (tree cond, tree body, tree incr, bool cond_is_first)
+static void
+genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
+ tree incr, bool cond_is_first, int *walk_subtrees,
+ void *data)
{
- gimple top, entry, stmt;
- gimple_seq stmt_list, body_seq, incr_seq, exit_seq;
- tree cont_block, break_block;
- location_t stmt_locus;
+ tree blab, clab;
+ tree exit = NULL;
+ tree stmt_list = NULL;
- stmt_locus = input_location;
- stmt_list = NULL;
- body_seq = NULL;
- incr_seq = NULL;
- exit_seq = NULL;
- entry = NULL;
+ blab = begin_bc_block (bc_break, start_locus);
+ clab = begin_bc_block (bc_continue, start_locus);
- break_block = begin_bc_block (bc_break);
- cont_block = begin_bc_block (bc_continue);
+ protected_set_expr_location (incr, start_locus);
- /* If condition is zero don't generate a loop construct. */
+ cp_walk_tree (&cond, cp_genericize_r, data, NULL);
+ cp_walk_tree (&body, cp_genericize_r, data, NULL);
+ cp_walk_tree (&incr, cp_genericize_r, data, NULL);
+ *walk_subtrees = 0;
+
+ if (cond && TREE_CODE (cond) != INTEGER_CST)
+ {
+ /* If COND is constant, don't bother building an exit. If it's false,
+ we won't build a loop. If it's true, any exits are in the body. */
+ location_t cloc = EXPR_LOC_OR_LOC (cond, start_locus);
+ exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
+ get_bc_label (bc_break));
+ exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
+ build_empty_stmt (cloc), exit);
+ }
+
+ if (exit && cond_is_first)
+ append_to_statement_list (exit, &stmt_list);
+ append_to_statement_list (body, &stmt_list);
+ finish_bc_block (&stmt_list, bc_continue, clab);
+ append_to_statement_list (incr, &stmt_list);
+ if (exit && !cond_is_first)
+ append_to_statement_list (exit, &stmt_list);
+
+ if (!stmt_list)
+ stmt_list = build_empty_stmt (start_locus);
+
+ tree loop;
if (cond && integer_zerop (cond))
{
- top = NULL;
if (cond_is_first)
- {
- stmt = gimple_build_goto (get_bc_label (bc_break));
- gimple_set_location (stmt, stmt_locus);
- gimple_seq_add_stmt (&stmt_list, stmt);
- }
+ loop = fold_build3_loc (start_locus, COND_EXPR,
+ void_type_node, cond, stmt_list,
+ build_empty_stmt (start_locus));
+ else
+ loop = stmt_list;
}
else
{
- /* If we use a LOOP_EXPR here, we have to feed the whole thing
- back through the main gimplifier to lower it. Given that we
- have to gimplify the loop body NOW so that we can resolve
- break/continue stmts, seems easier to just expand to gotos. */
- top = gimple_build_label (create_artificial_label (stmt_locus));
-
- /* If we have an exit condition, then we build an IF with gotos either
- out of the loop, or to the top of it. If there's no exit condition,
- then we just build a jump back to the top. */
- if (cond && !integer_nonzerop (cond))
- {
- if (cond != error_mark_node)
- {
- gimplify_expr (&cond, &exit_seq, NULL, is_gimple_val, fb_rvalue);
- stmt = gimple_build_cond (NE_EXPR, cond,
- build_int_cst (TREE_TYPE (cond), 0),
- gimple_label_label (top),
- get_bc_label (bc_break));
- gimple_seq_add_stmt (&exit_seq, stmt);
- }
-
- if (cond_is_first)
- {
- if (incr)
- {
- entry = gimple_build_label
- (create_artificial_label (stmt_locus));
- stmt = gimple_build_goto (gimple_label_label (entry));
- }
- else
- stmt = gimple_build_goto (get_bc_label (bc_continue));
- gimple_set_location (stmt, stmt_locus);
- gimple_seq_add_stmt (&stmt_list, stmt);
- }
- }
- else
- {
- stmt = gimple_build_goto (gimple_label_label (top));
- gimple_seq_add_stmt (&exit_seq, stmt);
- }
+ location_t loc = start_locus;
+ if (!cond || integer_nonzerop (cond))
+ loc = EXPR_LOCATION (expr_first (body));
+ if (loc == UNKNOWN_LOCATION)
+ loc = start_locus;
+ loop = build1_loc (loc, LOOP_EXPR, void_type_node, stmt_list);
}
- gimplify_stmt (&body, &body_seq);
- gimplify_stmt (&incr, &incr_seq);
-
- body_seq = finish_bc_block (bc_continue, cont_block, body_seq);
-
- gimple_seq_add_stmt (&stmt_list, top);
- gimple_seq_add_seq (&stmt_list, body_seq);
- gimple_seq_add_seq (&stmt_list, incr_seq);
- gimple_seq_add_stmt (&stmt_list, entry);
- gimple_seq_add_seq (&stmt_list, exit_seq);
-
- annotate_all_with_location (stmt_list, stmt_locus);
+ stmt_list = NULL;
+ append_to_statement_list (loop, &stmt_list);
+ finish_bc_block (&stmt_list, bc_break, blab);
+ if (!stmt_list)
+ stmt_list = build_empty_stmt (start_locus);
- return finish_bc_block (bc_break, break_block, stmt_list);
+ *stmt_p = stmt_list;
}
-/* Gimplify a FOR_STMT node. Move the stuff in the for-init-stmt into the
- prequeue and hand off to gimplify_cp_loop. */
+/* Genericize a FOR_STMT node *STMT_P. */
static void
-gimplify_for_stmt (tree *stmt_p, gimple_seq *pre_p)
+genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
{
tree stmt = *stmt_p;
+ tree expr = NULL;
+ tree loop;
+ tree init = FOR_INIT_STMT (stmt);
- if (FOR_INIT_STMT (stmt))
- gimplify_and_add (FOR_INIT_STMT (stmt), pre_p);
+ if (init)
+ {
+ cp_walk_tree (&init, cp_genericize_r, data, NULL);
+ append_to_statement_list (init, &expr);
+ }
- gimple_seq_add_seq (pre_p,
- gimplify_cp_loop (FOR_COND (stmt), FOR_BODY (stmt),
- FOR_EXPR (stmt), 1));
- *stmt_p = NULL_TREE;
+ genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
+ FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
+ append_to_statement_list (loop, &expr);
+ if (expr == NULL_TREE)
+ expr = loop;
+ *stmt_p = expr;
}
-/* Gimplify a WHILE_STMT node. */
+/* Genericize a WHILE_STMT node *STMT_P. */
static void
-gimplify_while_stmt (tree *stmt_p, gimple_seq *pre_p)
+genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
{
tree stmt = *stmt_p;
- gimple_seq_add_seq (pre_p,
- gimplify_cp_loop (WHILE_COND (stmt), WHILE_BODY (stmt),
- NULL_TREE, 1));
- *stmt_p = NULL_TREE;
+ genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
+ WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
}
-/* Gimplify a DO_STMT node. */
+/* Genericize a DO_STMT node *STMT_P. */
static void
-gimplify_do_stmt (tree *stmt_p, gimple_seq *pre_p)
+genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
{
tree stmt = *stmt_p;
- gimple_seq_add_seq (pre_p,
- gimplify_cp_loop (DO_COND (stmt), DO_BODY (stmt),
- NULL_TREE, 0));
- *stmt_p = NULL_TREE;
+ genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
+ DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
}
-/* Genericize a SWITCH_STMT by turning it into a SWITCH_EXPR. */
+/* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */
static void
-gimplify_switch_stmt (tree *stmt_p, gimple_seq *pre_p)
+genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
{
tree stmt = *stmt_p;
- tree break_block, body, t;
- location_t stmt_locus = input_location;
- gimple_seq seq = NULL;
+ tree break_block, body, cond, type;
+ location_t stmt_locus = EXPR_LOCATION (stmt);
- break_block = begin_bc_block (bc_break);
+ break_block = begin_bc_block (bc_break, stmt_locus);
body = SWITCH_STMT_BODY (stmt);
if (!body)
body = build_empty_stmt (stmt_locus);
+ cond = SWITCH_STMT_COND (stmt);
+ type = SWITCH_STMT_TYPE (stmt);
- t = build3 (SWITCH_EXPR, SWITCH_STMT_TYPE (stmt),
- SWITCH_STMT_COND (stmt), body, NULL_TREE);
- SET_EXPR_LOCATION (t, stmt_locus);
- gimplify_and_add (t, &seq);
+ cp_walk_tree (&body, cp_genericize_r, data, NULL);
+ cp_walk_tree (&cond, cp_genericize_r, data, NULL);
+ cp_walk_tree (&type, cp_genericize_r, data, NULL);
+ *walk_subtrees = 0;
- seq = finish_bc_block (bc_break, break_block, seq);
- gimple_seq_add_seq (pre_p, seq);
- *stmt_p = NULL_TREE;
+ *stmt_p = build3_loc (stmt_locus, SWITCH_EXPR, type, cond, body, NULL_TREE);
+ finish_bc_block (stmt_p, bc_break, break_block);
}
-/* Hook into the middle of gimplifying an OMP_FOR node. This is required
- in order to properly gimplify CONTINUE statements. Here we merely
- manage the continue stack; the rest of the job is performed by the
- regular gimplifier. */
+/* Genericize a CONTINUE_STMT node *STMT_P. */
+
+static void
+genericize_continue_stmt (tree *stmt_p)
+{
+ tree stmt_list = NULL;
+ tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
+ tree label = get_bc_label (bc_continue);
+ location_t location = EXPR_LOCATION (*stmt_p);
+ tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
+ append_to_statement_list_force (pred, &stmt_list);
+ append_to_statement_list (jump, &stmt_list);
+ *stmt_p = stmt_list;
+}
+
+/* Genericize a BREAK_STMT node *STMT_P. */
+
+static void
+genericize_break_stmt (tree *stmt_p)
+{
+ tree label = get_bc_label (bc_break);
+ location_t location = EXPR_LOCATION (*stmt_p);
+ *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
+}
+
+/* Genericize a OMP_FOR node *STMT_P. */
+
+static void
+genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
+{
+ tree stmt = *stmt_p;
+ location_t locus = EXPR_LOCATION (stmt);
+ tree clab = begin_bc_block (bc_continue, locus);
+
+ cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
+ if (TREE_CODE (stmt) != OMP_TASKLOOP)
+ cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
+ cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
+ cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
+ cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
+ cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
+ *walk_subtrees = 0;
+
+ finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
+}
+
+/* Hook into the middle of gimplifying an OMP_FOR node. */
static enum gimplify_status
cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
{
tree for_stmt = *expr_p;
- tree cont_block;
- gimple stmt;
gimple_seq seq = NULL;
/* Protect ourselves from recursion. */
return GS_UNHANDLED;
OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
- /* Note that while technically the continue label is enabled too soon
- here, we should have already diagnosed invalid continues nested within
- statement expressions within the INIT, COND, or INCR expressions. */
- cont_block = begin_bc_block (bc_continue);
-
gimplify_and_add (for_stmt, &seq);
- stmt = gimple_seq_last_stmt (seq);
- if (gimple_code (stmt) == GIMPLE_OMP_FOR)
- gimple_omp_set_body (stmt, finish_bc_block (bc_continue, cont_block,
- gimple_omp_body (stmt)));
- else
- seq = finish_bc_block (bc_continue, cont_block, seq);
gimple_seq_add_seq (pre_p, seq);
OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
TREE_TYPE (from) = void_type_node;
}
+ if (cxx_dialect >= cxx14 && TREE_CODE (sub) == CONSTRUCTOR)
+ /* Handle aggregate NSDMI. */
+ replace_placeholders (sub, to);
+
if (t == sub)
break;
else
tree body = TREE_OPERAND (stmt, 0);
gimple_seq try_ = NULL;
gimple_seq catch_ = NULL;
- gimple mnt;
+ gimple *mnt;
gimplify_and_add (body, &try_);
mnt = gimple_build_eh_must_not_throw (terminate_node);
return GS_ALL_DONE;
}
+/* Return TRUE if an operand (OP) of a given TYPE being copied is
+ really just an empty class copy.
+
+ Check that the operand has a simple form so that TARGET_EXPRs and
+ non-empty CONSTRUCTORs get reduced properly, and we leave the
+ return slot optimization alone because it isn't a copy. */
+
+static bool
+simple_empty_class_p (tree type, tree op)
+{
+ return
+ ((TREE_CODE (op) == COMPOUND_EXPR
+ && simple_empty_class_p (type, TREE_OPERAND (op, 1)))
+ || is_gimple_lvalue (op)
+ || INDIRECT_REF_P (op)
+ || (TREE_CODE (op) == CONSTRUCTOR
+ && CONSTRUCTOR_NELTS (op) == 0
+ && !TREE_CLOBBER_P (op))
+ || (TREE_CODE (op) == CALL_EXPR
+ && !CALL_EXPR_RETURN_SLOT_OPT (op)))
+ && is_really_empty_class (type);
+}
+
+/* Returns true if evaluating E as an lvalue has side-effects;
+ specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
+ have side-effects until there is a read or write through it. */
+
+static bool
+lvalue_has_side_effects (tree e)
+{
+ if (!TREE_SIDE_EFFECTS (e))
+ return false;
+ while (handled_component_p (e))
+ {
+ if (TREE_CODE (e) == ARRAY_REF
+ && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
+ return true;
+ e = TREE_OPERAND (e, 0);
+ }
+ if (DECL_P (e))
+ /* Just naming a variable has no side-effects. */
+ return false;
+ else if (INDIRECT_REF_P (e))
+ /* Similarly, indirection has no side-effects. */
+ return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
+ else
+ /* For anything else, trust TREE_SIDE_EFFECTS. */
+ return TREE_SIDE_EFFECTS (e);
+}
+
/* Do C++-specific gimplification. Args are as for gimplify_expr. */
int
cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
{
int saved_stmts_are_full_exprs_p = 0;
+ location_t loc = EXPR_LOC_OR_LOC (*expr_p, input_location);
enum tree_code code = TREE_CODE (*expr_p);
enum gimplify_status ret;
switch (code)
{
- case PTRMEM_CST:
- *expr_p = cplus_expand_constant (*expr_p);
- ret = GS_OK;
- break;
-
case AGGR_INIT_EXPR:
simplify_aggr_init_expr (expr_p);
ret = GS_OK;
init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
from_array,
tf_warning_or_error);
+ hash_set<tree> pset;
+ cp_walk_tree (expr_p, cp_fold_r, &pset, NULL);
+ cp_genericize_tree (expr_p);
ret = GS_OK;
input_location = loc;
}
LHS of an assignment might also be involved in the RHS, as in bug
25979. */
case INIT_EXPR:
+ if (fn_contains_cilk_spawn_p (cfun))
+ {
+ if (cilk_cp_detect_spawn_and_unwrap (expr_p))
+ {
+ cilk_cp_gimplify_call_params_in_spawned_fn (expr_p,
+ pre_p, post_p);
+ return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
+ }
+ if (seen_error () && contains_cilk_spawn_stmt (*expr_p))
+ return GS_ERROR;
+ }
+
cp_gimplify_init_expr (expr_p);
if (TREE_CODE (*expr_p) != INIT_EXPR)
return GS_OK;
- /* Otherwise fall through. */
+ /* Fall through. */
case MODIFY_EXPR:
+ modify_expr_case:
{
+ if (fn_contains_cilk_spawn_p (cfun)
+ && cilk_cp_detect_spawn_and_unwrap (expr_p)
+ && !seen_error ())
+ {
+ cilk_cp_gimplify_call_params_in_spawned_fn (expr_p, pre_p, post_p);
+ return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
+ }
/* If the back end isn't clever enough to know that the lhs and rhs
types are the same, add an explicit conversion. */
tree op0 = TREE_OPERAND (*expr_p, 0);
TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
TREE_TYPE (op0), op1);
- else if ((is_gimple_lvalue (op1) || INDIRECT_REF_P (op1)
- || (TREE_CODE (op1) == CONSTRUCTOR
- && CONSTRUCTOR_NELTS (op1) == 0
- && !TREE_CLOBBER_P (op1))
- || (TREE_CODE (op1) == CALL_EXPR
- && !CALL_EXPR_RETURN_SLOT_OPT (op1)))
- && is_really_empty_class (TREE_TYPE (op0)))
+ else if (simple_empty_class_p (TREE_TYPE (op0), op1))
{
- /* Remove any copies of empty classes. We check that the RHS
- has a simple form so that TARGET_EXPRs and non-empty
- CONSTRUCTORs get reduced properly, and we leave the return
- slot optimization alone because it isn't a copy (FIXME so it
- shouldn't be represented as one).
-
- Also drop volatile variables on the RHS to avoid infinite
- recursion from gimplify_expr trying to load the value. */
- if (!TREE_SIDE_EFFECTS (op1)
- || (DECL_P (op1) && TREE_THIS_VOLATILE (op1)))
- *expr_p = op0;
- else if (TREE_CODE (op1) == MEM_REF
- && TREE_THIS_VOLATILE (op1))
+ /* Remove any copies of empty classes. Also drop volatile
+ variables on the RHS to avoid infinite recursion from
+ gimplify_expr trying to load the value. */
+ if (TREE_SIDE_EFFECTS (op1))
{
- /* Similarly for volatile MEM_REFs on the RHS. */
- if (!TREE_SIDE_EFFECTS (TREE_OPERAND (op1, 0)))
- *expr_p = op0;
- else
- *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
- TREE_OPERAND (op1, 0), op0);
+ if (TREE_THIS_VOLATILE (op1)
+ && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
+ op1 = build_fold_addr_expr (op1);
+
+ gimplify_and_add (op1, pre_p);
}
- else
- *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
- op0, op1);
+ gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
+ is_gimple_lvalue, fb_lvalue);
+ *expr_p = TREE_OPERAND (*expr_p, 0);
}
+ /* P0145 says that the RHS is sequenced before the LHS.
+ gimplify_modify_expr gimplifies the RHS before the LHS, but that
+ isn't quite strong enough in two cases:
+
+ 1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would
+ mean it's evaluated after the LHS.
+
+ 2) the value calculation of the RHS is also sequenced before the
+ LHS, so for scalar assignment we need to preevaluate if the
+ RHS could be affected by LHS side-effects even if it has no
+ side-effects of its own. We don't need this for classes because
+ class assignment takes its RHS by reference. */
+ else if (flag_strong_eval_order > 1
+ && TREE_CODE (*expr_p) == MODIFY_EXPR
+ && lvalue_has_side_effects (op0)
+ && (TREE_CODE (op1) == CALL_EXPR
+ || (SCALAR_TYPE_P (TREE_TYPE (op1))
+ && !TREE_CONSTANT (op1))))
+ TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (op1, pre_p);
}
ret = GS_OK;
break;
gcc_unreachable ();
case FOR_STMT:
- gimplify_for_stmt (expr_p, pre_p);
- ret = GS_OK;
- break;
-
case WHILE_STMT:
- gimplify_while_stmt (expr_p, pre_p);
- ret = GS_OK;
- break;
-
case DO_STMT:
- gimplify_do_stmt (expr_p, pre_p);
- ret = GS_OK;
- break;
-
case SWITCH_STMT:
- gimplify_switch_stmt (expr_p, pre_p);
- ret = GS_OK;
- break;
+ case CONTINUE_STMT:
+ case BREAK_STMT:
+ gcc_unreachable ();
case OMP_FOR:
+ case OMP_SIMD:
+ case OMP_DISTRIBUTE:
+ case OMP_TASKLOOP:
ret = cp_gimplify_omp_for (expr_p, pre_p);
break;
- case CONTINUE_STMT:
- gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_CONTINUE, NOT_TAKEN));
- gimple_seq_add_stmt (pre_p, gimple_build_goto (get_bc_label (bc_continue)));
- *expr_p = NULL_TREE;
- ret = GS_ALL_DONE;
- break;
-
- case BREAK_STMT:
- gimple_seq_add_stmt (pre_p, gimple_build_goto (get_bc_label (bc_break)));
- *expr_p = NULL_TREE;
- ret = GS_ALL_DONE;
- break;
-
case EXPR_STMT:
gimplify_expr_stmt (expr_p);
ret = GS_OK;
}
break;
+ case CILK_SPAWN_STMT:
+ gcc_assert(fn_contains_cilk_spawn_p (cfun)
+ && cilk_cp_detect_spawn_and_unwrap (expr_p));
+
+ if (!seen_error ())
+ {
+ cilk_cp_gimplify_call_params_in_spawned_fn (expr_p, pre_p, post_p);
+ return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
+ }
+ return GS_ERROR;
+
+ case CALL_EXPR:
+ if (fn_contains_cilk_spawn_p (cfun)
+ && cilk_cp_detect_spawn_and_unwrap (expr_p)
+ && !seen_error ())
+ {
+ cilk_cp_gimplify_call_params_in_spawned_fn (expr_p, pre_p, post_p);
+ return (enum gimplify_status) gimplify_cilk_spawn (expr_p);
+ }
+ ret = GS_OK;
+ if (!CALL_EXPR_FN (*expr_p))
+ /* Internal function call. */;
+ else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
+ {
+ /* This is a call to a (compound) assignment operator that used
+ the operator syntax; gimplify the RHS first. */
+ gcc_assert (call_expr_nargs (*expr_p) == 2);
+ gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
+ enum gimplify_status t
+ = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc);
+ if (t == GS_ERROR)
+ ret = GS_ERROR;
+ }
+ else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
+ {
+ /* Leave the last argument for gimplify_call_expr, to avoid problems
+ with __builtin_va_arg_pack(). */
+ int nargs = call_expr_nargs (*expr_p) - 1;
+ for (int i = 0; i < nargs; ++i)
+ {
+ enum gimplify_status t
+ = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
+ if (t == GS_ERROR)
+ ret = GS_ERROR;
+ }
+ }
+ else if (flag_strong_eval_order
+ && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
+ {
+ /* If flag_strong_eval_order, evaluate the object argument first. */
+ tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
+ if (POINTER_TYPE_P (fntype))
+ fntype = TREE_TYPE (fntype);
+ if (TREE_CODE (fntype) == METHOD_TYPE)
+ {
+ enum gimplify_status t
+ = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc);
+ if (t == GS_ERROR)
+ ret = GS_ERROR;
+ }
+ }
+ break;
+
+ case RETURN_EXPR:
+ if (TREE_OPERAND (*expr_p, 0)
+ && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
+ || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
+ {
+ expr_p = &TREE_OPERAND (*expr_p, 0);
+ code = TREE_CODE (*expr_p);
+ /* Avoid going through the INIT_EXPR case, which can
+ degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
+ goto modify_expr_case;
+ }
+ /* Fall through. */
+
default:
ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
break;
/* Return true if the uid in both int tree maps are equal. */
-int
-cxx_int_tree_map_eq (const void *va, const void *vb)
+bool
+cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
{
- const struct cxx_int_tree_map *a = (const struct cxx_int_tree_map *) va;
- const struct cxx_int_tree_map *b = (const struct cxx_int_tree_map *) vb;
return (a->uid == b->uid);
}
/* Hash a UID in a cxx_int_tree_map. */
unsigned int
-cxx_int_tree_map_hash (const void *item)
+cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
{
- return ((const struct cxx_int_tree_map *)item)->uid;
+ return item->uid;
}
/* A stable comparison routine for use with splay trees and DECLs. */
type = TREE_TYPE (type);
if (type == error_mark_node || !CLASS_TYPE_P (type))
return false;
- if (TREE_CODE (decl) == VAR_DECL && DECL_THREAD_LOCAL_P (decl))
+ if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
return false;
if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
return false;
struct cp_genericize_data
{
- struct pointer_set_t *p_set;
- VEC (tree, heap) *bind_expr_stack;
+ hash_set<tree> *p_set;
+ vec<tree> bind_expr_stack;
struct cp_genericize_omp_taskreg *omp_ctx;
+ tree try_block;
+ bool no_sanitize_p;
};
+/* Perform any pre-gimplification folding of C++ front end trees to
+ GENERIC.
+ Note: The folding of none-omp cases is something to move into
+ the middle-end. As for now we have most foldings only on GENERIC
+ in fold-const, we need to perform this before transformation to
+ GIMPLE-form. */
+
+static tree
+cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data)
+{
+ tree stmt;
+ enum tree_code code;
+
+ *stmt_p = stmt = cp_fold (*stmt_p);
+
+ if (((hash_set<tree> *) data)->add (stmt))
+ {
+ /* Don't walk subtrees of stmts we've already walked once, otherwise
+ we can have exponential complexity with e.g. lots of nested
+ SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
+ always the same tree, which the first time cp_fold_r has been
+ called on it had the subtrees walked. */
+ *walk_subtrees = 0;
+ return NULL;
+ }
+
+ code = TREE_CODE (stmt);
+ if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE
+ || code == OMP_TASKLOOP || code == CILK_FOR || code == CILK_SIMD
+ || code == OACC_LOOP)
+ {
+ tree x;
+ int i, n;
+
+ cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
+ cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
+ cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
+ x = OMP_FOR_COND (stmt);
+ if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
+ {
+ cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
+ cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
+ }
+ else if (x && TREE_CODE (x) == TREE_VEC)
+ {
+ n = TREE_VEC_LENGTH (x);
+ for (i = 0; i < n; i++)
+ {
+ tree o = TREE_VEC_ELT (x, i);
+ if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
+ cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
+ }
+ }
+ x = OMP_FOR_INCR (stmt);
+ if (x && TREE_CODE (x) == TREE_VEC)
+ {
+ n = TREE_VEC_LENGTH (x);
+ for (i = 0; i < n; i++)
+ {
+ tree o = TREE_VEC_ELT (x, i);
+ if (o && TREE_CODE (o) == MODIFY_EXPR)
+ o = TREE_OPERAND (o, 1);
+ if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
+ || TREE_CODE (o) == POINTER_PLUS_EXPR))
+ {
+ cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
+ cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
+ }
+ }
+ }
+ cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
+ *walk_subtrees = 0;
+ }
+
+ return NULL;
+}
+
+/* Fold ALL the trees! FIXME we should be able to remove this, but
+ apparently that still causes optimization regressions. */
+
+void
+cp_fold_function (tree fndecl)
+{
+ hash_set<tree> pset;
+ cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &pset, NULL);
+}
+
/* Perform any pre-gimplification lowering of C++ front end trees to
GENERIC. */
{
tree stmt = *stmt_p;
struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
- struct pointer_set_t *p_set = wtd->p_set;
+ hash_set<tree> *p_set = wtd->p_set;
/* If in an OpenMP context, note var uses. */
if (__builtin_expect (wtd->omp_ctx != NULL, 0)
- && (TREE_CODE (stmt) == VAR_DECL
+ && (VAR_P (stmt)
|| TREE_CODE (stmt) == PARM_DECL
|| TREE_CODE (stmt) == RESULT_DECL)
&& omp_var_to_track (stmt))
omp_cxx_notice_variable (wtd->omp_ctx, stmt);
- if (is_invisiref_parm (stmt)
- /* Don't dereference parms in a thunk, pass the references through. */
- && !(DECL_THUNK_P (current_function_decl)
- && TREE_CODE (stmt) == PARM_DECL))
+ /* Don't dereference parms in a thunk, pass the references through. */
+ if ((TREE_CODE (stmt) == CALL_EXPR && CALL_FROM_THUNK_P (stmt))
+ || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
+ {
+ *walk_subtrees = 0;
+ return NULL;
+ }
+
+ /* Otherwise, do dereference invisible reference parms. */
+ if (is_invisiref_parm (stmt))
{
*stmt_p = convert_from_reference (stmt);
*walk_subtrees = 0;
/* Map block scope extern declarations to visible declarations with the
same name and type in outer scopes if any. */
if (cp_function_chain->extern_decl_map
- && (TREE_CODE (stmt) == FUNCTION_DECL || TREE_CODE (stmt) == VAR_DECL)
+ && VAR_OR_FUNCTION_DECL_P (stmt)
&& DECL_EXTERNAL (stmt))
{
struct cxx_int_tree_map *h, in;
in.uid = DECL_UID (stmt);
- h = (struct cxx_int_tree_map *)
- htab_find_with_hash (cp_function_chain->extern_decl_map,
- &in, in.uid);
+ h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
if (h)
{
*stmt_p = h->to;
}
/* Other than invisiref parms, don't walk the same tree twice. */
- if (pointer_set_contains (p_set, stmt))
+ if (p_set->contains (stmt))
{
*walk_subtrees = 0;
return NULL_TREE;
if (__builtin_expect (wtd->omp_ctx != NULL, 0)
&& omp_var_to_track (TREE_OPERAND (stmt, 0)))
omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
- *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
+ *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
*walk_subtrees = 0;
}
else if (TREE_CODE (stmt) == RETURN_EXPR
*walk_subtrees = 0;
break;
case OMP_CLAUSE_REDUCTION:
- gcc_assert (!is_invisiref_parm (OMP_CLAUSE_DECL (stmt)));
+ /* Don't dereference an invisiref in reduction clause's
+ OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
+ still needs to be genericized. */
+ if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
+ {
+ *walk_subtrees = 0;
+ if (OMP_CLAUSE_REDUCTION_INIT (stmt))
+ cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
+ cp_genericize_r, data, NULL);
+ if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
+ cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
+ cp_genericize_r, data, NULL);
+ }
break;
default:
break;
to lower this construct before scanning it, so we need to lower these
before doing anything else. */
else if (TREE_CODE (stmt) == CLEANUP_STMT)
- *stmt_p = build2 (CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
- : TRY_FINALLY_EXPR,
- void_type_node,
- CLEANUP_BODY (stmt),
- CLEANUP_EXPR (stmt));
+ *stmt_p = build2_loc (EXPR_LOCATION (stmt),
+ CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
+ : TRY_FINALLY_EXPR,
+ void_type_node,
+ CLEANUP_BODY (stmt),
+ CLEANUP_EXPR (stmt));
else if (TREE_CODE (stmt) == IF_STMT)
{
{
tree decl;
for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
- if (TREE_CODE (decl) == VAR_DECL
+ if (VAR_P (decl)
&& !DECL_EXTERNAL (decl)
&& omp_var_to_track (decl))
{
: OMP_CLAUSE_DEFAULT_PRIVATE);
}
}
- VEC_safe_push (tree, heap, wtd->bind_expr_stack, stmt);
+ if (flag_sanitize
+ & (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
+ {
+ /* The point here is to not sanitize static initializers. */
+ bool no_sanitize_p = wtd->no_sanitize_p;
+ wtd->no_sanitize_p = true;
+ for (tree decl = BIND_EXPR_VARS (stmt);
+ decl;
+ decl = DECL_CHAIN (decl))
+ if (VAR_P (decl)
+ && TREE_STATIC (decl)
+ && DECL_INITIAL (decl))
+ cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
+ wtd->no_sanitize_p = no_sanitize_p;
+ }
+ wtd->bind_expr_stack.safe_push (stmt);
cp_walk_tree (&BIND_EXPR_BODY (stmt),
cp_genericize_r, data, NULL);
- VEC_pop (tree, wtd->bind_expr_stack);
+ wtd->bind_expr_stack.pop ();
}
else if (TREE_CODE (stmt) == USING_STMT)
/* Get the innermost inclosing GIMPLE_BIND that has a non NULL
BLOCK, and append an IMPORTED_DECL to its
BLOCK_VARS chained list. */
- if (wtd->bind_expr_stack)
+ if (wtd->bind_expr_stack.exists ())
{
int i;
- for (i = VEC_length (tree, wtd->bind_expr_stack) - 1; i >= 0; i--)
- if ((block = BIND_EXPR_BLOCK (VEC_index (tree,
- wtd->bind_expr_stack, i))))
+ for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
+ if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
break;
}
if (block)
*stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
*walk_subtrees = 0;
}
- else if (TREE_CODE (stmt) == OMP_PARALLEL || TREE_CODE (stmt) == OMP_TASK)
+ else if (TREE_CODE (stmt) == DECL_EXPR)
+ {
+ tree d = DECL_EXPR_DECL (stmt);
+ if (TREE_CODE (d) == VAR_DECL)
+ gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
+ }
+ else if (TREE_CODE (stmt) == OMP_PARALLEL
+ || TREE_CODE (stmt) == OMP_TASK
+ || TREE_CODE (stmt) == OMP_TASKLOOP)
{
struct cp_genericize_omp_taskreg omp_ctx;
tree c, decl;
default:
break;
}
- cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
+ if (TREE_CODE (stmt) == OMP_TASKLOOP)
+ genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
+ else
+ cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
wtd->omp_ctx = omp_ctx.outer;
splay_tree_delete (omp_ctx.variables);
}
+ else if (TREE_CODE (stmt) == TRY_BLOCK)
+ {
+ *walk_subtrees = 0;
+ tree try_block = wtd->try_block;
+ wtd->try_block = stmt;
+ cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
+ wtd->try_block = try_block;
+ cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
+ }
+ else if (TREE_CODE (stmt) == MUST_NOT_THROW_EXPR)
+ {
+ /* MUST_NOT_THROW_COND might be something else with TM. */
+ if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
+ {
+ *walk_subtrees = 0;
+ tree try_block = wtd->try_block;
+ wtd->try_block = stmt;
+ cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
+ wtd->try_block = try_block;
+ }
+ }
+ else if (TREE_CODE (stmt) == THROW_EXPR)
+ {
+ location_t loc = location_of (stmt);
+ if (TREE_NO_WARNING (stmt))
+ /* Never mind. */;
+ else if (wtd->try_block)
+ {
+ if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR
+ && warning_at (loc, OPT_Wterminate,
+ "throw will always call terminate()")
+ && cxx_dialect >= cxx11
+ && DECL_DESTRUCTOR_P (current_function_decl))
+ inform (loc, "in C++11 destructors default to noexcept");
+ }
+ else
+ {
+ if (warn_cxx11_compat && cxx_dialect < cxx11
+ && DECL_DESTRUCTOR_P (current_function_decl)
+ && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
+ == NULL_TREE)
+ && (get_defaulted_eh_spec (current_function_decl)
+ == empty_except_spec))
+ warning_at (loc, OPT_Wc__11_compat,
+ "in C++11 this throw will terminate because "
+ "destructors default to noexcept");
+ }
+ }
else if (TREE_CODE (stmt) == CONVERT_EXPR)
gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
+ else if (TREE_CODE (stmt) == FOR_STMT)
+ genericize_for_stmt (stmt_p, walk_subtrees, data);
+ else if (TREE_CODE (stmt) == WHILE_STMT)
+ genericize_while_stmt (stmt_p, walk_subtrees, data);
+ else if (TREE_CODE (stmt) == DO_STMT)
+ genericize_do_stmt (stmt_p, walk_subtrees, data);
+ else if (TREE_CODE (stmt) == SWITCH_STMT)
+ genericize_switch_stmt (stmt_p, walk_subtrees, data);
+ else if (TREE_CODE (stmt) == CONTINUE_STMT)
+ genericize_continue_stmt (stmt_p);
+ else if (TREE_CODE (stmt) == BREAK_STMT)
+ genericize_break_stmt (stmt_p);
+ else if (TREE_CODE (stmt) == OMP_FOR
+ || TREE_CODE (stmt) == OMP_SIMD
+ || TREE_CODE (stmt) == OMP_DISTRIBUTE)
+ genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
+ else if (TREE_CODE (stmt) == PTRMEM_CST)
+ {
+ /* By the time we get here we're handing off to the back end, so we don't
+ need or want to preserve PTRMEM_CST anymore. */
+ *stmt_p = cplus_expand_constant (stmt);
+ *walk_subtrees = 0;
+ }
+ else if ((flag_sanitize
+ & (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
+ && !wtd->no_sanitize_p)
+ {
+ if ((flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
+ && TREE_CODE (stmt) == NOP_EXPR
+ && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE)
+ ubsan_maybe_instrument_reference (stmt);
+ else if (TREE_CODE (stmt) == CALL_EXPR)
+ {
+ tree fn = CALL_EXPR_FN (stmt);
+ if (fn != NULL_TREE
+ && !error_operand_p (fn)
+ && POINTER_TYPE_P (TREE_TYPE (fn))
+ && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
+ {
+ bool is_ctor
+ = TREE_CODE (fn) == ADDR_EXPR
+ && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
+ && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
+ if (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
+ ubsan_maybe_instrument_member_call (stmt, is_ctor);
+ if ((flag_sanitize & SANITIZE_VPTR) && !is_ctor)
+ cp_ubsan_maybe_instrument_member_call (stmt);
+ }
+ }
+ }
- pointer_set_insert (p_set, *stmt_p);
+ p_set->add (*stmt_p);
return NULL;
}
+/* Lower C++ front end trees to GENERIC in T_P. */
+
+static void
+cp_genericize_tree (tree* t_p)
+{
+ struct cp_genericize_data wtd;
+
+ wtd.p_set = new hash_set<tree>;
+ wtd.bind_expr_stack.create (0);
+ wtd.omp_ctx = NULL;
+ wtd.try_block = NULL_TREE;
+ wtd.no_sanitize_p = false;
+ cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
+ delete wtd.p_set;
+ wtd.bind_expr_stack.release ();
+ if (flag_sanitize & SANITIZE_VPTR)
+ cp_ubsan_instrument_member_accesses (t_p);
+}
+
+/* If a function that should end with a return in non-void
+ function doesn't obviously end with return, add ubsan
+ instrumentation code to verify it at runtime. */
+
+static void
+cp_ubsan_maybe_instrument_return (tree fndecl)
+{
+ if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
+ || DECL_CONSTRUCTOR_P (fndecl)
+ || DECL_DESTRUCTOR_P (fndecl)
+ || !targetm.warn_func_return (fndecl))
+ return;
+
+ tree t = DECL_SAVED_TREE (fndecl);
+ while (t)
+ {
+ switch (TREE_CODE (t))
+ {
+ case BIND_EXPR:
+ t = BIND_EXPR_BODY (t);
+ continue;
+ case TRY_FINALLY_EXPR:
+ t = TREE_OPERAND (t, 0);
+ continue;
+ case STATEMENT_LIST:
+ {
+ tree_stmt_iterator i = tsi_last (t);
+ if (!tsi_end_p (i))
+ {
+ t = tsi_stmt (i);
+ continue;
+ }
+ }
+ break;
+ case RETURN_EXPR:
+ return;
+ default:
+ break;
+ }
+ break;
+ }
+ if (t == NULL_TREE)
+ return;
+ t = DECL_SAVED_TREE (fndecl);
+ if (TREE_CODE (t) == BIND_EXPR
+ && TREE_CODE (BIND_EXPR_BODY (t)) == STATEMENT_LIST)
+ {
+ tree_stmt_iterator i = tsi_last (BIND_EXPR_BODY (t));
+ t = ubsan_instrument_return (DECL_SOURCE_LOCATION (fndecl));
+ tsi_link_after (&i, t, TSI_NEW_STMT);
+ }
+}
+
void
cp_genericize (tree fndecl)
{
tree t;
- struct cp_genericize_data wtd;
/* Fix up the types of parms passed by invisible reference. */
for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
if (DECL_CLONED_FUNCTION_P (fndecl))
return;
+ /* Allow cp_genericize calls to be nested. */
+ tree save_bc_label[2];
+ save_bc_label[bc_break] = bc_label[bc_break];
+ save_bc_label[bc_continue] = bc_label[bc_continue];
+ bc_label[bc_break] = NULL_TREE;
+ bc_label[bc_continue] = NULL_TREE;
+
+ /* Expand all the array notations here. */
+ if (flag_cilkplus
+ && contains_array_notation_expr (DECL_SAVED_TREE (fndecl)))
+ DECL_SAVED_TREE (fndecl) =
+ expand_array_notation_exprs (DECL_SAVED_TREE (fndecl));
+
/* We do want to see every occurrence of the parms, so we can't just use
walk_tree's hash functionality. */
- wtd.p_set = pointer_set_create ();
- wtd.bind_expr_stack = NULL;
- wtd.omp_ctx = NULL;
- cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_genericize_r, &wtd, NULL);
- pointer_set_destroy (wtd.p_set);
- VEC_free (tree, heap, wtd.bind_expr_stack);
+ cp_genericize_tree (&DECL_SAVED_TREE (fndecl));
+
+ if (flag_sanitize & SANITIZE_RETURN
+ && do_ubsan_in_current_function ())
+ cp_ubsan_maybe_instrument_return (fndecl);
/* Do everything else. */
c_genericize (fndecl);
gcc_assert (bc_label[bc_break] == NULL);
gcc_assert (bc_label[bc_continue] == NULL);
+ bc_label[bc_break] = save_bc_label[bc_break];
+ bc_label[bc_continue] = save_bc_label[bc_continue];
}
\f
/* Build code to apply FN to each member of ARG1 and ARG2. FN may be
end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
end1 = fold_build_pointer_plus (start1, end1);
- p1 = create_tmp_var (TREE_TYPE (start1), NULL);
+ p1 = create_tmp_var (TREE_TYPE (start1));
t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
append_to_statement_list (t, &ret);
if (arg2)
{
- p2 = create_tmp_var (TREE_TYPE (start2), NULL);
+ p2 = create_tmp_var (TREE_TYPE (start2));
t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
append_to_statement_list (t, &ret);
}
for (parm = defparm; parm && parm != void_list_node;
parm = TREE_CHAIN (parm), i++)
argarray[i] = convert_default_arg (TREE_VALUE (parm),
- TREE_PURPOSE (parm), fn, i);
+ TREE_PURPOSE (parm), fn, i,
+ tf_warning_or_error);
t = build_call_a (fn, i, argarray);
t = fold_convert (void_type_node, t);
t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
parm = TREE_CHAIN (parm), i++)
argarray[i] = convert_default_arg (TREE_VALUE (parm),
TREE_PURPOSE (parm),
- fn, i);
+ fn, i, tf_warning_or_error);
t = build_call_a (fn, i, argarray);
t = fold_convert (void_type_node, t);
return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
NULL if there's nothing to do. */
tree
-cxx_omp_clause_default_ctor (tree clause, tree decl,
- tree outer ATTRIBUTE_UNUSED)
+cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
{
tree info = CP_OMP_CLAUSE_INFO (clause);
tree ret = NULL;
bool
cxx_omp_privatize_by_reference (const_tree decl)
{
- return is_invisiref_parm (decl);
+ return (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
+ || is_invisiref_parm (decl));
}
/* Return true if DECL is const qualified var having no mutable member. */
/* Finalize an implicitly determined clause. */
void
-cxx_omp_finish_clause (tree c)
+cxx_omp_finish_clause (tree c, gimple_seq *)
{
tree decl, inner_type;
bool make_shared = false;
if (decl == error_mark_node)
make_shared = true;
else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
- {
- if (is_invisiref_parm (decl))
- inner_type = TREE_TYPE (inner_type);
- else
- {
- error ("%qE implicitly determined as %<firstprivate%> has reference type",
- decl);
- make_shared = true;
- }
- }
+ inner_type = TREE_TYPE (inner_type);
/* We're interested in the base element, not arrays. */
while (TREE_CODE (inner_type) == ARRAY_TYPE)
for making these queries. */
if (!make_shared
&& CLASS_TYPE_P (inner_type)
- && cxx_omp_create_clause_info (c, inner_type, false, true, false))
+ && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
make_shared = true;
if (make_shared)
OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
}
+
+/* Return true if DECL's DECL_VALUE_EXPR (if any) should be
+ disregarded in OpenMP construct, because it is going to be
+ remapped during OpenMP lowering. SHARED is true if DECL
+ is going to be shared, false if it is going to be privatized. */
+
+bool
+cxx_omp_disregard_value_expr (tree decl, bool shared)
+{
+ return !shared
+ && VAR_P (decl)
+ && DECL_HAS_VALUE_EXPR_P (decl)
+ && DECL_ARTIFICIAL (decl)
+ && DECL_LANG_SPECIFIC (decl)
+ && DECL_OMP_PRIVATIZED_MEMBER (decl);
+}
+
+/* Perform folding on expression X. */
+
+tree
+cp_fully_fold (tree x)
+{
+ if (processing_template_decl)
+ return x;
+ /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
+ have to call both. */
+ if (cxx_dialect >= cxx11)
+ x = maybe_constant_value (x);
+ return cp_fold (x);
+}
+
+/* Fold expression X which is used as an rvalue if RVAL is true. */
+
+static tree
+cp_fold_maybe_rvalue (tree x, bool rval)
+{
+ while (true)
+ {
+ x = cp_fold (x);
+ if (rval && DECL_P (x))
+ {
+ tree v = decl_constant_value (x);
+ if (v != x && v != error_mark_node)
+ {
+ x = v;
+ continue;
+ }
+ }
+ break;
+ }
+ return x;
+}
+
+/* Fold expression X which is used as an rvalue. */
+
+static tree
+cp_fold_rvalue (tree x)
+{
+ return cp_fold_maybe_rvalue (x, true);
+}
+
+/* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
+ and certain changes are made to the folding done. Or should be (FIXME). We
+ never touch maybe_const, as it is only used for the C front-end
+ C_MAYBE_CONST_EXPR. */
+
+tree
+c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/)
+{
+ /* c_fully_fold is only used on rvalues, and we need to fold CONST_DECL to
+ INTEGER_CST. */
+ return cp_fold_rvalue (x);
+}
+
+static GTY((deletable)) hash_map<tree, tree> *fold_cache;
+
+/* Dispose of the whole FOLD_CACHE. */
+
+void
+clear_fold_cache (void)
+{
+ if (fold_cache != NULL)
+ fold_cache->empty ();
+}
+
+/* This function tries to fold an expression X.
+ To avoid combinatorial explosion, folding results are kept in fold_cache.
+ If we are processing a template or X is invalid, we don't fold at all.
+ For performance reasons we don't cache expressions representing a
+ declaration or constant.
+ Function returns X or its folded variant. */
+
+static tree
+cp_fold (tree x)
+{
+ tree op0, op1, op2, op3;
+ tree org_x = x, r = NULL_TREE;
+ enum tree_code code;
+ location_t loc;
+ bool rval_ops = true;
+
+ if (!x || x == error_mark_node)
+ return x;
+
+ if (processing_template_decl
+ || (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node)))
+ return x;
+
+ /* Don't bother to cache DECLs or constants. */
+ if (DECL_P (x) || CONSTANT_CLASS_P (x))
+ return x;
+
+ if (fold_cache == NULL)
+ fold_cache = hash_map<tree, tree>::create_ggc (101);
+
+ if (tree *cached = fold_cache->get (x))
+ return *cached;
+
+ code = TREE_CODE (x);
+ switch (code)
+ {
+ case SIZEOF_EXPR:
+ x = fold_sizeof_expr (x);
+ break;
+
+ case VIEW_CONVERT_EXPR:
+ rval_ops = false;
+ /* FALLTHRU */
+ case CONVERT_EXPR:
+ case NOP_EXPR:
+ case NON_LVALUE_EXPR:
+
+ if (VOID_TYPE_P (TREE_TYPE (x)))
+ return x;
+
+ loc = EXPR_LOCATION (x);
+ op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
+
+ if (code == CONVERT_EXPR
+ && SCALAR_TYPE_P (TREE_TYPE (x))
+ && op0 != void_node)
+ /* During parsing we used convert_to_*_nofold; re-convert now using the
+ folding variants, since fold() doesn't do those transformations. */
+ x = fold (convert (TREE_TYPE (x), op0));
+ else if (op0 != TREE_OPERAND (x, 0))
+ {
+ if (op0 == error_mark_node)
+ x = error_mark_node;
+ else
+ x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
+ }
+ else
+ x = fold (x);
+
+ /* Conversion of an out-of-range value has implementation-defined
+ behavior; the language considers it different from arithmetic
+ overflow, which is undefined. */
+ if (TREE_CODE (op0) == INTEGER_CST
+ && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
+ TREE_OVERFLOW (x) = false;
+
+ break;
+
+ case INDIRECT_REF:
+ /* We don't need the decltype(auto) obfuscation anymore. */
+ if (REF_PARENTHESIZED_P (x))
+ {
+ tree p = maybe_undo_parenthesized_ref (x);
+ return cp_fold (p);
+ }
+ goto unary;
+
+ case ADDR_EXPR:
+ case REALPART_EXPR:
+ case IMAGPART_EXPR:
+ rval_ops = false;
+ /* FALLTHRU */
+ case CONJ_EXPR:
+ case FIX_TRUNC_EXPR:
+ case FLOAT_EXPR:
+ case NEGATE_EXPR:
+ case ABS_EXPR:
+ case BIT_NOT_EXPR:
+ case TRUTH_NOT_EXPR:
+ case FIXED_CONVERT_EXPR:
+ unary:
+
+ loc = EXPR_LOCATION (x);
+ op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
+
+ if (op0 != TREE_OPERAND (x, 0))
+ {
+ if (op0 == error_mark_node)
+ x = error_mark_node;
+ else
+ {
+ x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
+ if (code == INDIRECT_REF
+ && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
+ {
+ TREE_READONLY (x) = TREE_READONLY (org_x);
+ TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
+ TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
+ }
+ }
+ }
+ else
+ x = fold (x);
+
+ gcc_assert (TREE_CODE (x) != COND_EXPR
+ || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
+ break;
+
+ case UNARY_PLUS_EXPR:
+ op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
+ if (op0 == error_mark_node)
+ x = error_mark_node;
+ else
+ x = fold_convert (TREE_TYPE (x), op0);
+ break;
+
+ case POSTDECREMENT_EXPR:
+ case POSTINCREMENT_EXPR:
+ case INIT_EXPR:
+ case PREDECREMENT_EXPR:
+ case PREINCREMENT_EXPR:
+ case COMPOUND_EXPR:
+ case MODIFY_EXPR:
+ rval_ops = false;
+ /* FALLTHRU */
+ case POINTER_PLUS_EXPR:
+ case PLUS_EXPR:
+ case MINUS_EXPR:
+ case MULT_EXPR:
+ case TRUNC_DIV_EXPR:
+ case CEIL_DIV_EXPR:
+ case FLOOR_DIV_EXPR:
+ case ROUND_DIV_EXPR:
+ case TRUNC_MOD_EXPR:
+ case CEIL_MOD_EXPR:
+ case ROUND_MOD_EXPR:
+ case RDIV_EXPR:
+ case EXACT_DIV_EXPR:
+ case MIN_EXPR:
+ case MAX_EXPR:
+ case LSHIFT_EXPR:
+ case RSHIFT_EXPR:
+ case LROTATE_EXPR:
+ case RROTATE_EXPR:
+ case BIT_AND_EXPR:
+ case BIT_IOR_EXPR:
+ case BIT_XOR_EXPR:
+ case TRUTH_AND_EXPR:
+ case TRUTH_ANDIF_EXPR:
+ case TRUTH_OR_EXPR:
+ case TRUTH_ORIF_EXPR:
+ case TRUTH_XOR_EXPR:
+ case LT_EXPR: case LE_EXPR:
+ case GT_EXPR: case GE_EXPR:
+ case EQ_EXPR: case NE_EXPR:
+ case UNORDERED_EXPR: case ORDERED_EXPR:
+ case UNLT_EXPR: case UNLE_EXPR:
+ case UNGT_EXPR: case UNGE_EXPR:
+ case UNEQ_EXPR: case LTGT_EXPR:
+ case RANGE_EXPR: case COMPLEX_EXPR:
+
+ loc = EXPR_LOCATION (x);
+ op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
+ op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
+
+ if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
+ {
+ if (op0 == error_mark_node || op1 == error_mark_node)
+ x = error_mark_node;
+ else
+ x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
+ }
+ else
+ x = fold (x);
+
+ if (TREE_NO_WARNING (org_x)
+ && warn_nonnull_compare
+ && COMPARISON_CLASS_P (org_x))
+ {
+ if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
+ ;
+ else if (COMPARISON_CLASS_P (x))
+ TREE_NO_WARNING (x) = 1;
+ /* Otherwise give up on optimizing these, let GIMPLE folders
+ optimize those later on. */
+ else if (op0 != TREE_OPERAND (org_x, 0)
+ || op1 != TREE_OPERAND (org_x, 1))
+ {
+ x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
+ TREE_NO_WARNING (x) = 1;
+ }
+ else
+ x = org_x;
+ }
+ break;
+
+ case VEC_COND_EXPR:
+ case COND_EXPR:
+
+ /* Don't bother folding a void condition, since it can't produce a
+ constant value. Also, some statement-level uses of COND_EXPR leave
+ one of the branches NULL, so folding would crash. */
+ if (VOID_TYPE_P (TREE_TYPE (x)))
+ return x;
+
+ loc = EXPR_LOCATION (x);
+ op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
+ op1 = cp_fold (TREE_OPERAND (x, 1));
+ op2 = cp_fold (TREE_OPERAND (x, 2));
+
+ if (op0 != TREE_OPERAND (x, 0)
+ || op1 != TREE_OPERAND (x, 1)
+ || op2 != TREE_OPERAND (x, 2))
+ {
+ if (op0 == error_mark_node
+ || op1 == error_mark_node
+ || op2 == error_mark_node)
+ x = error_mark_node;
+ else
+ x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
+ }
+ else
+ x = fold (x);
+
+ /* A COND_EXPR might have incompatible types in branches if one or both
+ arms are bitfields. If folding exposed such a branch, fix it up. */
+ if (TREE_CODE (x) != code)
+ if (tree type = is_bitfield_expr_with_lowered_type (x))
+ x = fold_convert (type, x);
+
+ break;
+
+ case CALL_EXPR:
+ {
+ int i, m, sv = optimize, nw = sv, changed = 0;
+ tree callee = get_callee_fndecl (x);
+
+ /* Some built-in function calls will be evaluated at compile-time in
+ fold (). Set optimize to 1 when folding __builtin_constant_p inside
+ a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
+ if (callee && DECL_BUILT_IN (callee) && !optimize
+ && DECL_IS_BUILTIN_CONSTANT_P (callee)
+ && current_function_decl
+ && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
+ nw = 1;
+
+ x = copy_node (x);
+
+ m = call_expr_nargs (x);
+ for (i = 0; i < m; i++)
+ {
+ r = cp_fold (CALL_EXPR_ARG (x, i));
+ if (r != CALL_EXPR_ARG (x, i))
+ {
+ if (r == error_mark_node)
+ {
+ x = error_mark_node;
+ break;
+ }
+ changed = 1;
+ }
+ CALL_EXPR_ARG (x, i) = r;
+ }
+ if (x == error_mark_node)
+ break;
+
+ optimize = nw;
+ r = fold (x);
+ optimize = sv;
+
+ if (TREE_CODE (r) != CALL_EXPR)
+ {
+ x = cp_fold (r);
+ break;
+ }
+
+ optimize = nw;
+
+ /* Invoke maybe_constant_value for functions declared
+ constexpr and not called with AGGR_INIT_EXPRs.
+ TODO:
+ Do constexpr expansion of expressions where the call itself is not
+ constant, but the call followed by an INDIRECT_REF is. */
+ if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
+ && !flag_no_inline)
+ r = maybe_constant_value (x);
+ optimize = sv;
+
+ if (TREE_CODE (r) != CALL_EXPR)
+ {
+ x = r;
+ break;
+ }
+
+ if (!changed)
+ x = org_x;
+ break;
+ }
+
+ case CONSTRUCTOR:
+ {
+ unsigned i;
+ constructor_elt *p;
+ bool changed = false;
+ vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
+ vec<constructor_elt, va_gc> *nelts = NULL;
+ vec_safe_reserve (nelts, vec_safe_length (elts));
+ FOR_EACH_VEC_SAFE_ELT (elts, i, p)
+ {
+ tree op = cp_fold (p->value);
+ constructor_elt e = { p->index, op };
+ nelts->quick_push (e);
+ if (op != p->value)
+ {
+ if (op == error_mark_node)
+ {
+ x = error_mark_node;
+ changed = false;
+ break;
+ }
+ changed = true;
+ }
+ }
+ if (changed)
+ x = build_constructor (TREE_TYPE (x), nelts);
+ else
+ vec_free (nelts);
+ break;
+ }
+ case TREE_VEC:
+ {
+ bool changed = false;
+ vec<tree, va_gc> *vec = make_tree_vector ();
+ int i, n = TREE_VEC_LENGTH (x);
+ vec_safe_reserve (vec, n);
+
+ for (i = 0; i < n; i++)
+ {
+ tree op = cp_fold (TREE_VEC_ELT (x, i));
+ vec->quick_push (op);
+ if (op != TREE_VEC_ELT (x, i))
+ changed = true;
+ }
+
+ if (changed)
+ {
+ r = copy_node (x);
+ for (i = 0; i < n; i++)
+ TREE_VEC_ELT (r, i) = (*vec)[i];
+ x = r;
+ }
+
+ release_tree_vector (vec);
+ }
+
+ break;
+
+ case ARRAY_REF:
+ case ARRAY_RANGE_REF:
+
+ loc = EXPR_LOCATION (x);
+ op0 = cp_fold (TREE_OPERAND (x, 0));
+ op1 = cp_fold (TREE_OPERAND (x, 1));
+ op2 = cp_fold (TREE_OPERAND (x, 2));
+ op3 = cp_fold (TREE_OPERAND (x, 3));
+
+ if (op0 != TREE_OPERAND (x, 0)
+ || op1 != TREE_OPERAND (x, 1)
+ || op2 != TREE_OPERAND (x, 2)
+ || op3 != TREE_OPERAND (x, 3))
+ {
+ if (op0 == error_mark_node
+ || op1 == error_mark_node
+ || op2 == error_mark_node
+ || op3 == error_mark_node)
+ x = error_mark_node;
+ else
+ {
+ x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
+ TREE_READONLY (x) = TREE_READONLY (org_x);
+ TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
+ TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
+ }
+ }
+
+ x = fold (x);
+ break;
+
+ default:
+ return org_x;
+ }
+
+ fold_cache->put (org_x, x);
+ /* Prevent that we try to fold an already folded result again. */
+ if (x != org_x)
+ fold_cache->put (x, x);
+
+ return x;
+}
+
+#include "gt-cp-cp-gimplify.h"