/* Compute the size of the elements in the CTOR. It differs
from the size of the vector type elements only when the
CTOR elements are vectors themselves. */
- tree val_type = TREE_TYPE (CONSTRUCTOR_ELT (exp, 0)->value);
+ tree val_type = (CONSTRUCTOR_NELTS (exp) != 0
+ ? TREE_TYPE (CONSTRUCTOR_ELT (exp, 0)->value)
+ : elttype);
if (VECTOR_TYPE_P (val_type))
bitsize = tree_to_uhwi (TYPE_SIZE (val_type));
else
/* We keep an exact subset of the constructor elements. */
(if (multiple_p (idx, k, &elt) && multiple_p (n, k, &count))
(if (CONSTRUCTOR_NELTS (ctor) == 0)
- { build_constructor (type, NULL); }
+ { build_zero_cst (type); }
(if (count == 1)
(if (elt < CONSTRUCTOR_NELTS (ctor))
(view_convert { CONSTRUCTOR_ELT (ctor, elt)->value; })
??? Eventually allow this if the CTOR ends up constant or
uniform. */
(if (single_use (@0))
- {
- vec<constructor_elt, va_gc> *vals;
- vec_alloc (vals, count);
- for (unsigned i = 0;
- i < count && elt + i < CONSTRUCTOR_NELTS (ctor); ++i)
- CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
- CONSTRUCTOR_ELT (ctor, elt + i)->value);
- build_constructor (type, vals);
- }))))
+ (with
+ {
+ vec<constructor_elt, va_gc> *vals;
+ vec_alloc (vals, count);
+ bool constant_p = true;
+ tree res;
+ for (unsigned i = 0;
+ i < count && elt + i < CONSTRUCTOR_NELTS (ctor); ++i)
+ {
+ tree e = CONSTRUCTOR_ELT (ctor, elt + i)->value;
+ CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE, e);
+ if (!CONSTANT_CLASS_P (e))
+ constant_p = false;
+ }
+ res = (constant_p ? build_vector_from_ctor (type, vals)
+ : build_constructor (type, vals));
+ }
+ { res; })))))
/* The bitfield references a single constructor element. */
(if (k.is_constant (&const_k)
&& idx + n <= (idx / const_k + 1) * const_k)
return new_bb;
}
-/* Build a ternary operation and gimplify it. Emit code before GSI.
- Return the gimple_val holding the result. */
-
-tree
-gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
- tree type, tree a, tree b, tree c)
-{
- tree ret;
- location_t loc = gimple_location (gsi_stmt (*gsi));
-
- ret = fold_build3_loc (loc, code, type, a, b, c);
- return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
- GSI_SAME_STMT);
-}
-
-/* Build a binary operation and gimplify it. Emit code before GSI.
- Return the gimple_val holding the result. */
-
-tree
-gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
- tree type, tree a, tree b)
-{
- tree ret;
-
- ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
- return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
- GSI_SAME_STMT);
-}
-
-/* Build a unary operation and gimplify it. Emit code before GSI.
- Return the gimple_val holding the result. */
-
-tree
-gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
- tree a)
-{
- tree ret;
-
- ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
- return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
- GSI_SAME_STMT);
-}
-
\f
/* Given a basic block B which ends with a conditional and has
extern bool gimple_purge_all_dead_eh_edges (const_bitmap);
extern bool gimple_purge_dead_abnormal_call_edges (basic_block);
extern bool gimple_purge_all_dead_abnormal_call_edges (const_bitmap);
-extern tree gimplify_build3 (gimple_stmt_iterator *, enum tree_code,
- tree, tree, tree, tree);
-extern tree gimplify_build2 (gimple_stmt_iterator *, enum tree_code,
- tree, tree, tree);
-extern tree gimplify_build1 (gimple_stmt_iterator *, enum tree_code,
- tree, tree);
extern void extract_true_false_edges_from_block (basic_block, edge *, edge *);
extern tree find_case_label_for_value (const gswitch *switch_stmt, tree val);
extern edge find_taken_edge_switch_expr (const gswitch *switch_stmt, tree val);
#include "vec-perm-indices.h"
#include "insn-config.h"
#include "tree-ssa-dce.h"
+#include "gimple-fold.h"
+#include "gimple-match.h"
#include "recog.h" /* FIXME: for insn_data */
+/* Build a ternary operation and gimplify it. Emit code before GSI.
+ Return the gimple_val holding the result. */
+
+static tree
+gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
+ tree type, tree a, tree b, tree c)
+{
+ location_t loc = gimple_location (gsi_stmt (*gsi));
+ gimple_seq stmts = NULL;
+ tree ret = gimple_build (&stmts, loc, code, type, a, b, c);
+ gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
+ return ret;
+}
+
+/* Build a binary operation and gimplify it. Emit code before GSI.
+ Return the gimple_val holding the result. */
+
+static tree
+gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
+ tree type, tree a, tree b)
+{
+ location_t loc = gimple_location (gsi_stmt (*gsi));
+ gimple_seq stmts = NULL;
+ tree ret = gimple_build (&stmts, loc, code, type, a, b);
+ gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
+ return ret;
+}
+
+/* Build a unary operation and gimplify it. Emit code before GSI.
+ Return the gimple_val holding the result. */
+
+static tree
+gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
+ tree a)
+{
+ location_t loc = gimple_location (gsi_stmt (*gsi));
+ gimple_seq stmts = NULL;
+ tree ret = gimple_build (&stmts, loc, code, type, a);
+ gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
+ return ret;
+}
+
+
static void expand_vector_operations_1 (gimple_stmt_iterator *, bitmap);
/* Return the number of elements in a vector type TYPE that we have
tree, tree, tree, tree, tree, enum tree_code,
tree);
+/* Extract the vector element of type TYPE at BITPOS with BITSIZE from T
+ and return it. */
+
tree
tree_vec_extract (gimple_stmt_iterator *gsi, tree type,
tree t, tree bitsize, tree bitpos)
{
- if (TREE_CODE (t) == SSA_NAME)
- {
- gimple *def_stmt = SSA_NAME_DEF_STMT (t);
- if (is_gimple_assign (def_stmt)
- && (gimple_assign_rhs_code (def_stmt) == VECTOR_CST
- || (bitpos
- && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR)))
- t = gimple_assign_rhs1 (def_stmt);
- }
- if (bitpos)
- return gimplify_build3 (gsi, BIT_FIELD_REF, type, t, bitsize, bitpos);
- else
- return gimplify_build1 (gsi, VIEW_CONVERT_EXPR, type, t);
+ /* We're using the resimplify API and maybe_push_res_to_seq to
+ simplify the BIT_FIELD_REF but restrict the simplification to
+ a single stmt while at the same time following SSA edges for
+ simplification with already emitted CTORs. */
+ gimple_match_op opr;
+ opr.set_op (BIT_FIELD_REF, type, t, bitsize, bitpos);
+ opr.resimplify (NULL, follow_all_ssa_edges);
+ gimple_seq stmts = NULL;
+ tree res = maybe_push_res_to_seq (&opr, &stmts);
+ gcc_assert (res);
+ gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
+ return res;
}
static tree
scalar_int_mode mode
= int_mode_for_size (tree_to_uhwi (TYPE_SIZE (type)), 0).require ();
compute_type = lang_hooks.types.type_for_mode (mode, 1);
- result = f (gsi, compute_type, a, b, NULL_TREE, NULL_TREE, code, type);
+ result = f (gsi, compute_type, a, b, bitsize_zero_node,
+ TYPE_SIZE (compute_type), code, type);
warning_at (loc, OPT_Wvector_operation_performance,
"vector operation will be expanded with a "
"single scalar operation");