gcc_assert (to_real == from_real);
+ gcc_assert (to_mode != BLKmode);
+ gcc_assert (from_mode != BLKmode);
/* If the source and destination are already the same, then there's
nothing to do. */
!= GET_MODE_PRECISION (to_mode))
|| (DECIMAL_FLOAT_MODE_P (from_mode)
!= DECIMAL_FLOAT_MODE_P (to_mode)));
-
+
if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
/* Conversion between decimal float and binary float, same size. */
tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
use_reg (rtx *call_fusage, rtx reg)
{
gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
-
+
*call_fusage
= gen_rtx_EXPR_LIST (VOIDmode,
gen_rtx_USE (VOIDmode, reg), *call_fusage);
pred = insn_data[(int) code].operand[1].predicate;
if (pred != 0 && ! (*pred) (opsize, mode))
opsize = copy_to_mode_reg (mode, opsize);
-
+
opchar = val;
char_mode = insn_data[(int) code].operand[2].mode;
if (char_mode != VOIDmode)
rtx seq, inner;
bool need_clobber;
int i;
-
+
gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
-
+
/* If X is a push on the stack, do the push now and replace
X with a reference to the stack pointer. */
if (push_operand (x, mode))
temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
emit_move_insn (target, temp);
}
+ else if (GET_MODE (target) == BLKmode)
+ emit_block_move (target, temp, expr_size (exp),
+ (call_param_p
+ ? BLOCK_OP_CALL_PARM
+ : BLOCK_OP_NORMAL));
else
convert_move (target, temp, unsignedp);
}
case CONSTRUCTOR:
{
HOST_WIDE_INT nz = 0, ic = 0;
-
+
bool const_elt_p
= categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
largest element. Which would avoid comparing the size of the
initialized element against any tail padding in the union.
Doesn't seem worth the effort... */
- if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
+ if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
TYPE_SIZE (init_sub_type)) == 1)
{
/* And now we have to find out if the element itself is fully
HOST_WIDE_INT bitpos = 0;
tree offset;
rtx to_rtx = target;
-
+
/* Just ignore missing fields. We cleared the whole
structure, above, if any fields are missing. */
if (field == 0)
continue;
-
+
if (cleared && initializer_zerop (value))
continue;
-
+
if (host_integerp (DECL_SIZE (field), 1))
bitsize = tree_low_cst (DECL_SIZE (field), 1);
else
bitsize = -1;
-
+
mode = DECL_MODE (field);
if (DECL_BIT_FIELD (field))
mode = VOIDmode;
-
+
offset = DECL_FIELD_OFFSET (field);
if (host_integerp (offset, 0)
&& host_integerp (bit_position (field), 0))
}
else
bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
-
+
if (offset)
{
rtx offset_rtx;
-
+
offset
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
make_tree (TREE_TYPE (exp),
offset_rtx = expand_normal (offset);
gcc_assert (MEM_P (to_rtx));
-
+
#ifdef POINTERS_EXTEND_UNSIGNED
if (GET_MODE (offset_rtx) != Pmode)
offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
&& bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
{
tree type = TREE_TYPE (value);
-
+
if (TYPE_PRECISION (type) < BITS_PER_WORD)
{
type = lang_hooks.types.type_for_size
(BITS_PER_WORD, TYPE_UNSIGNED (type));
value = fold_convert (type, value);
}
-
+
if (BYTES_BIG_ENDIAN)
value
= fold_build2 (LSHIFT_EXPR, type, value,
to_rtx = copy_rtx (to_rtx);
MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
}
-
+
store_constructor_field (to_rtx, bitsize, bitpos, mode,
value, type, cleared,
get_alias_set (TREE_TYPE (field)));
tree index, value;
HOST_WIDE_INT count = 0, zero_count = 0;
need_to_clear = ! const_bounds_p;
-
+
/* This loop is a more accurate version of the loop in
mostly_zeros_p (it handles RANGE_EXPR in an index). It
is also needed to check for missing elements. */
if (need_to_clear)
break;
-
+
if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
{
tree lo_index = TREE_OPERAND (index, 0);
tree hi_index = TREE_OPERAND (index, 1);
-
+
if (! host_integerp (lo_index, 1)
|| ! host_integerp (hi_index, 1))
{
need_to_clear = 1;
break;
}
-
+
this_node_count = (tree_low_cst (hi_index, 1)
- tree_low_cst (lo_index, 1) + 1);
}
else
this_node_count = 1;
-
+
count += this_node_count;
if (mostly_zeros_p (value))
zero_count += this_node_count;
}
-
+
/* Clear the entire array first if there are any missing
elements, or if the incidence of zero elements is >=
75%. */
|| 4 * zero_count >= 3 * count))
need_to_clear = 1;
}
-
+
if (need_to_clear && size > 0)
{
if (REG_P (target))
HOST_WIDE_INT bitpos;
int unsignedp;
rtx xtarget = target;
-
+
if (cleared && initializer_zerop (value))
continue;
-
+
unsignedp = TYPE_UNSIGNED (elttype);
mode = TYPE_MODE (elttype);
if (mode == BLKmode)
: -1);
else
bitsize = GET_MODE_BITSIZE (mode);
-
+
if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
{
tree lo_index = TREE_OPERAND (index, 0);
rtx index_r, pos_rtx;
HOST_WIDE_INT lo, hi, count;
tree position;
-
+
/* If the range is constant and "small", unroll the loop. */
if (const_bounds_p
&& host_integerp (lo_index, 0)
for (; lo <= hi; lo++)
{
bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
-
+
if (MEM_P (target)
&& !MEM_KEEP_ALIAS_SET_P (target)
&& TREE_CODE (type) == ARRAY_TYPE
target = copy_rtx (target);
MEM_KEEP_ALIAS_SET_P (target) = 1;
}
-
+
store_constructor_field
(target, bitsize, bitpos, mode, value, type, cleared,
get_alias_set (elttype));
rtx loop_start = gen_label_rtx ();
rtx loop_end = gen_label_rtx ();
tree exit_cond;
-
+
expand_normal (hi_index);
unsignedp = TYPE_UNSIGNED (domain);
-
+
index = build_decl (VAR_DECL, NULL_TREE, domain);
-
+
index_r
= gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
&unsignedp, 0));
SET_DECL_RTL (index, index_r);
store_expr (lo_index, index_r, 0);
-
+
/* Build the head of the loop. */
do_pending_stack_adjust ();
emit_label (loop_start);
size_binop (MULT_EXPR, position,
fold_convert (ssizetype,
TYPE_SIZE_UNIT (elttype)));
-
+
pos_rtx = expand_normal (position);
xtarget = offset_address (target, pos_rtx,
highest_pow2_factor (position));
exit_cond = build2 (LT_EXPR, integer_type_node,
index, hi_index);
jumpif (exit_cond, loop_end);
-
+
/* Update the loop counter, and jump to the head of
the loop. */
expand_assignment (index,
build2 (PLUS_EXPR, TREE_TYPE (index),
index, integer_one_node));
-
+
emit_jump (loop_start);
-
+
/* Build the end of the loop. */
emit_label (loop_end);
}
|| ! host_integerp (TYPE_SIZE (elttype), 1))
{
tree position;
-
+
if (index == 0)
index = ssize_int (1);
-
+
if (minelt)
index = fold_convert (ssizetype,
fold_build2 (MINUS_EXPR,
TREE_TYPE (index),
index,
TYPE_MIN_VALUE (domain)));
-
+
position =
size_binop (MULT_EXPR, index,
fold_convert (ssizetype,
* tree_low_cst (TYPE_SIZE (elttype), 1));
else
bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
-
+
if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
&& TREE_CODE (type) == ARRAY_TYPE
&& TYPE_NONALIASED_COMPONENT (type))
HOST_WIDE_INT bitpos;
rtvec vector = NULL;
unsigned n_elts;
-
+
gcc_assert (eltmode != BLKmode);
-
+
n_elts = TYPE_VECTOR_SUBPARTS (type);
if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
{
enum machine_mode mode = GET_MODE (target);
-
+
icode = (int) vec_init_optab->handlers[mode].insn_code;
if (icode != CODE_FOR_nothing)
{
unsigned int i;
-
+
vector = rtvec_alloc (n_elts);
for (i = 0; i < n_elts; i++)
RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
}
}
-
+
/* If the constructor has fewer elements than the vector,
clear the whole array first. Similarly if this is static
constructor of a non-BLKmode object. */
{
unsigned HOST_WIDE_INT count = 0, zero_count = 0;
tree value;
-
+
FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
{
int n_elts_here = tree_low_cst
(int_const_binop (TRUNC_DIV_EXPR,
TYPE_SIZE (TREE_TYPE (value)),
TYPE_SIZE (elttype), 0), 1);
-
+
count += n_elts_here;
if (mostly_zeros_p (value))
zero_count += n_elts_here;
or if the incidence of zero elements is >= 75%. */
need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
}
-
+
if (need_to_clear && size > 0 && !vector)
{
if (REG_P (target))
clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
cleared = 1;
}
-
+
/* Inform later passes that the old value is dead. */
if (!cleared && !vector && REG_P (target))
emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
{
HOST_WIDE_INT eltpos;
tree value = ce->value;
-
+
bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
if (cleared && initializer_zerop (value))
continue;
-
+
if (ce->index)
eltpos = tree_low_cst (ce->index, 1);
else
eltpos = i;
-
+
if (vector)
{
/* Vector CONSTRUCTORs should only be built from smaller
cleared, get_alias_set (elttype));
}
}
-
+
if (vector)
emit_insn (GEN_FCN (icode)
(target,
gen_rtx_PARALLEL (GET_MODE (target), vector)));
break;
}
-
+
default:
gcc_unreachable ();
}
{
size_tree = TREE_OPERAND (exp, 1);
*punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
-
+
/* For vector types, with the correct size of access, use the mode of
inner type. */
if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
&& GET_MODE (decl_rtl) != DECL_MODE (exp))
{
enum machine_mode pmode;
-
+
/* Get the signedness used for this variable. Ensure we get the
same mode we got when the variable was declared. */
pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
tree index1 = index;
tree low_bound = array_ref_low_bound (exp);
index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
-
+
/* Optimize the special-case of a zero lower bound.
-
+
We convert the low_bound to sizetype to avoid some problems
with constant folding. (E.g. suppose the lower bound is 1,
and its mode is QI. Without the conversion,l (ARRAY
+(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
+INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
-
+
if (! integer_zerop (low_bound))
index1 = size_diffop (index1, fold_convert (sizetype,
low_bound));
-
+
if (0 > compare_tree_int (index1,
TREE_STRING_LENGTH (init)))
{
else
{
gcc_assert (REG_P (target));
-
+
/* Store this field into a union of the proper type. */
store_field (target,
MIN ((int_size_in_bytes (TREE_TYPE
other. */
else if (SCALAR_INT_MODE_P (GET_MODE (op0))
&& SCALAR_INT_MODE_P (TYPE_MODE (type)))
- op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
+ op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
- /* As a last resort, spill op0 to memory, and reload it in a
+ /* As a last resort, spill op0 to memory, and reload it in a
different mode. */
else if (!MEM_P (op0))
{
case REALIGN_LOAD_EXPR:
{
- tree oprnd0 = TREE_OPERAND (exp, 0);
+ tree oprnd0 = TREE_OPERAND (exp, 0);
tree oprnd1 = TREE_OPERAND (exp, 1);
tree oprnd2 = TREE_OPERAND (exp, 2);
rtx op2;
this_optab = optab_for_tree_code (code, type);
expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
op2 = expand_normal (oprnd2);
- temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
+ temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
target, unsignedp);
gcc_assert (temp);
return temp;
expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
op2 = expand_normal (oprnd2);
- target = expand_widen_pattern_expr (exp, op0, op1, op2,
+ target = expand_widen_pattern_expr (exp, op0, op1, op2,
target, unsignedp);
return target;
}
{
tree oprnd0 = TREE_OPERAND (exp, 0);
tree oprnd1 = TREE_OPERAND (exp, 1);
-
+
expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
target, unsignedp);
if (TREE_CODE (offset) != INTEGER_CST)
return 0;
/* Adjust offset by the lower bound. */
- offset = size_diffop (fold_convert (sizetype, offset),
+ offset = size_diffop (fold_convert (sizetype, offset),
fold_convert (sizetype, lower_bound));
}
}
if (icode == CODE_FOR_nothing)
{
enum machine_mode wmode;
-
+
for (wmode = operand_mode;
icode == CODE_FOR_nothing && wmode != VOIDmode;
wmode = GET_MODE_WIDER_MODE (wmode))
label = gen_label_rtx ();
do_compare_rtx_and_jump (op0, op1, code, unsignedp, operand_mode, NULL_RTX,
NULL_RTX, label);
-
+
emit_move_insn (target, invert ? const1_rtx : const0_rtx);
emit_label (label);