if (opt
&& AGGREGATE_TYPE_P (type)
&& TYPE_SIZE (type)
- && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
- && wi::geu_p (TYPE_SIZE (type), max_align)
- && align < max_align)
- align = max_align;
+ && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
+ {
- if ((TREE_INT_CST_LOW (TYPE_SIZE (type)) >= (unsigned) max_align_compat
- || TREE_INT_CST_HIGH (TYPE_SIZE (type)))
++ if (wi::geu_p (TYPE_SIZE (type), max_align_compat)
+ && align < max_align_compat)
+ align = max_align_compat;
- if ((TREE_INT_CST_LOW (TYPE_SIZE (type)) >= (unsigned) max_align
- || TREE_INT_CST_HIGH (TYPE_SIZE (type)))
- && align < max_align)
- align = max_align;
++ if (wi::geu_p (TYPE_SIZE (type), max_align)
++ && align < max_align)
++ align = max_align;
+ }
/* x86-64 ABI requires arrays greater than 16 bytes to be aligned
to 16byte boundary. */
return val;
}
- /* Return the propagation value when applying __builtin_assume_aligned to
- its arguments. */
+ /* Return the propagation value for __builtin_assume_aligned
+ and functions with assume_aligned or alloc_aligned attribute.
+ For __builtin_assume_aligned, ATTR is NULL_TREE,
+ for assume_aligned attribute ATTR is non-NULL and ALLOC_ALIGNED
+ is false, for alloc_aligned attribute ATTR is non-NULL and
+ ALLOC_ALIGNED is true. */
static prop_value_t
- bit_value_assume_aligned (gimple stmt)
+ bit_value_assume_aligned (gimple stmt, tree attr, prop_value_t ptrval,
+ bool alloc_aligned)
{
- tree ptr = gimple_call_arg (stmt, 0), align, misalign = NULL_TREE;
- tree type = TREE_TYPE (ptr);
+ tree align, misalign = NULL_TREE, type;
unsigned HOST_WIDE_INT aligni, misaligni = 0;
- prop_value_t ptrval = get_value_for_expr (ptr, true);
prop_value_t alignval;
- double_int value, mask;
+ widest_int value, mask;
prop_value_t val;
+ if (attr == NULL_TREE)
+ {
+ tree ptr = gimple_call_arg (stmt, 0);
+ type = TREE_TYPE (ptr);
+ ptrval = get_value_for_expr (ptr, true);
+ }
+ else
+ {
+ tree lhs = gimple_call_lhs (stmt);
+ type = TREE_TYPE (lhs);
+ }
+
if (ptrval.lattice_val == UNDEFINED)
return ptrval;
gcc_assert ((ptrval.lattice_val == CONSTANT
&& TREE_CODE (ptrval.value) == INTEGER_CST)
- || ptrval.mask.is_minus_one ());
+ || ptrval.mask == -1);
- align = gimple_call_arg (stmt, 1);
- if (!tree_fits_uhwi_p (align))
- return ptrval;
- aligni = tree_to_uhwi (align);
- if (aligni <= 1
- || (aligni & (aligni - 1)) != 0)
- return ptrval;
- if (gimple_call_num_args (stmt) > 2)
+ if (attr == NULL_TREE)
+ {
+ /* Get aligni and misaligni from __builtin_assume_aligned. */
+ align = gimple_call_arg (stmt, 1);
+ if (!tree_fits_uhwi_p (align))
+ return ptrval;
+ aligni = tree_to_uhwi (align);
+ if (gimple_call_num_args (stmt) > 2)
+ {
+ misalign = gimple_call_arg (stmt, 2);
+ if (!tree_fits_uhwi_p (misalign))
+ return ptrval;
+ misaligni = tree_to_uhwi (misalign);
+ }
+ }
+ else
{
- misalign = gimple_call_arg (stmt, 2);
- if (!tree_fits_uhwi_p (misalign))
+ /* Get aligni and misaligni from assume_aligned or
+ alloc_align attributes. */
+ if (TREE_VALUE (attr) == NULL_TREE)
return ptrval;
- misaligni = tree_to_uhwi (misalign);
- if (misaligni >= aligni)
+ attr = TREE_VALUE (attr);
+ align = TREE_VALUE (attr);
+ if (!tree_fits_uhwi_p (align))
return ptrval;
+ aligni = tree_to_uhwi (align);
+ if (alloc_aligned)
+ {
+ if (aligni == 0 || aligni > gimple_call_num_args (stmt))
+ return ptrval;
+ align = gimple_call_arg (stmt, aligni - 1);
+ if (!tree_fits_uhwi_p (align))
+ return ptrval;
+ aligni = tree_to_uhwi (align);
+ }
+ else if (TREE_CHAIN (attr) && TREE_VALUE (TREE_CHAIN (attr)))
+ {
+ misalign = TREE_VALUE (TREE_CHAIN (attr));
+ if (!tree_fits_uhwi_p (misalign))
+ return ptrval;
+ misaligni = tree_to_uhwi (misalign);
+ }
}
+ if (aligni <= 1 || (aligni & (aligni - 1)) != 0 || misaligni >= aligni)
+ return ptrval;
+
align = build_int_cst_type (type, -aligni);
alignval = get_value_for_expr (align, true);
bit_value_binop_1 (BIT_AND_EXPR, type, &value, &mask,
*/
- HOST_WIDE_INT
- min_seg_len_b = (TREE_CODE (dr_b1->seg_len) == INTEGER_CST) ?
- TREE_INT_CST_LOW (dr_b1->seg_len) :
- vect_factor;
+ HOST_WIDE_INT min_seg_len_b = (tree_fits_shwi_p (dr_b1->seg_len)
+ ? tree_to_shwi (dr_b1->seg_len)
+ : vect_factor);
if (diff <= min_seg_len_b
- || (TREE_CODE (dr_a1->seg_len) == INTEGER_CST
- && diff - (HOST_WIDE_INT) TREE_INT_CST_LOW (dr_a1->seg_len) <
- min_seg_len_b))
+ || (tree_fits_shwi_p (dr_a1->seg_len)
+ && diff - tree_to_shwi (dr_a1->seg_len) < min_seg_len_b))
{
+ if (dump_enabled_p ())
+ {
+ dump_printf_loc (MSG_NOTE, vect_location,
+ "merging ranges for ");
+ dump_generic_expr (MSG_NOTE, TDF_SLIM,
+ DR_REF (dr_a1->dr));
+ dump_printf (MSG_NOTE, ", ");
+ dump_generic_expr (MSG_NOTE, TDF_SLIM,
+ DR_REF (dr_b1->dr));
+ dump_printf (MSG_NOTE, " and ");
+ dump_generic_expr (MSG_NOTE, TDF_SLIM,
+ DR_REF (dr_a2->dr));
+ dump_printf (MSG_NOTE, ", ");
+ dump_generic_expr (MSG_NOTE, TDF_SLIM,
+ DR_REF (dr_b2->dr));
+ dump_printf (MSG_NOTE, "\n");
+ }
+
dr_a1->seg_len = size_binop (PLUS_EXPR,
dr_a2->seg_len, size_int (diff));
comp_alias_ddrs.ordered_remove (i--);