+2014-01-13 Eric Botcazou <ebotcazou@adacore.com>
+
+ * builtins.c (get_object_alignment_2): Minor tweak.
+ * tree-ssa-loop-ivopts.c (may_be_unaligned_p): Rewrite.
+
2014-01-13 Christian Bruel <christian.bruel@st.com>
* config/sh/sh-mem.cc (sh_expand_cmpnstr): Unroll small sizes and
- optimized non constant lengths.
+ optimized non constant lengths.
2014-01-13 Jakub Jelinek <jakub@redhat.com>
--- /dev/null
+package Loop_Optimization17_Pkg is
+
+ type vector is array (1..3) of Long_Float;
+
+ type Rec is
+ record
+ I : Integer;
+ V1, V2 : Vector;
+ S : Long_Float;
+ end record;
+
+ for Rec use
+ record
+ I at 0 range 0 .. 31;
+ V1 at 4 range 0 .. 191;
+ V2 at 28 range 0 .. 191;
+ S at 52 range 0 .. 63;
+ end record;
+ for Rec'Alignment use 4;
+ for Rec'Size use 480;
+
+ type Index_T is range 1 .. 5;
+ type Arr is array (Index_T) of Rec;
+
+ Object : Arr;
+
+ function F (V : Vector) return Vector;
+
+end Loop_Optimization17_Pkg;
}
}
-/* Returns true if memory reference REF with step STEP may be unaligned. */
+/* Return true if memory reference REF with step STEP may be unaligned. */
static bool
may_be_unaligned_p (tree ref, tree step)
{
- tree base;
- tree base_type;
- HOST_WIDE_INT bitsize;
- HOST_WIDE_INT bitpos;
- tree toffset;
- enum machine_mode mode;
- int unsignedp, volatilep;
- unsigned base_align;
-
/* TARGET_MEM_REFs are translated directly to valid MEMs on the target,
thus they are not misaligned. */
if (TREE_CODE (ref) == TARGET_MEM_REF)
return false;
- /* The test below is basically copy of what expr.c:normal_inner_ref
- does to check whether the object must be loaded by parts when
- STRICT_ALIGNMENT is true. */
- base = get_inner_reference (ref, &bitsize, &bitpos, &toffset, &mode,
- &unsignedp, &volatilep, true);
- base_type = TREE_TYPE (base);
- base_align = get_object_alignment (base);
- base_align = MAX (base_align, TYPE_ALIGN (base_type));
-
- if (mode != BLKmode)
- {
- unsigned mode_align = GET_MODE_ALIGNMENT (mode);
-
- if (base_align < mode_align
- || (bitpos % mode_align) != 0
- || (bitpos % BITS_PER_UNIT) != 0)
- return true;
+ unsigned int align = TYPE_ALIGN (TREE_TYPE (ref));
- if (toffset
- && (highest_pow2_factor (toffset) * BITS_PER_UNIT) < mode_align)
- return true;
+ unsigned HOST_WIDE_INT bitpos;
+ unsigned int ref_align;
+ get_object_alignment_1 (ref, &ref_align, &bitpos);
+ if (ref_align < align
+ || (bitpos % align) != 0
+ || (bitpos % BITS_PER_UNIT) != 0)
+ return true;
- if ((highest_pow2_factor (step) * BITS_PER_UNIT) < mode_align)
- return true;
- }
+ unsigned int trailing_zeros = tree_ctz (step);
+ if (trailing_zeros < HOST_BITS_PER_INT
+ && (1U << trailing_zeros) * BITS_PER_UNIT < align)
+ return true;
return false;
}