re PR debug/78839 (DWARF output different between GCC 5 and 6)
authorJakub Jelinek <jakub@redhat.com>
Tue, 17 Jan 2017 18:32:13 +0000 (19:32 +0100)
committerJakub Jelinek <jakub@gcc.gnu.org>
Tue, 17 Jan 2017 18:32:13 +0000 (19:32 +0100)
PR debug/78839
* dwarf2out.c (field_byte_offset): Restore the
PCC_BITFIELD_TYPE_MATTERS behavior for INTEGER_CST DECL_FIELD_OFFSET
and DECL_FIELD_BIT_OFFSET.  Use fold_build2 instead of build2 + fold.
(analyze_variants_discr, gen_variant_part): Use fold_build2 instead
of build2 + fold.

From-SVN: r244545

gcc/ChangeLog
gcc/dwarf2out.c

index 892ce5d..0670beb 100644 (file)
@@ -1,3 +1,12 @@
+2017-01-17  Jakub Jelinek  <jakub@redhat.com>
+
+       PR debug/78839
+       * dwarf2out.c (field_byte_offset): Restore the
+       PCC_BITFIELD_TYPE_MATTERS behavior for INTEGER_CST DECL_FIELD_OFFSET
+       and DECL_FIELD_BIT_OFFSET.  Use fold_build2 instead of build2 + fold.
+       (analyze_variants_discr, gen_variant_part): Use fold_build2 instead
+       of build2 + fold.
+
 2017-01-17  Eric Botcazou  <ebotcazou@adacore.com>
 
        PR ada/67205
index 19f7f65..169da86 100644 (file)
@@ -17980,10 +17980,6 @@ static dw_loc_descr_ref
 field_byte_offset (const_tree decl, struct vlr_context *ctx,
                   HOST_WIDE_INT *cst_offset)
 {
-  offset_int object_offset_in_bits;
-  offset_int object_offset_in_bytes;
-  offset_int bitpos_int;
-  bool is_byte_offset_cst, is_bit_offset_cst;
   tree tree_result;
   dw_loc_list_ref loc_result;
 
@@ -17994,20 +17990,21 @@ field_byte_offset (const_tree decl, struct vlr_context *ctx,
   else
     gcc_assert (TREE_CODE (decl) == FIELD_DECL);
 
-  is_bit_offset_cst = TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST;
-  is_byte_offset_cst = TREE_CODE (DECL_FIELD_OFFSET (decl)) != INTEGER_CST;
-
   /* We cannot handle variable bit offsets at the moment, so abort if it's the
      case.  */
-  if (is_bit_offset_cst)
+  if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST)
     return NULL;
 
 #ifdef PCC_BITFIELD_TYPE_MATTERS
   /* We used to handle only constant offsets in all cases.  Now, we handle
      properly dynamic byte offsets only when PCC bitfield type doesn't
      matter.  */
-  if (PCC_BITFIELD_TYPE_MATTERS && is_byte_offset_cst && is_bit_offset_cst)
+  if (PCC_BITFIELD_TYPE_MATTERS
+      && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST)
     {
+      offset_int object_offset_in_bits;
+      offset_int object_offset_in_bytes;
+      offset_int bitpos_int;
       tree type;
       tree field_size_tree;
       offset_int deepest_bitpos;
@@ -18102,13 +18099,23 @@ field_byte_offset (const_tree decl, struct vlr_context *ctx,
          object_offset_in_bits
            = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
        }
+
+      object_offset_in_bytes
+       = wi::lrshift (object_offset_in_bits, LOG2_BITS_PER_UNIT);
+      if (ctx->variant_part_offset == NULL_TREE)
+       {
+         *cst_offset = object_offset_in_bytes.to_shwi ();
+         return NULL;
+       }
+      tree_result = wide_int_to_tree (sizetype, object_offset_in_bytes);
     }
+  else
 #endif /* PCC_BITFIELD_TYPE_MATTERS */
+    tree_result = byte_position (decl);
 
-  tree_result = byte_position (decl);
   if (ctx->variant_part_offset != NULL_TREE)
-    tree_result = fold (build2 (PLUS_EXPR, TREE_TYPE (tree_result),
-                               ctx->variant_part_offset, tree_result));
+    tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result),
+                              ctx->variant_part_offset, tree_result);
 
   /* If the byte offset is a constant, it's simplier to handle a native
      constant rather than a DWARF expression.  */
@@ -23744,14 +23751,12 @@ analyze_variants_discr (tree variant_part_decl,
 
              if (!lower_cst_included)
                lower_cst
-                 = fold (build2 (PLUS_EXPR, TREE_TYPE (lower_cst),
-                                 lower_cst,
-                                 build_int_cst (TREE_TYPE (lower_cst), 1)));
+                 = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst,
+                                build_int_cst (TREE_TYPE (lower_cst), 1));
              if (!upper_cst_included)
                upper_cst
-                 = fold (build2 (MINUS_EXPR, TREE_TYPE (upper_cst),
-                                 upper_cst,
-                                 build_int_cst (TREE_TYPE (upper_cst), 1)));
+                 = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst,
+                                build_int_cst (TREE_TYPE (upper_cst), 1));
 
              if (!get_discr_value (lower_cst,
                                    &new_node->dw_discr_lower_bound)
@@ -23922,8 +23927,8 @@ gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
                 we recurse.  */
 
              vlr_sub_ctx.variant_part_offset
-               = fold (build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
-                               variant_part_offset, byte_position (member)));
+               = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
+                              variant_part_offset, byte_position (member));
              gen_variant_part (member, &vlr_sub_ctx, variant_die);
            }
          else