+ if (sra_mode == SRA_MODE_EARLY_IPA && INDIRECT_REF_P (base))
+ {
+ base = get_ssa_base_param (TREE_OPERAND (base, 0));
+ if (!base)
+ return NULL;
+ ptr = true;
+ }
+ else
+ ptr = false;
+
+ if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
+ return NULL;
+
+ if (sra_mode == SRA_MODE_EARLY_IPA)
+ {
+ if (size < 0 || size != max_size)
+ {
+ disqualify_candidate (base, "Encountered a variable sized access.");
+ return NULL;
+ }
+ if ((offset % BITS_PER_UNIT) != 0 || (size % BITS_PER_UNIT) != 0)
+ {
+ disqualify_candidate (base,
+ "Encountered an acces not aligned to a byte.");
+ return NULL;
+ }
+
+ if (ptr)
+ mark_parm_dereference (base, offset + size, stmt);
+ }
+ else
+ {
+ if (size != max_size)
+ {
+ size = max_size;
+ unscalarizable_region = true;
+ }
+ if (size < 0)
+ {
+ disqualify_candidate (base, "Encountered an unconstrained access.");
+ return NULL;
+ }
+ }
+
+ access = create_access_1 (base, offset, size);
+ access->expr = expr;
+ access->type = TREE_TYPE (expr);
+ access->write = write;
+ access->grp_unscalarizable_region = unscalarizable_region;
+ access->stmt = stmt;
+
+ return access;
+}
+
+
+/* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
+ register types or (recursively) records with only these two kinds of
+ fields. */
+
+static bool
+type_consists_of_records_p (tree type)
+{
+ tree fld;
+
+ if (TREE_CODE (type) != RECORD_TYPE)
+ return false;
+
+ for (fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
+ if (TREE_CODE (fld) == FIELD_DECL)
+ {
+ tree ft = TREE_TYPE (fld);
+
+ if (!is_gimple_reg_type (ft)
+ && !type_consists_of_records_p (ft))
+ return false;
+ }
+ return true;
+}
+
+/* Create total_scalarization accesses for all scalar type fields in DECL that
+ must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE
+ must be the top-most VAR_DECL representing the variable, OFFSET must be the
+ offset of DECL within BASE. */
+
+static void
+completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset)
+{
+ tree fld, decl_type = TREE_TYPE (decl);
+
+ for (fld = TYPE_FIELDS (decl_type); fld; fld = TREE_CHAIN (fld))
+ if (TREE_CODE (fld) == FIELD_DECL)
+ {
+ HOST_WIDE_INT pos = offset + int_bit_position (fld);
+ tree ft = TREE_TYPE (fld);
+
+ if (is_gimple_reg_type (ft))
+ {
+ struct access *access;
+ HOST_WIDE_INT size;
+ tree expr;
+ bool ok;
+
+ size = tree_low_cst (DECL_SIZE (fld), 1);
+ expr = base;
+ ok = build_ref_for_offset (&expr, TREE_TYPE (base), pos,
+ ft, false);
+ gcc_assert (ok);
+
+ access = create_access_1 (base, pos, size);
+ access->expr = expr;
+ access->type = ft;
+ access->total_scalarization = 1;
+ /* Accesses for intraprocedural SRA can have their stmt NULL. */
+ }
+ else
+ completely_scalarize_record (base, fld, pos);
+ }
+}
+
+
+/* Search the given tree for a declaration by skipping handled components and
+ exclude it from the candidates. */
+
+static void
+disqualify_base_of_expr (tree t, const char *reason)
+{
+ while (handled_component_p (t))
+ t = TREE_OPERAND (t, 0);
+
+ if (sra_mode == SRA_MODE_EARLY_IPA)
+ {
+ if (INDIRECT_REF_P (t))
+ t = TREE_OPERAND (t, 0);
+ t = get_ssa_base_param (t);
+ }
+
+ if (t && DECL_P (t))
+ disqualify_candidate (t, reason);
+}
+
+/* Scan expression EXPR and create access structures for all accesses to
+ candidates for scalarization. Return the created access or NULL if none is
+ created. */
+
+static struct access *
+build_access_from_expr_1 (tree *expr_ptr, gimple stmt, bool write)
+{
+ struct access *ret = NULL;
+ tree expr = *expr_ptr;
+ bool partial_ref;
+
+ if (TREE_CODE (expr) == BIT_FIELD_REF
+ || TREE_CODE (expr) == IMAGPART_EXPR
+ || TREE_CODE (expr) == REALPART_EXPR)
+ {
+ expr = TREE_OPERAND (expr, 0);
+ partial_ref = true;
+ }
+ else
+ partial_ref = false;
+
+ /* We need to dive through V_C_Es in order to get the size of its parameter
+ and not the result type. Ada produces such statements. We are also
+ capable of handling the topmost V_C_E but not any of those buried in other
+ handled components. */
+ if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
+ expr = TREE_OPERAND (expr, 0);
+
+ if (contains_view_convert_expr_p (expr))
+ {
+ disqualify_base_of_expr (expr, "V_C_E under a different handled "
+ "component.");
+ return NULL;
+ }
+
+ switch (TREE_CODE (expr))
+ {
+ case INDIRECT_REF:
+ if (sra_mode != SRA_MODE_EARLY_IPA)
+ return NULL;
+ /* fall through */
+ case VAR_DECL:
+ case PARM_DECL:
+ case RESULT_DECL:
+ case COMPONENT_REF:
+ case ARRAY_REF:
+ case ARRAY_RANGE_REF:
+ ret = create_access (expr, stmt, write);
+ break;
+
+ default:
+ break;
+ }
+
+ if (write && partial_ref && ret)
+ ret->grp_partial_lhs = 1;
+
+ return ret;
+}
+
+/* Callback of scan_function. Scan expression EXPR and create access
+ structures for all accesses to candidates for scalarization. Return true if
+ any access has been inserted. */
+
+static bool
+build_access_from_expr (tree *expr_ptr,
+ gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED, bool write,
+ void *data ATTRIBUTE_UNUSED)
+{
+ struct access *access;
+
+ access = build_access_from_expr_1 (expr_ptr, gsi_stmt (*gsi), write);
+ if (access)
+ {
+ /* This means the aggregate is accesses as a whole in a way other than an
+ assign statement and thus cannot be removed even if we had a scalar
+ replacement for everything. */
+ if (cannot_scalarize_away_bitmap)
+ bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
+ return true;
+ }
+ return false;
+}
+
+/* Disqualify LHS and RHS for scalarization if STMT must end its basic block in
+ modes in which it matters, return true iff they have been disqualified. RHS
+ may be NULL, in that case ignore it. If we scalarize an aggregate in
+ intra-SRA we may need to add statements after each statement. This is not
+ possible if a statement unconditionally has to end the basic block. */
+static bool
+disqualify_ops_if_throwing_stmt (gimple stmt, tree lhs, tree rhs)
+{
+ if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
+ && (stmt_can_throw_internal (stmt) || stmt_ends_bb_p (stmt)))
+ {
+ disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
+ if (rhs)
+ disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
+ return true;
+ }
+ return false;
+}
+
+
+/* Result code for scan_assign callback for scan_function. */
+enum scan_assign_result { SRA_SA_NONE, /* nothing done for the stmt */
+ SRA_SA_PROCESSED, /* stmt analyzed/changed */
+ SRA_SA_REMOVED }; /* stmt redundant and eliminated */
+
+
+/* Callback of scan_function. Scan expressions occuring in the statement
+ pointed to by STMT_EXPR, create access structures for all accesses to
+ candidates for scalarization and remove those candidates which occur in
+ statements or expressions that prevent them from being split apart. Return
+ true if any access has been inserted. */
+
+static enum scan_assign_result
+build_accesses_from_assign (gimple *stmt_ptr,
+ gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED,
+ void *data ATTRIBUTE_UNUSED)
+{
+ gimple stmt = *stmt_ptr;
+ tree *lhs_ptr, *rhs_ptr;
+ struct access *lacc, *racc;
+
+ if (!gimple_assign_single_p (stmt))
+ return SRA_SA_NONE;
+
+ lhs_ptr = gimple_assign_lhs_ptr (stmt);
+ rhs_ptr = gimple_assign_rhs1_ptr (stmt);
+
+ if (disqualify_ops_if_throwing_stmt (stmt, *lhs_ptr, *rhs_ptr))
+ return SRA_SA_NONE;
+
+ racc = build_access_from_expr_1 (rhs_ptr, stmt, false);
+ lacc = build_access_from_expr_1 (lhs_ptr, stmt, true);
+
+ if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
+ && racc && !is_gimple_reg_type (racc->type))
+ bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
+
+ if (lacc && racc
+ && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
+ && !lacc->grp_unscalarizable_region
+ && !racc->grp_unscalarizable_region
+ && AGGREGATE_TYPE_P (TREE_TYPE (*lhs_ptr))
+ /* FIXME: Turn the following line into an assert after PR 40058 is
+ fixed. */
+ && lacc->size == racc->size
+ && useless_type_conversion_p (lacc->type, racc->type))
+ {
+ struct assign_link *link;
+
+ link = (struct assign_link *) pool_alloc (link_pool);
+ memset (link, 0, sizeof (struct assign_link));
+
+ link->lacc = lacc;
+ link->racc = racc;
+
+ add_link_to_rhs (racc, link);
+ }
+
+ return (lacc || racc) ? SRA_SA_PROCESSED : SRA_SA_NONE;
+}
+
+/* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
+ GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
+
+static bool
+asm_visit_addr (gimple stmt ATTRIBUTE_UNUSED, tree op,
+ void *data ATTRIBUTE_UNUSED)
+{
+ if (DECL_P (op))
+ disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
+
+ return false;
+}
+
+/* Return true iff callsite CALL has at least as many actual arguments as there
+ are formal parameters of the function currently processed by IPA-SRA. */
+
+static inline bool
+callsite_has_enough_arguments_p (gimple call)
+{
+ return gimple_call_num_args (call) >= (unsigned) func_param_count;
+}
+
+/* Scan function and look for interesting statements. Return true if any has
+ been found or processed, as indicated by callbacks. SCAN_EXPR is a callback
+ called on all expressions within statements except assign statements and
+ those deemed entirely unsuitable for some reason (all operands in such
+ statements and expression are removed from candidate_bitmap). SCAN_ASSIGN
+ is a callback called on all assign statements, HANDLE_SSA_DEFS is a callback
+ called on assign statements and those call statements which have a lhs, it
+ can be NULL. ANALYSIS_STAGE is true when running in the analysis stage of a
+ pass and thus no statement is being modified. DATA is a pointer passed to
+ all callbacks. If any single callback returns true, this function also
+ returns true, otherwise it returns false. */
+
+static bool
+scan_function (bool (*scan_expr) (tree *, gimple_stmt_iterator *, bool, void *),
+ enum scan_assign_result (*scan_assign) (gimple *,
+ gimple_stmt_iterator *,
+ void *),
+ bool (*handle_ssa_defs)(gimple, void *),
+ bool analysis_stage, void *data)
+{
+ gimple_stmt_iterator gsi;
+ basic_block bb;
+ unsigned i;
+ tree *t;
+ bool ret = false;
+
+ FOR_EACH_BB (bb)
+ {
+ bool bb_changed = false;
+
+ if (handle_ssa_defs)
+ for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ ret |= handle_ssa_defs (gsi_stmt (gsi), data);
+
+ gsi = gsi_start_bb (bb);
+ while (!gsi_end_p (gsi))
+ {
+ gimple stmt = gsi_stmt (gsi);
+ enum scan_assign_result assign_result;
+ bool any = false, deleted = false;
+
+ if (analysis_stage && final_bbs && stmt_can_throw_external (stmt))
+ bitmap_set_bit (final_bbs, bb->index);
+ switch (gimple_code (stmt))
+ {
+ case GIMPLE_RETURN:
+ t = gimple_return_retval_ptr (stmt);
+ if (*t != NULL_TREE)
+ any |= scan_expr (t, &gsi, false, data);
+ if (analysis_stage && final_bbs)
+ bitmap_set_bit (final_bbs, bb->index);
+ break;
+
+ case GIMPLE_ASSIGN:
+ assign_result = scan_assign (&stmt, &gsi, data);
+ any |= assign_result == SRA_SA_PROCESSED;
+ deleted = assign_result == SRA_SA_REMOVED;
+ if (handle_ssa_defs && assign_result != SRA_SA_REMOVED)
+ any |= handle_ssa_defs (stmt, data);
+ break;
+
+ case GIMPLE_CALL:
+ /* Operands must be processed before the lhs. */
+ for (i = 0; i < gimple_call_num_args (stmt); i++)
+ {
+ tree *argp = gimple_call_arg_ptr (stmt, i);
+ any |= scan_expr (argp, &gsi, false, data);
+ }
+
+ if (analysis_stage && sra_mode == SRA_MODE_EARLY_IPA)
+ {
+ tree dest = gimple_call_fndecl (stmt);
+ int flags = gimple_call_flags (stmt);
+
+ if (dest)
+ {
+ if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
+ && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
+ encountered_apply_args = true;
+ if (cgraph_get_node (dest)
+ == cgraph_get_node (current_function_decl))
+ {
+ encountered_recursive_call = true;
+ if (!callsite_has_enough_arguments_p (stmt))
+ encountered_unchangable_recursive_call = true;
+ }
+ }
+
+ if (final_bbs
+ && (flags & (ECF_CONST | ECF_PURE)) == 0)
+ bitmap_set_bit (final_bbs, bb->index);
+ }
+
+ if (gimple_call_lhs (stmt))
+ {
+ tree *lhs_ptr = gimple_call_lhs_ptr (stmt);
+ if (!analysis_stage
+ || !disqualify_ops_if_throwing_stmt (stmt,
+ *lhs_ptr, NULL))
+ {
+ any |= scan_expr (lhs_ptr, &gsi, true, data);
+ if (handle_ssa_defs)
+ any |= handle_ssa_defs (stmt, data);
+ }
+ }
+ break;
+
+ case GIMPLE_ASM:
+ if (analysis_stage)
+ {
+ walk_stmt_load_store_addr_ops (stmt, NULL, NULL, NULL,
+ asm_visit_addr);
+ if (final_bbs)
+ bitmap_set_bit (final_bbs, bb->index);
+ }
+ for (i = 0; i < gimple_asm_ninputs (stmt); i++)
+ {
+ tree *op = &TREE_VALUE (gimple_asm_input_op (stmt, i));
+ any |= scan_expr (op, &gsi, false, data);
+ }
+ for (i = 0; i < gimple_asm_noutputs (stmt); i++)
+ {
+ tree *op = &TREE_VALUE (gimple_asm_output_op (stmt, i));
+ any |= scan_expr (op, &gsi, true, data);
+ }
+ break;
+
+ default:
+ break;
+ }
+
+ if (any)
+ {
+ ret = true;
+
+ if (!analysis_stage)
+ {
+ bb_changed = true;
+ update_stmt (stmt);
+ maybe_clean_eh_stmt (stmt);
+ }
+ }
+ if (deleted)
+ bb_changed = true;
+ else
+ {
+ gsi_next (&gsi);
+ ret = true;
+ }
+ }
+ if (!analysis_stage && bb_changed && sra_mode == SRA_MODE_EARLY_IPA)
+ gimple_purge_dead_eh_edges (bb);
+ }
+
+ return ret;
+}
+
+/* Helper of QSORT function. There are pointers to accesses in the array. An
+ access is considered smaller than another if it has smaller offset or if the
+ offsets are the same but is size is bigger. */
+
+static int
+compare_access_positions (const void *a, const void *b)
+{
+ const access_p *fp1 = (const access_p *) a;
+ const access_p *fp2 = (const access_p *) b;
+ const access_p f1 = *fp1;
+ const access_p f2 = *fp2;
+
+ if (f1->offset != f2->offset)
+ return f1->offset < f2->offset ? -1 : 1;
+
+ if (f1->size == f2->size)
+ {
+ if (f1->type == f2->type)
+ return 0;
+ /* Put any non-aggregate type before any aggregate type. */
+ else if (!is_gimple_reg_type (f1->type)
+ && is_gimple_reg_type (f2->type))
+ return 1;
+ else if (is_gimple_reg_type (f1->type)
+ && !is_gimple_reg_type (f2->type))
+ return -1;
+ /* Put any complex or vector type before any other scalar type. */
+ else if (TREE_CODE (f1->type) != COMPLEX_TYPE
+ && TREE_CODE (f1->type) != VECTOR_TYPE
+ && (TREE_CODE (f2->type) == COMPLEX_TYPE
+ || TREE_CODE (f2->type) == VECTOR_TYPE))
+ return 1;
+ else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
+ || TREE_CODE (f1->type) == VECTOR_TYPE)
+ && TREE_CODE (f2->type) != COMPLEX_TYPE
+ && TREE_CODE (f2->type) != VECTOR_TYPE)
+ return -1;
+ /* Put the integral type with the bigger precision first. */
+ else if (INTEGRAL_TYPE_P (f1->type)
+ && INTEGRAL_TYPE_P (f2->type))
+ return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
+ /* Put any integral type with non-full precision last. */
+ else if (INTEGRAL_TYPE_P (f1->type)
+ && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
+ != TYPE_PRECISION (f1->type)))
+ return 1;
+ else if (INTEGRAL_TYPE_P (f2->type)
+ && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
+ != TYPE_PRECISION (f2->type)))
+ return -1;
+ /* Stabilize the sort. */
+ return TYPE_UID (f1->type) - TYPE_UID (f2->type);
+ }
+
+ /* We want the bigger accesses first, thus the opposite operator in the next
+ line: */
+ return f1->size > f2->size ? -1 : 1;
+}
+
+
+/* Append a name of the declaration to the name obstack. A helper function for
+ make_fancy_name. */
+
+static void
+make_fancy_decl_name (tree decl)
+{
+ char buffer[32];
+
+ tree name = DECL_NAME (decl);
+ if (name)
+ obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
+ IDENTIFIER_LENGTH (name));
+ else
+ {
+ sprintf (buffer, "D%u", DECL_UID (decl));
+ obstack_grow (&name_obstack, buffer, strlen (buffer));
+ }
+}
+
+/* Helper for make_fancy_name. */
+
+static void
+make_fancy_name_1 (tree expr)
+{
+ char buffer[32];
+ tree index;
+
+ if (DECL_P (expr))
+ {
+ make_fancy_decl_name (expr);
+ return;
+ }
+
+ switch (TREE_CODE (expr))
+ {
+ case COMPONENT_REF:
+ make_fancy_name_1 (TREE_OPERAND (expr, 0));
+ obstack_1grow (&name_obstack, '$');
+ make_fancy_decl_name (TREE_OPERAND (expr, 1));
+ break;
+
+ case ARRAY_REF:
+ make_fancy_name_1 (TREE_OPERAND (expr, 0));
+ obstack_1grow (&name_obstack, '$');
+ /* Arrays with only one element may not have a constant as their
+ index. */
+ index = TREE_OPERAND (expr, 1);
+ if (TREE_CODE (index) != INTEGER_CST)
+ break;
+ sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
+ obstack_grow (&name_obstack, buffer, strlen (buffer));
+
+ break;
+
+ case BIT_FIELD_REF:
+ case REALPART_EXPR:
+ case IMAGPART_EXPR:
+ gcc_unreachable (); /* we treat these as scalars. */
+ break;
+ default:
+ break;
+ }
+}
+
+/* Create a human readable name for replacement variable of ACCESS. */
+
+static char *
+make_fancy_name (tree expr)
+{
+ make_fancy_name_1 (expr);
+ obstack_1grow (&name_obstack, '\0');
+ return XOBFINISH (&name_obstack, char *);
+}
+
+/* Helper function for build_ref_for_offset. */
+
+static bool
+build_ref_for_offset_1 (tree *res, tree type, HOST_WIDE_INT offset,
+ tree exp_type)
+{
+ while (1)
+ {
+ tree fld;
+ tree tr_size, index, minidx;
+ HOST_WIDE_INT el_size;
+
+ if (offset == 0 && exp_type
+ && types_compatible_p (exp_type, type))
+ return true;
+
+ switch (TREE_CODE (type))
+ {
+ case UNION_TYPE:
+ case QUAL_UNION_TYPE:
+ case RECORD_TYPE:
+ for (fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
+ {
+ HOST_WIDE_INT pos, size;
+ tree expr, *expr_ptr;
+
+ if (TREE_CODE (fld) != FIELD_DECL)
+ continue;
+
+ pos = int_bit_position (fld);
+ gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
+ tr_size = DECL_SIZE (fld);
+ if (!tr_size || !host_integerp (tr_size, 1))
+ continue;
+ size = tree_low_cst (tr_size, 1);
+ if (size == 0)
+ {
+ if (pos != offset)
+ continue;
+ }
+ else if (pos > offset || (pos + size) <= offset)
+ continue;
+
+ if (res)
+ {
+ expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
+ NULL_TREE);
+ expr_ptr = &expr;
+ }
+ else
+ expr_ptr = NULL;
+ if (build_ref_for_offset_1 (expr_ptr, TREE_TYPE (fld),
+ offset - pos, exp_type))
+ {
+ if (res)
+ *res = expr;
+ return true;
+ }
+ }
+ return false;
+
+ case ARRAY_TYPE:
+ tr_size = TYPE_SIZE (TREE_TYPE (type));
+ if (!tr_size || !host_integerp (tr_size, 1))
+ return false;
+ el_size = tree_low_cst (tr_size, 1);
+
+ minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
+ if (TREE_CODE (minidx) != INTEGER_CST)
+ return false;
+ if (res)
+ {
+ index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
+ if (!integer_zerop (minidx))
+ index = int_const_binop (PLUS_EXPR, index, minidx, 0);
+ *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
+ NULL_TREE, NULL_TREE);
+ }
+ offset = offset % el_size;
+ type = TREE_TYPE (type);
+ break;
+
+ default:
+ if (offset != 0)
+ return false;
+
+ if (exp_type)
+ return false;
+ else
+ return true;
+ }
+ }
+}
+
+/* Construct an expression that would reference a part of aggregate *EXPR of
+ type TYPE at the given OFFSET of the type EXP_TYPE. If EXPR is NULL, the
+ function only determines whether it can build such a reference without
+ actually doing it, otherwise, the tree it points to is unshared first and
+ then used as a base for furhter sub-references.
+
+ FIXME: Eventually this should be replaced with
+ maybe_fold_offset_to_reference() from tree-ssa-ccp.c but that requires a
+ minor rewrite of fold_stmt.
+ */
+
+bool
+build_ref_for_offset (tree *expr, tree type, HOST_WIDE_INT offset,
+ tree exp_type, bool allow_ptr)
+{
+ location_t loc = expr ? EXPR_LOCATION (*expr) : UNKNOWN_LOCATION;
+
+ if (expr)
+ *expr = unshare_expr (*expr);
+
+ if (allow_ptr && POINTER_TYPE_P (type))
+ {
+ type = TREE_TYPE (type);
+ if (expr)
+ *expr = fold_build1_loc (loc, INDIRECT_REF, type, *expr);
+ }
+
+ return build_ref_for_offset_1 (expr, type, offset, exp_type);
+}
+
+/* Return true iff TYPE is stdarg va_list type. */
+
+static inline bool
+is_va_list_type (tree type)
+{
+ return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
+}
+
+/* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
+ those with type which is suitable for scalarization. */
+
+static bool
+find_var_candidates (void)
+{
+ tree var, type;
+ referenced_var_iterator rvi;
+ bool ret = false;
+
+ FOR_EACH_REFERENCED_VAR (var, rvi)
+ {
+ if (TREE_CODE (var) != VAR_DECL && TREE_CODE (var) != PARM_DECL)
+ continue;
+ type = TREE_TYPE (var);
+
+ if (!AGGREGATE_TYPE_P (type)
+ || needs_to_live_in_memory (var)
+ || TREE_THIS_VOLATILE (var)
+ || !COMPLETE_TYPE_P (type)
+ || !host_integerp (TYPE_SIZE (type), 1)
+ || tree_low_cst (TYPE_SIZE (type), 1) == 0
+ || type_internals_preclude_sra_p (type)
+ /* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
+ we also want to schedule it rather late. Thus we ignore it in
+ the early pass. */
+ || (sra_mode == SRA_MODE_EARLY_INTRA
+ && is_va_list_type (type)))
+ continue;
+
+ bitmap_set_bit (candidate_bitmap, DECL_UID (var));
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
+ print_generic_expr (dump_file, var, 0);
+ fprintf (dump_file, "\n");
+ }
+ ret = true;
+ }
+
+ return ret;
+}
+
+/* Sort all accesses for the given variable, check for partial overlaps and
+ return NULL if there are any. If there are none, pick a representative for
+ each combination of offset and size and create a linked list out of them.
+ Return the pointer to the first representative and make sure it is the first
+ one in the vector of accesses. */
+
+static struct access *
+sort_and_splice_var_accesses (tree var)
+{
+ int i, j, access_count;
+ struct access *res, **prev_acc_ptr = &res;
+ VEC (access_p, heap) *access_vec;
+ bool first = true;
+ HOST_WIDE_INT low = -1, high = 0;
+
+ access_vec = get_base_access_vector (var);
+ if (!access_vec)
+ return NULL;
+ access_count = VEC_length (access_p, access_vec);
+
+ /* Sort by <OFFSET, SIZE>. */
+ qsort (VEC_address (access_p, access_vec), access_count, sizeof (access_p),
+ compare_access_positions);
+
+ i = 0;
+ while (i < access_count)
+ {
+ struct access *access = VEC_index (access_p, access_vec, i);
+ bool grp_write = access->write;
+ bool grp_read = !access->write;
+ bool multiple_reads = false;
+ bool total_scalarization = access->total_scalarization;
+ bool grp_partial_lhs = access->grp_partial_lhs;
+ bool first_scalar = is_gimple_reg_type (access->type);
+ bool unscalarizable_region = access->grp_unscalarizable_region;
+
+ if (first || access->offset >= high)
+ {
+ first = false;
+ low = access->offset;
+ high = access->offset + access->size;
+ }
+ else if (access->offset > low && access->offset + access->size > high)
+ return NULL;
+ else
+ gcc_assert (access->offset >= low
+ && access->offset + access->size <= high);
+
+ j = i + 1;
+ while (j < access_count)
+ {
+ struct access *ac2 = VEC_index (access_p, access_vec, j);
+ if (ac2->offset != access->offset || ac2->size != access->size)
+ break;
+ if (ac2->write)
+ grp_write = true;
+ else
+ {
+ if (grp_read)
+ multiple_reads = true;
+ else
+ grp_read = true;
+ }
+ grp_partial_lhs |= ac2->grp_partial_lhs;
+ unscalarizable_region |= ac2->grp_unscalarizable_region;
+ total_scalarization |= ac2->total_scalarization;
+ relink_to_new_repr (access, ac2);
+
+ /* If there are both aggregate-type and scalar-type accesses with
+ this combination of size and offset, the comparison function
+ should have put the scalars first. */
+ gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
+ ac2->group_representative = access;
+ j++;
+ }
+
+ i = j;
+
+ access->group_representative = access;
+ access->grp_write = grp_write;
+ access->grp_read = grp_read;
+ access->grp_hint = multiple_reads || total_scalarization;
+ access->grp_partial_lhs = grp_partial_lhs;
+ access->grp_unscalarizable_region = unscalarizable_region;
+ if (access->first_link)
+ add_access_to_work_queue (access);
+
+ *prev_acc_ptr = access;
+ prev_acc_ptr = &access->next_grp;
+ }
+
+ gcc_assert (res == VEC_index (access_p, access_vec, 0));
+ return res;
+}
+
+/* Create a variable for the given ACCESS which determines the type, name and a
+ few other properties. Return the variable declaration and store it also to
+ ACCESS->replacement. */
+
+static tree
+create_access_replacement (struct access *access)