#include "backend.h"
#include "target.h"
#include "rtl.h"
+#include "rtlanal.h"
#include "tree.h"
#include "predict.h"
#include "df.h"
return;
}
}
+
+/* Try to add a description of REG X to this object, stopping once
+ the REF_END limit has been reached. FLAGS is a bitmask of
+ rtx_obj_reference flags that describe the context. */
+
+void
+rtx_properties::try_to_add_reg (const_rtx x, unsigned int flags)
+{
+ if (REG_NREGS (x) != 1)
+ flags |= rtx_obj_flags::IS_MULTIREG;
+ machine_mode mode = GET_MODE (x);
+ unsigned int start_regno = REGNO (x);
+ unsigned int end_regno = END_REGNO (x);
+ for (unsigned int regno = start_regno; regno < end_regno; ++regno)
+ if (ref_iter != ref_end)
+ *ref_iter++ = rtx_obj_reference (regno, flags, mode,
+ regno - start_regno);
+}
+
+/* Add a description of destination X to this object. FLAGS is a bitmask
+ of rtx_obj_reference flags that describe the context.
+
+ This routine accepts all rtxes that can legitimately appear in a
+ SET_DEST. */
+
+void
+rtx_properties::try_to_add_dest (const_rtx x, unsigned int flags)
+{
+ /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
+ each of whose first operand is a register. */
+ if (__builtin_expect (GET_CODE (x) == PARALLEL, 0))
+ {
+ for (int i = XVECLEN (x, 0) - 1; i >= 0; --i)
+ if (rtx dest = XEXP (XVECEXP (x, 0, i), 0))
+ try_to_add_dest (dest, flags);
+ return;
+ }
+
+ unsigned int base_flags = flags & rtx_obj_flags::STICKY_FLAGS;
+ flags |= rtx_obj_flags::IS_WRITE;
+ for (;;)
+ if (GET_CODE (x) == ZERO_EXTRACT)
+ {
+ try_to_add_src (XEXP (x, 1), base_flags);
+ try_to_add_src (XEXP (x, 2), base_flags);
+ flags |= rtx_obj_flags::IS_READ;
+ x = XEXP (x, 0);
+ }
+ else if (GET_CODE (x) == STRICT_LOW_PART)
+ {
+ flags |= rtx_obj_flags::IS_READ;
+ x = XEXP (x, 0);
+ }
+ else if (GET_CODE (x) == SUBREG)
+ {
+ flags |= rtx_obj_flags::IN_SUBREG;
+ if (read_modify_subreg_p (x))
+ flags |= rtx_obj_flags::IS_READ;
+ x = SUBREG_REG (x);
+ }
+ else
+ break;
+
+ if (MEM_P (x))
+ {
+ if (ref_iter != ref_end)
+ *ref_iter++ = rtx_obj_reference (MEM_REGNO, flags, GET_MODE (x));
+
+ unsigned int addr_flags = base_flags | rtx_obj_flags::IN_MEM_STORE;
+ if (flags & rtx_obj_flags::IS_READ)
+ addr_flags |= rtx_obj_flags::IN_MEM_LOAD;
+ try_to_add_src (XEXP (x, 0), addr_flags);
+ return;
+ }
+
+ if (__builtin_expect (REG_P (x), 1))
+ {
+ /* We want to keep sp alive everywhere - by making all
+ writes to sp also use sp. */
+ if (REGNO (x) == STACK_POINTER_REGNUM)
+ flags |= rtx_obj_flags::IS_READ;
+ try_to_add_reg (x, flags);
+ return;
+ }
+}
+
+/* Try to add a description of source X to this object, stopping once
+ the REF_END limit has been reached. FLAGS is a bitmask of
+ rtx_obj_reference flags that describe the context.
+
+ This routine accepts all rtxes that can legitimately appear in a SET_SRC. */
+
+void
+rtx_properties::try_to_add_src (const_rtx x, unsigned int flags)
+{
+ unsigned int base_flags = flags & rtx_obj_flags::STICKY_FLAGS;
+ subrtx_iterator::array_type array;
+ FOR_EACH_SUBRTX (iter, array, x, NONCONST)
+ {
+ const_rtx x = *iter;
+ rtx_code code = GET_CODE (x);
+ if (code == REG)
+ try_to_add_reg (x, flags | rtx_obj_flags::IS_READ);
+ else if (code == MEM)
+ {
+ if (MEM_VOLATILE_P (x))
+ has_volatile_refs = true;
+
+ if (!MEM_READONLY_P (x) && ref_iter != ref_end)
+ {
+ auto mem_flags = flags | rtx_obj_flags::IS_READ;
+ *ref_iter++ = rtx_obj_reference (MEM_REGNO, mem_flags,
+ GET_MODE (x));
+ }
+
+ try_to_add_src (XEXP (x, 0),
+ base_flags | rtx_obj_flags::IN_MEM_LOAD);
+ iter.skip_subrtxes ();
+ }
+ else if (code == SUBREG)
+ {
+ try_to_add_src (SUBREG_REG (x), flags | rtx_obj_flags::IN_SUBREG);
+ iter.skip_subrtxes ();
+ }
+ else if (code == UNSPEC_VOLATILE)
+ has_volatile_refs = true;
+ else if (code == ASM_INPUT || code == ASM_OPERANDS)
+ {
+ has_asm = true;
+ if (MEM_VOLATILE_P (x))
+ has_volatile_refs = true;
+ }
+ else if (code == PRE_INC
+ || code == PRE_DEC
+ || code == POST_INC
+ || code == POST_DEC
+ || code == PRE_MODIFY
+ || code == POST_MODIFY)
+ {
+ has_pre_post_modify = true;
+
+ unsigned int addr_flags = (base_flags
+ | rtx_obj_flags::IS_PRE_POST_MODIFY
+ | rtx_obj_flags::IS_READ);
+ try_to_add_dest (XEXP (x, 0), addr_flags);
+ if (code == PRE_MODIFY || code == POST_MODIFY)
+ iter.substitute (XEXP (XEXP (x, 1), 1));
+ else
+ iter.skip_subrtxes ();
+ }
+ else if (code == CALL)
+ has_call = true;
+ }
+}
+
+/* Try to add a description of instruction pattern PAT to this object,
+ stopping once the REF_END limit has been reached. */
+
+void
+rtx_properties::try_to_add_pattern (const_rtx pat)
+{
+ switch (GET_CODE (pat))
+ {
+ case COND_EXEC:
+ try_to_add_src (COND_EXEC_TEST (pat));
+ try_to_add_pattern (COND_EXEC_CODE (pat));
+ break;
+
+ case PARALLEL:
+ {
+ int last = XVECLEN (pat, 0) - 1;
+ for (int i = 0; i < last; ++i)
+ try_to_add_pattern (XVECEXP (pat, 0, i));
+ try_to_add_pattern (XVECEXP (pat, 0, last));
+ break;
+ }
+
+ case ASM_OPERANDS:
+ for (int i = 0, len = ASM_OPERANDS_INPUT_LENGTH (pat); i < len; ++i)
+ try_to_add_src (ASM_OPERANDS_INPUT (pat, i));
+ break;
+
+ case CLOBBER:
+ try_to_add_dest (XEXP (pat, 0), rtx_obj_flags::IS_CLOBBER);
+ break;
+
+ case SET:
+ try_to_add_dest (SET_DEST (pat));
+ try_to_add_src (SET_SRC (pat));
+ break;
+
+ default:
+ /* All the other possibilities never store and can use a normal
+ rtx walk. This includes:
+
+ - USE
+ - TRAP_IF
+ - PREFETCH
+ - UNSPEC
+ - UNSPEC_VOLATILE. */
+ try_to_add_src (pat);
+ break;
+ }
+}
+
+/* Try to add a description of INSN to this object, stopping once
+ the REF_END limit has been reached. INCLUDE_NOTES is true if the
+ description should include REG_EQUAL and REG_EQUIV notes; all such
+ references will then be marked with rtx_obj_flags::IN_NOTE.
+
+ For calls, this description includes all accesses in
+ CALL_INSN_FUNCTION_USAGE. It also include all implicit accesses
+ to global registers by the target function. However, it does not
+ include clobbers performed by the target function; callers that want
+ this information should instead use the function_abi interface. */
+
+void
+rtx_properties::try_to_add_insn (const rtx_insn *insn, bool include_notes)
+{
+ if (CALL_P (insn))
+ {
+ /* Adding the global registers first removes a situation in which
+ a fixed-form clobber of register R could come before a real set
+ of register R. */
+ if (!hard_reg_set_empty_p (global_reg_set))
+ {
+ unsigned int flags = (rtx_obj_flags::IS_READ
+ | rtx_obj_flags::IS_WRITE);
+ for (unsigned int regno = 0; regno < FIRST_PSEUDO_REGISTER; ++regno)
+ if (global_regs[regno] && ref_iter != ref_end)
+ *ref_iter++ = rtx_obj_reference (regno, flags,
+ reg_raw_mode[regno], 0);
+ }
+ if (ref_iter != ref_end && !RTL_CONST_CALL_P (insn))
+ {
+ auto mem_flags = rtx_obj_flags::IS_READ;
+ if (!RTL_PURE_CALL_P (insn))
+ mem_flags |= rtx_obj_flags::IS_WRITE;
+ *ref_iter++ = rtx_obj_reference (MEM_REGNO, mem_flags, BLKmode);
+ }
+ try_to_add_pattern (PATTERN (insn));
+ for (rtx link = CALL_INSN_FUNCTION_USAGE (insn); link;
+ link = XEXP (link, 1))
+ {
+ rtx x = XEXP (link, 0);
+ if (GET_CODE (x) == CLOBBER)
+ try_to_add_dest (XEXP (x, 0), rtx_obj_flags::IS_CLOBBER);
+ else if (GET_CODE (x) == USE)
+ try_to_add_src (XEXP (x, 0));
+ }
+ }
+ else
+ try_to_add_pattern (PATTERN (insn));
+
+ if (include_notes)
+ for (rtx note = REG_NOTES (insn); note; note = XEXP (note, 1))
+ if (REG_NOTE_KIND (note) == REG_EQUAL
+ || REG_NOTE_KIND (note) == REG_EQUIV)
+ try_to_add_note (XEXP (note, 0));
+}
+
+/* Grow the storage by a bit while keeping the contents of the first
+ START elements. */
+
+void
+vec_rtx_properties_base::grow (ptrdiff_t start)
+{
+ /* The same heuristic that vec uses. */
+ ptrdiff_t new_elems = (ref_end - ref_begin) * 3 / 2;
+ if (ref_begin == m_storage)
+ {
+ ref_begin = XNEWVEC (rtx_obj_reference, new_elems);
+ if (start)
+ memcpy (ref_begin, m_storage, start * sizeof (rtx_obj_reference));
+ }
+ else
+ ref_begin = reinterpret_cast<rtx_obj_reference *>
+ (xrealloc (ref_begin, new_elems * sizeof (rtx_obj_reference)));
+ ref_iter = ref_begin + start;
+ ref_end = ref_begin + new_elems;
+}
\f
/* Return nonzero if X's old contents don't survive after INSN.
This will be true if X is (cc0) or if X is a register and
--- /dev/null
+/* Analyze RTL for GNU compiler.
+ Copyright (C) 2020 Free Software Foundation, Inc.
+
+This file is part of GCC.
+
+GCC is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation; either version 3, or (at your option) any later
+version.
+
+GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or
+FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+for more details.
+
+You should have received a copy of the GNU General Public License
+along with GCC; see the file COPYING3. If not see
+<http://www.gnu.org/licenses/>. */
+
+/* Note that for historical reasons, many rtlanal.c functions are
+ declared in rtl.h rather than here. */
+
+#ifndef GCC_RTLANAL_H
+#define GCC_RTLANAL_H
+
+/* A dummy register value that represents the whole of variable memory.
+ Using ~0U means that arrays that track both registers and memory can
+ be indexed by regno + 1. */
+const unsigned int MEM_REGNO = ~0U;
+
+/* Bitmasks of flags describing an rtx_obj_reference. See the accessors
+ in the class for details. */
+namespace rtx_obj_flags
+{
+ const uint16_t IS_READ = 1U << 0;
+ const uint16_t IS_WRITE = 1U << 1;
+ const uint16_t IS_CLOBBER = 1U << 2;
+ const uint16_t IS_PRE_POST_MODIFY = 1U << 3;
+ const uint16_t IS_MULTIREG = 1U << 4;
+ const uint16_t IN_MEM_LOAD = 1U << 5;
+ const uint16_t IN_MEM_STORE = 1U << 6;
+ const uint16_t IN_SUBREG = 1U << 7;
+ const uint16_t IN_NOTE = 1U << 8;
+
+ /* Flags that apply to all subrtxes of the rtx they were originally
+ added for. */
+ static const uint16_t STICKY_FLAGS = IN_NOTE;
+}
+
+/* Contains information about a reference to a register or variable memory. */
+class rtx_obj_reference
+{
+public:
+ rtx_obj_reference () = default;
+ rtx_obj_reference (unsigned int regno, uint16_t flags,
+ machine_mode mode, unsigned int multireg_offset = 0);
+
+ bool is_reg () const { return regno != MEM_REGNO; }
+ bool is_mem () const { return regno == MEM_REGNO; }
+
+ /* True if the reference is a read or a write respectively.
+ Both flags are set in a read-modify-write context, such as
+ for read_modify_subreg_p. */
+ bool is_read () const { return flags & rtx_obj_flags::IS_READ; }
+ bool is_write () const { return flags & rtx_obj_flags::IS_WRITE; }
+
+ /* True if IS_WRITE and if the write is a clobber rather than a set. */
+ bool is_clobber () const { return flags & rtx_obj_flags::IS_CLOBBER; }
+
+ /* True if the reference is updated by an RTX_AUTOINC. Both IS_READ
+ and IS_WRITE are also true if so. */
+ bool is_pre_post_modify () const
+ {
+ return flags & rtx_obj_flags::IS_PRE_POST_MODIFY;
+ }
+
+ /* True if the register is part of a multi-register hard REG. */
+ bool is_multireg () const { return flags & rtx_obj_flags::IS_MULTIREG; }
+
+ /* True if the reference occurs in the address of a load MEM. */
+ bool in_mem_load () const { return flags & rtx_obj_flags::IN_MEM_LOAD; }
+
+ /* True if the reference occurs in the address of a store MEM. */
+ bool in_mem_store () const { return flags & rtx_obj_flags::IN_MEM_STORE; }
+
+ /* True if the reference occurs in any kind of MEM address. */
+ bool in_address () const { return in_mem_load () || in_mem_store (); }
+
+ /* True if the reference occurs in a SUBREG. */
+ bool in_subreg () const { return flags & rtx_obj_flags::IN_SUBREG; }
+
+ /* True if the reference occurs in a REG_EQUAL or REG_EQUIV note. */
+ bool in_note () const { return flags & rtx_obj_flags::IN_NOTE; }
+
+ /* The referenced register, or MEM_REGNO for variable memory. */
+ unsigned int regno;
+
+ /* A bitmask of rtx_obj_flags. */
+ unsigned int flags : 16;
+
+ /* The mode of the reference. If IS_MULTIREG, this is the mode of
+ REGNO - MULTIREG_OFFSET. */
+ machine_mode mode : 8;
+
+ /* If IS_MULTIREG, the offset of REGNO from the start of the register. */
+ unsigned int multireg_offset : 8;
+};
+
+/* Construct a reference with the given fields. */
+
+inline rtx_obj_reference::rtx_obj_reference (unsigned int regno, uint16_t flags,
+ machine_mode mode,
+ unsigned int multireg_offset)
+ : regno (regno),
+ flags (flags),
+ mode (mode),
+ multireg_offset (multireg_offset)
+{
+}
+
+/* Contains information about an rtx or an instruction, including a
+ list of rtx_obj_references. The storage backing the list needs
+ to be filled in by assigning to REF_BEGIN and REF_END. */
+
+class rtx_properties
+{
+public:
+ rtx_properties ();
+
+ void try_to_add_reg (const_rtx x, unsigned int flags = 0);
+ void try_to_add_dest (const_rtx x, unsigned int flags = 0);
+ void try_to_add_src (const_rtx x, unsigned int flags = 0);
+ void try_to_add_pattern (const_rtx pat);
+ void try_to_add_note (const_rtx x);
+ void try_to_add_insn (const rtx_insn *insn, bool include_notes);
+
+ iterator_range<rtx_obj_reference *> refs () const;
+
+ /* Return the number of rtx_obj_references that have been recorded. */
+ size_t num_refs () const { return ref_iter - ref_begin; }
+
+ bool has_side_effects () const;
+
+ /* [REF_BEGIN, REF_END) is the maximum extent of the memory available
+ for recording references. REG_ITER is the first unused entry. */
+ rtx_obj_reference *ref_begin;
+ rtx_obj_reference *ref_iter;
+ rtx_obj_reference *ref_end;
+
+ /* True if the rtx includes an asm. */
+ unsigned int has_asm : 1;
+
+ /* True if the rtx includes a call. */
+ unsigned int has_call : 1;
+
+ /* True if the rtx includes an RTX_AUTOINC expression. */
+ unsigned int has_pre_post_modify : 1;
+
+ /* True if the rtx contains volatile references, in the sense of
+ volatile_refs_p. */
+ unsigned int has_volatile_refs : 1;
+
+ /* For future expansion. */
+ unsigned int spare : 28;
+};
+
+inline rtx_properties::rtx_properties ()
+ : ref_begin (nullptr),
+ ref_iter (nullptr),
+ ref_end (nullptr),
+ has_asm (false),
+ has_call (false),
+ has_pre_post_modify (false),
+ has_volatile_refs (false),
+ spare (0)
+{
+}
+
+/* Like add_src, but treat X has being part of a REG_EQUAL or
+ REG_EQUIV note. */
+
+inline void
+rtx_properties::try_to_add_note (const_rtx x)
+{
+ try_to_add_src (x, rtx_obj_flags::IN_NOTE);
+}
+
+/* Return true if the rtx has side effects, in the sense of
+ side_effects_p (except for side_effects_p's special handling
+ of combine.c clobbers). */
+
+inline bool
+rtx_properties::has_side_effects () const
+{
+ return has_volatile_refs || has_pre_post_modify || has_call;
+}
+
+/* Return an iterator range for all the references, suitable for
+ range-based for loops. */
+
+inline iterator_range<rtx_obj_reference *>
+rtx_properties::refs () const
+{
+ return { ref_begin, ref_iter };
+}
+
+/* BASE is derived from rtx_properties and provides backing storage
+ for REF_BEGIN. It has a grow () method that increases the amount
+ of memory available if the initial allocation was too small. */
+
+template<typename Base>
+class growing_rtx_properties : public Base
+{
+public:
+ template<typename... Args>
+ growing_rtx_properties (Args...);
+
+ template<typename AddFn>
+ void repeat (AddFn add);
+
+ /* Wrappers around the try_to_* functions that always succeed. */
+ void add_dest (const_rtx x, unsigned int flags = 0);
+ void add_src (const_rtx x, unsigned int flags = 0);
+ void add_pattern (const_rtx pat);
+ void add_note (const_rtx x);
+ void add_insn (const rtx_insn *insn, bool include_notes);
+};
+
+template<typename Base>
+template<typename... Args>
+growing_rtx_properties<Base>::growing_rtx_properties (Args... args)
+ : Base (std::forward<Args> (args)...)
+{
+}
+
+/* Perform ADD until there is enough room to hold the result. */
+
+template<typename Base>
+template<typename AddFn>
+inline void
+growing_rtx_properties<Base>::repeat (AddFn add)
+{
+ ptrdiff_t count = this->num_refs ();
+ for (;;)
+ {
+ add ();
+ /* This retries if the storage happened to be exactly the right size,
+ but that's expected to be a rare case and so isn't worth
+ optimizing for. */
+ if (__builtin_expect (this->ref_iter != this->ref_end, 1))
+ break;
+ this->grow (count);
+ }
+}
+
+template<typename Base>
+inline void
+growing_rtx_properties<Base>::add_dest (const_rtx x, unsigned int flags)
+{
+ repeat ([&]() { this->try_to_add_dest (x, flags); });
+}
+
+template<typename Base>
+inline void
+growing_rtx_properties<Base>::add_src (const_rtx x, unsigned int flags)
+{
+ repeat ([&]() { this->try_to_add_src (x, flags); });
+}
+
+template<typename Base>
+inline void
+growing_rtx_properties<Base>::add_pattern (const_rtx pat)
+{
+ repeat ([&]() { this->try_to_add_pattern (pat); });
+}
+
+template<typename Base>
+inline void
+growing_rtx_properties<Base>::add_note (const_rtx x)
+{
+ repeat ([&]() { this->try_to_add_note (x); });
+}
+
+template<typename Base>
+inline void
+growing_rtx_properties<Base>::add_insn (const rtx_insn *insn, bool include_notes)
+{
+ repeat ([&]() { this->try_to_add_insn (insn, include_notes); });
+}
+
+/* A base class for vec_rtx_properties; see there for details. */
+
+class vec_rtx_properties_base : public rtx_properties
+{
+ static const size_t SIZE = 32;
+
+public:
+ vec_rtx_properties_base ();
+ ~vec_rtx_properties_base ();
+
+protected:
+ void grow (ptrdiff_t);
+
+private:
+ rtx_obj_reference m_storage[SIZE];
+};
+
+inline vec_rtx_properties_base::vec_rtx_properties_base ()
+{
+ ref_begin = ref_iter = m_storage;
+ ref_end = m_storage + SIZE;
+}
+
+inline vec_rtx_properties_base::~vec_rtx_properties_base ()
+{
+ if (__builtin_expect (ref_begin != m_storage, 0))
+ free (ref_begin);
+}
+
+/* A rtx_properties that stores its references in a temporary array.
+ Like auto_vec, the array is initially on the stack, but can switch
+ to the heap if necessary.
+
+ The reason for implementing this as a derived class is that the
+ default on-stack size should be enough for the vast majority of
+ expressions and instructions. It's therefore not worth paying
+ the cost of conditionally calling grow code at every site that
+ records a new reference. Instead, the rtx_properties code can use
+ trivial iterator updates for the common case, and in the rare case
+ that the vector needs to be resized, we can pay the cost of
+ collecting the references a second time. */
+using vec_rtx_properties = growing_rtx_properties<vec_rtx_properties_base>;
+
+#endif