1 /* Common subexpression elimination library for GNU compiler.
2 Copyright (C) 1987-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
35 #include "function-abi.h"
37 /* A list of cselib_val structures. */
40 struct elt_list *next;
44 static bool cselib_record_memory;
45 static bool cselib_preserve_constants;
46 static bool cselib_any_perm_equivs;
47 static inline void promote_debug_loc (struct elt_loc_list *l);
48 static struct elt_list *new_elt_list (struct elt_list *, cselib_val *);
49 static void new_elt_loc_list (cselib_val *, rtx);
50 static void unchain_one_value (cselib_val *);
51 static void unchain_one_elt_list (struct elt_list **);
52 static void unchain_one_elt_loc_list (struct elt_loc_list **);
53 static void remove_useless_values (void);
54 static unsigned int cselib_hash_rtx (rtx, int, machine_mode);
55 static cselib_val *new_cselib_val (unsigned int, machine_mode, rtx);
56 static void add_mem_for_addr (cselib_val *, cselib_val *, rtx);
57 static cselib_val *cselib_lookup_mem (rtx, int);
58 static void cselib_invalidate_regno (unsigned int, machine_mode,
60 static void cselib_invalidate_mem (rtx);
61 static void cselib_record_set (rtx, cselib_val *, cselib_val *);
62 static void cselib_record_sets (rtx_insn *);
64 struct expand_value_data
67 cselib_expand_callback callback;
72 static rtx cselib_expand_value_rtx_1 (rtx, struct expand_value_data *, int);
74 /* There are three ways in which cselib can look up an rtx:
75 - for a REG, the reg_values table (which is indexed by regno) is used
76 - for a MEM, we recursively look up its address and then follow the
77 addr_list of that value
78 - for everything else, we compute a hash value and go through the hash
79 table. Since different rtx's can still have the same hash value,
80 this involves walking the table entries for a given value and comparing
81 the locations of the entries with the rtx we are looking up. */
83 struct cselib_hasher : nofree_ptr_hash <cselib_val>
86 /* The rtx value and its mode (needed separately for constant
90 /* The mode of the contaning MEM, if any, otherwise VOIDmode. */
93 typedef key *compare_type;
94 static inline hashval_t hash (const cselib_val *);
95 static inline bool equal (const cselib_val *, const key *);
98 /* The hash function for our hash table. The value is always computed with
99 cselib_hash_rtx when adding an element; this function just extracts the
100 hash value from a cselib_val structure. */
103 cselib_hasher::hash (const cselib_val *v)
108 /* The equality test for our hash table. The first argument V is a table
109 element (i.e. a cselib_val), while the second arg X is an rtx. We know
110 that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
111 CONST of an appropriate mode. */
114 cselib_hasher::equal (const cselib_val *v, const key *x_arg)
116 struct elt_loc_list *l;
118 machine_mode mode = x_arg->mode;
119 machine_mode memmode = x_arg->memmode;
121 if (mode != GET_MODE (v->val_rtx))
124 if (GET_CODE (x) == VALUE)
125 return x == v->val_rtx;
127 /* We don't guarantee that distinct rtx's have different hash values,
128 so we need to do a comparison. */
129 for (l = v->locs; l; l = l->next)
130 if (rtx_equal_for_cselib_1 (l->loc, x, memmode, 0))
132 promote_debug_loc (l);
139 /* A table that enables us to look up elts by their value. */
140 static hash_table<cselib_hasher> *cselib_hash_table;
142 /* A table to hold preserved values. */
143 static hash_table<cselib_hasher> *cselib_preserved_hash_table;
145 /* This is a global so we don't have to pass this through every function.
146 It is used in new_elt_loc_list to set SETTING_INSN. */
147 static rtx_insn *cselib_current_insn;
149 /* The unique id that the next create value will take. */
150 static unsigned int next_uid;
152 /* The number of registers we had when the varrays were last resized. */
153 static unsigned int cselib_nregs;
155 /* Count values without known locations, or with only locations that
156 wouldn't have been known except for debug insns. Whenever this
157 grows too big, we remove these useless values from the table.
159 Counting values with only debug values is a bit tricky. We don't
160 want to increment n_useless_values when we create a value for a
161 debug insn, for this would get n_useless_values out of sync, but we
162 want increment it if all locs in the list that were ever referenced
163 in nondebug insns are removed from the list.
165 In the general case, once we do that, we'd have to stop accepting
166 nondebug expressions in the loc list, to avoid having two values
167 equivalent that, without debug insns, would have been made into
168 separate values. However, because debug insns never introduce
169 equivalences themselves (no assignments), the only means for
170 growing loc lists is through nondebug assignments. If the locs
171 also happen to be referenced in debug insns, it will work just fine.
173 A consequence of this is that there's at most one debug-only loc in
174 each loc list. If we keep it in the first entry, testing whether
175 we have a debug-only loc list takes O(1).
177 Furthermore, since any additional entry in a loc list containing a
178 debug loc would have to come from an assignment (nondebug) that
179 references both the initial debug loc and the newly-equivalent loc,
180 the initial debug loc would be promoted to a nondebug loc, and the
181 loc list would not contain debug locs any more.
183 So the only case we have to be careful with in order to keep
184 n_useless_values in sync between debug and nondebug compilations is
185 to avoid incrementing n_useless_values when removing the single loc
186 from a value that turns out to not appear outside debug values. We
187 increment n_useless_debug_values instead, and leave such values
188 alone until, for other reasons, we garbage-collect useless
190 static int n_useless_values;
191 static int n_useless_debug_values;
193 /* Count values whose locs have been taken exclusively from debug
194 insns for the entire life of the value. */
195 static int n_debug_values;
197 /* Number of useless values before we remove them from the hash table. */
198 #define MAX_USELESS_VALUES 32
200 /* This table maps from register number to values. It does not
201 contain pointers to cselib_val structures, but rather elt_lists.
202 The purpose is to be able to refer to the same register in
203 different modes. The first element of the list defines the mode in
204 which the register was set; if the mode is unknown or the value is
205 no longer valid in that mode, ELT will be NULL for the first
207 static struct elt_list **reg_values;
208 static unsigned int reg_values_size;
209 #define REG_VALUES(i) reg_values[i]
211 /* The largest number of hard regs used by any entry added to the
212 REG_VALUES table. Cleared on each cselib_clear_table() invocation. */
213 static unsigned int max_value_regs;
215 /* Here the set of indices I with REG_VALUES(I) != 0 is saved. This is used
216 in cselib_clear_table() for fast emptying. */
217 static unsigned int *used_regs;
218 static unsigned int n_used_regs;
220 /* We pass this to cselib_invalidate_mem to invalidate all of
221 memory for a non-const call instruction. */
222 static GTY(()) rtx callmem;
224 /* Set by discard_useless_locs if it deleted the last location of any
226 static int values_became_useless;
228 /* Used as stop element of the containing_mem list so we can check
229 presence in the list by checking the next pointer. */
230 static cselib_val dummy_val;
232 /* If non-NULL, value of the eliminated arg_pointer_rtx or frame_pointer_rtx
233 that is constant through the whole function and should never be
235 static cselib_val *cfa_base_preserved_val;
236 static unsigned int cfa_base_preserved_regno = INVALID_REGNUM;
238 /* Used to list all values that contain memory reference.
239 May or may not contain the useless values - the list is compacted
240 each time memory is invalidated. */
241 static cselib_val *first_containing_mem = &dummy_val;
243 static object_allocator<elt_list> elt_list_pool ("elt_list");
244 static object_allocator<elt_loc_list> elt_loc_list_pool ("elt_loc_list");
245 static object_allocator<cselib_val> cselib_val_pool ("cselib_val_list");
247 static pool_allocator value_pool ("value", RTX_CODE_SIZE (VALUE));
249 /* If nonnull, cselib will call this function before freeing useless
250 VALUEs. A VALUE is deemed useless if its "locs" field is null. */
251 void (*cselib_discard_hook) (cselib_val *);
253 /* If nonnull, cselib will call this function before recording sets or
254 even clobbering outputs of INSN. All the recorded sets will be
255 represented in the array sets[n_sets]. new_val_min can be used to
256 tell whether values present in sets are introduced by this
258 void (*cselib_record_sets_hook) (rtx_insn *insn, struct cselib_set *sets,
261 #define PRESERVED_VALUE_P(RTX) \
262 (RTL_FLAG_CHECK1 ("PRESERVED_VALUE_P", (RTX), VALUE)->unchanging)
264 #define SP_BASED_VALUE_P(RTX) \
265 (RTL_FLAG_CHECK1 ("SP_BASED_VALUE_P", (RTX), VALUE)->jump)
269 /* Allocate a struct elt_list and fill in its two elements with the
272 static inline struct elt_list *
273 new_elt_list (struct elt_list *next, cselib_val *elt)
275 elt_list *el = elt_list_pool.allocate ();
281 /* Allocate a struct elt_loc_list with LOC and prepend it to VAL's loc
285 new_elt_loc_list (cselib_val *val, rtx loc)
287 struct elt_loc_list *el, *next = val->locs;
289 gcc_checking_assert (!next || !next->setting_insn
290 || !DEBUG_INSN_P (next->setting_insn)
291 || cselib_current_insn == next->setting_insn);
293 /* If we're creating the first loc in a debug insn context, we've
294 just created a debug value. Count it. */
295 if (!next && cselib_current_insn && DEBUG_INSN_P (cselib_current_insn))
298 val = canonical_cselib_val (val);
301 if (GET_CODE (loc) == VALUE)
303 loc = canonical_cselib_val (CSELIB_VAL_PTR (loc))->val_rtx;
305 gcc_checking_assert (PRESERVED_VALUE_P (loc)
306 == PRESERVED_VALUE_P (val->val_rtx));
308 if (val->val_rtx == loc)
310 else if (val->uid > CSELIB_VAL_PTR (loc)->uid)
312 /* Reverse the insertion. */
313 new_elt_loc_list (CSELIB_VAL_PTR (loc), val->val_rtx);
317 gcc_checking_assert (val->uid < CSELIB_VAL_PTR (loc)->uid);
319 if (CSELIB_VAL_PTR (loc)->locs)
321 /* Bring all locs from LOC to VAL. */
322 for (el = CSELIB_VAL_PTR (loc)->locs; el->next; el = el->next)
324 /* Adjust values that have LOC as canonical so that VAL
325 becomes their canonical. */
326 if (el->loc && GET_CODE (el->loc) == VALUE)
328 gcc_checking_assert (CSELIB_VAL_PTR (el->loc)->locs->loc
330 CSELIB_VAL_PTR (el->loc)->locs->loc = val->val_rtx;
333 el->next = val->locs;
334 next = val->locs = CSELIB_VAL_PTR (loc)->locs;
337 if (CSELIB_VAL_PTR (loc)->addr_list)
339 /* Bring in addr_list into canonical node. */
340 struct elt_list *last = CSELIB_VAL_PTR (loc)->addr_list;
343 last->next = val->addr_list;
344 val->addr_list = CSELIB_VAL_PTR (loc)->addr_list;
345 CSELIB_VAL_PTR (loc)->addr_list = NULL;
348 if (CSELIB_VAL_PTR (loc)->next_containing_mem != NULL
349 && val->next_containing_mem == NULL)
351 /* Add VAL to the containing_mem list after LOC. LOC will
352 be removed when we notice it doesn't contain any
354 val->next_containing_mem = CSELIB_VAL_PTR (loc)->next_containing_mem;
355 CSELIB_VAL_PTR (loc)->next_containing_mem = val;
358 /* Chain LOC back to VAL. */
359 el = elt_loc_list_pool.allocate ();
360 el->loc = val->val_rtx;
361 el->setting_insn = cselib_current_insn;
363 CSELIB_VAL_PTR (loc)->locs = el;
366 el = elt_loc_list_pool.allocate ();
368 el->setting_insn = cselib_current_insn;
373 /* Promote loc L to a nondebug cselib_current_insn if L is marked as
374 originating from a debug insn, maintaining the debug values
378 promote_debug_loc (struct elt_loc_list *l)
380 if (l && l->setting_insn && DEBUG_INSN_P (l->setting_insn)
381 && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn)))
384 l->setting_insn = cselib_current_insn;
385 if (cselib_preserve_constants && l->next)
387 gcc_assert (l->next->setting_insn
388 && DEBUG_INSN_P (l->next->setting_insn)
390 l->next->setting_insn = cselib_current_insn;
393 gcc_assert (!l->next);
397 /* The elt_list at *PL is no longer needed. Unchain it and free its
401 unchain_one_elt_list (struct elt_list **pl)
403 struct elt_list *l = *pl;
406 elt_list_pool.remove (l);
409 /* Likewise for elt_loc_lists. */
412 unchain_one_elt_loc_list (struct elt_loc_list **pl)
414 struct elt_loc_list *l = *pl;
417 elt_loc_list_pool.remove (l);
420 /* Likewise for cselib_vals. This also frees the addr_list associated with
424 unchain_one_value (cselib_val *v)
427 unchain_one_elt_list (&v->addr_list);
429 cselib_val_pool.remove (v);
432 /* Remove all entries from the hash table. Also used during
436 cselib_clear_table (void)
438 cselib_reset_table (1);
441 /* Return TRUE if V is a constant, a function invariant or a VALUE
442 equivalence; FALSE otherwise. */
445 invariant_or_equiv_p (cselib_val *v)
447 struct elt_loc_list *l;
449 if (v == cfa_base_preserved_val)
452 /* Keep VALUE equivalences around. */
453 for (l = v->locs; l; l = l->next)
454 if (GET_CODE (l->loc) == VALUE)
458 && v->locs->next == NULL)
460 if (CONSTANT_P (v->locs->loc)
461 && (GET_CODE (v->locs->loc) != CONST
462 || !references_value_p (v->locs->loc, 0)))
464 /* Although a debug expr may be bound to different expressions,
465 we can preserve it as if it was constant, to get unification
466 and proper merging within var-tracking. */
467 if (GET_CODE (v->locs->loc) == DEBUG_EXPR
468 || GET_CODE (v->locs->loc) == DEBUG_IMPLICIT_PTR
469 || GET_CODE (v->locs->loc) == ENTRY_VALUE
470 || GET_CODE (v->locs->loc) == DEBUG_PARAMETER_REF)
473 /* (plus (value V) (const_int C)) is invariant iff V is invariant. */
474 if (GET_CODE (v->locs->loc) == PLUS
475 && CONST_INT_P (XEXP (v->locs->loc, 1))
476 && GET_CODE (XEXP (v->locs->loc, 0)) == VALUE
477 && invariant_or_equiv_p (CSELIB_VAL_PTR (XEXP (v->locs->loc, 0))))
484 /* Remove from hash table all VALUEs except constants, function
485 invariants and VALUE equivalences. */
488 preserve_constants_and_equivs (cselib_val **x, void *info ATTRIBUTE_UNUSED)
492 if (invariant_or_equiv_p (v))
494 cselib_hasher::key lookup = {
495 GET_MODE (v->val_rtx), v->val_rtx, VOIDmode
498 = cselib_preserved_hash_table->find_slot_with_hash (&lookup,
504 cselib_hash_table->clear_slot (x);
509 /* Remove all entries from the hash table, arranging for the next
510 value to be numbered NUM. */
513 cselib_reset_table (unsigned int num)
519 if (cfa_base_preserved_val)
521 unsigned int regno = cfa_base_preserved_regno;
522 unsigned int new_used_regs = 0;
523 for (i = 0; i < n_used_regs; i++)
524 if (used_regs[i] == regno)
530 REG_VALUES (used_regs[i]) = 0;
531 gcc_assert (new_used_regs == 1);
532 n_used_regs = new_used_regs;
533 used_regs[0] = regno;
535 = hard_regno_nregs (regno,
536 GET_MODE (cfa_base_preserved_val->locs->loc));
540 for (i = 0; i < n_used_regs; i++)
541 REG_VALUES (used_regs[i]) = 0;
545 if (cselib_preserve_constants)
546 cselib_hash_table->traverse <void *, preserve_constants_and_equivs>
550 cselib_hash_table->empty ();
551 gcc_checking_assert (!cselib_any_perm_equivs);
554 n_useless_values = 0;
555 n_useless_debug_values = 0;
560 first_containing_mem = &dummy_val;
563 /* Return the number of the next value that will be generated. */
566 cselib_get_next_uid (void)
571 /* Search for X, whose hashcode is HASH, in CSELIB_HASH_TABLE,
572 INSERTing if requested. When X is part of the address of a MEM,
573 MEMMODE should specify the mode of the MEM. */
576 cselib_find_slot (machine_mode mode, rtx x, hashval_t hash,
577 enum insert_option insert, machine_mode memmode)
579 cselib_val **slot = NULL;
580 cselib_hasher::key lookup = { mode, x, memmode };
581 if (cselib_preserve_constants)
582 slot = cselib_preserved_hash_table->find_slot_with_hash (&lookup, hash,
585 slot = cselib_hash_table->find_slot_with_hash (&lookup, hash, insert);
589 /* Return true if X contains a VALUE rtx. If ONLY_USELESS is set, we
590 only return true for values which point to a cselib_val whose value
591 element has been set to zero, which implies the cselib_val will be
595 references_value_p (const_rtx x, int only_useless)
597 const enum rtx_code code = GET_CODE (x);
598 const char *fmt = GET_RTX_FORMAT (code);
601 if (GET_CODE (x) == VALUE
602 && (! only_useless ||
603 (CSELIB_VAL_PTR (x)->locs == 0 && !PRESERVED_VALUE_P (x))))
606 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
608 if (fmt[i] == 'e' && references_value_p (XEXP (x, i), only_useless))
610 else if (fmt[i] == 'E')
611 for (j = 0; j < XVECLEN (x, i); j++)
612 if (references_value_p (XVECEXP (x, i, j), only_useless))
619 /* For all locations found in X, delete locations that reference useless
620 values (i.e. values without any location). Called through
624 discard_useless_locs (cselib_val **x, void *info ATTRIBUTE_UNUSED)
627 struct elt_loc_list **p = &v->locs;
628 bool had_locs = v->locs != NULL;
629 rtx_insn *setting_insn = v->locs ? v->locs->setting_insn : NULL;
633 if (references_value_p ((*p)->loc, 1))
634 unchain_one_elt_loc_list (p);
639 if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
641 if (setting_insn && DEBUG_INSN_P (setting_insn))
642 n_useless_debug_values++;
645 values_became_useless = 1;
650 /* If X is a value with no locations, remove it from the hashtable. */
653 discard_useless_values (cselib_val **x, void *info ATTRIBUTE_UNUSED)
657 if (v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
659 if (cselib_discard_hook)
660 cselib_discard_hook (v);
662 CSELIB_VAL_PTR (v->val_rtx) = NULL;
663 cselib_hash_table->clear_slot (x);
664 unchain_one_value (v);
671 /* Clean out useless values (i.e. those which no longer have locations
672 associated with them) from the hash table. */
675 remove_useless_values (void)
679 /* First pass: eliminate locations that reference the value. That in
680 turn can make more values useless. */
683 values_became_useless = 0;
684 cselib_hash_table->traverse <void *, discard_useless_locs> (NULL);
686 while (values_became_useless);
688 /* Second pass: actually remove the values. */
690 p = &first_containing_mem;
691 for (v = *p; v != &dummy_val; v = v->next_containing_mem)
692 if (v->locs && v == canonical_cselib_val (v))
695 p = &(*p)->next_containing_mem;
699 n_useless_values += n_useless_debug_values;
700 n_debug_values -= n_useless_debug_values;
701 n_useless_debug_values = 0;
703 cselib_hash_table->traverse <void *, discard_useless_values> (NULL);
705 gcc_assert (!n_useless_values);
708 /* Arrange for a value to not be removed from the hash table even if
709 it becomes useless. */
712 cselib_preserve_value (cselib_val *v)
714 PRESERVED_VALUE_P (v->val_rtx) = 1;
717 /* Test whether a value is preserved. */
720 cselib_preserved_value_p (cselib_val *v)
722 return PRESERVED_VALUE_P (v->val_rtx);
725 /* Arrange for a REG value to be assumed constant through the whole function,
726 never invalidated and preserved across cselib_reset_table calls. */
729 cselib_preserve_cfa_base_value (cselib_val *v, unsigned int regno)
731 if (cselib_preserve_constants
733 && REG_P (v->locs->loc))
735 cfa_base_preserved_val = v;
736 cfa_base_preserved_regno = regno;
740 /* Clean all non-constant expressions in the hash table, but retain
744 cselib_preserve_only_values (void)
748 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
749 cselib_invalidate_regno (i, reg_raw_mode[i]);
751 cselib_invalidate_mem (callmem);
753 remove_useless_values ();
755 gcc_assert (first_containing_mem == &dummy_val);
758 /* Arrange for a value to be marked as based on stack pointer
759 for find_base_term purposes. */
762 cselib_set_value_sp_based (cselib_val *v)
764 SP_BASED_VALUE_P (v->val_rtx) = 1;
767 /* Test whether a value is based on stack pointer for
768 find_base_term purposes. */
771 cselib_sp_based_value_p (cselib_val *v)
773 return SP_BASED_VALUE_P (v->val_rtx);
776 /* Return the mode in which a register was last set. If X is not a
777 register, return its mode. If the mode in which the register was
778 set is not known, or the value was already clobbered, return
782 cselib_reg_set_mode (const_rtx x)
787 if (REG_VALUES (REGNO (x)) == NULL
788 || REG_VALUES (REGNO (x))->elt == NULL)
791 return GET_MODE (REG_VALUES (REGNO (x))->elt->val_rtx);
794 /* If x is a PLUS or an autoinc operation, expand the operation,
795 storing the offset, if any, in *OFF. */
798 autoinc_split (rtx x, rtx *off, machine_mode memmode)
800 switch (GET_CODE (x))
807 if (memmode == VOIDmode)
810 *off = gen_int_mode (-GET_MODE_SIZE (memmode), GET_MODE (x));
814 if (memmode == VOIDmode)
817 *off = gen_int_mode (GET_MODE_SIZE (memmode), GET_MODE (x));
833 /* Return nonzero if we can prove that X and Y contain the same value,
834 taking our gathered information into account. MEMMODE holds the
835 mode of the enclosing MEM, if any, as required to deal with autoinc
836 addressing modes. If X and Y are not (known to be) part of
837 addresses, MEMMODE should be VOIDmode. */
840 rtx_equal_for_cselib_1 (rtx x, rtx y, machine_mode memmode, int depth)
846 if (REG_P (x) || MEM_P (x))
848 cselib_val *e = cselib_lookup (x, GET_MODE (x), 0, memmode);
854 if (REG_P (y) || MEM_P (y))
856 cselib_val *e = cselib_lookup (y, GET_MODE (y), 0, memmode);
865 if (GET_CODE (x) == VALUE)
867 cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (x));
868 struct elt_loc_list *l;
870 if (GET_CODE (y) == VALUE)
871 return e == canonical_cselib_val (CSELIB_VAL_PTR (y));
876 for (l = e->locs; l; l = l->next)
880 /* Avoid infinite recursion. We know we have the canonical
881 value, so we can just skip any values in the equivalence
883 if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
885 else if (rtx_equal_for_cselib_1 (t, y, memmode, depth + 1))
891 else if (GET_CODE (y) == VALUE)
893 cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (y));
894 struct elt_loc_list *l;
899 for (l = e->locs; l; l = l->next)
903 if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
905 else if (rtx_equal_for_cselib_1 (x, t, memmode, depth + 1))
912 if (GET_MODE (x) != GET_MODE (y))
915 if (GET_CODE (x) != GET_CODE (y))
917 rtx xorig = x, yorig = y;
918 rtx xoff = NULL, yoff = NULL;
920 x = autoinc_split (x, &xoff, memmode);
921 y = autoinc_split (y, &yoff, memmode);
926 if (xoff && !rtx_equal_for_cselib_1 (xoff, yoff, memmode, depth))
929 /* Don't recurse if nothing changed. */
930 if (x != xorig || y != yorig)
931 return rtx_equal_for_cselib_1 (x, y, memmode, depth);
936 /* These won't be handled correctly by the code below. */
937 switch (GET_CODE (x))
943 case DEBUG_IMPLICIT_PTR:
944 return DEBUG_IMPLICIT_PTR_DECL (x)
945 == DEBUG_IMPLICIT_PTR_DECL (y);
947 case DEBUG_PARAMETER_REF:
948 return DEBUG_PARAMETER_REF_DECL (x)
949 == DEBUG_PARAMETER_REF_DECL (y);
952 /* ENTRY_VALUEs are function invariant, it is thus undesirable to
953 use rtx_equal_for_cselib_1 to compare the operands. */
954 return rtx_equal_p (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
957 return label_ref_label (x) == label_ref_label (y);
960 return REGNO (x) == REGNO (y);
963 /* We have to compare any autoinc operations in the addresses
964 using this MEM's mode. */
965 return rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 0), GET_MODE (x),
973 fmt = GET_RTX_FORMAT (code);
975 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
982 if (XWINT (x, i) != XWINT (y, i))
988 if (XINT (x, i) != XINT (y, i))
993 if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
999 /* Two vectors must have the same length. */
1000 if (XVECLEN (x, i) != XVECLEN (y, i))
1003 /* And the corresponding elements must match. */
1004 for (j = 0; j < XVECLEN (x, i); j++)
1005 if (! rtx_equal_for_cselib_1 (XVECEXP (x, i, j),
1006 XVECEXP (y, i, j), memmode, depth))
1012 && targetm.commutative_p (x, UNKNOWN)
1013 && rtx_equal_for_cselib_1 (XEXP (x, 1), XEXP (y, 0), memmode,
1015 && rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 1), memmode,
1018 if (! rtx_equal_for_cselib_1 (XEXP (x, i), XEXP (y, i), memmode,
1025 if (strcmp (XSTR (x, i), XSTR (y, i)))
1030 /* These are just backpointers, so they don't matter. */
1037 /* It is believed that rtx's at this level will never
1038 contain anything but integers and other rtx's,
1039 except for within LABEL_REFs and SYMBOL_REFs. */
1047 /* Hash an rtx. Return 0 if we couldn't hash the rtx.
1048 For registers and memory locations, we look up their cselib_val structure
1049 and return its VALUE element.
1050 Possible reasons for return 0 are: the object is volatile, or we couldn't
1051 find a register or memory location in the table and CREATE is zero. If
1052 CREATE is nonzero, table elts are created for regs and mem.
1053 N.B. this hash function returns the same hash value for RTXes that
1054 differ only in the order of operands, thus it is suitable for comparisons
1055 that take commutativity into account.
1056 If we wanted to also support associative rules, we'd have to use a different
1057 strategy to avoid returning spurious 0, e.g. return ~(~0U >> 1) .
1058 MEMMODE indicates the mode of an enclosing MEM, and it's only
1059 used to compute autoinc values.
1060 We used to have a MODE argument for hashing for CONST_INTs, but that
1061 didn't make sense, since it caused spurious hash differences between
1062 (set (reg:SI 1) (const_int))
1063 (plus:SI (reg:SI 2) (reg:SI 1))
1065 (plus:SI (reg:SI 2) (const_int))
1066 If the mode is important in any context, it must be checked specifically
1067 in a comparison anyway, since relying on hash differences is unsafe. */
1070 cselib_hash_rtx (rtx x, int create, machine_mode memmode)
1077 unsigned int hash = 0;
1079 code = GET_CODE (x);
1080 hash += (unsigned) code + (unsigned) GET_MODE (x);
1085 e = CSELIB_VAL_PTR (x);
1090 e = cselib_lookup (x, GET_MODE (x), create, memmode);
1097 hash += ((unsigned) DEBUG_EXPR << 7)
1098 + DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x));
1099 return hash ? hash : (unsigned int) DEBUG_EXPR;
1101 case DEBUG_IMPLICIT_PTR:
1102 hash += ((unsigned) DEBUG_IMPLICIT_PTR << 7)
1103 + DECL_UID (DEBUG_IMPLICIT_PTR_DECL (x));
1104 return hash ? hash : (unsigned int) DEBUG_IMPLICIT_PTR;
1106 case DEBUG_PARAMETER_REF:
1107 hash += ((unsigned) DEBUG_PARAMETER_REF << 7)
1108 + DECL_UID (DEBUG_PARAMETER_REF_DECL (x));
1109 return hash ? hash : (unsigned int) DEBUG_PARAMETER_REF;
1112 /* ENTRY_VALUEs are function invariant, thus try to avoid
1113 recursing on argument if ENTRY_VALUE is one of the
1114 forms emitted by expand_debug_expr, otherwise
1115 ENTRY_VALUE hash would depend on the current value
1116 in some register or memory. */
1117 if (REG_P (ENTRY_VALUE_EXP (x)))
1118 hash += (unsigned int) REG
1119 + (unsigned int) GET_MODE (ENTRY_VALUE_EXP (x))
1120 + (unsigned int) REGNO (ENTRY_VALUE_EXP (x));
1121 else if (MEM_P (ENTRY_VALUE_EXP (x))
1122 && REG_P (XEXP (ENTRY_VALUE_EXP (x), 0)))
1123 hash += (unsigned int) MEM
1124 + (unsigned int) GET_MODE (XEXP (ENTRY_VALUE_EXP (x), 0))
1125 + (unsigned int) REGNO (XEXP (ENTRY_VALUE_EXP (x), 0));
1127 hash += cselib_hash_rtx (ENTRY_VALUE_EXP (x), create, memmode);
1128 return hash ? hash : (unsigned int) ENTRY_VALUE;
1131 hash += ((unsigned) CONST_INT << 7) + UINTVAL (x);
1132 return hash ? hash : (unsigned int) CONST_INT;
1134 case CONST_WIDE_INT:
1135 for (i = 0; i < CONST_WIDE_INT_NUNITS (x); i++)
1136 hash += CONST_WIDE_INT_ELT (x, i);
1139 case CONST_POLY_INT:
1143 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1144 h.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]);
1149 /* This is like the general case, except that it only counts
1150 the integers representing the constant. */
1151 hash += (unsigned) code + (unsigned) GET_MODE (x);
1152 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (x) == VOIDmode)
1153 hash += ((unsigned) CONST_DOUBLE_LOW (x)
1154 + (unsigned) CONST_DOUBLE_HIGH (x));
1156 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
1157 return hash ? hash : (unsigned int) CONST_DOUBLE;
1160 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
1161 hash += fixed_hash (CONST_FIXED_VALUE (x));
1162 return hash ? hash : (unsigned int) CONST_FIXED;
1169 units = const_vector_encoded_nelts (x);
1171 for (i = 0; i < units; ++i)
1173 elt = CONST_VECTOR_ENCODED_ELT (x, i);
1174 hash += cselib_hash_rtx (elt, 0, memmode);
1180 /* Assume there is only one rtx object for any given label. */
1182 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1183 differences and differences between each stage's debugging dumps. */
1184 hash += (((unsigned int) LABEL_REF << 7)
1185 + CODE_LABEL_NUMBER (label_ref_label (x)));
1186 return hash ? hash : (unsigned int) LABEL_REF;
1190 /* Don't hash on the symbol's address to avoid bootstrap differences.
1191 Different hash values may cause expressions to be recorded in
1192 different orders and thus different registers to be used in the
1193 final assembler. This also avoids differences in the dump files
1194 between various stages. */
1196 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
1199 h += (h << 7) + *p++; /* ??? revisit */
1201 hash += ((unsigned int) SYMBOL_REF << 7) + h;
1202 return hash ? hash : (unsigned int) SYMBOL_REF;
1207 /* We can't compute these without knowing the MEM mode. */
1208 gcc_assert (memmode != VOIDmode);
1209 offset = GET_MODE_SIZE (memmode);
1210 if (code == PRE_DEC)
1212 /* Adjust the hash so that (mem:MEMMODE (pre_* (reg))) hashes
1213 like (mem:MEMMODE (plus (reg) (const_int I))). */
1214 hash += (unsigned) PLUS - (unsigned)code
1215 + cselib_hash_rtx (XEXP (x, 0), create, memmode)
1216 + cselib_hash_rtx (gen_int_mode (offset, GET_MODE (x)),
1218 return hash ? hash : 1 + (unsigned) PLUS;
1221 gcc_assert (memmode != VOIDmode);
1222 return cselib_hash_rtx (XEXP (x, 1), create, memmode);
1227 gcc_assert (memmode != VOIDmode);
1228 return cselib_hash_rtx (XEXP (x, 0), create, memmode);
1233 case UNSPEC_VOLATILE:
1237 if (MEM_VOLATILE_P (x))
1246 i = GET_RTX_LENGTH (code) - 1;
1247 fmt = GET_RTX_FORMAT (code);
1254 rtx tem = XEXP (x, i);
1255 unsigned int tem_hash = cselib_hash_rtx (tem, create, memmode);
1264 for (j = 0; j < XVECLEN (x, i); j++)
1266 unsigned int tem_hash
1267 = cselib_hash_rtx (XVECEXP (x, i, j), create, memmode);
1278 const unsigned char *p = (const unsigned char *) XSTR (x, i);
1287 hash += XINT (x, i);
1291 hash += constant_lower_bound (SUBREG_BYTE (x));
1304 return hash ? hash : 1 + (unsigned int) GET_CODE (x);
1307 /* Create a new value structure for VALUE and initialize it. The mode of the
1310 static inline cselib_val *
1311 new_cselib_val (unsigned int hash, machine_mode mode, rtx x)
1313 cselib_val *e = cselib_val_pool.allocate ();
1316 gcc_assert (next_uid);
1319 e->uid = next_uid++;
1320 /* We use an alloc pool to allocate this RTL construct because it
1321 accounts for about 8% of the overall memory usage. We know
1322 precisely when we can have VALUE RTXen (when cselib is active)
1323 so we don't need to put them in garbage collected memory.
1324 ??? Why should a VALUE be an RTX in the first place? */
1325 e->val_rtx = (rtx_def*) value_pool.allocate ();
1326 memset (e->val_rtx, 0, RTX_HDR_SIZE);
1327 PUT_CODE (e->val_rtx, VALUE);
1328 PUT_MODE (e->val_rtx, mode);
1329 CSELIB_VAL_PTR (e->val_rtx) = e;
1332 e->next_containing_mem = 0;
1334 if (dump_file && (dump_flags & TDF_CSELIB))
1336 fprintf (dump_file, "cselib value %u:%u ", e->uid, hash);
1337 if (flag_dump_noaddr || flag_dump_unnumbered)
1338 fputs ("# ", dump_file);
1340 fprintf (dump_file, "%p ", (void*)e);
1341 print_rtl_single (dump_file, x);
1342 fputc ('\n', dump_file);
1348 /* ADDR_ELT is a value that is used as address. MEM_ELT is the value that
1349 contains the data at this address. X is a MEM that represents the
1350 value. Update the two value structures to represent this situation. */
1353 add_mem_for_addr (cselib_val *addr_elt, cselib_val *mem_elt, rtx x)
1355 addr_elt = canonical_cselib_val (addr_elt);
1356 mem_elt = canonical_cselib_val (mem_elt);
1358 /* Avoid duplicates. */
1359 addr_space_t as = MEM_ADDR_SPACE (x);
1360 for (elt_loc_list *l = mem_elt->locs; l; l = l->next)
1362 && CSELIB_VAL_PTR (XEXP (l->loc, 0)) == addr_elt
1363 && MEM_ADDR_SPACE (l->loc) == as)
1365 promote_debug_loc (l);
1369 addr_elt->addr_list = new_elt_list (addr_elt->addr_list, mem_elt);
1370 new_elt_loc_list (mem_elt,
1371 replace_equiv_address_nv (x, addr_elt->val_rtx));
1372 if (mem_elt->next_containing_mem == NULL)
1374 mem_elt->next_containing_mem = first_containing_mem;
1375 first_containing_mem = mem_elt;
1379 /* Subroutine of cselib_lookup. Return a value for X, which is a MEM rtx.
1380 If CREATE, make a new one if we haven't seen it before. */
1383 cselib_lookup_mem (rtx x, int create)
1385 machine_mode mode = GET_MODE (x);
1386 machine_mode addr_mode;
1389 cselib_val *mem_elt;
1391 if (MEM_VOLATILE_P (x) || mode == BLKmode
1392 || !cselib_record_memory
1393 || (FLOAT_MODE_P (mode) && flag_float_store))
1396 addr_mode = GET_MODE (XEXP (x, 0));
1397 if (addr_mode == VOIDmode)
1400 /* Look up the value for the address. */
1401 addr = cselib_lookup (XEXP (x, 0), addr_mode, create, mode);
1404 addr = canonical_cselib_val (addr);
1406 /* Find a value that describes a value of our mode at that address. */
1407 addr_space_t as = MEM_ADDR_SPACE (x);
1408 for (elt_list *l = addr->addr_list; l; l = l->next)
1409 if (GET_MODE (l->elt->val_rtx) == mode)
1411 for (elt_loc_list *l2 = l->elt->locs; l2; l2 = l2->next)
1412 if (MEM_P (l2->loc) && MEM_ADDR_SPACE (l2->loc) == as)
1414 promote_debug_loc (l->elt->locs);
1422 mem_elt = new_cselib_val (next_uid, mode, x);
1423 add_mem_for_addr (addr, mem_elt, x);
1424 slot = cselib_find_slot (mode, x, mem_elt->hash, INSERT, VOIDmode);
1429 /* Search through the possible substitutions in P. We prefer a non reg
1430 substitution because this allows us to expand the tree further. If
1431 we find, just a reg, take the lowest regno. There may be several
1432 non-reg results, we just take the first one because they will all
1433 expand to the same place. */
1436 expand_loc (struct elt_loc_list *p, struct expand_value_data *evd,
1439 rtx reg_result = NULL;
1440 unsigned int regno = UINT_MAX;
1441 struct elt_loc_list *p_in = p;
1443 for (; p; p = p->next)
1445 /* Return these right away to avoid returning stack pointer based
1446 expressions for frame pointer and vice versa, which is something
1447 that would confuse DSE. See the comment in cselib_expand_value_rtx_1
1448 for more details. */
1450 && (REGNO (p->loc) == STACK_POINTER_REGNUM
1451 || REGNO (p->loc) == FRAME_POINTER_REGNUM
1452 || REGNO (p->loc) == HARD_FRAME_POINTER_REGNUM
1453 || REGNO (p->loc) == cfa_base_preserved_regno))
1455 /* Avoid infinite recursion trying to expand a reg into a
1457 if ((REG_P (p->loc))
1458 && (REGNO (p->loc) < regno)
1459 && !bitmap_bit_p (evd->regs_active, REGNO (p->loc)))
1461 reg_result = p->loc;
1462 regno = REGNO (p->loc);
1464 /* Avoid infinite recursion and do not try to expand the
1466 else if (GET_CODE (p->loc) == VALUE
1467 && CSELIB_VAL_PTR (p->loc)->locs == p_in)
1469 else if (!REG_P (p->loc))
1472 if (dump_file && (dump_flags & TDF_CSELIB))
1474 print_inline_rtx (dump_file, p->loc, 0);
1475 fprintf (dump_file, "\n");
1477 if (GET_CODE (p->loc) == LO_SUM
1478 && GET_CODE (XEXP (p->loc, 1)) == SYMBOL_REF
1480 && (note = find_reg_note (p->setting_insn, REG_EQUAL, NULL_RTX))
1481 && XEXP (note, 0) == XEXP (p->loc, 1))
1482 return XEXP (p->loc, 1);
1483 result = cselib_expand_value_rtx_1 (p->loc, evd, max_depth - 1);
1490 if (regno != UINT_MAX)
1493 if (dump_file && (dump_flags & TDF_CSELIB))
1494 fprintf (dump_file, "r%d\n", regno);
1496 result = cselib_expand_value_rtx_1 (reg_result, evd, max_depth - 1);
1501 if (dump_file && (dump_flags & TDF_CSELIB))
1505 print_inline_rtx (dump_file, reg_result, 0);
1506 fprintf (dump_file, "\n");
1509 fprintf (dump_file, "NULL\n");
1515 /* Forward substitute and expand an expression out to its roots.
1516 This is the opposite of common subexpression. Because local value
1517 numbering is such a weak optimization, the expanded expression is
1518 pretty much unique (not from a pointer equals point of view but
1519 from a tree shape point of view.
1521 This function returns NULL if the expansion fails. The expansion
1522 will fail if there is no value number for one of the operands or if
1523 one of the operands has been overwritten between the current insn
1524 and the beginning of the basic block. For instance x has no
1530 REGS_ACTIVE is a scratch bitmap that should be clear when passing in.
1531 It is clear on return. */
1534 cselib_expand_value_rtx (rtx orig, bitmap regs_active, int max_depth)
1536 struct expand_value_data evd;
1538 evd.regs_active = regs_active;
1539 evd.callback = NULL;
1540 evd.callback_arg = NULL;
1543 return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1546 /* Same as cselib_expand_value_rtx, but using a callback to try to
1547 resolve some expressions. The CB function should return ORIG if it
1548 can't or does not want to deal with a certain RTX. Any other
1549 return value, including NULL, will be used as the expansion for
1550 VALUE, without any further changes. */
1553 cselib_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1554 cselib_expand_callback cb, void *data)
1556 struct expand_value_data evd;
1558 evd.regs_active = regs_active;
1560 evd.callback_arg = data;
1563 return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1566 /* Similar to cselib_expand_value_rtx_cb, but no rtxs are actually copied
1567 or simplified. Useful to find out whether cselib_expand_value_rtx_cb
1568 would return NULL or non-NULL, without allocating new rtx. */
1571 cselib_dummy_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1572 cselib_expand_callback cb, void *data)
1574 struct expand_value_data evd;
1576 evd.regs_active = regs_active;
1578 evd.callback_arg = data;
1581 return cselib_expand_value_rtx_1 (orig, &evd, max_depth) != NULL;
1584 /* Internal implementation of cselib_expand_value_rtx and
1585 cselib_expand_value_rtx_cb. */
1588 cselib_expand_value_rtx_1 (rtx orig, struct expand_value_data *evd,
1594 const char *format_ptr;
1597 code = GET_CODE (orig);
1599 /* For the context of dse, if we end up expand into a huge tree, we
1600 will not have a useful address, so we might as well just give up
1609 struct elt_list *l = REG_VALUES (REGNO (orig));
1611 if (l && l->elt == NULL)
1613 for (; l; l = l->next)
1614 if (GET_MODE (l->elt->val_rtx) == GET_MODE (orig))
1617 unsigned regno = REGNO (orig);
1619 /* The only thing that we are not willing to do (this
1620 is requirement of dse and if others potential uses
1621 need this function we should add a parm to control
1622 it) is that we will not substitute the
1623 STACK_POINTER_REGNUM, FRAME_POINTER or the
1626 These expansions confuses the code that notices that
1627 stores into the frame go dead at the end of the
1628 function and that the frame is not effected by calls
1629 to subroutines. If you allow the
1630 STACK_POINTER_REGNUM substitution, then dse will
1631 think that parameter pushing also goes dead which is
1632 wrong. If you allow the FRAME_POINTER or the
1633 HARD_FRAME_POINTER then you lose the opportunity to
1634 make the frame assumptions. */
1635 if (regno == STACK_POINTER_REGNUM
1636 || regno == FRAME_POINTER_REGNUM
1637 || regno == HARD_FRAME_POINTER_REGNUM
1638 || regno == cfa_base_preserved_regno)
1641 bitmap_set_bit (evd->regs_active, regno);
1643 if (dump_file && (dump_flags & TDF_CSELIB))
1644 fprintf (dump_file, "expanding: r%d into: ", regno);
1646 result = expand_loc (l->elt->locs, evd, max_depth);
1647 bitmap_clear_bit (evd->regs_active, regno);
1663 /* SCRATCH must be shared because they represent distinct values. */
1667 if (REG_P (XEXP (orig, 0)) && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0))))
1672 if (shared_const_p (orig))
1682 subreg = evd->callback (orig, evd->regs_active, max_depth,
1688 subreg = cselib_expand_value_rtx_1 (SUBREG_REG (orig), evd,
1692 scopy = simplify_gen_subreg (GET_MODE (orig), subreg,
1693 GET_MODE (SUBREG_REG (orig)),
1694 SUBREG_BYTE (orig));
1696 || (GET_CODE (scopy) == SUBREG
1697 && !REG_P (SUBREG_REG (scopy))
1698 && !MEM_P (SUBREG_REG (scopy))))
1708 if (dump_file && (dump_flags & TDF_CSELIB))
1710 fputs ("\nexpanding ", dump_file);
1711 print_rtl_single (dump_file, orig);
1712 fputs (" into...", dump_file);
1717 result = evd->callback (orig, evd->regs_active, max_depth,
1724 result = expand_loc (CSELIB_VAL_PTR (orig)->locs, evd, max_depth);
1730 return evd->callback (orig, evd->regs_active, max_depth,
1738 /* Copy the various flags, fields, and other information. We assume
1739 that all fields need copying, and then clear the fields that should
1740 not be copied. That is the sensible default behavior, and forces
1741 us to explicitly document why we are *not* copying a flag. */
1745 copy = shallow_copy_rtx (orig);
1747 format_ptr = GET_RTX_FORMAT (code);
1749 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1750 switch (*format_ptr++)
1753 if (XEXP (orig, i) != NULL)
1755 rtx result = cselib_expand_value_rtx_1 (XEXP (orig, i), evd,
1760 XEXP (copy, i) = result;
1766 if (XVEC (orig, i) != NULL)
1769 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
1770 for (j = 0; j < XVECLEN (orig, i); j++)
1772 rtx result = cselib_expand_value_rtx_1 (XVECEXP (orig, i, j),
1773 evd, max_depth - 1);
1777 XVECEXP (copy, i, j) = result;
1791 /* These are left unchanged. */
1801 mode = GET_MODE (copy);
1802 /* If an operand has been simplified into CONST_INT, which doesn't
1803 have a mode and the mode isn't derivable from whole rtx's mode,
1804 try simplify_*_operation first with mode from original's operand
1805 and as a fallback wrap CONST_INT into gen_rtx_CONST. */
1807 switch (GET_RTX_CLASS (code))
1810 if (CONST_INT_P (XEXP (copy, 0))
1811 && GET_MODE (XEXP (orig, 0)) != VOIDmode)
1813 scopy = simplify_unary_operation (code, mode, XEXP (copy, 0),
1814 GET_MODE (XEXP (orig, 0)));
1819 case RTX_COMM_ARITH:
1821 /* These expressions can derive operand modes from the whole rtx's mode. */
1824 case RTX_BITFIELD_OPS:
1825 if (CONST_INT_P (XEXP (copy, 0))
1826 && GET_MODE (XEXP (orig, 0)) != VOIDmode)
1828 scopy = simplify_ternary_operation (code, mode,
1829 GET_MODE (XEXP (orig, 0)),
1830 XEXP (copy, 0), XEXP (copy, 1),
1837 case RTX_COMM_COMPARE:
1838 if (CONST_INT_P (XEXP (copy, 0))
1839 && GET_MODE (XEXP (copy, 1)) == VOIDmode
1840 && (GET_MODE (XEXP (orig, 0)) != VOIDmode
1841 || GET_MODE (XEXP (orig, 1)) != VOIDmode))
1843 scopy = simplify_relational_operation (code, mode,
1844 (GET_MODE (XEXP (orig, 0))
1846 ? GET_MODE (XEXP (orig, 0))
1847 : GET_MODE (XEXP (orig, 1)),
1857 scopy = simplify_rtx (copy);
1863 /* Walk rtx X and replace all occurrences of REG and MEM subexpressions
1864 with VALUE expressions. This way, it becomes independent of changes
1865 to registers and memory.
1866 X isn't actually modified; if modifications are needed, new rtl is
1867 allocated. However, the return value can share rtl with X.
1868 If X is within a MEM, MEMMODE must be the mode of the MEM. */
1871 cselib_subst_to_values (rtx x, machine_mode memmode)
1873 enum rtx_code code = GET_CODE (x);
1874 const char *fmt = GET_RTX_FORMAT (code);
1884 l = REG_VALUES (REGNO (x));
1885 if (l && l->elt == NULL)
1887 for (; l; l = l->next)
1888 if (GET_MODE (l->elt->val_rtx) == GET_MODE (x))
1889 return l->elt->val_rtx;
1894 e = cselib_lookup_mem (x, 0);
1895 /* This used to happen for autoincrements, but we deal with them
1896 properly now. Remove the if stmt for the next release. */
1899 /* Assign a value that doesn't match any other. */
1900 e = new_cselib_val (next_uid, GET_MODE (x), x);
1905 e = cselib_lookup (x, GET_MODE (x), 0, memmode);
1915 gcc_assert (memmode != VOIDmode);
1916 offset = GET_MODE_SIZE (memmode);
1917 if (code == PRE_DEC)
1919 return cselib_subst_to_values (plus_constant (GET_MODE (x),
1920 XEXP (x, 0), offset),
1924 gcc_assert (memmode != VOIDmode);
1925 return cselib_subst_to_values (XEXP (x, 1), memmode);
1930 gcc_assert (memmode != VOIDmode);
1931 return cselib_subst_to_values (XEXP (x, 0), memmode);
1937 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1941 rtx t = cselib_subst_to_values (XEXP (x, i), memmode);
1943 if (t != XEXP (x, i))
1946 copy = shallow_copy_rtx (x);
1950 else if (fmt[i] == 'E')
1954 for (j = 0; j < XVECLEN (x, i); j++)
1956 rtx t = cselib_subst_to_values (XVECEXP (x, i, j), memmode);
1958 if (t != XVECEXP (x, i, j))
1960 if (XVEC (x, i) == XVEC (copy, i))
1963 copy = shallow_copy_rtx (x);
1964 XVEC (copy, i) = shallow_copy_rtvec (XVEC (x, i));
1966 XVECEXP (copy, i, j) = t;
1975 /* Wrapper for cselib_subst_to_values, that indicates X is in INSN. */
1978 cselib_subst_to_values_from_insn (rtx x, machine_mode memmode, rtx_insn *insn)
1981 gcc_assert (!cselib_current_insn);
1982 cselib_current_insn = insn;
1983 ret = cselib_subst_to_values (x, memmode);
1984 cselib_current_insn = NULL;
1988 /* Look up the rtl expression X in our tables and return the value it
1989 has. If CREATE is zero, we return NULL if we don't know the value.
1990 Otherwise, we create a new one if possible, using mode MODE if X
1991 doesn't have a mode (i.e. because it's a constant). When X is part
1992 of an address, MEMMODE should be the mode of the enclosing MEM if
1993 we're tracking autoinc expressions. */
1996 cselib_lookup_1 (rtx x, machine_mode mode,
1997 int create, machine_mode memmode)
2001 unsigned int hashval;
2003 if (GET_MODE (x) != VOIDmode)
2004 mode = GET_MODE (x);
2006 if (GET_CODE (x) == VALUE)
2007 return CSELIB_VAL_PTR (x);
2012 unsigned int i = REGNO (x);
2015 if (l && l->elt == NULL)
2017 for (; l; l = l->next)
2018 if (mode == GET_MODE (l->elt->val_rtx))
2020 promote_debug_loc (l->elt->locs);
2027 if (i < FIRST_PSEUDO_REGISTER)
2029 unsigned int n = hard_regno_nregs (i, mode);
2031 if (n > max_value_regs)
2035 e = new_cselib_val (next_uid, GET_MODE (x), x);
2036 new_elt_loc_list (e, x);
2038 scalar_int_mode int_mode;
2039 if (REG_VALUES (i) == 0)
2041 /* Maintain the invariant that the first entry of
2042 REG_VALUES, if present, must be the value used to set the
2043 register, or NULL. */
2044 used_regs[n_used_regs++] = i;
2045 REG_VALUES (i) = new_elt_list (REG_VALUES (i), NULL);
2047 else if (cselib_preserve_constants
2048 && is_int_mode (mode, &int_mode))
2050 /* During var-tracking, try harder to find equivalences
2051 for SUBREGs. If a setter sets say a DImode register
2052 and user uses that register only in SImode, add a lowpart
2054 struct elt_list *lwider = NULL;
2055 scalar_int_mode lmode;
2057 if (l && l->elt == NULL)
2059 for (; l; l = l->next)
2060 if (is_int_mode (GET_MODE (l->elt->val_rtx), &lmode)
2061 && GET_MODE_SIZE (lmode) > GET_MODE_SIZE (int_mode)
2063 || partial_subreg_p (lmode,
2064 GET_MODE (lwider->elt->val_rtx))))
2066 struct elt_loc_list *el;
2067 if (i < FIRST_PSEUDO_REGISTER
2068 && hard_regno_nregs (i, lmode) != 1)
2070 for (el = l->elt->locs; el; el = el->next)
2071 if (!REG_P (el->loc))
2078 rtx sub = lowpart_subreg (int_mode, lwider->elt->val_rtx,
2079 GET_MODE (lwider->elt->val_rtx));
2081 new_elt_loc_list (e, sub);
2084 REG_VALUES (i)->next = new_elt_list (REG_VALUES (i)->next, e);
2085 slot = cselib_find_slot (mode, x, e->hash, INSERT, memmode);
2091 return cselib_lookup_mem (x, create);
2093 hashval = cselib_hash_rtx (x, create, memmode);
2094 /* Can't even create if hashing is not possible. */
2098 slot = cselib_find_slot (mode, x, hashval,
2099 create ? INSERT : NO_INSERT, memmode);
2103 e = (cselib_val *) *slot;
2107 e = new_cselib_val (hashval, mode, x);
2109 /* We have to fill the slot before calling cselib_subst_to_values:
2110 the hash table is inconsistent until we do so, and
2111 cselib_subst_to_values will need to do lookups. */
2113 new_elt_loc_list (e, cselib_subst_to_values (x, memmode));
2117 /* Wrapper for cselib_lookup, that indicates X is in INSN. */
2120 cselib_lookup_from_insn (rtx x, machine_mode mode,
2121 int create, machine_mode memmode, rtx_insn *insn)
2125 gcc_assert (!cselib_current_insn);
2126 cselib_current_insn = insn;
2128 ret = cselib_lookup (x, mode, create, memmode);
2130 cselib_current_insn = NULL;
2135 /* Wrapper for cselib_lookup_1, that logs the lookup result and
2136 maintains invariants related with debug insns. */
2139 cselib_lookup (rtx x, machine_mode mode,
2140 int create, machine_mode memmode)
2142 cselib_val *ret = cselib_lookup_1 (x, mode, create, memmode);
2144 /* ??? Should we return NULL if we're not to create an entry, the
2145 found loc is a debug loc and cselib_current_insn is not DEBUG?
2146 If so, we should also avoid converting val to non-DEBUG; probably
2147 easiest setting cselib_current_insn to NULL before the call
2150 if (dump_file && (dump_flags & TDF_CSELIB))
2152 fputs ("cselib lookup ", dump_file);
2153 print_inline_rtx (dump_file, x, 2);
2154 fprintf (dump_file, " => %u:%u\n",
2156 ret ? ret->hash : 0);
2162 /* Invalidate any entries in reg_values that overlap REGNO. This is called
2163 if REGNO is changing. MODE is the mode of the assignment to REGNO, which
2164 is used to determine how many hard registers are being changed. If MODE
2165 is VOIDmode, then only REGNO is being changed; this is used when
2166 invalidating call clobbered registers across a call. */
2169 cselib_invalidate_regno (unsigned int regno, machine_mode mode,
2172 unsigned int endregno;
2175 /* If we see pseudos after reload, something is _wrong_. */
2176 gcc_assert (!reload_completed || regno < FIRST_PSEUDO_REGISTER
2177 || reg_renumber[regno] < 0);
2179 /* Determine the range of registers that must be invalidated. For
2180 pseudos, only REGNO is affected. For hard regs, we must take MODE
2181 into account, and we must also invalidate lower register numbers
2182 if they contain values that overlap REGNO. */
2183 if (regno < FIRST_PSEUDO_REGISTER)
2185 gcc_assert (mode != VOIDmode);
2187 if (regno < max_value_regs)
2190 i = regno - max_value_regs;
2192 endregno = end_hard_regno (mode, regno);
2194 if (setter && GET_CODE (setter) == CLOBBER_HIGH)
2195 gcc_assert (endregno == regno + 1);
2200 endregno = regno + 1;
2203 for (; i < endregno; i++)
2205 struct elt_list **l = ®_VALUES (i);
2207 /* Go through all known values for this reg; if it overlaps the range
2208 we're invalidating, remove the value. */
2211 cselib_val *v = (*l)->elt;
2213 rtx_insn *setting_insn;
2214 struct elt_loc_list **p;
2215 unsigned int this_last = i;
2217 if (i < FIRST_PSEUDO_REGISTER && v != NULL)
2218 this_last = end_hard_regno (GET_MODE (v->val_rtx), i) - 1;
2220 if (this_last < regno || v == NULL
2221 || (v == cfa_base_preserved_val
2222 && i == cfa_base_preserved_regno))
2228 /* Ignore if clobber high and the register isn't clobbered. */
2229 if (setter && GET_CODE (setter) == CLOBBER_HIGH)
2231 gcc_assert (endregno == regno + 1);
2232 const_rtx x = XEXP (setter, 0);
2233 if (!reg_is_clobbered_by_clobber_high (i, GET_MODE (v->val_rtx),
2241 /* We have an overlap. */
2242 if (*l == REG_VALUES (i))
2244 /* Maintain the invariant that the first entry of
2245 REG_VALUES, if present, must be the value used to set
2246 the register, or NULL. This is also nice because
2247 then we won't push the same regno onto user_regs
2253 unchain_one_elt_list (l);
2255 v = canonical_cselib_val (v);
2257 had_locs = v->locs != NULL;
2258 setting_insn = v->locs ? v->locs->setting_insn : NULL;
2260 /* Now, we clear the mapping from value to reg. It must exist, so
2261 this code will crash intentionally if it doesn't. */
2262 for (p = &v->locs; ; p = &(*p)->next)
2266 if (REG_P (x) && REGNO (x) == i)
2268 unchain_one_elt_loc_list (p);
2273 if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
2275 if (setting_insn && DEBUG_INSN_P (setting_insn))
2276 n_useless_debug_values++;
2284 /* Invalidate any locations in the table which are changed because of a
2285 store to MEM_RTX. If this is called because of a non-const call
2286 instruction, MEM_RTX is (mem:BLK const0_rtx). */
2289 cselib_invalidate_mem (rtx mem_rtx)
2291 cselib_val **vp, *v, *next;
2295 mem_addr = canon_rtx (get_addr (XEXP (mem_rtx, 0)));
2296 mem_rtx = canon_rtx (mem_rtx);
2298 vp = &first_containing_mem;
2299 for (v = *vp; v != &dummy_val; v = next)
2301 bool has_mem = false;
2302 struct elt_loc_list **p = &v->locs;
2303 bool had_locs = v->locs != NULL;
2304 rtx_insn *setting_insn = v->locs ? v->locs->setting_insn : NULL;
2310 struct elt_list **mem_chain;
2312 /* MEMs may occur in locations only at the top level; below
2313 that every MEM or REG is substituted by its VALUE. */
2319 if (num_mems < PARAM_VALUE (PARAM_MAX_CSELIB_MEMORY_LOCATIONS)
2320 && ! canon_anti_dependence (x, false, mem_rtx,
2321 GET_MODE (mem_rtx), mem_addr))
2329 /* This one overlaps. */
2330 /* We must have a mapping from this MEM's address to the
2331 value (E). Remove that, too. */
2332 addr = cselib_lookup (XEXP (x, 0), VOIDmode, 0, GET_MODE (x));
2333 addr = canonical_cselib_val (addr);
2334 gcc_checking_assert (v == canonical_cselib_val (v));
2335 mem_chain = &addr->addr_list;
2338 cselib_val *canon = canonical_cselib_val ((*mem_chain)->elt);
2342 unchain_one_elt_list (mem_chain);
2346 /* Record canonicalized elt. */
2347 (*mem_chain)->elt = canon;
2349 mem_chain = &(*mem_chain)->next;
2352 unchain_one_elt_loc_list (p);
2355 if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
2357 if (setting_insn && DEBUG_INSN_P (setting_insn))
2358 n_useless_debug_values++;
2363 next = v->next_containing_mem;
2367 vp = &(*vp)->next_containing_mem;
2370 v->next_containing_mem = NULL;
2375 /* Invalidate DEST, which is being assigned to or clobbered by SETTER. */
2378 cselib_invalidate_rtx (rtx dest, const_rtx setter)
2380 while (GET_CODE (dest) == SUBREG
2381 || GET_CODE (dest) == ZERO_EXTRACT
2382 || GET_CODE (dest) == STRICT_LOW_PART)
2383 dest = XEXP (dest, 0);
2386 cselib_invalidate_regno (REGNO (dest), GET_MODE (dest), setter);
2387 else if (MEM_P (dest))
2388 cselib_invalidate_mem (dest);
2391 /* A wrapper for cselib_invalidate_rtx to be called via note_stores. */
2394 cselib_invalidate_rtx_note_stores (rtx dest, const_rtx setter,
2395 void *data ATTRIBUTE_UNUSED)
2397 cselib_invalidate_rtx (dest, setter);
2400 /* Record the result of a SET instruction. DEST is being set; the source
2401 contains the value described by SRC_ELT. If DEST is a MEM, DEST_ADDR_ELT
2402 describes its address. */
2405 cselib_record_set (rtx dest, cselib_val *src_elt, cselib_val *dest_addr_elt)
2407 if (src_elt == 0 || side_effects_p (dest))
2412 unsigned int dreg = REGNO (dest);
2413 if (dreg < FIRST_PSEUDO_REGISTER)
2415 unsigned int n = REG_NREGS (dest);
2417 if (n > max_value_regs)
2421 if (REG_VALUES (dreg) == 0)
2423 used_regs[n_used_regs++] = dreg;
2424 REG_VALUES (dreg) = new_elt_list (REG_VALUES (dreg), src_elt);
2428 /* The register should have been invalidated. */
2429 gcc_assert (REG_VALUES (dreg)->elt == 0);
2430 REG_VALUES (dreg)->elt = src_elt;
2433 if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx))
2435 new_elt_loc_list (src_elt, dest);
2437 else if (MEM_P (dest) && dest_addr_elt != 0
2438 && cselib_record_memory)
2440 if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx))
2442 add_mem_for_addr (dest_addr_elt, src_elt, dest);
2446 /* Make ELT and X's VALUE equivalent to each other at INSN. */
2449 cselib_add_permanent_equiv (cselib_val *elt, rtx x, rtx_insn *insn)
2452 rtx_insn *save_cselib_current_insn = cselib_current_insn;
2454 gcc_checking_assert (elt);
2455 gcc_checking_assert (PRESERVED_VALUE_P (elt->val_rtx));
2456 gcc_checking_assert (!side_effects_p (x));
2458 cselib_current_insn = insn;
2460 nelt = cselib_lookup (x, GET_MODE (elt->val_rtx), 1, VOIDmode);
2464 cselib_any_perm_equivs = true;
2466 if (!PRESERVED_VALUE_P (nelt->val_rtx))
2467 cselib_preserve_value (nelt);
2469 new_elt_loc_list (nelt, elt->val_rtx);
2472 cselib_current_insn = save_cselib_current_insn;
2475 /* Return TRUE if any permanent equivalences have been recorded since
2476 the table was last initialized. */
2478 cselib_have_permanent_equivalences (void)
2480 return cselib_any_perm_equivs;
2483 /* There is no good way to determine how many elements there can be
2484 in a PARALLEL. Since it's fairly cheap, use a really large number. */
2485 #define MAX_SETS (FIRST_PSEUDO_REGISTER * 2)
2487 struct cselib_record_autoinc_data
2489 struct cselib_set *sets;
2493 /* Callback for for_each_inc_dec. Records in ARG the SETs implied by
2494 autoinc RTXs: SRC plus SRCOFF if non-NULL is stored in DEST. */
2497 cselib_record_autoinc_cb (rtx mem ATTRIBUTE_UNUSED, rtx op ATTRIBUTE_UNUSED,
2498 rtx dest, rtx src, rtx srcoff, void *arg)
2500 struct cselib_record_autoinc_data *data;
2501 data = (struct cselib_record_autoinc_data *)arg;
2503 data->sets[data->n_sets].dest = dest;
2506 data->sets[data->n_sets].src = gen_rtx_PLUS (GET_MODE (src), src, srcoff);
2508 data->sets[data->n_sets].src = src;
2515 /* Record the effects of any sets and autoincs in INSN. */
2517 cselib_record_sets (rtx_insn *insn)
2521 struct cselib_set sets[MAX_SETS];
2523 int n_sets_before_autoinc;
2524 int n_strict_low_parts = 0;
2525 struct cselib_record_autoinc_data data;
2527 rtx body = PATTERN (insn);
2528 if (GET_CODE (body) == COND_EXEC)
2530 cond = COND_EXEC_TEST (body);
2531 body = COND_EXEC_CODE (body);
2534 /* Find all sets. */
2535 if (GET_CODE (body) == SET)
2537 sets[0].src = SET_SRC (body);
2538 sets[0].dest = SET_DEST (body);
2541 else if (GET_CODE (body) == PARALLEL)
2543 /* Look through the PARALLEL and record the values being
2544 set, if possible. Also handle any CLOBBERs. */
2545 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
2547 rtx x = XVECEXP (body, 0, i);
2549 if (GET_CODE (x) == SET)
2551 sets[n_sets].src = SET_SRC (x);
2552 sets[n_sets].dest = SET_DEST (x);
2559 && MEM_P (sets[0].src)
2560 && !cselib_record_memory
2561 && MEM_READONLY_P (sets[0].src))
2563 rtx note = find_reg_equal_equiv_note (insn);
2565 if (note && CONSTANT_P (XEXP (note, 0)))
2566 sets[0].src = XEXP (note, 0);
2570 data.n_sets = n_sets_before_autoinc = n_sets;
2571 for_each_inc_dec (PATTERN (insn), cselib_record_autoinc_cb, &data);
2572 n_sets = data.n_sets;
2574 /* Look up the values that are read. Do this before invalidating the
2575 locations that are written. */
2576 for (i = 0; i < n_sets; i++)
2578 rtx dest = sets[i].dest;
2581 /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for
2582 the low part after invalidating any knowledge about larger modes. */
2583 if (GET_CODE (sets[i].dest) == STRICT_LOW_PART)
2584 sets[i].dest = dest = XEXP (dest, 0);
2586 /* We don't know how to record anything but REG or MEM. */
2588 || (MEM_P (dest) && cselib_record_memory))
2590 rtx src = sets[i].src;
2592 src = gen_rtx_IF_THEN_ELSE (GET_MODE (dest), cond, src, dest);
2593 sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1, VOIDmode);
2596 machine_mode address_mode = get_address_mode (dest);
2598 sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0),
2603 sets[i].dest_addr_elt = 0;
2606 /* Improve handling of STRICT_LOW_PART if the current value is known
2607 to be const0_rtx, then the low bits will be set to dest and higher
2608 bits will remain zero. Used in code like:
2610 {di:SI=0;clobber flags:CC;}
2611 flags:CCNO=cmp(bx:SI,0)
2612 strict_low_part(di:QI)=flags:CCNO<=0
2614 where we can note both that di:QI=flags:CCNO<=0 and
2615 also that because di:SI is known to be 0 and strict_low_part(di:QI)
2616 preserves the upper bits that di:SI=zero_extend(flags:CCNO<=0). */
2617 scalar_int_mode mode;
2619 && cselib_record_sets_hook
2621 && HARD_REGISTER_P (dest)
2623 && is_a <scalar_int_mode> (GET_MODE (dest), &mode)
2624 && n_sets + n_strict_low_parts < MAX_SETS)
2626 opt_scalar_int_mode wider_mode_iter;
2627 FOR_EACH_WIDER_MODE (wider_mode_iter, mode)
2629 scalar_int_mode wider_mode = wider_mode_iter.require ();
2630 if (GET_MODE_PRECISION (wider_mode) > BITS_PER_WORD)
2633 rtx reg = gen_lowpart (wider_mode, dest);
2637 cselib_val *v = cselib_lookup (reg, wider_mode, 0, VOIDmode);
2641 struct elt_loc_list *l;
2642 for (l = v->locs; l; l = l->next)
2643 if (l->loc == const0_rtx)
2649 sets[n_sets + n_strict_low_parts].dest = reg;
2650 sets[n_sets + n_strict_low_parts].src = dest;
2651 sets[n_sets + n_strict_low_parts++].src_elt = sets[i].src_elt;
2657 if (cselib_record_sets_hook)
2658 cselib_record_sets_hook (insn, sets, n_sets);
2660 /* Invalidate all locations written by this insn. Note that the elts we
2661 looked up in the previous loop aren't affected, just some of their
2662 locations may go away. */
2663 note_pattern_stores (body, cselib_invalidate_rtx_note_stores, NULL);
2665 for (i = n_sets_before_autoinc; i < n_sets; i++)
2666 cselib_invalidate_rtx (sets[i].dest);
2668 /* If this is an asm, look for duplicate sets. This can happen when the
2669 user uses the same value as an output multiple times. This is valid
2670 if the outputs are not actually used thereafter. Treat this case as
2671 if the value isn't actually set. We do this by smashing the destination
2672 to pc_rtx, so that we won't record the value later. */
2673 if (n_sets >= 2 && asm_noperands (body) >= 0)
2675 for (i = 0; i < n_sets; i++)
2677 rtx dest = sets[i].dest;
2678 if (REG_P (dest) || MEM_P (dest))
2681 for (j = i + 1; j < n_sets; j++)
2682 if (rtx_equal_p (dest, sets[j].dest))
2684 sets[i].dest = pc_rtx;
2685 sets[j].dest = pc_rtx;
2691 /* Now enter the equivalences in our tables. */
2692 for (i = 0; i < n_sets; i++)
2694 rtx dest = sets[i].dest;
2696 || (MEM_P (dest) && cselib_record_memory))
2697 cselib_record_set (dest, sets[i].src_elt, sets[i].dest_addr_elt);
2700 /* And deal with STRICT_LOW_PART. */
2701 for (i = 0; i < n_strict_low_parts; i++)
2703 if (! PRESERVED_VALUE_P (sets[n_sets + i].src_elt->val_rtx))
2705 machine_mode dest_mode = GET_MODE (sets[n_sets + i].dest);
2707 = cselib_lookup (sets[n_sets + i].dest, dest_mode, 1, VOIDmode);
2708 cselib_preserve_value (v);
2709 rtx r = gen_rtx_ZERO_EXTEND (dest_mode,
2710 sets[n_sets + i].src_elt->val_rtx);
2711 cselib_add_permanent_equiv (v, r, insn);
2715 /* Return true if INSN in the prologue initializes hard_frame_pointer_rtx. */
2718 fp_setter_insn (rtx_insn *insn)
2720 rtx expr, pat = NULL_RTX;
2722 if (!RTX_FRAME_RELATED_P (insn))
2725 expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
2727 pat = XEXP (expr, 0);
2728 if (!modified_in_p (hard_frame_pointer_rtx, pat ? pat : insn))
2731 /* Don't return true for frame pointer restores in the epilogue. */
2732 if (find_reg_note (insn, REG_CFA_RESTORE, hard_frame_pointer_rtx))
2737 /* Record the effects of INSN. */
2740 cselib_process_insn (rtx_insn *insn)
2745 cselib_current_insn = insn;
2747 /* Forget everything at a CODE_LABEL or a setjmp. */
2750 && find_reg_note (insn, REG_SETJMP, NULL)))
2751 && !cselib_preserve_constants)
2753 cselib_reset_table (next_uid);
2754 cselib_current_insn = NULL;
2758 if (! INSN_P (insn))
2760 cselib_current_insn = NULL;
2764 /* If this is a call instruction, forget anything stored in a
2765 call clobbered register, or, if this is not a const call, in
2769 function_abi callee_abi = insn_callee_abi (insn);
2770 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2771 if (elt_list *values = REG_VALUES (i))
2773 /* If we know what mode the value was set in, check whether
2774 it is still available after the call in that mode. If we
2775 don't know the mode, we have to check for the worst-case
2776 scenario instead. */
2779 machine_mode mode = GET_MODE (values->elt->val_rtx);
2780 if (callee_abi.clobbers_reg_p (mode, i))
2781 cselib_invalidate_regno (i, mode);
2785 if (callee_abi.clobbers_at_least_part_of_reg_p (i))
2786 cselib_invalidate_regno (i, reg_raw_mode[i]);
2790 /* Since it is not clear how cselib is going to be used, be
2791 conservative here and treat looping pure or const functions
2792 as if they were regular functions. */
2793 if (RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)
2794 || !(RTL_CONST_OR_PURE_CALL_P (insn)))
2795 cselib_invalidate_mem (callmem);
2797 /* For const/pure calls, invalidate any argument slots because
2798 they are owned by the callee. */
2799 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
2800 if (GET_CODE (XEXP (x, 0)) == USE
2801 && MEM_P (XEXP (XEXP (x, 0), 0)))
2802 cselib_invalidate_mem (XEXP (XEXP (x, 0), 0));
2805 cselib_record_sets (insn);
2807 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
2808 after we have processed the insn. */
2811 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
2813 gcc_assert (GET_CODE (XEXP (x, 0)) != CLOBBER_HIGH);
2814 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
2815 cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0));
2817 /* Flush everything on setjmp. */
2818 if (cselib_preserve_constants
2819 && find_reg_note (insn, REG_SETJMP, NULL))
2821 cselib_preserve_only_values ();
2822 cselib_reset_table (next_uid);
2826 /* On setter of the hard frame pointer if frame_pointer_needed,
2827 invalidate stack_pointer_rtx, so that sp and {,h}fp based
2828 VALUEs are distinct. */
2829 if (reload_completed
2830 && frame_pointer_needed
2831 && fp_setter_insn (insn))
2832 cselib_invalidate_rtx (stack_pointer_rtx);
2834 cselib_current_insn = NULL;
2836 if (n_useless_values > MAX_USELESS_VALUES
2837 /* remove_useless_values is linear in the hash table size. Avoid
2838 quadratic behavior for very large hashtables with very few
2839 useless elements. */
2840 && ((unsigned int)n_useless_values
2841 > (cselib_hash_table->elements () - n_debug_values) / 4))
2842 remove_useless_values ();
2845 /* Initialize cselib for one pass. The caller must also call
2846 init_alias_analysis. */
2849 cselib_init (int record_what)
2851 cselib_record_memory = record_what & CSELIB_RECORD_MEMORY;
2852 cselib_preserve_constants = record_what & CSELIB_PRESERVE_CONSTANTS;
2853 cselib_any_perm_equivs = false;
2855 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything,
2856 see canon_true_dependence. This is only created once. */
2858 callmem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2860 cselib_nregs = max_reg_num ();
2862 /* We preserve reg_values to allow expensive clearing of the whole thing.
2863 Reallocate it however if it happens to be too large. */
2864 if (!reg_values || reg_values_size < cselib_nregs
2865 || (reg_values_size > 10 && reg_values_size > cselib_nregs * 4))
2868 /* Some space for newly emit instructions so we don't end up
2869 reallocating in between passes. */
2870 reg_values_size = cselib_nregs + (63 + cselib_nregs) / 16;
2871 reg_values = XCNEWVEC (struct elt_list *, reg_values_size);
2873 used_regs = XNEWVEC (unsigned int, cselib_nregs);
2875 /* FIXME: enable sanitization (PR87845) */
2877 = new hash_table<cselib_hasher> (31, /* ggc */ false,
2878 /* sanitize_eq_and_hash */ false);
2879 if (cselib_preserve_constants)
2880 cselib_preserved_hash_table
2881 = new hash_table<cselib_hasher> (31, /* ggc */ false,
2882 /* sanitize_eq_and_hash */ false);
2886 /* Called when the current user is done with cselib. */
2889 cselib_finish (void)
2891 bool preserved = cselib_preserve_constants;
2892 cselib_discard_hook = NULL;
2893 cselib_preserve_constants = false;
2894 cselib_any_perm_equivs = false;
2895 cfa_base_preserved_val = NULL;
2896 cfa_base_preserved_regno = INVALID_REGNUM;
2897 elt_list_pool.release ();
2898 elt_loc_list_pool.release ();
2899 cselib_val_pool.release ();
2900 value_pool.release ();
2901 cselib_clear_table ();
2902 delete cselib_hash_table;
2903 cselib_hash_table = NULL;
2905 delete cselib_preserved_hash_table;
2906 cselib_preserved_hash_table = NULL;
2909 n_useless_values = 0;
2910 n_useless_debug_values = 0;
2915 /* Dump the cselib_val *X to FILE *OUT. */
2918 dump_cselib_val (cselib_val **x, FILE *out)
2921 bool need_lf = true;
2923 print_inline_rtx (out, v->val_rtx, 0);
2927 struct elt_loc_list *l = v->locs;
2933 fputs (" locs:", out);
2936 if (l->setting_insn)
2937 fprintf (out, "\n from insn %i ",
2938 INSN_UID (l->setting_insn));
2940 fprintf (out, "\n ");
2941 print_inline_rtx (out, l->loc, 4);
2943 while ((l = l->next));
2948 fputs (" no locs", out);
2954 struct elt_list *e = v->addr_list;
2960 fputs (" addr list:", out);
2964 print_inline_rtx (out, e->elt->val_rtx, 2);
2966 while ((e = e->next));
2971 fputs (" no addrs", out);
2975 if (v->next_containing_mem == &dummy_val)
2976 fputs (" last mem\n", out);
2977 else if (v->next_containing_mem)
2979 fputs (" next mem ", out);
2980 print_inline_rtx (out, v->next_containing_mem->val_rtx, 2);
2989 /* Dump to OUT everything in the CSELIB table. */
2992 dump_cselib_table (FILE *out)
2994 fprintf (out, "cselib hash table:\n");
2995 cselib_hash_table->traverse <FILE *, dump_cselib_val> (out);
2996 fprintf (out, "cselib preserved hash table:\n");
2997 cselib_preserved_hash_table->traverse <FILE *, dump_cselib_val> (out);
2998 if (first_containing_mem != &dummy_val)
3000 fputs ("first mem ", out);
3001 print_inline_rtx (out, first_containing_mem->val_rtx, 2);
3004 fprintf (out, "next uid %i\n", next_uid);
3007 #include "gt-cselib.h"