1 /* RTL dead store elimination.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
4 Contributed by Richard Sandiford <rsandifor@codesourcery.com>
5 and Kenneth Zadeck <zadeck@naturalbridge.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
27 #include "coretypes.h"
35 #include "fold-const.h"
36 #include "stor-layout.h"
43 #include "tree-pass.h"
44 #include "alloc-pool.h"
45 #include "insn-config.h"
55 #include "insn-codes.h"
60 #include "internal-fn.h"
61 #include "gimple-ssa.h"
63 #include "cfgcleanup.h"
65 /* This file contains three techniques for performing Dead Store
68 * The first technique performs dse locally on any base address. It
69 is based on the cselib which is a local value numbering technique.
70 This technique is local to a basic block but deals with a fairly
73 * The second technique performs dse globally but is restricted to
74 base addresses that are either constant or are relative to the
77 * The third technique, (which is only done after register allocation)
78 processes the spill slots. This differs from the second
79 technique because it takes advantage of the fact that spilling is
80 completely free from the effects of aliasing.
82 Logically, dse is a backwards dataflow problem. A store can be
83 deleted if it if cannot be reached in the backward direction by any
84 use of the value being stored. However, the local technique uses a
85 forwards scan of the basic block because cselib requires that the
86 block be processed in that order.
88 The pass is logically broken into 7 steps:
92 1) The local algorithm, as well as scanning the insns for the two
95 2) Analysis to see if the global algs are necessary. In the case
96 of stores base on a constant address, there must be at least two
97 stores to that address, to make it possible to delete some of the
98 stores. In the case of stores off of the frame or spill related
99 stores, only one store to an address is necessary because those
100 stores die at the end of the function.
102 3) Set up the global dataflow equations based on processing the
103 info parsed in the first step.
105 4) Solve the dataflow equations.
107 5) Delete the insns that the global analysis has indicated are
110 6) Delete insns that store the same value as preceding store
111 where the earlier store couldn't be eliminated.
115 This step uses cselib and canon_rtx to build the largest expression
116 possible for each address. This pass is a forwards pass through
117 each basic block. From the point of view of the global technique,
118 the first pass could examine a block in either direction. The
119 forwards ordering is to accommodate cselib.
121 We make a simplifying assumption: addresses fall into four broad
124 1) base has rtx_varies_p == false, offset is constant.
125 2) base has rtx_varies_p == false, offset variable.
126 3) base has rtx_varies_p == true, offset constant.
127 4) base has rtx_varies_p == true, offset variable.
129 The local passes are able to process all 4 kinds of addresses. The
130 global pass only handles 1).
132 The global problem is formulated as follows:
134 A store, S1, to address A, where A is not relative to the stack
135 frame, can be eliminated if all paths from S1 to the end of the
136 function contain another store to A before a read to A.
138 If the address A is relative to the stack frame, a store S2 to A
139 can be eliminated if there are no paths from S2 that reach the
140 end of the function that read A before another store to A. In
141 this case S2 can be deleted if there are paths from S2 to the
142 end of the function that have no reads or writes to A. This
143 second case allows stores to the stack frame to be deleted that
144 would otherwise die when the function returns. This cannot be
145 done if stores_off_frame_dead_at_return is not true. See the doc
146 for that variable for when this variable is false.
148 The global problem is formulated as a backwards set union
149 dataflow problem where the stores are the gens and reads are the
150 kills. Set union problems are rare and require some special
151 handling given our representation of bitmaps. A straightforward
152 implementation requires a lot of bitmaps filled with 1s.
153 These are expensive and cumbersome in our bitmap formulation so
154 care has been taken to avoid large vectors filled with 1s. See
155 the comments in bb_info and in the dataflow confluence functions
158 There are two places for further enhancements to this algorithm:
160 1) The original dse which was embedded in a pass called flow also
161 did local address forwarding. For example in
166 flow would replace the right hand side of the second insn with a
167 reference to r100. Most of the information is available to add this
168 to this pass. It has not done it because it is a lot of work in
169 the case that either r100 is assigned to between the first and
170 second insn and/or the second insn is a load of part of the value
171 stored by the first insn.
173 insn 5 in gcc.c-torture/compile/990203-1.c simple case.
174 insn 15 in gcc.c-torture/execute/20001017-2.c simple case.
175 insn 25 in gcc.c-torture/execute/20001026-1.c simple case.
176 insn 44 in gcc.c-torture/execute/20010910-1.c simple case.
178 2) The cleaning up of spill code is quite profitable. It currently
179 depends on reading tea leaves and chicken entrails left by reload.
180 This pass depends on reload creating a singleton alias set for each
181 spill slot and telling the next dse pass which of these alias sets
182 are the singletons. Rather than analyze the addresses of the
183 spills, dse's spill processing just does analysis of the loads and
184 stores that use those alias sets. There are three cases where this
187 a) Reload sometimes creates the slot for one mode of access, and
188 then inserts loads and/or stores for a smaller mode. In this
189 case, the current code just punts on the slot. The proper thing
190 to do is to back out and use one bit vector position for each
191 byte of the entity associated with the slot. This depends on
192 KNOWING that reload always generates the accesses for each of the
193 bytes in some canonical (read that easy to understand several
194 passes after reload happens) way.
196 b) Reload sometimes decides that spill slot it allocated was not
197 large enough for the mode and goes back and allocates more slots
198 with the same mode and alias set. The backout in this case is a
199 little more graceful than (a). In this case the slot is unmarked
200 as being a spill slot and if final address comes out to be based
201 off the frame pointer, the global algorithm handles this slot.
203 c) For any pass that may prespill, there is currently no
204 mechanism to tell the dse pass that the slot being used has the
205 special properties that reload uses. It may be that all that is
206 required is to have those passes make the same calls that reload
207 does, assuming that the alias sets can be manipulated in the same
210 /* There are limits to the size of constant offsets we model for the
211 global problem. There are certainly test cases, that exceed this
212 limit, however, it is unlikely that there are important programs
213 that really have constant offsets this size. */
214 #define MAX_OFFSET (64 * 1024)
216 /* Obstack for the DSE dataflow bitmaps. We don't want to put these
217 on the default obstack because these bitmaps can grow quite large
218 (~2GB for the small (!) test case of PR54146) and we'll hold on to
219 all that memory until the end of the compiler run.
220 As a bonus, delete_tree_live_info can destroy all the bitmaps by just
221 releasing the whole obstack. */
222 static bitmap_obstack dse_bitmap_obstack;
224 /* Obstack for other data. As for above: Kinda nice to be able to
225 throw it all away at the end in one big sweep. */
226 static struct obstack dse_obstack;
228 /* Scratch bitmap for cselib's cselib_expand_value_rtx. */
229 static bitmap scratch = NULL;
231 struct insn_info_type;
233 /* This structure holds information about a candidate store. */
237 /* False means this is a clobber. */
240 /* False if a single HOST_WIDE_INT bitmap is used for positions_needed. */
243 /* The id of the mem group of the base address. If rtx_varies_p is
244 true, this is -1. Otherwise, it is the index into the group
248 /* This is the cselib value. */
249 cselib_val *cse_base;
251 /* This canonized mem. */
254 /* Canonized MEM address for use by canon_true_dependence. */
257 /* If this is non-zero, it is the alias set of a spill location. */
258 alias_set_type alias_set;
260 /* The offset of the first and byte before the last byte associated
261 with the operation. */
262 HOST_WIDE_INT begin, end;
266 /* A bitmask as wide as the number of bytes in the word that
267 contains a 1 if the byte may be needed. The store is unused if
268 all of the bits are 0. This is used if IS_LARGE is false. */
269 unsigned HOST_WIDE_INT small_bitmask;
273 /* A bitmap with one bit per byte. Cleared bit means the position
274 is needed. Used if IS_LARGE is false. */
277 /* Number of set bits (i.e. unneeded bytes) in BITMAP. If it is
278 equal to END - BEGIN, the whole store is unused. */
283 /* The next store info for this insn. */
284 struct store_info *next;
286 /* The right hand side of the store. This is used if there is a
287 subsequent reload of the mems address somewhere later in the
291 /* If rhs is or holds a constant, this contains that constant,
295 /* Set if this store stores the same constant value as REDUNDANT_REASON
296 insn stored. These aren't eliminated early, because doing that
297 might prevent the earlier larger store to be eliminated. */
298 struct insn_info_type *redundant_reason;
301 /* Return a bitmask with the first N low bits set. */
303 static unsigned HOST_WIDE_INT
304 lowpart_bitmask (int n)
306 unsigned HOST_WIDE_INT mask = ~(unsigned HOST_WIDE_INT) 0;
307 return mask >> (HOST_BITS_PER_WIDE_INT - n);
310 static object_allocator<store_info> cse_store_info_pool ("cse_store_info_pool",
313 static object_allocator<store_info> rtx_store_info_pool ("rtx_store_info_pool",
316 /* This structure holds information about a load. These are only
317 built for rtx bases. */
318 struct read_info_type
320 /* The id of the mem group of the base address. */
323 /* If this is non-zero, it is the alias set of a spill location. */
324 alias_set_type alias_set;
326 /* The offset of the first and byte after the last byte associated
327 with the operation. If begin == end == 0, the read did not have
328 a constant offset. */
331 /* The mem being read. */
334 /* The next read_info for this insn. */
335 struct read_info_type *next;
337 typedef struct read_info_type *read_info_t;
339 static object_allocator<read_info_type> read_info_type_pool
340 ("read_info_pool", 100);
342 /* One of these records is created for each insn. */
344 struct insn_info_type
346 /* Set true if the insn contains a store but the insn itself cannot
347 be deleted. This is set if the insn is a parallel and there is
348 more than one non dead output or if the insn is in some way
352 /* This field is only used by the global algorithm. It is set true
353 if the insn contains any read of mem except for a (1). This is
354 also set if the insn is a call or has a clobber mem. If the insn
355 contains a wild read, the use_rec will be null. */
358 /* This is true only for CALL instructions which could potentially read
359 any non-frame memory location. This field is used by the global
361 bool non_frame_wild_read;
363 /* This field is only used for the processing of const functions.
364 These functions cannot read memory, but they can read the stack
365 because that is where they may get their parms. We need to be
366 this conservative because, like the store motion pass, we don't
367 consider CALL_INSN_FUNCTION_USAGE when processing call insns.
368 Moreover, we need to distinguish two cases:
369 1. Before reload (register elimination), the stores related to
370 outgoing arguments are stack pointer based and thus deemed
371 of non-constant base in this pass. This requires special
372 handling but also means that the frame pointer based stores
373 need not be killed upon encountering a const function call.
374 2. After reload, the stores related to outgoing arguments can be
375 either stack pointer or hard frame pointer based. This means
376 that we have no other choice than also killing all the frame
377 pointer based stores upon encountering a const function call.
378 This field is set after reload for const function calls and before
379 reload for const tail function calls on targets where arg pointer
380 is the frame pointer. Having this set is less severe than a wild
381 read, it just means that all the frame related stores are killed
382 rather than all the stores. */
385 /* This field is only used for the processing of const functions.
386 It is set if the insn may contain a stack pointer based store. */
387 bool stack_pointer_based;
389 /* This is true if any of the sets within the store contains a
390 cselib base. Such stores can only be deleted by the local
392 bool contains_cselib_groups;
397 /* The list of mem sets or mem clobbers that are contained in this
398 insn. If the insn is deletable, it contains only one mem set.
399 But it could also contain clobbers. Insns that contain more than
400 one mem set are not deletable, but each of those mems are here in
401 order to provide info to delete other insns. */
402 store_info *store_rec;
404 /* The linked list of mem uses in this insn. Only the reads from
405 rtx bases are listed here. The reads to cselib bases are
406 completely processed during the first scan and so are never
408 read_info_t read_rec;
410 /* The live fixed registers. We assume only fixed registers can
411 cause trouble by being clobbered from an expanded pattern;
412 storing only the live fixed registers (rather than all registers)
413 means less memory needs to be allocated / copied for the individual
415 regset fixed_regs_live;
417 /* The prev insn in the basic block. */
418 struct insn_info_type * prev_insn;
420 /* The linked list of insns that are in consideration for removal in
421 the forwards pass through the basic block. This pointer may be
422 trash as it is not cleared when a wild read occurs. The only
423 time it is guaranteed to be correct is when the traversal starts
424 at active_local_stores. */
425 struct insn_info_type * next_local_store;
427 typedef struct insn_info_type *insn_info_t;
429 static object_allocator<insn_info_type> insn_info_type_pool
430 ("insn_info_pool", 100);
432 /* The linked list of stores that are under consideration in this
434 static insn_info_t active_local_stores;
435 static int active_local_stores_len;
437 struct dse_bb_info_type
439 /* Pointer to the insn info for the last insn in the block. These
440 are linked so this is how all of the insns are reached. During
441 scanning this is the current insn being scanned. */
442 insn_info_t last_insn;
444 /* The info for the global dataflow problem. */
447 /* This is set if the transfer function should and in the wild_read
448 bitmap before applying the kill and gen sets. That vector knocks
449 out most of the bits in the bitmap and thus speeds up the
451 bool apply_wild_read;
453 /* The following 4 bitvectors hold information about which positions
454 of which stores are live or dead. They are indexed by
457 /* The set of store positions that exist in this block before a wild read. */
460 /* The set of load positions that exist in this block above the
461 same position of a store. */
464 /* The set of stores that reach the top of the block without being
467 Do not represent the in if it is all ones. Note that this is
468 what the bitvector should logically be initialized to for a set
469 intersection problem. However, like the kill set, this is too
470 expensive. So initially, the in set will only be created for the
471 exit block and any block that contains a wild read. */
474 /* The set of stores that reach the bottom of the block from it's
477 Do not represent the in if it is all ones. Note that this is
478 what the bitvector should logically be initialized to for a set
479 intersection problem. However, like the kill and in set, this is
480 too expensive. So what is done is that the confluence operator
481 just initializes the vector from one of the out sets of the
482 successors of the block. */
485 /* The following bitvector is indexed by the reg number. It
486 contains the set of regs that are live at the current instruction
487 being processed. While it contains info for all of the
488 registers, only the hard registers are actually examined. It is used
489 to assure that shift and/or add sequences that are inserted do not
490 accidentally clobber live hard regs. */
494 typedef struct dse_bb_info_type *bb_info_t;
496 static object_allocator<dse_bb_info_type> dse_bb_info_type_pool
497 ("bb_info_pool", 100);
499 /* Table to hold all bb_infos. */
500 static bb_info_t *bb_table;
502 /* There is a group_info for each rtx base that is used to reference
503 memory. There are also not many of the rtx bases because they are
504 very limited in scope. */
508 /* The actual base of the address. */
511 /* The sequential id of the base. This allows us to have a
512 canonical ordering of these that is not based on addresses. */
515 /* True if there are any positions that are to be processed
517 bool process_globally;
519 /* True if the base of this group is either the frame_pointer or
520 hard_frame_pointer. */
523 /* A mem wrapped around the base pointer for the group in order to do
524 read dependency. It must be given BLKmode in order to encompass all
525 the possible offsets from the base. */
528 /* Canonized version of base_mem's address. */
531 /* These two sets of two bitmaps are used to keep track of how many
532 stores are actually referencing that position from this base. We
533 only do this for rtx bases as this will be used to assign
534 positions in the bitmaps for the global problem. Bit N is set in
535 store1 on the first store for offset N. Bit N is set in store2
536 for the second store to offset N. This is all we need since we
537 only care about offsets that have two or more stores for them.
539 The "_n" suffix is for offsets less than 0 and the "_p" suffix is
540 for 0 and greater offsets.
542 There is one special case here, for stores into the stack frame,
543 we will or store1 into store2 before deciding which stores look
544 at globally. This is because stores to the stack frame that have
545 no other reads before the end of the function can also be
547 bitmap store1_n, store1_p, store2_n, store2_p;
549 /* These bitmaps keep track of offsets in this group escape this function.
550 An offset escapes if it corresponds to a named variable whose
551 addressable flag is set. */
552 bitmap escaped_n, escaped_p;
554 /* The positions in this bitmap have the same assignments as the in,
555 out, gen and kill bitmaps. This bitmap is all zeros except for
556 the positions that are occupied by stores for this group. */
559 /* The offset_map is used to map the offsets from this base into
560 positions in the global bitmaps. It is only created after all of
561 the all of stores have been scanned and we know which ones we
563 int *offset_map_n, *offset_map_p;
564 int offset_map_size_n, offset_map_size_p;
567 static object_allocator<group_info> group_info_pool
568 ("rtx_group_info_pool", 100);
570 /* Index into the rtx_group_vec. */
571 static int rtx_group_next_id;
574 static vec<group_info *> rtx_group_vec;
577 /* This structure holds the set of changes that are being deferred
578 when removing read operation. See replace_read. */
579 struct deferred_change
582 /* The mem that is being replaced. */
585 /* The reg it is being replaced with. */
588 struct deferred_change *next;
591 static object_allocator<deferred_change> deferred_change_pool
592 ("deferred_change_pool", 10);
594 static deferred_change *deferred_change_list = NULL;
596 /* The group that holds all of the clear_alias_sets. */
597 static group_info *clear_alias_group;
599 /* The modes of the clear_alias_sets. */
600 static htab_t clear_alias_mode_table;
602 /* Hash table element to look up the mode for an alias set. */
603 struct clear_alias_mode_holder
605 alias_set_type alias_set;
609 /* This is true except if cfun->stdarg -- i.e. we cannot do
610 this for vararg functions because they play games with the frame. */
611 static bool stores_off_frame_dead_at_return;
613 /* Counter for stats. */
614 static int globally_deleted;
615 static int locally_deleted;
616 static int spill_deleted;
618 static bitmap all_blocks;
620 /* Locations that are killed by calls in the global phase. */
621 static bitmap kill_on_calls;
623 /* The number of bits used in the global bitmaps. */
624 static unsigned int current_position;
626 /*----------------------------------------------------------------------------
630 ----------------------------------------------------------------------------*/
633 /* Find the entry associated with ALIAS_SET. */
635 static struct clear_alias_mode_holder *
636 clear_alias_set_lookup (alias_set_type alias_set)
638 struct clear_alias_mode_holder tmp_holder;
641 tmp_holder.alias_set = alias_set;
642 slot = htab_find_slot (clear_alias_mode_table, &tmp_holder, NO_INSERT);
645 return (struct clear_alias_mode_holder *) *slot;
649 /* Hashtable callbacks for maintaining the "bases" field of
650 store_group_info, given that the addresses are function invariants. */
652 struct invariant_group_base_hasher : nofree_ptr_hash <group_info>
654 static inline hashval_t hash (const group_info *);
655 static inline bool equal (const group_info *, const group_info *);
659 invariant_group_base_hasher::equal (const group_info *gi1,
660 const group_info *gi2)
662 return rtx_equal_p (gi1->rtx_base, gi2->rtx_base);
666 invariant_group_base_hasher::hash (const group_info *gi)
669 return hash_rtx (gi->rtx_base, Pmode, &do_not_record, NULL, false);
672 /* Tables of group_info structures, hashed by base value. */
673 static hash_table<invariant_group_base_hasher> *rtx_group_table;
676 /* Get the GROUP for BASE. Add a new group if it is not there. */
679 get_group_info (rtx base)
681 struct group_info tmp_gi;
687 /* Find the store_base_info structure for BASE, creating a new one
689 tmp_gi.rtx_base = base;
690 slot = rtx_group_table->find_slot (&tmp_gi, INSERT);
695 if (!clear_alias_group)
697 clear_alias_group = gi = group_info_pool.allocate ();
698 memset (gi, 0, sizeof (struct group_info));
699 gi->id = rtx_group_next_id++;
700 gi->store1_n = BITMAP_ALLOC (&dse_bitmap_obstack);
701 gi->store1_p = BITMAP_ALLOC (&dse_bitmap_obstack);
702 gi->store2_n = BITMAP_ALLOC (&dse_bitmap_obstack);
703 gi->store2_p = BITMAP_ALLOC (&dse_bitmap_obstack);
704 gi->escaped_p = BITMAP_ALLOC (&dse_bitmap_obstack);
705 gi->escaped_n = BITMAP_ALLOC (&dse_bitmap_obstack);
706 gi->group_kill = BITMAP_ALLOC (&dse_bitmap_obstack);
707 gi->process_globally = false;
708 gi->offset_map_size_n = 0;
709 gi->offset_map_size_p = 0;
710 gi->offset_map_n = NULL;
711 gi->offset_map_p = NULL;
712 rtx_group_vec.safe_push (gi);
714 return clear_alias_group;
719 *slot = gi = group_info_pool.allocate ();
721 gi->id = rtx_group_next_id++;
722 gi->base_mem = gen_rtx_MEM (BLKmode, base);
723 gi->canon_base_addr = canon_rtx (base);
724 gi->store1_n = BITMAP_ALLOC (&dse_bitmap_obstack);
725 gi->store1_p = BITMAP_ALLOC (&dse_bitmap_obstack);
726 gi->store2_n = BITMAP_ALLOC (&dse_bitmap_obstack);
727 gi->store2_p = BITMAP_ALLOC (&dse_bitmap_obstack);
728 gi->escaped_p = BITMAP_ALLOC (&dse_bitmap_obstack);
729 gi->escaped_n = BITMAP_ALLOC (&dse_bitmap_obstack);
730 gi->group_kill = BITMAP_ALLOC (&dse_bitmap_obstack);
731 gi->process_globally = false;
733 (base == frame_pointer_rtx) || (base == hard_frame_pointer_rtx);
734 gi->offset_map_size_n = 0;
735 gi->offset_map_size_p = 0;
736 gi->offset_map_n = NULL;
737 gi->offset_map_p = NULL;
738 rtx_group_vec.safe_push (gi);
745 /* Initialization of data structures. */
751 globally_deleted = 0;
754 bitmap_obstack_initialize (&dse_bitmap_obstack);
755 gcc_obstack_init (&dse_obstack);
757 scratch = BITMAP_ALLOC (®_obstack);
758 kill_on_calls = BITMAP_ALLOC (&dse_bitmap_obstack);
761 rtx_group_table = new hash_table<invariant_group_base_hasher> (11);
763 bb_table = XNEWVEC (bb_info_t, last_basic_block_for_fn (cfun));
764 rtx_group_next_id = 0;
766 stores_off_frame_dead_at_return = !cfun->stdarg;
768 init_alias_analysis ();
770 clear_alias_group = NULL;
775 /*----------------------------------------------------------------------------
778 Scan all of the insns. Any random ordering of the blocks is fine.
779 Each block is scanned in forward order to accommodate cselib which
780 is used to remove stores with non-constant bases.
781 ----------------------------------------------------------------------------*/
783 /* Delete all of the store_info recs from INSN_INFO. */
786 free_store_info (insn_info_t insn_info)
788 store_info *cur = insn_info->store_rec;
791 store_info *next = cur->next;
793 BITMAP_FREE (cur->positions_needed.large.bmap);
795 cse_store_info_pool.remove (cur);
797 rtx_store_info_pool.remove (cur);
801 insn_info->cannot_delete = true;
802 insn_info->contains_cselib_groups = false;
803 insn_info->store_rec = NULL;
806 struct note_add_store_info
808 rtx_insn *first, *current;
809 regset fixed_regs_live;
813 /* Callback for emit_inc_dec_insn_before via note_stores.
814 Check if a register is clobbered which is live afterwards. */
817 note_add_store (rtx loc, const_rtx expr ATTRIBUTE_UNUSED, void *data)
820 note_add_store_info *info = (note_add_store_info *) data;
825 /* If this register is referenced by the current or an earlier insn,
826 that's OK. E.g. this applies to the register that is being incremented
827 with this addition. */
828 for (insn = info->first;
829 insn != NEXT_INSN (info->current);
830 insn = NEXT_INSN (insn))
831 if (reg_referenced_p (loc, PATTERN (insn)))
834 /* If we come here, we have a clobber of a register that's only OK
835 if that register is not live. If we don't have liveness information
836 available, fail now. */
837 if (!info->fixed_regs_live)
839 info->failure = true;
842 /* Now check if this is a live fixed register. */
843 unsigned int end_regno = END_REGNO (loc);
844 for (unsigned int regno = REGNO (loc); regno < end_regno; ++regno)
845 if (REGNO_REG_SET_P (info->fixed_regs_live, regno))
846 info->failure = true;
849 /* Callback for for_each_inc_dec that emits an INSN that sets DEST to
850 SRC + SRCOFF before insn ARG. */
853 emit_inc_dec_insn_before (rtx mem ATTRIBUTE_UNUSED,
854 rtx op ATTRIBUTE_UNUSED,
855 rtx dest, rtx src, rtx srcoff, void *arg)
857 insn_info_t insn_info = (insn_info_t) arg;
858 rtx_insn *insn = insn_info->insn, *new_insn, *cur;
859 note_add_store_info info;
861 /* We can reuse all operands without copying, because we are about
862 to delete the insn that contained it. */
866 emit_insn (gen_add3_insn (dest, src, srcoff));
867 new_insn = get_insns ();
871 new_insn = gen_move_insn (dest, src);
872 info.first = new_insn;
873 info.fixed_regs_live = insn_info->fixed_regs_live;
874 info.failure = false;
875 for (cur = new_insn; cur; cur = NEXT_INSN (cur))
878 note_stores (PATTERN (cur), note_add_store, &info);
881 /* If a failure was flagged above, return 1 so that for_each_inc_dec will
882 return it immediately, communicating the failure to its caller. */
886 emit_insn_before (new_insn, insn);
891 /* Before we delete INSN_INFO->INSN, make sure that the auto inc/dec, if it
892 is there, is split into a separate insn.
893 Return true on success (or if there was nothing to do), false on failure. */
896 check_for_inc_dec_1 (insn_info_t insn_info)
898 rtx_insn *insn = insn_info->insn;
899 rtx note = find_reg_note (insn, REG_INC, NULL_RTX);
901 return for_each_inc_dec (PATTERN (insn), emit_inc_dec_insn_before,
907 /* Entry point for postreload. If you work on reload_cse, or you need this
908 anywhere else, consider if you can provide register liveness information
909 and add a parameter to this function so that it can be passed down in
910 insn_info.fixed_regs_live. */
912 check_for_inc_dec (rtx_insn *insn)
914 insn_info_type insn_info;
917 insn_info.insn = insn;
918 insn_info.fixed_regs_live = NULL;
919 note = find_reg_note (insn, REG_INC, NULL_RTX);
921 return for_each_inc_dec (PATTERN (insn), emit_inc_dec_insn_before,
926 /* Delete the insn and free all of the fields inside INSN_INFO. */
929 delete_dead_store_insn (insn_info_t insn_info)
931 read_info_t read_info;
936 if (!check_for_inc_dec_1 (insn_info))
938 if (dump_file && (dump_flags & TDF_DETAILS))
940 fprintf (dump_file, "Locally deleting insn %d ",
941 INSN_UID (insn_info->insn));
942 if (insn_info->store_rec->alias_set)
943 fprintf (dump_file, "alias set %d\n",
944 (int) insn_info->store_rec->alias_set);
946 fprintf (dump_file, "\n");
949 free_store_info (insn_info);
950 read_info = insn_info->read_rec;
954 read_info_t next = read_info->next;
955 read_info_type_pool.remove (read_info);
958 insn_info->read_rec = NULL;
960 delete_insn (insn_info->insn);
962 insn_info->insn = NULL;
964 insn_info->wild_read = false;
967 /* Return whether DECL, a local variable, can possibly escape the current
971 local_variable_can_escape (tree decl)
973 if (TREE_ADDRESSABLE (decl))
976 /* If this is a partitioned variable, we need to consider all the variables
977 in the partition. This is necessary because a store into one of them can
978 be replaced with a store into another and this may not change the outcome
979 of the escape analysis. */
980 if (cfun->gimple_df->decls_to_pointers != NULL)
982 tree *namep = cfun->gimple_df->decls_to_pointers->get (decl);
984 return TREE_ADDRESSABLE (*namep);
990 /* Return whether EXPR can possibly escape the current function scope. */
993 can_escape (tree expr)
998 base = get_base_address (expr);
1000 && !may_be_aliased (base)
1001 && !(TREE_CODE (base) == VAR_DECL
1002 && !DECL_EXTERNAL (base)
1003 && !TREE_STATIC (base)
1004 && local_variable_can_escape (base)))
1009 /* Set the store* bitmaps offset_map_size* fields in GROUP based on
1010 OFFSET and WIDTH. */
1013 set_usage_bits (group_info *group, HOST_WIDE_INT offset, HOST_WIDE_INT width,
1017 bool expr_escapes = can_escape (expr);
1018 if (offset > -MAX_OFFSET && offset + width < MAX_OFFSET)
1019 for (i=offset; i<offset+width; i++)
1027 store1 = group->store1_n;
1028 store2 = group->store2_n;
1029 escaped = group->escaped_n;
1034 store1 = group->store1_p;
1035 store2 = group->store2_p;
1036 escaped = group->escaped_p;
1040 if (!bitmap_set_bit (store1, ai))
1041 bitmap_set_bit (store2, ai);
1046 if (group->offset_map_size_n < ai)
1047 group->offset_map_size_n = ai;
1051 if (group->offset_map_size_p < ai)
1052 group->offset_map_size_p = ai;
1056 bitmap_set_bit (escaped, ai);
1061 reset_active_stores (void)
1063 active_local_stores = NULL;
1064 active_local_stores_len = 0;
1067 /* Free all READ_REC of the LAST_INSN of BB_INFO. */
1070 free_read_records (bb_info_t bb_info)
1072 insn_info_t insn_info = bb_info->last_insn;
1073 read_info_t *ptr = &insn_info->read_rec;
1076 read_info_t next = (*ptr)->next;
1077 if ((*ptr)->alias_set == 0)
1079 read_info_type_pool.remove (*ptr);
1083 ptr = &(*ptr)->next;
1087 /* Set the BB_INFO so that the last insn is marked as a wild read. */
1090 add_wild_read (bb_info_t bb_info)
1092 insn_info_t insn_info = bb_info->last_insn;
1093 insn_info->wild_read = true;
1094 free_read_records (bb_info);
1095 reset_active_stores ();
1098 /* Set the BB_INFO so that the last insn is marked as a wild read of
1099 non-frame locations. */
1102 add_non_frame_wild_read (bb_info_t bb_info)
1104 insn_info_t insn_info = bb_info->last_insn;
1105 insn_info->non_frame_wild_read = true;
1106 free_read_records (bb_info);
1107 reset_active_stores ();
1110 /* Return true if X is a constant or one of the registers that behave
1111 as a constant over the life of a function. This is equivalent to
1112 !rtx_varies_p for memory addresses. */
1115 const_or_frame_p (rtx x)
1120 if (GET_CODE (x) == REG)
1122 /* Note that we have to test for the actual rtx used for the frame
1123 and arg pointers and not just the register number in case we have
1124 eliminated the frame and/or arg pointer and are using it
1126 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
1127 /* The arg pointer varies if it is not a fixed register. */
1128 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
1129 || x == pic_offset_table_rtx)
1137 /* Take all reasonable action to put the address of MEM into the form
1138 that we can do analysis on.
1140 The gold standard is to get the address into the form: address +
1141 OFFSET where address is something that rtx_varies_p considers a
1142 constant. When we can get the address in this form, we can do
1143 global analysis on it. Note that for constant bases, address is
1144 not actually returned, only the group_id. The address can be
1147 If that fails, we try cselib to get a value we can at least use
1148 locally. If that fails we return false.
1150 The GROUP_ID is set to -1 for cselib bases and the index of the
1151 group for non_varying bases.
1153 FOR_READ is true if this is a mem read and false if not. */
1156 canon_address (rtx mem,
1157 alias_set_type *alias_set_out,
1159 HOST_WIDE_INT *offset,
1162 machine_mode address_mode = get_address_mode (mem);
1163 rtx mem_address = XEXP (mem, 0);
1164 rtx expanded_address, address;
1169 cselib_lookup (mem_address, address_mode, 1, GET_MODE (mem));
1171 if (dump_file && (dump_flags & TDF_DETAILS))
1173 fprintf (dump_file, " mem: ");
1174 print_inline_rtx (dump_file, mem_address, 0);
1175 fprintf (dump_file, "\n");
1178 /* First see if just canon_rtx (mem_address) is const or frame,
1179 if not, try cselib_expand_value_rtx and call canon_rtx on that. */
1181 for (expanded = 0; expanded < 2; expanded++)
1185 /* Use cselib to replace all of the reg references with the full
1186 expression. This will take care of the case where we have
1188 r_x = base + offset;
1193 val = *(base + offset); */
1195 expanded_address = cselib_expand_value_rtx (mem_address,
1198 /* If this fails, just go with the address from first
1200 if (!expanded_address)
1204 expanded_address = mem_address;
1206 /* Split the address into canonical BASE + OFFSET terms. */
1207 address = canon_rtx (expanded_address);
1211 if (dump_file && (dump_flags & TDF_DETAILS))
1215 fprintf (dump_file, "\n after cselib_expand address: ");
1216 print_inline_rtx (dump_file, expanded_address, 0);
1217 fprintf (dump_file, "\n");
1220 fprintf (dump_file, "\n after canon_rtx address: ");
1221 print_inline_rtx (dump_file, address, 0);
1222 fprintf (dump_file, "\n");
1225 if (GET_CODE (address) == CONST)
1226 address = XEXP (address, 0);
1228 if (GET_CODE (address) == PLUS
1229 && CONST_INT_P (XEXP (address, 1)))
1231 *offset = INTVAL (XEXP (address, 1));
1232 address = XEXP (address, 0);
1235 if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (mem))
1236 && const_or_frame_p (address))
1238 group_info *group = get_group_info (address);
1240 if (dump_file && (dump_flags & TDF_DETAILS))
1241 fprintf (dump_file, " gid=%d offset=%d \n",
1242 group->id, (int)*offset);
1244 *group_id = group->id;
1249 *base = cselib_lookup (address, address_mode, true, GET_MODE (mem));
1254 if (dump_file && (dump_flags & TDF_DETAILS))
1255 fprintf (dump_file, " no cselib val - should be a wild read.\n");
1258 if (dump_file && (dump_flags & TDF_DETAILS))
1259 fprintf (dump_file, " varying cselib base=%u:%u offset = %d\n",
1260 (*base)->uid, (*base)->hash, (int)*offset);
1265 /* Clear the rhs field from the active_local_stores array. */
1268 clear_rhs_from_active_local_stores (void)
1270 insn_info_t ptr = active_local_stores;
1274 store_info *store_info = ptr->store_rec;
1275 /* Skip the clobbers. */
1276 while (!store_info->is_set)
1277 store_info = store_info->next;
1279 store_info->rhs = NULL;
1280 store_info->const_rhs = NULL;
1282 ptr = ptr->next_local_store;
1287 /* Mark byte POS bytes from the beginning of store S_INFO as unneeded. */
1290 set_position_unneeded (store_info *s_info, int pos)
1292 if (__builtin_expect (s_info->is_large, false))
1294 if (bitmap_set_bit (s_info->positions_needed.large.bmap, pos))
1295 s_info->positions_needed.large.count++;
1298 s_info->positions_needed.small_bitmask
1299 &= ~(((unsigned HOST_WIDE_INT) 1) << pos);
1302 /* Mark the whole store S_INFO as unneeded. */
1305 set_all_positions_unneeded (store_info *s_info)
1307 if (__builtin_expect (s_info->is_large, false))
1309 int pos, end = s_info->end - s_info->begin;
1310 for (pos = 0; pos < end; pos++)
1311 bitmap_set_bit (s_info->positions_needed.large.bmap, pos);
1312 s_info->positions_needed.large.count = end;
1315 s_info->positions_needed.small_bitmask = (unsigned HOST_WIDE_INT) 0;
1318 /* Return TRUE if any bytes from S_INFO store are needed. */
1321 any_positions_needed_p (store_info *s_info)
1323 if (__builtin_expect (s_info->is_large, false))
1324 return (s_info->positions_needed.large.count
1325 < s_info->end - s_info->begin);
1327 return (s_info->positions_needed.small_bitmask
1328 != (unsigned HOST_WIDE_INT) 0);
1331 /* Return TRUE if all bytes START through START+WIDTH-1 from S_INFO
1332 store are needed. */
1335 all_positions_needed_p (store_info *s_info, int start, int width)
1337 if (__builtin_expect (s_info->is_large, false))
1339 int end = start + width;
1341 if (bitmap_bit_p (s_info->positions_needed.large.bmap, start++))
1347 unsigned HOST_WIDE_INT mask = lowpart_bitmask (width) << start;
1348 return (s_info->positions_needed.small_bitmask & mask) == mask;
1353 static rtx get_stored_val (store_info *, machine_mode, HOST_WIDE_INT,
1354 HOST_WIDE_INT, basic_block, bool);
1357 /* BODY is an instruction pattern that belongs to INSN. Return 1 if
1358 there is a candidate store, after adding it to the appropriate
1359 local store group if so. */
1362 record_store (rtx body, bb_info_t bb_info)
1364 rtx mem, rhs, const_rhs, mem_addr;
1365 HOST_WIDE_INT offset = 0;
1366 HOST_WIDE_INT width = 0;
1367 alias_set_type spill_alias_set;
1368 insn_info_t insn_info = bb_info->last_insn;
1369 store_info *store_info = NULL;
1371 cselib_val *base = NULL;
1372 insn_info_t ptr, last, redundant_reason;
1373 bool store_is_unused;
1375 if (GET_CODE (body) != SET && GET_CODE (body) != CLOBBER)
1378 mem = SET_DEST (body);
1380 /* If this is not used, then this cannot be used to keep the insn
1381 from being deleted. On the other hand, it does provide something
1382 that can be used to prove that another store is dead. */
1384 = (find_reg_note (insn_info->insn, REG_UNUSED, mem) != NULL);
1386 /* Check whether that value is a suitable memory location. */
1389 /* If the set or clobber is unused, then it does not effect our
1390 ability to get rid of the entire insn. */
1391 if (!store_is_unused)
1392 insn_info->cannot_delete = true;
1396 /* At this point we know mem is a mem. */
1397 if (GET_MODE (mem) == BLKmode)
1399 if (GET_CODE (XEXP (mem, 0)) == SCRATCH)
1401 if (dump_file && (dump_flags & TDF_DETAILS))
1402 fprintf (dump_file, " adding wild read for (clobber (mem:BLK (scratch))\n");
1403 add_wild_read (bb_info);
1404 insn_info->cannot_delete = true;
1407 /* Handle (set (mem:BLK (addr) [... S36 ...]) (const_int 0))
1408 as memset (addr, 0, 36); */
1409 else if (!MEM_SIZE_KNOWN_P (mem)
1410 || MEM_SIZE (mem) <= 0
1411 || MEM_SIZE (mem) > MAX_OFFSET
1412 || GET_CODE (body) != SET
1413 || !CONST_INT_P (SET_SRC (body)))
1415 if (!store_is_unused)
1417 /* If the set or clobber is unused, then it does not effect our
1418 ability to get rid of the entire insn. */
1419 insn_info->cannot_delete = true;
1420 clear_rhs_from_active_local_stores ();
1426 /* We can still process a volatile mem, we just cannot delete it. */
1427 if (MEM_VOLATILE_P (mem))
1428 insn_info->cannot_delete = true;
1430 if (!canon_address (mem, &spill_alias_set, &group_id, &offset, &base))
1432 clear_rhs_from_active_local_stores ();
1436 if (GET_MODE (mem) == BLKmode)
1437 width = MEM_SIZE (mem);
1439 width = GET_MODE_SIZE (GET_MODE (mem));
1441 if (spill_alias_set)
1443 bitmap store1 = clear_alias_group->store1_p;
1444 bitmap store2 = clear_alias_group->store2_p;
1446 gcc_assert (GET_MODE (mem) != BLKmode);
1448 if (!bitmap_set_bit (store1, spill_alias_set))
1449 bitmap_set_bit (store2, spill_alias_set);
1451 if (clear_alias_group->offset_map_size_p < spill_alias_set)
1452 clear_alias_group->offset_map_size_p = spill_alias_set;
1454 store_info = rtx_store_info_pool.allocate ();
1456 if (dump_file && (dump_flags & TDF_DETAILS))
1457 fprintf (dump_file, " processing spill store %d(%s)\n",
1458 (int) spill_alias_set, GET_MODE_NAME (GET_MODE (mem)));
1460 else if (group_id >= 0)
1462 /* In the restrictive case where the base is a constant or the
1463 frame pointer we can do global analysis. */
1466 = rtx_group_vec[group_id];
1467 tree expr = MEM_EXPR (mem);
1469 store_info = rtx_store_info_pool.allocate ();
1470 set_usage_bits (group, offset, width, expr);
1472 if (dump_file && (dump_flags & TDF_DETAILS))
1473 fprintf (dump_file, " processing const base store gid=%d[%d..%d)\n",
1474 group_id, (int)offset, (int)(offset+width));
1478 if (may_be_sp_based_p (XEXP (mem, 0)))
1479 insn_info->stack_pointer_based = true;
1480 insn_info->contains_cselib_groups = true;
1482 store_info = cse_store_info_pool.allocate ();
1485 if (dump_file && (dump_flags & TDF_DETAILS))
1486 fprintf (dump_file, " processing cselib store [%d..%d)\n",
1487 (int)offset, (int)(offset+width));
1490 const_rhs = rhs = NULL_RTX;
1491 if (GET_CODE (body) == SET
1492 /* No place to keep the value after ra. */
1493 && !reload_completed
1494 && (REG_P (SET_SRC (body))
1495 || GET_CODE (SET_SRC (body)) == SUBREG
1496 || CONSTANT_P (SET_SRC (body)))
1497 && !MEM_VOLATILE_P (mem)
1498 /* Sometimes the store and reload is used for truncation and
1500 && !(FLOAT_MODE_P (GET_MODE (mem)) && (flag_float_store)))
1502 rhs = SET_SRC (body);
1503 if (CONSTANT_P (rhs))
1505 else if (body == PATTERN (insn_info->insn))
1507 rtx tem = find_reg_note (insn_info->insn, REG_EQUAL, NULL_RTX);
1508 if (tem && CONSTANT_P (XEXP (tem, 0)))
1509 const_rhs = XEXP (tem, 0);
1511 if (const_rhs == NULL_RTX && REG_P (rhs))
1513 rtx tem = cselib_expand_value_rtx (rhs, scratch, 5);
1515 if (tem && CONSTANT_P (tem))
1520 /* Check to see if this stores causes some other stores to be
1522 ptr = active_local_stores;
1524 redundant_reason = NULL;
1525 mem = canon_rtx (mem);
1526 /* For alias_set != 0 canon_true_dependence should be never called. */
1527 if (spill_alias_set)
1528 mem_addr = NULL_RTX;
1532 mem_addr = base->val_rtx;
1536 = rtx_group_vec[group_id];
1537 mem_addr = group->canon_base_addr;
1539 /* get_addr can only handle VALUE but cannot handle expr like:
1540 VALUE + OFFSET, so call get_addr to get original addr for
1541 mem_addr before plus_constant. */
1542 mem_addr = get_addr (mem_addr);
1544 mem_addr = plus_constant (get_address_mode (mem), mem_addr, offset);
1549 insn_info_t next = ptr->next_local_store;
1550 struct store_info *s_info = ptr->store_rec;
1553 /* Skip the clobbers. We delete the active insn if this insn
1554 shadows the set. To have been put on the active list, it
1555 has exactly on set. */
1556 while (!s_info->is_set)
1557 s_info = s_info->next;
1559 if (s_info->alias_set != spill_alias_set)
1561 else if (s_info->alias_set)
1563 struct clear_alias_mode_holder *entry
1564 = clear_alias_set_lookup (s_info->alias_set);
1565 /* Generally, spills cannot be processed if and of the
1566 references to the slot have a different mode. But if
1567 we are in the same block and mode is exactly the same
1568 between this store and one before in the same block,
1569 we can still delete it. */
1570 if ((GET_MODE (mem) == GET_MODE (s_info->mem))
1571 && (GET_MODE (mem) == entry->mode))
1574 set_all_positions_unneeded (s_info);
1576 if (dump_file && (dump_flags & TDF_DETAILS))
1577 fprintf (dump_file, " trying spill store in insn=%d alias_set=%d\n",
1578 INSN_UID (ptr->insn), (int) s_info->alias_set);
1580 else if ((s_info->group_id == group_id)
1581 && (s_info->cse_base == base))
1584 if (dump_file && (dump_flags & TDF_DETAILS))
1585 fprintf (dump_file, " trying store in insn=%d gid=%d[%d..%d)\n",
1586 INSN_UID (ptr->insn), s_info->group_id,
1587 (int)s_info->begin, (int)s_info->end);
1589 /* Even if PTR won't be eliminated as unneeded, if both
1590 PTR and this insn store the same constant value, we might
1591 eliminate this insn instead. */
1592 if (s_info->const_rhs
1594 && offset >= s_info->begin
1595 && offset + width <= s_info->end
1596 && all_positions_needed_p (s_info, offset - s_info->begin,
1599 if (GET_MODE (mem) == BLKmode)
1601 if (GET_MODE (s_info->mem) == BLKmode
1602 && s_info->const_rhs == const_rhs)
1603 redundant_reason = ptr;
1605 else if (s_info->const_rhs == const0_rtx
1606 && const_rhs == const0_rtx)
1607 redundant_reason = ptr;
1612 val = get_stored_val (s_info, GET_MODE (mem),
1613 offset, offset + width,
1614 BLOCK_FOR_INSN (insn_info->insn),
1616 if (get_insns () != NULL)
1619 if (val && rtx_equal_p (val, const_rhs))
1620 redundant_reason = ptr;
1624 for (i = MAX (offset, s_info->begin);
1625 i < offset + width && i < s_info->end;
1627 set_position_unneeded (s_info, i - s_info->begin);
1629 else if (s_info->rhs)
1630 /* Need to see if it is possible for this store to overwrite
1631 the value of store_info. If it is, set the rhs to NULL to
1632 keep it from being used to remove a load. */
1634 if (canon_true_dependence (s_info->mem,
1635 GET_MODE (s_info->mem),
1640 s_info->const_rhs = NULL;
1644 /* An insn can be deleted if every position of every one of
1645 its s_infos is zero. */
1646 if (any_positions_needed_p (s_info))
1651 insn_info_t insn_to_delete = ptr;
1653 active_local_stores_len--;
1655 last->next_local_store = ptr->next_local_store;
1657 active_local_stores = ptr->next_local_store;
1659 if (!insn_to_delete->cannot_delete)
1660 delete_dead_store_insn (insn_to_delete);
1668 /* Finish filling in the store_info. */
1669 store_info->next = insn_info->store_rec;
1670 insn_info->store_rec = store_info;
1671 store_info->mem = mem;
1672 store_info->alias_set = spill_alias_set;
1673 store_info->mem_addr = mem_addr;
1674 store_info->cse_base = base;
1675 if (width > HOST_BITS_PER_WIDE_INT)
1677 store_info->is_large = true;
1678 store_info->positions_needed.large.count = 0;
1679 store_info->positions_needed.large.bmap = BITMAP_ALLOC (&dse_bitmap_obstack);
1683 store_info->is_large = false;
1684 store_info->positions_needed.small_bitmask = lowpart_bitmask (width);
1686 store_info->group_id = group_id;
1687 store_info->begin = offset;
1688 store_info->end = offset + width;
1689 store_info->is_set = GET_CODE (body) == SET;
1690 store_info->rhs = rhs;
1691 store_info->const_rhs = const_rhs;
1692 store_info->redundant_reason = redundant_reason;
1694 /* If this is a clobber, we return 0. We will only be able to
1695 delete this insn if there is only one store USED store, but we
1696 can use the clobber to delete other stores earlier. */
1697 return store_info->is_set ? 1 : 0;
1702 dump_insn_info (const char * start, insn_info_t insn_info)
1704 fprintf (dump_file, "%s insn=%d %s\n", start,
1705 INSN_UID (insn_info->insn),
1706 insn_info->store_rec ? "has store" : "naked");
1710 /* If the modes are different and the value's source and target do not
1711 line up, we need to extract the value from lower part of the rhs of
1712 the store, shift it, and then put it into a form that can be shoved
1713 into the read_insn. This function generates a right SHIFT of a
1714 value that is at least ACCESS_SIZE bytes wide of READ_MODE. The
1715 shift sequence is returned or NULL if we failed to find a
1719 find_shift_sequence (int access_size,
1720 store_info *store_info,
1721 machine_mode read_mode,
1722 int shift, bool speed, bool require_cst)
1724 machine_mode store_mode = GET_MODE (store_info->mem);
1725 machine_mode new_mode;
1726 rtx read_reg = NULL;
1728 /* Some machines like the x86 have shift insns for each size of
1729 operand. Other machines like the ppc or the ia-64 may only have
1730 shift insns that shift values within 32 or 64 bit registers.
1731 This loop tries to find the smallest shift insn that will right
1732 justify the value we want to read but is available in one insn on
1735 for (new_mode = smallest_mode_for_size (access_size * BITS_PER_UNIT,
1737 GET_MODE_BITSIZE (new_mode) <= BITS_PER_WORD;
1738 new_mode = GET_MODE_WIDER_MODE (new_mode))
1740 rtx target, new_reg, new_lhs;
1741 rtx_insn *shift_seq, *insn;
1744 /* If a constant was stored into memory, try to simplify it here,
1745 otherwise the cost of the shift might preclude this optimization
1746 e.g. at -Os, even when no actual shift will be needed. */
1747 if (store_info->const_rhs)
1749 unsigned int byte = subreg_lowpart_offset (new_mode, store_mode);
1750 rtx ret = simplify_subreg (new_mode, store_info->const_rhs,
1752 if (ret && CONSTANT_P (ret))
1754 ret = simplify_const_binary_operation (LSHIFTRT, new_mode,
1755 ret, GEN_INT (shift));
1756 if (ret && CONSTANT_P (ret))
1758 byte = subreg_lowpart_offset (read_mode, new_mode);
1759 ret = simplify_subreg (read_mode, ret, new_mode, byte);
1760 if (ret && CONSTANT_P (ret)
1761 && (set_src_cost (ret, read_mode, speed)
1762 <= COSTS_N_INSNS (1)))
1771 /* Try a wider mode if truncating the store mode to NEW_MODE
1772 requires a real instruction. */
1773 if (GET_MODE_BITSIZE (new_mode) < GET_MODE_BITSIZE (store_mode)
1774 && !TRULY_NOOP_TRUNCATION_MODES_P (new_mode, store_mode))
1777 /* Also try a wider mode if the necessary punning is either not
1778 desirable or not possible. */
1779 if (!CONSTANT_P (store_info->rhs)
1780 && !MODES_TIEABLE_P (new_mode, store_mode))
1783 new_reg = gen_reg_rtx (new_mode);
1787 /* In theory we could also check for an ashr. Ian Taylor knows
1788 of one dsp where the cost of these two was not the same. But
1789 this really is a rare case anyway. */
1790 target = expand_binop (new_mode, lshr_optab, new_reg,
1791 GEN_INT (shift), new_reg, 1, OPTAB_DIRECT);
1793 shift_seq = get_insns ();
1796 if (target != new_reg || shift_seq == NULL)
1800 for (insn = shift_seq; insn != NULL_RTX; insn = NEXT_INSN (insn))
1802 cost += insn_rtx_cost (PATTERN (insn), speed);
1804 /* The computation up to here is essentially independent
1805 of the arguments and could be precomputed. It may
1806 not be worth doing so. We could precompute if
1807 worthwhile or at least cache the results. The result
1808 technically depends on both SHIFT and ACCESS_SIZE,
1809 but in practice the answer will depend only on ACCESS_SIZE. */
1811 if (cost > COSTS_N_INSNS (1))
1814 new_lhs = extract_low_bits (new_mode, store_mode,
1815 copy_rtx (store_info->rhs));
1816 if (new_lhs == NULL_RTX)
1819 /* We found an acceptable shift. Generate a move to
1820 take the value from the store and put it into the
1821 shift pseudo, then shift it, then generate another
1822 move to put in into the target of the read. */
1823 emit_move_insn (new_reg, new_lhs);
1824 emit_insn (shift_seq);
1825 read_reg = extract_low_bits (read_mode, new_mode, new_reg);
1833 /* Call back for note_stores to find the hard regs set or clobbered by
1834 insn. Data is a bitmap of the hardregs set so far. */
1837 look_for_hardregs (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
1839 bitmap regs_set = (bitmap) data;
1842 && HARD_REGISTER_P (x))
1843 bitmap_set_range (regs_set, REGNO (x), REG_NREGS (x));
1846 /* Helper function for replace_read and record_store.
1847 Attempt to return a value stored in STORE_INFO, from READ_BEGIN
1848 to one before READ_END bytes read in READ_MODE. Return NULL
1849 if not successful. If REQUIRE_CST is true, return always constant. */
1852 get_stored_val (store_info *store_info, machine_mode read_mode,
1853 HOST_WIDE_INT read_begin, HOST_WIDE_INT read_end,
1854 basic_block bb, bool require_cst)
1856 machine_mode store_mode = GET_MODE (store_info->mem);
1858 int access_size; /* In bytes. */
1861 /* To get here the read is within the boundaries of the write so
1862 shift will never be negative. Start out with the shift being in
1864 if (store_mode == BLKmode)
1866 else if (BYTES_BIG_ENDIAN)
1867 shift = store_info->end - read_end;
1869 shift = read_begin - store_info->begin;
1871 access_size = shift + GET_MODE_SIZE (read_mode);
1873 /* From now on it is bits. */
1874 shift *= BITS_PER_UNIT;
1877 read_reg = find_shift_sequence (access_size, store_info, read_mode, shift,
1878 optimize_bb_for_speed_p (bb),
1880 else if (store_mode == BLKmode)
1882 /* The store is a memset (addr, const_val, const_size). */
1883 gcc_assert (CONST_INT_P (store_info->rhs));
1884 store_mode = int_mode_for_mode (read_mode);
1885 if (store_mode == BLKmode)
1886 read_reg = NULL_RTX;
1887 else if (store_info->rhs == const0_rtx)
1888 read_reg = extract_low_bits (read_mode, store_mode, const0_rtx);
1889 else if (GET_MODE_BITSIZE (store_mode) > HOST_BITS_PER_WIDE_INT
1890 || BITS_PER_UNIT >= HOST_BITS_PER_WIDE_INT)
1891 read_reg = NULL_RTX;
1894 unsigned HOST_WIDE_INT c
1895 = INTVAL (store_info->rhs)
1896 & (((HOST_WIDE_INT) 1 << BITS_PER_UNIT) - 1);
1897 int shift = BITS_PER_UNIT;
1898 while (shift < HOST_BITS_PER_WIDE_INT)
1903 read_reg = gen_int_mode (c, store_mode);
1904 read_reg = extract_low_bits (read_mode, store_mode, read_reg);
1907 else if (store_info->const_rhs
1909 || GET_MODE_CLASS (read_mode) != GET_MODE_CLASS (store_mode)))
1910 read_reg = extract_low_bits (read_mode, store_mode,
1911 copy_rtx (store_info->const_rhs));
1913 read_reg = extract_low_bits (read_mode, store_mode,
1914 copy_rtx (store_info->rhs));
1915 if (require_cst && read_reg && !CONSTANT_P (read_reg))
1916 read_reg = NULL_RTX;
1920 /* Take a sequence of:
1943 Depending on the alignment and the mode of the store and
1947 The STORE_INFO and STORE_INSN are for the store and READ_INFO
1948 and READ_INSN are for the read. Return true if the replacement
1952 replace_read (store_info *store_info, insn_info_t store_insn,
1953 read_info_t read_info, insn_info_t read_insn, rtx *loc,
1956 machine_mode store_mode = GET_MODE (store_info->mem);
1957 machine_mode read_mode = GET_MODE (read_info->mem);
1958 rtx_insn *insns, *this_insn;
1965 /* Create a sequence of instructions to set up the read register.
1966 This sequence goes immediately before the store and its result
1967 is read by the load.
1969 We need to keep this in perspective. We are replacing a read
1970 with a sequence of insns, but the read will almost certainly be
1971 in cache, so it is not going to be an expensive one. Thus, we
1972 are not willing to do a multi insn shift or worse a subroutine
1973 call to get rid of the read. */
1974 if (dump_file && (dump_flags & TDF_DETAILS))
1975 fprintf (dump_file, "trying to replace %smode load in insn %d"
1976 " from %smode store in insn %d\n",
1977 GET_MODE_NAME (read_mode), INSN_UID (read_insn->insn),
1978 GET_MODE_NAME (store_mode), INSN_UID (store_insn->insn));
1980 bb = BLOCK_FOR_INSN (read_insn->insn);
1981 read_reg = get_stored_val (store_info,
1982 read_mode, read_info->begin, read_info->end,
1984 if (read_reg == NULL_RTX)
1987 if (dump_file && (dump_flags & TDF_DETAILS))
1988 fprintf (dump_file, " -- could not extract bits of stored value\n");
1991 /* Force the value into a new register so that it won't be clobbered
1992 between the store and the load. */
1993 read_reg = copy_to_mode_reg (read_mode, read_reg);
1994 insns = get_insns ();
1997 if (insns != NULL_RTX)
1999 /* Now we have to scan the set of new instructions to see if the
2000 sequence contains and sets of hardregs that happened to be
2001 live at this point. For instance, this can happen if one of
2002 the insns sets the CC and the CC happened to be live at that
2003 point. This does occasionally happen, see PR 37922. */
2004 bitmap regs_set = BITMAP_ALLOC (®_obstack);
2006 for (this_insn = insns; this_insn != NULL_RTX; this_insn = NEXT_INSN (this_insn))
2007 note_stores (PATTERN (this_insn), look_for_hardregs, regs_set);
2009 bitmap_and_into (regs_set, regs_live);
2010 if (!bitmap_empty_p (regs_set))
2012 if (dump_file && (dump_flags & TDF_DETAILS))
2015 "abandoning replacement because sequence clobbers live hardregs:");
2016 df_print_regset (dump_file, regs_set);
2019 BITMAP_FREE (regs_set);
2022 BITMAP_FREE (regs_set);
2025 if (validate_change (read_insn->insn, loc, read_reg, 0))
2027 deferred_change *change = deferred_change_pool.allocate ();
2029 /* Insert this right before the store insn where it will be safe
2030 from later insns that might change it before the read. */
2031 emit_insn_before (insns, store_insn->insn);
2033 /* And now for the kludge part: cselib croaks if you just
2034 return at this point. There are two reasons for this:
2036 1) Cselib has an idea of how many pseudos there are and
2037 that does not include the new ones we just added.
2039 2) Cselib does not know about the move insn we added
2040 above the store_info, and there is no way to tell it
2041 about it, because it has "moved on".
2043 Problem (1) is fixable with a certain amount of engineering.
2044 Problem (2) is requires starting the bb from scratch. This
2047 So we are just going to have to lie. The move/extraction
2048 insns are not really an issue, cselib did not see them. But
2049 the use of the new pseudo read_insn is a real problem because
2050 cselib has not scanned this insn. The way that we solve this
2051 problem is that we are just going to put the mem back for now
2052 and when we are finished with the block, we undo this. We
2053 keep a table of mems to get rid of. At the end of the basic
2054 block we can put them back. */
2056 *loc = read_info->mem;
2057 change->next = deferred_change_list;
2058 deferred_change_list = change;
2060 change->reg = read_reg;
2062 /* Get rid of the read_info, from the point of view of the
2063 rest of dse, play like this read never happened. */
2064 read_insn->read_rec = read_info->next;
2065 read_info_type_pool.remove (read_info);
2066 if (dump_file && (dump_flags & TDF_DETAILS))
2068 fprintf (dump_file, " -- replaced the loaded MEM with ");
2069 print_simple_rtl (dump_file, read_reg);
2070 fprintf (dump_file, "\n");
2076 if (dump_file && (dump_flags & TDF_DETAILS))
2078 fprintf (dump_file, " -- replacing the loaded MEM with ");
2079 print_simple_rtl (dump_file, read_reg);
2080 fprintf (dump_file, " led to an invalid instruction\n");
2086 /* Check the address of MEM *LOC and kill any appropriate stores that may
2090 check_mem_read_rtx (rtx *loc, bb_info_t bb_info)
2092 rtx mem = *loc, mem_addr;
2093 insn_info_t insn_info;
2094 HOST_WIDE_INT offset = 0;
2095 HOST_WIDE_INT width = 0;
2096 alias_set_type spill_alias_set = 0;
2097 cselib_val *base = NULL;
2099 read_info_t read_info;
2101 insn_info = bb_info->last_insn;
2103 if ((MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2104 || (MEM_VOLATILE_P (mem)))
2106 if (dump_file && (dump_flags & TDF_DETAILS))
2107 fprintf (dump_file, " adding wild read, volatile or barrier.\n");
2108 add_wild_read (bb_info);
2109 insn_info->cannot_delete = true;
2113 /* If it is reading readonly mem, then there can be no conflict with
2115 if (MEM_READONLY_P (mem))
2118 if (!canon_address (mem, &spill_alias_set, &group_id, &offset, &base))
2120 if (dump_file && (dump_flags & TDF_DETAILS))
2121 fprintf (dump_file, " adding wild read, canon_address failure.\n");
2122 add_wild_read (bb_info);
2126 if (GET_MODE (mem) == BLKmode)
2129 width = GET_MODE_SIZE (GET_MODE (mem));
2131 read_info = read_info_type_pool.allocate ();
2132 read_info->group_id = group_id;
2133 read_info->mem = mem;
2134 read_info->alias_set = spill_alias_set;
2135 read_info->begin = offset;
2136 read_info->end = offset + width;
2137 read_info->next = insn_info->read_rec;
2138 insn_info->read_rec = read_info;
2139 /* For alias_set != 0 canon_true_dependence should be never called. */
2140 if (spill_alias_set)
2141 mem_addr = NULL_RTX;
2145 mem_addr = base->val_rtx;
2149 = rtx_group_vec[group_id];
2150 mem_addr = group->canon_base_addr;
2152 /* get_addr can only handle VALUE but cannot handle expr like:
2153 VALUE + OFFSET, so call get_addr to get original addr for
2154 mem_addr before plus_constant. */
2155 mem_addr = get_addr (mem_addr);
2157 mem_addr = plus_constant (get_address_mode (mem), mem_addr, offset);
2160 /* We ignore the clobbers in store_info. The is mildly aggressive,
2161 but there really should not be a clobber followed by a read. */
2163 if (spill_alias_set)
2165 insn_info_t i_ptr = active_local_stores;
2166 insn_info_t last = NULL;
2168 if (dump_file && (dump_flags & TDF_DETAILS))
2169 fprintf (dump_file, " processing spill load %d\n",
2170 (int) spill_alias_set);
2174 store_info *store_info = i_ptr->store_rec;
2176 /* Skip the clobbers. */
2177 while (!store_info->is_set)
2178 store_info = store_info->next;
2180 if (store_info->alias_set == spill_alias_set)
2182 if (dump_file && (dump_flags & TDF_DETAILS))
2183 dump_insn_info ("removing from active", i_ptr);
2185 active_local_stores_len--;
2187 last->next_local_store = i_ptr->next_local_store;
2189 active_local_stores = i_ptr->next_local_store;
2193 i_ptr = i_ptr->next_local_store;
2196 else if (group_id >= 0)
2198 /* This is the restricted case where the base is a constant or
2199 the frame pointer and offset is a constant. */
2200 insn_info_t i_ptr = active_local_stores;
2201 insn_info_t last = NULL;
2203 if (dump_file && (dump_flags & TDF_DETAILS))
2206 fprintf (dump_file, " processing const load gid=%d[BLK]\n",
2209 fprintf (dump_file, " processing const load gid=%d[%d..%d)\n",
2210 group_id, (int)offset, (int)(offset+width));
2215 bool remove = false;
2216 store_info *store_info = i_ptr->store_rec;
2218 /* Skip the clobbers. */
2219 while (!store_info->is_set)
2220 store_info = store_info->next;
2222 /* There are three cases here. */
2223 if (store_info->group_id < 0)
2224 /* We have a cselib store followed by a read from a
2227 = canon_true_dependence (store_info->mem,
2228 GET_MODE (store_info->mem),
2229 store_info->mem_addr,
2232 else if (group_id == store_info->group_id)
2234 /* This is a block mode load. We may get lucky and
2235 canon_true_dependence may save the day. */
2238 = canon_true_dependence (store_info->mem,
2239 GET_MODE (store_info->mem),
2240 store_info->mem_addr,
2243 /* If this read is just reading back something that we just
2244 stored, rewrite the read. */
2248 && offset >= store_info->begin
2249 && offset + width <= store_info->end
2250 && all_positions_needed_p (store_info,
2251 offset - store_info->begin,
2253 && replace_read (store_info, i_ptr, read_info,
2254 insn_info, loc, bb_info->regs_live))
2257 /* The bases are the same, just see if the offsets
2259 if ((offset < store_info->end)
2260 && (offset + width > store_info->begin))
2266 The else case that is missing here is that the
2267 bases are constant but different. There is nothing
2268 to do here because there is no overlap. */
2272 if (dump_file && (dump_flags & TDF_DETAILS))
2273 dump_insn_info ("removing from active", i_ptr);
2275 active_local_stores_len--;
2277 last->next_local_store = i_ptr->next_local_store;
2279 active_local_stores = i_ptr->next_local_store;
2283 i_ptr = i_ptr->next_local_store;
2288 insn_info_t i_ptr = active_local_stores;
2289 insn_info_t last = NULL;
2290 if (dump_file && (dump_flags & TDF_DETAILS))
2292 fprintf (dump_file, " processing cselib load mem:");
2293 print_inline_rtx (dump_file, mem, 0);
2294 fprintf (dump_file, "\n");
2299 bool remove = false;
2300 store_info *store_info = i_ptr->store_rec;
2302 if (dump_file && (dump_flags & TDF_DETAILS))
2303 fprintf (dump_file, " processing cselib load against insn %d\n",
2304 INSN_UID (i_ptr->insn));
2306 /* Skip the clobbers. */
2307 while (!store_info->is_set)
2308 store_info = store_info->next;
2310 /* If this read is just reading back something that we just
2311 stored, rewrite the read. */
2313 && store_info->group_id == -1
2314 && store_info->cse_base == base
2316 && offset >= store_info->begin
2317 && offset + width <= store_info->end
2318 && all_positions_needed_p (store_info,
2319 offset - store_info->begin, width)
2320 && replace_read (store_info, i_ptr, read_info, insn_info, loc,
2321 bb_info->regs_live))
2324 if (!store_info->alias_set)
2325 remove = canon_true_dependence (store_info->mem,
2326 GET_MODE (store_info->mem),
2327 store_info->mem_addr,
2332 if (dump_file && (dump_flags & TDF_DETAILS))
2333 dump_insn_info ("removing from active", i_ptr);
2335 active_local_stores_len--;
2337 last->next_local_store = i_ptr->next_local_store;
2339 active_local_stores = i_ptr->next_local_store;
2343 i_ptr = i_ptr->next_local_store;
2348 /* A note_uses callback in which DATA points the INSN_INFO for
2349 as check_mem_read_rtx. Nullify the pointer if i_m_r_m_r returns
2350 true for any part of *LOC. */
2353 check_mem_read_use (rtx *loc, void *data)
2355 subrtx_ptr_iterator::array_type array;
2356 FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
2360 check_mem_read_rtx (loc, (bb_info_t) data);
2365 /* Get arguments passed to CALL_INSN. Return TRUE if successful.
2366 So far it only handles arguments passed in registers. */
2369 get_call_args (rtx call_insn, tree fn, rtx *args, int nargs)
2371 CUMULATIVE_ARGS args_so_far_v;
2372 cumulative_args_t args_so_far;
2376 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
2377 args_so_far = pack_cumulative_args (&args_so_far_v);
2379 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
2381 arg != void_list_node && idx < nargs;
2382 arg = TREE_CHAIN (arg), idx++)
2384 machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
2386 reg = targetm.calls.function_arg (args_so_far, mode, NULL_TREE, true);
2387 if (!reg || !REG_P (reg) || GET_MODE (reg) != mode
2388 || GET_MODE_CLASS (mode) != MODE_INT)
2391 for (link = CALL_INSN_FUNCTION_USAGE (call_insn);
2393 link = XEXP (link, 1))
2394 if (GET_CODE (XEXP (link, 0)) == USE)
2396 args[idx] = XEXP (XEXP (link, 0), 0);
2397 if (REG_P (args[idx])
2398 && REGNO (args[idx]) == REGNO (reg)
2399 && (GET_MODE (args[idx]) == mode
2400 || (GET_MODE_CLASS (GET_MODE (args[idx])) == MODE_INT
2401 && (GET_MODE_SIZE (GET_MODE (args[idx]))
2403 && (GET_MODE_SIZE (GET_MODE (args[idx]))
2404 > GET_MODE_SIZE (mode)))))
2410 tmp = cselib_expand_value_rtx (args[idx], scratch, 5);
2411 if (GET_MODE (args[idx]) != mode)
2413 if (!tmp || !CONST_INT_P (tmp))
2415 tmp = gen_int_mode (INTVAL (tmp), mode);
2420 targetm.calls.function_arg_advance (args_so_far, mode, NULL_TREE, true);
2422 if (arg != void_list_node || idx != nargs)
2427 /* Return a bitmap of the fixed registers contained in IN. */
2430 copy_fixed_regs (const_bitmap in)
2434 ret = ALLOC_REG_SET (NULL);
2435 bitmap_and (ret, in, fixed_reg_set_regset);
2439 /* Apply record_store to all candidate stores in INSN. Mark INSN
2440 if some part of it is not a candidate store and assigns to a
2441 non-register target. */
2444 scan_insn (bb_info_t bb_info, rtx_insn *insn)
2447 insn_info_type *insn_info = insn_info_type_pool.allocate ();
2449 memset (insn_info, 0, sizeof (struct insn_info_type));
2451 if (dump_file && (dump_flags & TDF_DETAILS))
2452 fprintf (dump_file, "\n**scanning insn=%d\n",
2455 insn_info->prev_insn = bb_info->last_insn;
2456 insn_info->insn = insn;
2457 bb_info->last_insn = insn_info;
2459 if (DEBUG_INSN_P (insn))
2461 insn_info->cannot_delete = true;
2465 /* Look at all of the uses in the insn. */
2466 note_uses (&PATTERN (insn), check_mem_read_use, bb_info);
2471 tree memset_call = NULL_TREE;
2473 insn_info->cannot_delete = true;
2475 /* Const functions cannot do anything bad i.e. read memory,
2476 however, they can read their parameters which may have
2477 been pushed onto the stack.
2478 memset and bzero don't read memory either. */
2479 const_call = RTL_CONST_CALL_P (insn);
2482 rtx call = get_call_rtx_from (insn);
2483 if (call && GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
2485 rtx symbol = XEXP (XEXP (call, 0), 0);
2486 if (SYMBOL_REF_DECL (symbol)
2487 && TREE_CODE (SYMBOL_REF_DECL (symbol)) == FUNCTION_DECL)
2489 if ((DECL_BUILT_IN_CLASS (SYMBOL_REF_DECL (symbol))
2491 && (DECL_FUNCTION_CODE (SYMBOL_REF_DECL (symbol))
2492 == BUILT_IN_MEMSET))
2493 || SYMBOL_REF_DECL (symbol) == block_clear_fn)
2494 memset_call = SYMBOL_REF_DECL (symbol);
2498 if (const_call || memset_call)
2500 insn_info_t i_ptr = active_local_stores;
2501 insn_info_t last = NULL;
2503 if (dump_file && (dump_flags & TDF_DETAILS))
2504 fprintf (dump_file, "%s call %d\n",
2505 const_call ? "const" : "memset", INSN_UID (insn));
2507 /* See the head comment of the frame_read field. */
2508 if (reload_completed
2509 /* Tail calls are storing their arguments using
2510 arg pointer. If it is a frame pointer on the target,
2511 even before reload we need to kill frame pointer based
2513 || (SIBLING_CALL_P (insn)
2514 && HARD_FRAME_POINTER_IS_ARG_POINTER))
2515 insn_info->frame_read = true;
2517 /* Loop over the active stores and remove those which are
2518 killed by the const function call. */
2521 bool remove_store = false;
2523 /* The stack pointer based stores are always killed. */
2524 if (i_ptr->stack_pointer_based)
2525 remove_store = true;
2527 /* If the frame is read, the frame related stores are killed. */
2528 else if (insn_info->frame_read)
2530 store_info *store_info = i_ptr->store_rec;
2532 /* Skip the clobbers. */
2533 while (!store_info->is_set)
2534 store_info = store_info->next;
2536 if (store_info->group_id >= 0
2537 && rtx_group_vec[store_info->group_id]->frame_related)
2538 remove_store = true;
2543 if (dump_file && (dump_flags & TDF_DETAILS))
2544 dump_insn_info ("removing from active", i_ptr);
2546 active_local_stores_len--;
2548 last->next_local_store = i_ptr->next_local_store;
2550 active_local_stores = i_ptr->next_local_store;
2555 i_ptr = i_ptr->next_local_store;
2561 if (get_call_args (insn, memset_call, args, 3)
2562 && CONST_INT_P (args[1])
2563 && CONST_INT_P (args[2])
2564 && INTVAL (args[2]) > 0)
2566 rtx mem = gen_rtx_MEM (BLKmode, args[0]);
2567 set_mem_size (mem, INTVAL (args[2]));
2568 body = gen_rtx_SET (mem, args[1]);
2569 mems_found += record_store (body, bb_info);
2570 if (dump_file && (dump_flags & TDF_DETAILS))
2571 fprintf (dump_file, "handling memset as BLKmode store\n");
2572 if (mems_found == 1)
2574 if (active_local_stores_len++
2575 >= PARAM_VALUE (PARAM_MAX_DSE_ACTIVE_LOCAL_STORES))
2577 active_local_stores_len = 1;
2578 active_local_stores = NULL;
2580 insn_info->fixed_regs_live
2581 = copy_fixed_regs (bb_info->regs_live);
2582 insn_info->next_local_store = active_local_stores;
2583 active_local_stores = insn_info;
2588 else if (SIBLING_CALL_P (insn) && reload_completed)
2589 /* Arguments for a sibling call that are pushed to memory are passed
2590 using the incoming argument pointer of the current function. After
2591 reload that might be (and likely is) frame pointer based. */
2592 add_wild_read (bb_info);
2594 /* Every other call, including pure functions, may read any memory
2595 that is not relative to the frame. */
2596 add_non_frame_wild_read (bb_info);
2601 /* Assuming that there are sets in these insns, we cannot delete
2603 if ((GET_CODE (PATTERN (insn)) == CLOBBER)
2604 || volatile_refs_p (PATTERN (insn))
2605 || (!cfun->can_delete_dead_exceptions && !insn_nothrow_p (insn))
2606 || (RTX_FRAME_RELATED_P (insn))
2607 || find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX))
2608 insn_info->cannot_delete = true;
2610 body = PATTERN (insn);
2611 if (GET_CODE (body) == PARALLEL)
2614 for (i = 0; i < XVECLEN (body, 0); i++)
2615 mems_found += record_store (XVECEXP (body, 0, i), bb_info);
2618 mems_found += record_store (body, bb_info);
2620 if (dump_file && (dump_flags & TDF_DETAILS))
2621 fprintf (dump_file, "mems_found = %d, cannot_delete = %s\n",
2622 mems_found, insn_info->cannot_delete ? "true" : "false");
2624 /* If we found some sets of mems, add it into the active_local_stores so
2625 that it can be locally deleted if found dead or used for
2626 replace_read and redundant constant store elimination. Otherwise mark
2627 it as cannot delete. This simplifies the processing later. */
2628 if (mems_found == 1)
2630 if (active_local_stores_len++
2631 >= PARAM_VALUE (PARAM_MAX_DSE_ACTIVE_LOCAL_STORES))
2633 active_local_stores_len = 1;
2634 active_local_stores = NULL;
2636 insn_info->fixed_regs_live = copy_fixed_regs (bb_info->regs_live);
2637 insn_info->next_local_store = active_local_stores;
2638 active_local_stores = insn_info;
2641 insn_info->cannot_delete = true;
2645 /* Remove BASE from the set of active_local_stores. This is a
2646 callback from cselib that is used to get rid of the stores in
2647 active_local_stores. */
2650 remove_useless_values (cselib_val *base)
2652 insn_info_t insn_info = active_local_stores;
2653 insn_info_t last = NULL;
2657 store_info *store_info = insn_info->store_rec;
2660 /* If ANY of the store_infos match the cselib group that is
2661 being deleted, then the insn can not be deleted. */
2664 if ((store_info->group_id == -1)
2665 && (store_info->cse_base == base))
2670 store_info = store_info->next;
2675 active_local_stores_len--;
2677 last->next_local_store = insn_info->next_local_store;
2679 active_local_stores = insn_info->next_local_store;
2680 free_store_info (insn_info);
2685 insn_info = insn_info->next_local_store;
2690 /* Do all of step 1. */
2696 bitmap regs_live = BITMAP_ALLOC (®_obstack);
2699 all_blocks = BITMAP_ALLOC (NULL);
2700 bitmap_set_bit (all_blocks, ENTRY_BLOCK);
2701 bitmap_set_bit (all_blocks, EXIT_BLOCK);
2703 FOR_ALL_BB_FN (bb, cfun)
2706 bb_info_t bb_info = dse_bb_info_type_pool.allocate ();
2708 memset (bb_info, 0, sizeof (dse_bb_info_type));
2709 bitmap_set_bit (all_blocks, bb->index);
2710 bb_info->regs_live = regs_live;
2712 bitmap_copy (regs_live, DF_LR_IN (bb));
2713 df_simulate_initialize_forwards (bb, regs_live);
2715 bb_table[bb->index] = bb_info;
2716 cselib_discard_hook = remove_useless_values;
2718 if (bb->index >= NUM_FIXED_BLOCKS)
2722 active_local_stores = NULL;
2723 active_local_stores_len = 0;
2724 cselib_clear_table ();
2726 /* Scan the insns. */
2727 FOR_BB_INSNS (bb, insn)
2730 scan_insn (bb_info, insn);
2731 cselib_process_insn (insn);
2733 df_simulate_one_insn_forwards (bb, insn, regs_live);
2736 /* This is something of a hack, because the global algorithm
2737 is supposed to take care of the case where stores go dead
2738 at the end of the function. However, the global
2739 algorithm must take a more conservative view of block
2740 mode reads than the local alg does. So to get the case
2741 where you have a store to the frame followed by a non
2742 overlapping block more read, we look at the active local
2743 stores at the end of the function and delete all of the
2744 frame and spill based ones. */
2745 if (stores_off_frame_dead_at_return
2746 && (EDGE_COUNT (bb->succs) == 0
2747 || (single_succ_p (bb)
2748 && single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun)
2749 && ! crtl->calls_eh_return)))
2751 insn_info_t i_ptr = active_local_stores;
2754 store_info *store_info = i_ptr->store_rec;
2756 /* Skip the clobbers. */
2757 while (!store_info->is_set)
2758 store_info = store_info->next;
2759 if (store_info->alias_set && !i_ptr->cannot_delete)
2760 delete_dead_store_insn (i_ptr);
2762 if (store_info->group_id >= 0)
2765 = rtx_group_vec[store_info->group_id];
2766 if (group->frame_related && !i_ptr->cannot_delete)
2767 delete_dead_store_insn (i_ptr);
2770 i_ptr = i_ptr->next_local_store;
2774 /* Get rid of the loads that were discovered in
2775 replace_read. Cselib is finished with this block. */
2776 while (deferred_change_list)
2778 deferred_change *next = deferred_change_list->next;
2780 /* There is no reason to validate this change. That was
2782 *deferred_change_list->loc = deferred_change_list->reg;
2783 deferred_change_pool.remove (deferred_change_list);
2784 deferred_change_list = next;
2787 /* Get rid of all of the cselib based store_infos in this
2788 block and mark the containing insns as not being
2790 ptr = bb_info->last_insn;
2793 if (ptr->contains_cselib_groups)
2795 store_info *s_info = ptr->store_rec;
2796 while (s_info && !s_info->is_set)
2797 s_info = s_info->next;
2799 && s_info->redundant_reason
2800 && s_info->redundant_reason->insn
2801 && !ptr->cannot_delete)
2803 if (dump_file && (dump_flags & TDF_DETAILS))
2804 fprintf (dump_file, "Locally deleting insn %d "
2805 "because insn %d stores the "
2806 "same value and couldn't be "
2808 INSN_UID (ptr->insn),
2809 INSN_UID (s_info->redundant_reason->insn));
2810 delete_dead_store_insn (ptr);
2812 free_store_info (ptr);
2818 /* Free at least positions_needed bitmaps. */
2819 for (s_info = ptr->store_rec; s_info; s_info = s_info->next)
2820 if (s_info->is_large)
2822 BITMAP_FREE (s_info->positions_needed.large.bmap);
2823 s_info->is_large = false;
2826 ptr = ptr->prev_insn;
2829 cse_store_info_pool.release ();
2831 bb_info->regs_live = NULL;
2834 BITMAP_FREE (regs_live);
2836 rtx_group_table->empty ();
2840 /*----------------------------------------------------------------------------
2843 Assign each byte position in the stores that we are going to
2844 analyze globally to a position in the bitmaps. Returns true if
2845 there are any bit positions assigned.
2846 ----------------------------------------------------------------------------*/
2849 dse_step2_init (void)
2854 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
2856 /* For all non stack related bases, we only consider a store to
2857 be deletable if there are two or more stores for that
2858 position. This is because it takes one store to make the
2859 other store redundant. However, for the stores that are
2860 stack related, we consider them if there is only one store
2861 for the position. We do this because the stack related
2862 stores can be deleted if their is no read between them and
2863 the end of the function.
2865 To make this work in the current framework, we take the stack
2866 related bases add all of the bits from store1 into store2.
2867 This has the effect of making the eligible even if there is
2870 if (stores_off_frame_dead_at_return && group->frame_related)
2872 bitmap_ior_into (group->store2_n, group->store1_n);
2873 bitmap_ior_into (group->store2_p, group->store1_p);
2874 if (dump_file && (dump_flags & TDF_DETAILS))
2875 fprintf (dump_file, "group %d is frame related ", i);
2878 group->offset_map_size_n++;
2879 group->offset_map_n = XOBNEWVEC (&dse_obstack, int,
2880 group->offset_map_size_n);
2881 group->offset_map_size_p++;
2882 group->offset_map_p = XOBNEWVEC (&dse_obstack, int,
2883 group->offset_map_size_p);
2884 group->process_globally = false;
2885 if (dump_file && (dump_flags & TDF_DETAILS))
2887 fprintf (dump_file, "group %d(%d+%d): ", i,
2888 (int)bitmap_count_bits (group->store2_n),
2889 (int)bitmap_count_bits (group->store2_p));
2890 bitmap_print (dump_file, group->store2_n, "n ", " ");
2891 bitmap_print (dump_file, group->store2_p, "p ", "\n");
2897 /* Init the offset tables for the normal case. */
2900 dse_step2_nospill (void)
2904 /* Position 0 is unused because 0 is used in the maps to mean
2906 current_position = 1;
2907 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
2912 if (group == clear_alias_group)
2915 memset (group->offset_map_n, 0, sizeof (int) * group->offset_map_size_n);
2916 memset (group->offset_map_p, 0, sizeof (int) * group->offset_map_size_p);
2917 bitmap_clear (group->group_kill);
2919 EXECUTE_IF_SET_IN_BITMAP (group->store2_n, 0, j, bi)
2921 bitmap_set_bit (group->group_kill, current_position);
2922 if (bitmap_bit_p (group->escaped_n, j))
2923 bitmap_set_bit (kill_on_calls, current_position);
2924 group->offset_map_n[j] = current_position++;
2925 group->process_globally = true;
2927 EXECUTE_IF_SET_IN_BITMAP (group->store2_p, 0, j, bi)
2929 bitmap_set_bit (group->group_kill, current_position);
2930 if (bitmap_bit_p (group->escaped_p, j))
2931 bitmap_set_bit (kill_on_calls, current_position);
2932 group->offset_map_p[j] = current_position++;
2933 group->process_globally = true;
2936 return current_position != 1;
2941 /*----------------------------------------------------------------------------
2944 Build the bit vectors for the transfer functions.
2945 ----------------------------------------------------------------------------*/
2948 /* Look up the bitmap index for OFFSET in GROUP_INFO. If it is not
2952 get_bitmap_index (group_info *group_info, HOST_WIDE_INT offset)
2956 HOST_WIDE_INT offset_p = -offset;
2957 if (offset_p >= group_info->offset_map_size_n)
2959 return group_info->offset_map_n[offset_p];
2963 if (offset >= group_info->offset_map_size_p)
2965 return group_info->offset_map_p[offset];
2970 /* Process the STORE_INFOs into the bitmaps into GEN and KILL. KILL
2974 scan_stores_nospill (store_info *store_info, bitmap gen, bitmap kill)
2979 group_info *group_info
2980 = rtx_group_vec[store_info->group_id];
2981 if (group_info->process_globally)
2982 for (i = store_info->begin; i < store_info->end; i++)
2984 int index = get_bitmap_index (group_info, i);
2987 bitmap_set_bit (gen, index);
2989 bitmap_clear_bit (kill, index);
2992 store_info = store_info->next;
2997 /* Process the STORE_INFOs into the bitmaps into GEN and KILL. KILL
3001 scan_stores_spill (store_info *store_info, bitmap gen, bitmap kill)
3005 if (store_info->alias_set)
3007 int index = get_bitmap_index (clear_alias_group,
3008 store_info->alias_set);
3011 bitmap_set_bit (gen, index);
3013 bitmap_clear_bit (kill, index);
3016 store_info = store_info->next;
3021 /* Process the READ_INFOs into the bitmaps into GEN and KILL. KILL
3025 scan_reads_nospill (insn_info_t insn_info, bitmap gen, bitmap kill)
3027 read_info_t read_info = insn_info->read_rec;
3031 /* If this insn reads the frame, kill all the frame related stores. */
3032 if (insn_info->frame_read)
3034 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
3035 if (group->process_globally && group->frame_related)
3038 bitmap_ior_into (kill, group->group_kill);
3039 bitmap_and_compl_into (gen, group->group_kill);
3042 if (insn_info->non_frame_wild_read)
3044 /* Kill all non-frame related stores. Kill all stores of variables that
3047 bitmap_ior_into (kill, kill_on_calls);
3048 bitmap_and_compl_into (gen, kill_on_calls);
3049 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
3050 if (group->process_globally && !group->frame_related)
3053 bitmap_ior_into (kill, group->group_kill);
3054 bitmap_and_compl_into (gen, group->group_kill);
3059 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
3061 if (group->process_globally)
3063 if (i == read_info->group_id)
3065 if (read_info->begin > read_info->end)
3067 /* Begin > end for block mode reads. */
3069 bitmap_ior_into (kill, group->group_kill);
3070 bitmap_and_compl_into (gen, group->group_kill);
3074 /* The groups are the same, just process the
3077 for (j = read_info->begin; j < read_info->end; j++)
3079 int index = get_bitmap_index (group, j);
3083 bitmap_set_bit (kill, index);
3084 bitmap_clear_bit (gen, index);
3091 /* The groups are different, if the alias sets
3092 conflict, clear the entire group. We only need
3093 to apply this test if the read_info is a cselib
3094 read. Anything with a constant base cannot alias
3095 something else with a different constant
3097 if ((read_info->group_id < 0)
3098 && canon_true_dependence (group->base_mem,
3099 GET_MODE (group->base_mem),
3100 group->canon_base_addr,
3101 read_info->mem, NULL_RTX))
3104 bitmap_ior_into (kill, group->group_kill);
3105 bitmap_and_compl_into (gen, group->group_kill);
3111 read_info = read_info->next;
3115 /* Process the READ_INFOs into the bitmaps into GEN and KILL. KILL
3119 scan_reads_spill (read_info_t read_info, bitmap gen, bitmap kill)
3123 if (read_info->alias_set)
3125 int index = get_bitmap_index (clear_alias_group,
3126 read_info->alias_set);
3130 bitmap_set_bit (kill, index);
3131 bitmap_clear_bit (gen, index);
3135 read_info = read_info->next;
3140 /* Return the insn in BB_INFO before the first wild read or if there
3141 are no wild reads in the block, return the last insn. */
3144 find_insn_before_first_wild_read (bb_info_t bb_info)
3146 insn_info_t insn_info = bb_info->last_insn;
3147 insn_info_t last_wild_read = NULL;
3151 if (insn_info->wild_read)
3153 last_wild_read = insn_info->prev_insn;
3154 /* Block starts with wild read. */
3155 if (!last_wild_read)
3159 insn_info = insn_info->prev_insn;
3163 return last_wild_read;
3165 return bb_info->last_insn;
3169 /* Scan the insns in BB_INFO starting at PTR and going to the top of
3170 the block in order to build the gen and kill sets for the block.
3171 We start at ptr which may be the last insn in the block or may be
3172 the first insn with a wild read. In the latter case we are able to
3173 skip the rest of the block because it just does not matter:
3174 anything that happens is hidden by the wild read. */
3177 dse_step3_scan (bool for_spills, basic_block bb)
3179 bb_info_t bb_info = bb_table[bb->index];
3180 insn_info_t insn_info;
3183 /* There are no wild reads in the spill case. */
3184 insn_info = bb_info->last_insn;
3186 insn_info = find_insn_before_first_wild_read (bb_info);
3188 /* In the spill case or in the no_spill case if there is no wild
3189 read in the block, we will need a kill set. */
3190 if (insn_info == bb_info->last_insn)
3193 bitmap_clear (bb_info->kill);
3195 bb_info->kill = BITMAP_ALLOC (&dse_bitmap_obstack);
3199 BITMAP_FREE (bb_info->kill);
3203 /* There may have been code deleted by the dce pass run before
3205 if (insn_info->insn && INSN_P (insn_info->insn))
3207 /* Process the read(s) last. */
3210 scan_stores_spill (insn_info->store_rec, bb_info->gen, bb_info->kill);
3211 scan_reads_spill (insn_info->read_rec, bb_info->gen, bb_info->kill);
3215 scan_stores_nospill (insn_info->store_rec, bb_info->gen, bb_info->kill);
3216 scan_reads_nospill (insn_info, bb_info->gen, bb_info->kill);
3220 insn_info = insn_info->prev_insn;
3225 /* Set the gen set of the exit block, and also any block with no
3226 successors that does not have a wild read. */
3229 dse_step3_exit_block_scan (bb_info_t bb_info)
3231 /* The gen set is all 0's for the exit block except for the
3232 frame_pointer_group. */
3234 if (stores_off_frame_dead_at_return)
3239 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
3241 if (group->process_globally && group->frame_related)
3242 bitmap_ior_into (bb_info->gen, group->group_kill);
3248 /* Find all of the blocks that are not backwards reachable from the
3249 exit block or any block with no successors (BB). These are the
3250 infinite loops or infinite self loops. These blocks will still
3251 have their bits set in UNREACHABLE_BLOCKS. */
3254 mark_reachable_blocks (sbitmap unreachable_blocks, basic_block bb)
3259 if (bitmap_bit_p (unreachable_blocks, bb->index))
3261 bitmap_clear_bit (unreachable_blocks, bb->index);
3262 FOR_EACH_EDGE (e, ei, bb->preds)
3264 mark_reachable_blocks (unreachable_blocks, e->src);
3269 /* Build the transfer functions for the function. */
3272 dse_step3 (bool for_spills)
3275 sbitmap unreachable_blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
3276 sbitmap_iterator sbi;
3277 bitmap all_ones = NULL;
3280 bitmap_ones (unreachable_blocks);
3282 FOR_ALL_BB_FN (bb, cfun)
3284 bb_info_t bb_info = bb_table[bb->index];
3286 bitmap_clear (bb_info->gen);
3288 bb_info->gen = BITMAP_ALLOC (&dse_bitmap_obstack);
3290 if (bb->index == ENTRY_BLOCK)
3292 else if (bb->index == EXIT_BLOCK)
3293 dse_step3_exit_block_scan (bb_info);
3295 dse_step3_scan (for_spills, bb);
3296 if (EDGE_COUNT (bb->succs) == 0)
3297 mark_reachable_blocks (unreachable_blocks, bb);
3299 /* If this is the second time dataflow is run, delete the old
3302 BITMAP_FREE (bb_info->in);
3304 BITMAP_FREE (bb_info->out);
3307 /* For any block in an infinite loop, we must initialize the out set
3308 to all ones. This could be expensive, but almost never occurs in
3309 practice. However, it is common in regression tests. */
3310 EXECUTE_IF_SET_IN_BITMAP (unreachable_blocks, 0, i, sbi)
3312 if (bitmap_bit_p (all_blocks, i))
3314 bb_info_t bb_info = bb_table[i];
3320 all_ones = BITMAP_ALLOC (&dse_bitmap_obstack);
3321 FOR_EACH_VEC_ELT (rtx_group_vec, j, group)
3322 bitmap_ior_into (all_ones, group->group_kill);
3326 bb_info->out = BITMAP_ALLOC (&dse_bitmap_obstack);
3327 bitmap_copy (bb_info->out, all_ones);
3333 BITMAP_FREE (all_ones);
3334 sbitmap_free (unreachable_blocks);
3339 /*----------------------------------------------------------------------------
3342 Solve the bitvector equations.
3343 ----------------------------------------------------------------------------*/
3346 /* Confluence function for blocks with no successors. Create an out
3347 set from the gen set of the exit block. This block logically has
3348 the exit block as a successor. */
3353 dse_confluence_0 (basic_block bb)
3355 bb_info_t bb_info = bb_table[bb->index];
3357 if (bb->index == EXIT_BLOCK)
3362 bb_info->out = BITMAP_ALLOC (&dse_bitmap_obstack);
3363 bitmap_copy (bb_info->out, bb_table[EXIT_BLOCK]->gen);
3367 /* Propagate the information from the in set of the dest of E to the
3368 out set of the src of E. If the various in or out sets are not
3369 there, that means they are all ones. */
3372 dse_confluence_n (edge e)
3374 bb_info_t src_info = bb_table[e->src->index];
3375 bb_info_t dest_info = bb_table[e->dest->index];
3380 bitmap_and_into (src_info->out, dest_info->in);
3383 src_info->out = BITMAP_ALLOC (&dse_bitmap_obstack);
3384 bitmap_copy (src_info->out, dest_info->in);
3391 /* Propagate the info from the out to the in set of BB_INDEX's basic
3392 block. There are three cases:
3394 1) The block has no kill set. In this case the kill set is all
3395 ones. It does not matter what the out set of the block is, none of
3396 the info can reach the top. The only thing that reaches the top is
3397 the gen set and we just copy the set.
3399 2) There is a kill set but no out set and bb has successors. In
3400 this case we just return. Eventually an out set will be created and
3401 it is better to wait than to create a set of ones.
3403 3) There is both a kill and out set. We apply the obvious transfer
3408 dse_transfer_function (int bb_index)
3410 bb_info_t bb_info = bb_table[bb_index];
3418 return bitmap_ior_and_compl (bb_info->in, bb_info->gen,
3419 bb_info->out, bb_info->kill);
3422 bb_info->in = BITMAP_ALLOC (&dse_bitmap_obstack);
3423 bitmap_ior_and_compl (bb_info->in, bb_info->gen,
3424 bb_info->out, bb_info->kill);
3434 /* Case 1 above. If there is already an in set, nothing
3440 bb_info->in = BITMAP_ALLOC (&dse_bitmap_obstack);
3441 bitmap_copy (bb_info->in, bb_info->gen);
3447 /* Solve the dataflow equations. */
3452 df_simple_dataflow (DF_BACKWARD, NULL, dse_confluence_0,
3453 dse_confluence_n, dse_transfer_function,
3454 all_blocks, df_get_postorder (DF_BACKWARD),
3455 df_get_n_blocks (DF_BACKWARD));
3456 if (dump_file && (dump_flags & TDF_DETAILS))
3460 fprintf (dump_file, "\n\n*** Global dataflow info after analysis.\n");
3461 FOR_ALL_BB_FN (bb, cfun)
3463 bb_info_t bb_info = bb_table[bb->index];
3465 df_print_bb_index (bb, dump_file);
3467 bitmap_print (dump_file, bb_info->in, " in: ", "\n");
3469 fprintf (dump_file, " in: *MISSING*\n");
3471 bitmap_print (dump_file, bb_info->gen, " gen: ", "\n");
3473 fprintf (dump_file, " gen: *MISSING*\n");
3475 bitmap_print (dump_file, bb_info->kill, " kill: ", "\n");
3477 fprintf (dump_file, " kill: *MISSING*\n");
3479 bitmap_print (dump_file, bb_info->out, " out: ", "\n");
3481 fprintf (dump_file, " out: *MISSING*\n\n");
3488 /*----------------------------------------------------------------------------
3491 Delete the stores that can only be deleted using the global information.
3492 ----------------------------------------------------------------------------*/
3496 dse_step5_nospill (void)
3499 FOR_EACH_BB_FN (bb, cfun)
3501 bb_info_t bb_info = bb_table[bb->index];
3502 insn_info_t insn_info = bb_info->last_insn;
3503 bitmap v = bb_info->out;
3507 bool deleted = false;
3508 if (dump_file && insn_info->insn)
3510 fprintf (dump_file, "starting to process insn %d\n",
3511 INSN_UID (insn_info->insn));
3512 bitmap_print (dump_file, v, " v: ", "\n");
3515 /* There may have been code deleted by the dce pass run before
3518 && INSN_P (insn_info->insn)
3519 && (!insn_info->cannot_delete)
3520 && (!bitmap_empty_p (v)))
3522 store_info *store_info = insn_info->store_rec;
3524 /* Try to delete the current insn. */
3527 /* Skip the clobbers. */
3528 while (!store_info->is_set)
3529 store_info = store_info->next;
3531 if (store_info->alias_set)
3536 group_info *group_info
3537 = rtx_group_vec[store_info->group_id];
3539 for (i = store_info->begin; i < store_info->end; i++)
3541 int index = get_bitmap_index (group_info, i);
3543 if (dump_file && (dump_flags & TDF_DETAILS))
3544 fprintf (dump_file, "i = %d, index = %d\n", (int)i, index);
3545 if (index == 0 || !bitmap_bit_p (v, index))
3547 if (dump_file && (dump_flags & TDF_DETAILS))
3548 fprintf (dump_file, "failing at i = %d\n", (int)i);
3557 && check_for_inc_dec_1 (insn_info))
3559 delete_insn (insn_info->insn);
3560 insn_info->insn = NULL;
3565 /* We do want to process the local info if the insn was
3566 deleted. For instance, if the insn did a wild read, we
3567 no longer need to trash the info. */
3569 && INSN_P (insn_info->insn)
3572 scan_stores_nospill (insn_info->store_rec, v, NULL);
3573 if (insn_info->wild_read)
3575 if (dump_file && (dump_flags & TDF_DETAILS))
3576 fprintf (dump_file, "wild read\n");
3579 else if (insn_info->read_rec
3580 || insn_info->non_frame_wild_read)
3582 if (dump_file && !insn_info->non_frame_wild_read)
3583 fprintf (dump_file, "regular read\n");
3584 else if (dump_file && (dump_flags & TDF_DETAILS))
3585 fprintf (dump_file, "non-frame wild read\n");
3586 scan_reads_nospill (insn_info, v, NULL);
3590 insn_info = insn_info->prev_insn;
3597 /*----------------------------------------------------------------------------
3600 Delete stores made redundant by earlier stores (which store the same
3601 value) that couldn't be eliminated.
3602 ----------------------------------------------------------------------------*/
3609 FOR_ALL_BB_FN (bb, cfun)
3611 bb_info_t bb_info = bb_table[bb->index];
3612 insn_info_t insn_info = bb_info->last_insn;
3616 /* There may have been code deleted by the dce pass run before
3619 && INSN_P (insn_info->insn)
3620 && !insn_info->cannot_delete)
3622 store_info *s_info = insn_info->store_rec;
3624 while (s_info && !s_info->is_set)
3625 s_info = s_info->next;
3627 && s_info->redundant_reason
3628 && s_info->redundant_reason->insn
3629 && INSN_P (s_info->redundant_reason->insn))
3631 rtx_insn *rinsn = s_info->redundant_reason->insn;
3632 if (dump_file && (dump_flags & TDF_DETAILS))
3633 fprintf (dump_file, "Locally deleting insn %d "
3634 "because insn %d stores the "
3635 "same value and couldn't be "
3637 INSN_UID (insn_info->insn),
3639 delete_dead_store_insn (insn_info);
3642 insn_info = insn_info->prev_insn;
3647 /*----------------------------------------------------------------------------
3650 Destroy everything left standing.
3651 ----------------------------------------------------------------------------*/
3656 bitmap_obstack_release (&dse_bitmap_obstack);
3657 obstack_free (&dse_obstack, NULL);
3659 end_alias_analysis ();
3661 delete rtx_group_table;
3662 rtx_group_table = NULL;
3663 rtx_group_vec.release ();
3664 BITMAP_FREE (all_blocks);
3665 BITMAP_FREE (scratch);
3667 rtx_store_info_pool.release ();
3668 read_info_type_pool.release ();
3669 insn_info_type_pool.release ();
3670 dse_bb_info_type_pool.release ();
3671 group_info_pool.release ();
3672 deferred_change_pool.release ();
3676 /* -------------------------------------------------------------------------
3678 ------------------------------------------------------------------------- */
3680 /* Callback for running pass_rtl_dse. */
3683 rest_of_handle_dse (void)
3685 df_set_flags (DF_DEFER_INSN_RESCAN);
3687 /* Need the notes since we must track live hardregs in the forwards
3689 df_note_add_problem ();
3695 if (dse_step2_nospill ())
3697 df_set_flags (DF_LR_RUN_DCE);
3699 if (dump_file && (dump_flags & TDF_DETAILS))
3700 fprintf (dump_file, "doing global processing\n");
3703 dse_step5_nospill ();
3710 fprintf (dump_file, "dse: local deletions = %d, global deletions = %d, spill deletions = %d\n",
3711 locally_deleted, globally_deleted, spill_deleted);
3713 /* DSE can eliminate potentially-trapping MEMs.
3714 Remove any EH edges associated with them. */
3715 if ((locally_deleted || globally_deleted)
3716 && cfun->can_throw_non_call_exceptions
3717 && purge_all_dead_edges ())
3725 const pass_data pass_data_rtl_dse1 =
3727 RTL_PASS, /* type */
3729 OPTGROUP_NONE, /* optinfo_flags */
3730 TV_DSE1, /* tv_id */
3731 0, /* properties_required */
3732 0, /* properties_provided */
3733 0, /* properties_destroyed */
3734 0, /* todo_flags_start */
3735 TODO_df_finish, /* todo_flags_finish */
3738 class pass_rtl_dse1 : public rtl_opt_pass
3741 pass_rtl_dse1 (gcc::context *ctxt)
3742 : rtl_opt_pass (pass_data_rtl_dse1, ctxt)
3745 /* opt_pass methods: */
3746 virtual bool gate (function *)
3748 return optimize > 0 && flag_dse && dbg_cnt (dse1);
3751 virtual unsigned int execute (function *) { return rest_of_handle_dse (); }
3753 }; // class pass_rtl_dse1
3758 make_pass_rtl_dse1 (gcc::context *ctxt)
3760 return new pass_rtl_dse1 (ctxt);
3765 const pass_data pass_data_rtl_dse2 =
3767 RTL_PASS, /* type */
3769 OPTGROUP_NONE, /* optinfo_flags */
3770 TV_DSE2, /* tv_id */
3771 0, /* properties_required */
3772 0, /* properties_provided */
3773 0, /* properties_destroyed */
3774 0, /* todo_flags_start */
3775 TODO_df_finish, /* todo_flags_finish */
3778 class pass_rtl_dse2 : public rtl_opt_pass
3781 pass_rtl_dse2 (gcc::context *ctxt)
3782 : rtl_opt_pass (pass_data_rtl_dse2, ctxt)
3785 /* opt_pass methods: */
3786 virtual bool gate (function *)
3788 return optimize > 0 && flag_dse && dbg_cnt (dse2);
3791 virtual unsigned int execute (function *) { return rest_of_handle_dse (); }
3793 }; // class pass_rtl_dse2
3798 make_pass_rtl_dse2 (gcc::context *ctxt)
3800 return new pass_rtl_dse2 (ctxt);