1 /* Dataflow support routines.
2 Copyright (C) 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
3 Contributed by Michael P. Hayes (m.hayes@elec.canterbury.ac.nz,
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
26 This file provides some dataflow routines for computing reaching defs,
27 upward exposed uses, live variables, def-use chains, and use-def
28 chains. The global dataflow is performed using simple iterative
29 methods with a worklist and could be sped up by ordering the blocks
30 with a depth first search order.
32 A `struct ref' data structure (ref) is allocated for every register
33 reference (def or use) and this records the insn and bb the ref is
34 found within. The refs are linked together in chains of uses and defs
35 for each insn and for each register. Each ref also has a chain field
36 that links all the use refs for a def or all the def refs for a use.
37 This is used to create use-def or def-use chains.
42 Here's an example of using the dataflow routines.
48 df_analyse (df, 0, DF_ALL);
50 df_dump (df, DF_ALL, stderr);
55 df_init simply creates a poor man's object (df) that needs to be
56 passed to all the dataflow routines. df_finish destroys this
57 object and frees up any allocated memory.
59 df_analyse performs the following:
61 1. Records defs and uses by scanning the insns in each basic block
62 or by scanning the insns queued by df_insn_modify.
63 2. Links defs and uses into insn-def and insn-use chains.
64 3. Links defs and uses into reg-def and reg-use chains.
65 4. Assigns LUIDs to each insn (for modified blocks).
66 5. Calculates local reaching definitions.
67 6. Calculates global reaching definitions.
68 7. Creates use-def chains.
69 8. Calculates local reaching uses (upwards exposed uses).
70 9. Calculates global reaching uses.
71 10. Creates def-use chains.
72 11. Calculates local live registers.
73 12. Calculates global live registers.
74 13. Calculates register lifetimes and determines local registers.
79 Note that the dataflow information is not updated for every newly
80 deleted or created insn. If the dataflow information requires
81 updating then all the changed, new, or deleted insns needs to be
82 marked with df_insn_modify (or df_insns_modify) either directly or
83 indirectly (say through calling df_insn_delete). df_insn_modify
84 marks all the modified insns to get processed the next time df_analyse
87 Beware that tinkering with insns may invalidate the dataflow information.
88 The philosophy behind these routines is that once the dataflow
89 information has been gathered, the user should store what they require
90 before they tinker with any insn. Once a reg is replaced, for example,
91 then the reg-def/reg-use chains will point to the wrong place. Once a
92 whole lot of changes have been made, df_analyse can be called again
93 to update the dataflow information. Currently, this is not very smart
94 with regard to propagating changes to the dataflow so it should not
100 The basic object is a REF (reference) and this may either be a DEF
101 (definition) or a USE of a register.
103 These are linked into a variety of lists; namely reg-def, reg-use,
104 insn-def, insn-use, def-use, and use-def lists. For example,
105 the reg-def lists contain all the refs that define a given register
106 while the insn-use lists contain all the refs used by an insn.
108 Note that the reg-def and reg-use chains are generally short (except for the
109 hard registers) and thus it is much faster to search these chains
110 rather than searching the def or use bitmaps.
112 If the insns are in SSA form then the reg-def and use-def lists
113 should only contain the single defining ref.
117 1) Incremental dataflow analysis.
119 Note that if a loop invariant insn is hoisted (or sunk), we do not
120 need to change the def-use or use-def chains. All we have to do is to
121 change the bb field for all the associated defs and uses and to
122 renumber the LUIDs for the original and new basic blocks of the insn.
124 When shadowing loop mems we create new uses and defs for new pseudos
125 so we do not affect the existing dataflow information.
127 My current strategy is to queue up all modified, created, or deleted
128 insns so when df_analyse is called we can easily determine all the new
129 or deleted refs. Currently the global dataflow information is
130 recomputed from scratch but this could be propagated more efficiently.
132 2) Improved global data flow computation using depth first search.
134 3) Reduced memory requirements.
136 We could operate a pool of ref structures. When a ref is deleted it
137 gets returned to the pool (say by linking on to a chain of free refs).
138 This will require a pair of bitmaps for defs and uses so that we can
139 tell which ones have been changed. Alternatively, we could
140 periodically squeeze the def and use tables and associated bitmaps and
141 renumber the def and use ids.
143 4) Ordering of reg-def and reg-use lists.
145 Should the first entry in the def list be the first def (within a BB)?
146 Similarly, should the first entry in the use list be the last use
149 5) Working with a sub-CFG.
151 Often the whole CFG does not need to be analyzed, for example,
152 when optimising a loop, only certain registers are of interest.
153 Perhaps there should be a bitmap argument to df_analyse to specify
154 which registers should be analyzed? */
158 #include "coretypes.h"
162 #include "insn-config.h"
164 #include "function.h"
167 #include "hard-reg-set.h"
168 #include "basic-block.h"
174 #define FOR_EACH_BB_IN_BITMAP(BITMAP, MIN, BB, CODE) \
177 unsigned int node_; \
178 EXECUTE_IF_SET_IN_BITMAP (BITMAP, MIN, node_, \
179 {(BB) = BASIC_BLOCK (node_); CODE;}); \
183 static struct obstack df_ref_obstack;
184 static struct df *ddf;
186 static void df_reg_table_realloc PARAMS((struct df *, int));
188 static void df_def_table_realloc PARAMS((struct df *, int));
190 static void df_insn_table_realloc PARAMS((struct df *, unsigned int));
191 static void df_bitmaps_alloc PARAMS((struct df *, int));
192 static void df_bitmaps_free PARAMS((struct df *, int));
193 static void df_free PARAMS((struct df *));
194 static void df_alloc PARAMS((struct df *, int));
196 static rtx df_reg_clobber_gen PARAMS((unsigned int));
197 static rtx df_reg_use_gen PARAMS((unsigned int));
199 static inline struct df_link *df_link_create PARAMS((struct ref *,
201 static struct df_link *df_ref_unlink PARAMS((struct df_link **, struct ref *));
202 static void df_def_unlink PARAMS((struct df *, struct ref *));
203 static void df_use_unlink PARAMS((struct df *, struct ref *));
204 static void df_insn_refs_unlink PARAMS ((struct df *, basic_block, rtx));
206 static void df_bb_refs_unlink PARAMS ((struct df *, basic_block));
207 static void df_refs_unlink PARAMS ((struct df *, bitmap));
210 static struct ref *df_ref_create PARAMS((struct df *,
212 enum df_ref_type, enum df_ref_flags));
213 static void df_ref_record_1 PARAMS((struct df *, rtx, rtx *,
214 rtx, enum df_ref_type,
216 static void df_ref_record PARAMS((struct df *, rtx, rtx *,
217 rtx, enum df_ref_type,
219 static void df_def_record_1 PARAMS((struct df *, rtx, basic_block, rtx));
220 static void df_defs_record PARAMS((struct df *, rtx, basic_block, rtx));
221 static void df_uses_record PARAMS((struct df *, rtx *,
222 enum df_ref_type, basic_block, rtx,
224 static void df_insn_refs_record PARAMS((struct df *, basic_block, rtx));
225 static void df_bb_refs_record PARAMS((struct df *, basic_block));
226 static void df_refs_record PARAMS((struct df *, bitmap));
228 static void df_bb_reg_def_chain_create PARAMS((struct df *, basic_block));
229 static void df_reg_def_chain_create PARAMS((struct df *, bitmap));
230 static void df_bb_reg_use_chain_create PARAMS((struct df *, basic_block));
231 static void df_reg_use_chain_create PARAMS((struct df *, bitmap));
232 static void df_bb_du_chain_create PARAMS((struct df *, basic_block, bitmap));
233 static void df_du_chain_create PARAMS((struct df *, bitmap));
234 static void df_bb_ud_chain_create PARAMS((struct df *, basic_block));
235 static void df_ud_chain_create PARAMS((struct df *, bitmap));
236 static void df_bb_rd_local_compute PARAMS((struct df *, basic_block));
237 static void df_rd_local_compute PARAMS((struct df *, bitmap));
238 static void df_bb_ru_local_compute PARAMS((struct df *, basic_block));
239 static void df_ru_local_compute PARAMS((struct df *, bitmap));
240 static void df_bb_lr_local_compute PARAMS((struct df *, basic_block));
241 static void df_lr_local_compute PARAMS((struct df *, bitmap));
242 static void df_bb_reg_info_compute PARAMS((struct df *, basic_block, bitmap));
243 static void df_reg_info_compute PARAMS((struct df *, bitmap));
245 static int df_bb_luids_set PARAMS((struct df *df, basic_block));
246 static int df_luids_set PARAMS((struct df *df, bitmap));
248 static int df_modified_p PARAMS ((struct df *, bitmap));
249 static int df_refs_queue PARAMS ((struct df *));
250 static int df_refs_process PARAMS ((struct df *));
251 static int df_bb_refs_update PARAMS ((struct df *, basic_block));
252 static int df_refs_update PARAMS ((struct df *));
253 static void df_analyse_1 PARAMS((struct df *, bitmap, int, int));
255 static void df_insns_modify PARAMS((struct df *, basic_block,
257 static int df_rtx_mem_replace PARAMS ((rtx *, void *));
258 static int df_rtx_reg_replace PARAMS ((rtx *, void *));
259 void df_refs_reg_replace PARAMS ((struct df *, bitmap,
260 struct df_link *, rtx, rtx));
262 static int df_def_dominates_all_uses_p PARAMS((struct df *, struct ref *def));
263 static int df_def_dominates_uses_p PARAMS((struct df *,
264 struct ref *def, bitmap));
265 static struct ref *df_bb_regno_last_use_find PARAMS((struct df *, basic_block,
267 static struct ref *df_bb_regno_first_def_find PARAMS((struct df *, basic_block,
269 static struct ref *df_bb_insn_regno_last_use_find PARAMS((struct df *,
272 static struct ref *df_bb_insn_regno_first_def_find PARAMS((struct df *,
276 static void df_chain_dump PARAMS((struct df_link *, FILE *file));
277 static void df_chain_dump_regno PARAMS((struct df_link *, FILE *file));
278 static void df_regno_debug PARAMS ((struct df *, unsigned int, FILE *));
279 static void df_ref_debug PARAMS ((struct df *, struct ref *, FILE *));
280 static void df_rd_transfer_function PARAMS ((int, int *, bitmap, bitmap,
281 bitmap, bitmap, void *));
282 static void df_ru_transfer_function PARAMS ((int, int *, bitmap, bitmap,
283 bitmap, bitmap, void *));
284 static void df_lr_transfer_function PARAMS ((int, int *, bitmap, bitmap,
285 bitmap, bitmap, void *));
286 static void hybrid_search_bitmap PARAMS ((basic_block, bitmap *, bitmap *,
287 bitmap *, bitmap *, enum df_flow_dir,
288 enum df_confluence_op,
289 transfer_function_bitmap,
290 sbitmap, sbitmap, void *));
291 static void hybrid_search_sbitmap PARAMS ((basic_block, sbitmap *, sbitmap *,
292 sbitmap *, sbitmap *, enum df_flow_dir,
293 enum df_confluence_op,
294 transfer_function_sbitmap,
295 sbitmap, sbitmap, void *));
296 static inline bool read_modify_subreg_p PARAMS ((rtx));
299 /* Local memory allocation/deallocation routines. */
302 /* Increase the insn info table to have space for at least SIZE + 1
305 df_insn_table_realloc (df, size)
310 if (size <= df->insn_size)
313 /* Make the table a little larger than requested, so we don't need
314 to enlarge it so often. */
315 size += df->insn_size / 4;
317 df->insns = (struct insn_info *)
318 xrealloc (df->insns, size * sizeof (struct insn_info));
320 memset (df->insns + df->insn_size, 0,
321 (size - df->insn_size) * sizeof (struct insn_info));
323 df->insn_size = size;
325 if (! df->insns_modified)
327 df->insns_modified = BITMAP_XMALLOC ();
328 bitmap_zero (df->insns_modified);
333 /* Increase the reg info table by SIZE more elements. */
335 df_reg_table_realloc (df, size)
339 /* Make table 25 percent larger by default. */
341 size = df->reg_size / 4;
343 size += df->reg_size;
344 if (size < max_reg_num ())
345 size = max_reg_num ();
347 df->regs = (struct reg_info *)
348 xrealloc (df->regs, size * sizeof (struct reg_info));
350 /* Zero the new entries. */
351 memset (df->regs + df->reg_size, 0,
352 (size - df->reg_size) * sizeof (struct reg_info));
359 /* Not currently used. */
361 df_def_table_realloc (df, size)
368 /* Make table 25 percent larger by default. */
370 size = df->def_size / 4;
372 df->def_size += size;
373 df->defs = xrealloc (df->defs,
374 df->def_size * sizeof (*df->defs));
376 /* Allocate a new block of memory and link into list of blocks
377 that will need to be freed later. */
379 refs = xmalloc (size * sizeof (*refs));
381 /* Link all the new refs together, overloading the chain field. */
382 for (i = 0; i < size - 1; i++)
383 refs[i].chain = (struct df_link *) (refs + i + 1);
384 refs[size - 1].chain = 0;
390 /* Allocate bitmaps for each basic block. */
392 df_bitmaps_alloc (df, flags)
399 /* Free the bitmaps if they need resizing. */
400 if ((flags & DF_LR) && df->n_regs < (unsigned int) max_reg_num ())
401 dflags |= DF_LR | DF_RU;
402 if ((flags & DF_RU) && df->n_uses < df->use_id)
404 if ((flags & DF_RD) && df->n_defs < df->def_id)
408 df_bitmaps_free (df, dflags);
410 df->n_defs = df->def_id;
411 df->n_uses = df->use_id;
415 struct bb_info *bb_info = DF_BB_INFO (df, bb);
417 if (flags & DF_RD && ! bb_info->rd_in)
419 /* Allocate bitmaps for reaching definitions. */
420 bb_info->rd_kill = BITMAP_XMALLOC ();
421 bitmap_zero (bb_info->rd_kill);
422 bb_info->rd_gen = BITMAP_XMALLOC ();
423 bitmap_zero (bb_info->rd_gen);
424 bb_info->rd_in = BITMAP_XMALLOC ();
425 bb_info->rd_out = BITMAP_XMALLOC ();
426 bb_info->rd_valid = 0;
429 if (flags & DF_RU && ! bb_info->ru_in)
431 /* Allocate bitmaps for upward exposed uses. */
432 bb_info->ru_kill = BITMAP_XMALLOC ();
433 bitmap_zero (bb_info->ru_kill);
434 /* Note the lack of symmetry. */
435 bb_info->ru_gen = BITMAP_XMALLOC ();
436 bitmap_zero (bb_info->ru_gen);
437 bb_info->ru_in = BITMAP_XMALLOC ();
438 bb_info->ru_out = BITMAP_XMALLOC ();
439 bb_info->ru_valid = 0;
442 if (flags & DF_LR && ! bb_info->lr_in)
444 /* Allocate bitmaps for live variables. */
445 bb_info->lr_def = BITMAP_XMALLOC ();
446 bitmap_zero (bb_info->lr_def);
447 bb_info->lr_use = BITMAP_XMALLOC ();
448 bitmap_zero (bb_info->lr_use);
449 bb_info->lr_in = BITMAP_XMALLOC ();
450 bb_info->lr_out = BITMAP_XMALLOC ();
451 bb_info->lr_valid = 0;
457 /* Free bitmaps for each basic block. */
459 df_bitmaps_free (df, flags)
460 struct df *df ATTRIBUTE_UNUSED;
467 struct bb_info *bb_info = DF_BB_INFO (df, bb);
472 if ((flags & DF_RD) && bb_info->rd_in)
474 /* Free bitmaps for reaching definitions. */
475 BITMAP_XFREE (bb_info->rd_kill);
476 bb_info->rd_kill = NULL;
477 BITMAP_XFREE (bb_info->rd_gen);
478 bb_info->rd_gen = NULL;
479 BITMAP_XFREE (bb_info->rd_in);
480 bb_info->rd_in = NULL;
481 BITMAP_XFREE (bb_info->rd_out);
482 bb_info->rd_out = NULL;
485 if ((flags & DF_RU) && bb_info->ru_in)
487 /* Free bitmaps for upward exposed uses. */
488 BITMAP_XFREE (bb_info->ru_kill);
489 bb_info->ru_kill = NULL;
490 BITMAP_XFREE (bb_info->ru_gen);
491 bb_info->ru_gen = NULL;
492 BITMAP_XFREE (bb_info->ru_in);
493 bb_info->ru_in = NULL;
494 BITMAP_XFREE (bb_info->ru_out);
495 bb_info->ru_out = NULL;
498 if ((flags & DF_LR) && bb_info->lr_in)
500 /* Free bitmaps for live variables. */
501 BITMAP_XFREE (bb_info->lr_def);
502 bb_info->lr_def = NULL;
503 BITMAP_XFREE (bb_info->lr_use);
504 bb_info->lr_use = NULL;
505 BITMAP_XFREE (bb_info->lr_in);
506 bb_info->lr_in = NULL;
507 BITMAP_XFREE (bb_info->lr_out);
508 bb_info->lr_out = NULL;
511 df->flags &= ~(flags & (DF_RD | DF_RU | DF_LR));
515 /* Allocate and initialize dataflow memory. */
517 df_alloc (df, n_regs)
524 gcc_obstack_init (&df_ref_obstack);
526 /* Perhaps we should use LUIDs to save memory for the insn_refs
527 table. This is only a small saving; a few pointers. */
528 n_insns = get_max_uid () + 1;
532 /* Approximate number of defs by number of insns. */
533 df->def_size = n_insns;
534 df->defs = xmalloc (df->def_size * sizeof (*df->defs));
538 /* Approximate number of uses by twice number of insns. */
539 df->use_size = n_insns * 2;
540 df->uses = xmalloc (df->use_size * sizeof (*df->uses));
543 df->n_bbs = last_basic_block;
545 /* Allocate temporary working array used during local dataflow analysis. */
546 df->reg_def_last = xmalloc (df->n_regs * sizeof (struct ref *));
548 df_insn_table_realloc (df, n_insns);
550 df_reg_table_realloc (df, df->n_regs);
552 df->bbs_modified = BITMAP_XMALLOC ();
553 bitmap_zero (df->bbs_modified);
557 df->bbs = xcalloc (last_basic_block, sizeof (struct bb_info));
559 df->all_blocks = BITMAP_XMALLOC ();
561 bitmap_set_bit (df->all_blocks, bb->index);
565 /* Free all the dataflow info. */
570 df_bitmaps_free (df, DF_ALL);
598 if (df->bbs_modified)
599 BITMAP_XFREE (df->bbs_modified);
600 df->bbs_modified = 0;
602 if (df->insns_modified)
603 BITMAP_XFREE (df->insns_modified);
604 df->insns_modified = 0;
606 BITMAP_XFREE (df->all_blocks);
609 obstack_free (&df_ref_obstack, NULL);
612 /* Local miscellaneous routines. */
614 /* Return a USE for register REGNO. */
615 static rtx df_reg_use_gen (regno)
621 reg = regno_reg_rtx[regno];
623 use = gen_rtx_USE (GET_MODE (reg), reg);
628 /* Return a CLOBBER for register REGNO. */
629 static rtx df_reg_clobber_gen (regno)
635 reg = regno_reg_rtx[regno];
637 use = gen_rtx_CLOBBER (GET_MODE (reg), reg);
641 /* Local chain manipulation routines. */
643 /* Create a link in a def-use or use-def chain. */
644 static inline struct df_link *
645 df_link_create (ref, next)
647 struct df_link *next;
649 struct df_link *link;
651 link = (struct df_link *) obstack_alloc (&df_ref_obstack,
659 /* Add REF to chain head pointed to by PHEAD. */
660 static struct df_link *
661 df_ref_unlink (phead, ref)
662 struct df_link **phead;
665 struct df_link *link = *phead;
671 /* Only a single ref. It must be the one we want.
672 If not, the def-use and use-def chains are likely to
674 if (link->ref != ref)
676 /* Now have an empty chain. */
681 /* Multiple refs. One of them must be us. */
682 if (link->ref == ref)
687 for (; link->next; link = link->next)
689 if (link->next->ref == ref)
691 /* Unlink from list. */
692 link->next = link->next->next;
703 /* Unlink REF from all def-use/use-def chains, etc. */
705 df_ref_remove (df, ref)
709 if (DF_REF_REG_DEF_P (ref))
711 df_def_unlink (df, ref);
712 df_ref_unlink (&df->insns[DF_REF_INSN_UID (ref)].defs, ref);
716 df_use_unlink (df, ref);
717 df_ref_unlink (&df->insns[DF_REF_INSN_UID (ref)].uses, ref);
723 /* Unlink DEF from use-def and reg-def chains. */
725 df_def_unlink (df, def)
726 struct df *df ATTRIBUTE_UNUSED;
729 struct df_link *du_link;
730 unsigned int dregno = DF_REF_REGNO (def);
732 /* Follow def-use chain to find all the uses of this def. */
733 for (du_link = DF_REF_CHAIN (def); du_link; du_link = du_link->next)
735 struct ref *use = du_link->ref;
737 /* Unlink this def from the use-def chain. */
738 df_ref_unlink (&DF_REF_CHAIN (use), def);
740 DF_REF_CHAIN (def) = 0;
742 /* Unlink def from reg-def chain. */
743 df_ref_unlink (&df->regs[dregno].defs, def);
745 df->defs[DF_REF_ID (def)] = 0;
749 /* Unlink use from def-use and reg-use chains. */
751 df_use_unlink (df, use)
752 struct df *df ATTRIBUTE_UNUSED;
755 struct df_link *ud_link;
756 unsigned int uregno = DF_REF_REGNO (use);
758 /* Follow use-def chain to find all the defs of this use. */
759 for (ud_link = DF_REF_CHAIN (use); ud_link; ud_link = ud_link->next)
761 struct ref *def = ud_link->ref;
763 /* Unlink this use from the def-use chain. */
764 df_ref_unlink (&DF_REF_CHAIN (def), use);
766 DF_REF_CHAIN (use) = 0;
768 /* Unlink use from reg-use chain. */
769 df_ref_unlink (&df->regs[uregno].uses, use);
771 df->uses[DF_REF_ID (use)] = 0;
774 /* Local routines for recording refs. */
777 /* Create a new ref of type DF_REF_TYPE for register REG at address
778 LOC within INSN of BB. */
780 df_ref_create (df, reg, loc, insn, ref_type, ref_flags)
785 enum df_ref_type ref_type;
786 enum df_ref_flags ref_flags;
788 struct ref *this_ref;
790 this_ref = (struct ref *) obstack_alloc (&df_ref_obstack,
792 DF_REF_REG (this_ref) = reg;
793 DF_REF_LOC (this_ref) = loc;
794 DF_REF_INSN (this_ref) = insn;
795 DF_REF_CHAIN (this_ref) = 0;
796 DF_REF_TYPE (this_ref) = ref_type;
797 DF_REF_FLAGS (this_ref) = ref_flags;
799 if (ref_type == DF_REF_REG_DEF)
801 if (df->def_id >= df->def_size)
803 /* Make table 25 percent larger. */
804 df->def_size += (df->def_size / 4);
805 df->defs = xrealloc (df->defs,
806 df->def_size * sizeof (*df->defs));
808 DF_REF_ID (this_ref) = df->def_id;
809 df->defs[df->def_id++] = this_ref;
813 if (df->use_id >= df->use_size)
815 /* Make table 25 percent larger. */
816 df->use_size += (df->use_size / 4);
817 df->uses = xrealloc (df->uses,
818 df->use_size * sizeof (*df->uses));
820 DF_REF_ID (this_ref) = df->use_id;
821 df->uses[df->use_id++] = this_ref;
827 /* Create a new reference of type DF_REF_TYPE for a single register REG,
828 used inside the LOC rtx of INSN. */
830 df_ref_record_1 (df, reg, loc, insn, ref_type, ref_flags)
835 enum df_ref_type ref_type;
836 enum df_ref_flags ref_flags;
838 df_ref_create (df, reg, loc, insn, ref_type, ref_flags);
842 /* Create new references of type DF_REF_TYPE for each part of register REG
843 at address LOC within INSN of BB. */
845 df_ref_record (df, reg, loc, insn, ref_type, ref_flags)
850 enum df_ref_type ref_type;
851 enum df_ref_flags ref_flags;
855 if (GET_CODE (reg) != REG && GET_CODE (reg) != SUBREG)
858 /* For the reg allocator we are interested in some SUBREG rtx's, but not
859 all. Notably only those representing a word extraction from a multi-word
860 reg. As written in the docu those should have the form
861 (subreg:SI (reg:M A) N), with size(SImode) > size(Mmode).
862 XXX Is that true? We could also use the global word_mode variable. */
863 if (GET_CODE (reg) == SUBREG
864 && (GET_MODE_SIZE (GET_MODE (reg)) < GET_MODE_SIZE (word_mode)
865 || GET_MODE_SIZE (GET_MODE (reg))
866 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (reg)))))
868 loc = &SUBREG_REG (reg);
872 regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
873 if (regno < FIRST_PSEUDO_REGISTER)
878 if (! (df->flags & DF_HARD_REGS))
881 /* GET_MODE (reg) is correct here. We don't want to go into a SUBREG
882 for the mode, because we only want to add references to regs, which
883 are really referenced. E.g. a (subreg:SI (reg:DI 0) 0) does _not_
884 reference the whole reg 0 in DI mode (which would also include
885 reg 1, at least, if 0 and 1 are SImode registers). */
886 endregno = HARD_REGNO_NREGS (regno, GET_MODE (reg));
887 if (GET_CODE (reg) == SUBREG)
888 regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
889 SUBREG_BYTE (reg), GET_MODE (reg));
892 for (i = regno; i < endregno; i++)
893 df_ref_record_1 (df, regno_reg_rtx[i],
894 loc, insn, ref_type, ref_flags);
898 df_ref_record_1 (df, reg, loc, insn, ref_type, ref_flags);
902 /* Writes to paradoxical subregs, or subregs which are too narrow
903 are read-modify-write. */
906 read_modify_subreg_p (x)
909 unsigned int isize, osize;
910 if (GET_CODE (x) != SUBREG)
912 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
913 osize = GET_MODE_SIZE (GET_MODE (x));
916 if (isize <= UNITS_PER_WORD)
918 if (osize > UNITS_PER_WORD)
923 /* Process all the registers defined in the rtx, X. */
925 df_def_record_1 (df, x, bb, insn)
931 rtx *loc = &SET_DEST (x);
933 enum df_ref_flags flags = 0;
935 /* Some targets place small structures in registers for
936 return values of functions. */
937 if (GET_CODE (dst) == PARALLEL && GET_MODE (dst) == BLKmode)
941 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
942 df_def_record_1 (df, XVECEXP (dst, 0, i), bb, insn);
946 #ifdef CLASS_CANNOT_CHANGE_MODE
947 if (GET_CODE (dst) == SUBREG
948 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (dst),
949 GET_MODE (SUBREG_REG (dst))))
950 flags |= DF_REF_MODE_CHANGE;
953 /* May be, we should flag the use of strict_low_part somehow. Might be
954 handy for the reg allocator. */
955 while (GET_CODE (dst) == STRICT_LOW_PART
956 || GET_CODE (dst) == ZERO_EXTRACT
957 || GET_CODE (dst) == SIGN_EXTRACT
958 || read_modify_subreg_p (dst))
960 /* Strict low part always contains SUBREG, but we don't want to make
961 it appear outside, as whole register is always considered. */
962 if (GET_CODE (dst) == STRICT_LOW_PART)
964 loc = &XEXP (dst, 0);
967 #ifdef CLASS_CANNOT_CHANGE_MODE
968 if (GET_CODE (dst) == SUBREG
969 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (dst),
970 GET_MODE (SUBREG_REG (dst))))
971 flags |= DF_REF_MODE_CHANGE;
973 loc = &XEXP (dst, 0);
975 flags |= DF_REF_READ_WRITE;
978 if (GET_CODE (dst) == REG
979 || (GET_CODE (dst) == SUBREG && GET_CODE (SUBREG_REG (dst)) == REG))
980 df_ref_record (df, dst, loc, insn, DF_REF_REG_DEF, flags);
984 /* Process all the registers defined in the pattern rtx, X. */
986 df_defs_record (df, x, bb, insn)
992 RTX_CODE code = GET_CODE (x);
994 if (code == SET || code == CLOBBER)
996 /* Mark the single def within the pattern. */
997 df_def_record_1 (df, x, bb, insn);
999 else if (code == PARALLEL)
1003 /* Mark the multiple defs within the pattern. */
1004 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1006 code = GET_CODE (XVECEXP (x, 0, i));
1007 if (code == SET || code == CLOBBER)
1008 df_def_record_1 (df, XVECEXP (x, 0, i), bb, insn);
1014 /* Process all the registers used in the rtx at address LOC. */
1016 df_uses_record (df, loc, ref_type, bb, insn, flags)
1019 enum df_ref_type ref_type;
1022 enum df_ref_flags flags;
1030 code = GET_CODE (x);
1046 /* If we are clobbering a MEM, mark any registers inside the address
1048 if (GET_CODE (XEXP (x, 0)) == MEM)
1049 df_uses_record (df, &XEXP (XEXP (x, 0), 0),
1050 DF_REF_REG_MEM_STORE, bb, insn, flags);
1052 /* If we're clobbering a REG then we have a def so ignore. */
1056 df_uses_record (df, &XEXP (x, 0), DF_REF_REG_MEM_LOAD, bb, insn, flags);
1060 /* While we're here, optimize this case. */
1062 /* In case the SUBREG is not of a register, don't optimize. */
1063 if (GET_CODE (SUBREG_REG (x)) != REG)
1065 loc = &SUBREG_REG (x);
1066 df_uses_record (df, loc, ref_type, bb, insn, flags);
1069 #ifdef CLASS_CANNOT_CHANGE_MODE
1070 if (CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (x),
1071 GET_MODE (SUBREG_REG (x))))
1072 flags |= DF_REF_MODE_CHANGE;
1075 /* ... Fall through ... */
1078 /* See a register (or subreg) other than being set. */
1079 df_ref_record (df, x, loc, insn, ref_type, flags);
1084 rtx dst = SET_DEST (x);
1086 df_uses_record (df, &SET_SRC (x), DF_REF_REG_USE, bb, insn, 0);
1088 switch (GET_CODE (dst))
1090 enum df_ref_flags use_flags;
1092 if (read_modify_subreg_p (dst))
1094 use_flags = DF_REF_READ_WRITE;
1095 #ifdef CLASS_CANNOT_CHANGE_MODE
1096 if (CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (dst),
1097 GET_MODE (SUBREG_REG (dst))))
1098 use_flags |= DF_REF_MODE_CHANGE;
1100 df_uses_record (df, &SUBREG_REG (dst), DF_REF_REG_USE, bb,
1104 /* ... FALLTHRU ... */
1111 df_uses_record (df, &XEXP (dst, 0),
1112 DF_REF_REG_MEM_STORE,
1115 case STRICT_LOW_PART:
1116 /* A strict_low_part uses the whole reg not only the subreg. */
1117 dst = XEXP (dst, 0);
1118 if (GET_CODE (dst) != SUBREG)
1120 use_flags = DF_REF_READ_WRITE;
1121 #ifdef CLASS_CANNOT_CHANGE_MODE
1122 if (CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (dst),
1123 GET_MODE (SUBREG_REG (dst))))
1124 use_flags |= DF_REF_MODE_CHANGE;
1126 df_uses_record (df, &SUBREG_REG (dst), DF_REF_REG_USE, bb,
1131 df_uses_record (df, &XEXP (dst, 0), DF_REF_REG_USE, bb, insn,
1133 df_uses_record (df, &XEXP (dst, 1), DF_REF_REG_USE, bb, insn, 0);
1134 df_uses_record (df, &XEXP (dst, 2), DF_REF_REG_USE, bb, insn, 0);
1135 dst = XEXP (dst, 0);
1147 case UNSPEC_VOLATILE:
1151 /* Traditional and volatile asm instructions must be considered to use
1152 and clobber all hard registers, all pseudo-registers and all of
1153 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
1155 Consider for instance a volatile asm that changes the fpu rounding
1156 mode. An insn should not be moved across this even if it only uses
1157 pseudo-regs because it might give an incorrectly rounded result.
1159 For now, just mark any regs we can find in ASM_OPERANDS as
1162 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
1163 We can not just fall through here since then we would be confused
1164 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
1165 traditional asms unlike their normal usage. */
1166 if (code == ASM_OPERANDS)
1170 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
1171 df_uses_record (df, &ASM_OPERANDS_INPUT (x, j),
1172 DF_REF_REG_USE, bb, insn, 0);
1184 /* Catch the def of the register being modified. */
1185 df_ref_record (df, XEXP (x, 0), &XEXP (x, 0), insn, DF_REF_REG_DEF, DF_REF_READ_WRITE);
1187 /* ... Fall through to handle uses ... */
1193 /* Recursively scan the operands of this expression. */
1195 const char *fmt = GET_RTX_FORMAT (code);
1198 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1202 /* Tail recursive case: save a function call level. */
1208 df_uses_record (df, &XEXP (x, i), ref_type, bb, insn, flags);
1210 else if (fmt[i] == 'E')
1213 for (j = 0; j < XVECLEN (x, i); j++)
1214 df_uses_record (df, &XVECEXP (x, i, j), ref_type,
1222 /* Record all the df within INSN of basic block BB. */
1224 df_insn_refs_record (df, bb, insn)
1235 /* Record register defs */
1236 df_defs_record (df, PATTERN (insn), bb, insn);
1238 if (df->flags & DF_EQUIV_NOTES)
1239 for (note = REG_NOTES (insn); note;
1240 note = XEXP (note, 1))
1242 switch (REG_NOTE_KIND (note))
1246 df_uses_record (df, &XEXP (note, 0), DF_REF_REG_USE,
1253 if (GET_CODE (insn) == CALL_INSN)
1258 /* Record the registers used to pass arguments. */
1259 for (note = CALL_INSN_FUNCTION_USAGE (insn); note;
1260 note = XEXP (note, 1))
1262 if (GET_CODE (XEXP (note, 0)) == USE)
1263 df_uses_record (df, &XEXP (XEXP (note, 0), 0), DF_REF_REG_USE,
1267 /* The stack ptr is used (honorarily) by a CALL insn. */
1268 x = df_reg_use_gen (STACK_POINTER_REGNUM);
1269 df_uses_record (df, &XEXP (x, 0), DF_REF_REG_USE, bb, insn, 0);
1271 if (df->flags & DF_HARD_REGS)
1273 /* Calls may also reference any of the global registers,
1274 so they are recorded as used. */
1275 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1278 x = df_reg_use_gen (i);
1279 df_uses_record (df, &SET_DEST (x),
1280 DF_REF_REG_USE, bb, insn, 0);
1285 /* Record the register uses. */
1286 df_uses_record (df, &PATTERN (insn),
1287 DF_REF_REG_USE, bb, insn, 0);
1290 if (GET_CODE (insn) == CALL_INSN)
1294 if (df->flags & DF_HARD_REGS)
1296 /* Kill all registers invalidated by a call. */
1297 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1298 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1300 rtx reg_clob = df_reg_clobber_gen (i);
1301 df_defs_record (df, reg_clob, bb, insn);
1305 /* There may be extra registers to be clobbered. */
1306 for (note = CALL_INSN_FUNCTION_USAGE (insn);
1308 note = XEXP (note, 1))
1309 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
1310 df_defs_record (df, XEXP (note, 0), bb, insn);
1316 /* Record all the refs within the basic block BB. */
1318 df_bb_refs_record (df, bb)
1324 /* Scan the block an insn at a time from beginning to end. */
1325 for (insn = bb->head; ; insn = NEXT_INSN (insn))
1329 /* Record defs within INSN. */
1330 df_insn_refs_record (df, bb, insn);
1332 if (insn == bb->end)
1338 /* Record all the refs in the basic blocks specified by BLOCKS. */
1340 df_refs_record (df, blocks)
1346 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1348 df_bb_refs_record (df, bb);
1352 /* Dataflow analysis routines. */
1355 /* Create reg-def chains for basic block BB. These are a list of
1356 definitions for each register. */
1358 df_bb_reg_def_chain_create (df, bb)
1364 /* Perhaps the defs should be sorted using a depth first search
1365 of the CFG (or possibly a breadth first search). We currently
1366 scan the basic blocks in reverse order so that the first defs
1367 appear at the start of the chain. */
1369 for (insn = bb->end; insn && insn != PREV_INSN (bb->head);
1370 insn = PREV_INSN (insn))
1372 struct df_link *link;
1373 unsigned int uid = INSN_UID (insn);
1375 if (! INSN_P (insn))
1378 for (link = df->insns[uid].defs; link; link = link->next)
1380 struct ref *def = link->ref;
1381 unsigned int dregno = DF_REF_REGNO (def);
1382 /* Don't add ref's to the chain two times. I.e. only add
1383 new refs. XXX the same could be done by testing if the current
1384 insn is a modified (or a new) one. This would be faster. */
1385 if (DF_REF_ID (def) < df->def_id_save)
1388 df->regs[dregno].defs
1389 = df_link_create (def, df->regs[dregno].defs);
1395 /* Create reg-def chains for each basic block within BLOCKS. These
1396 are a list of definitions for each register. */
1398 df_reg_def_chain_create (df, blocks)
1404 FOR_EACH_BB_IN_BITMAP/*_REV*/ (blocks, 0, bb,
1406 df_bb_reg_def_chain_create (df, bb);
1411 /* Create reg-use chains for basic block BB. These are a list of uses
1412 for each register. */
1414 df_bb_reg_use_chain_create (df, bb)
1420 /* Scan in forward order so that the last uses appear at the
1421 start of the chain. */
1423 for (insn = bb->head; insn && insn != NEXT_INSN (bb->end);
1424 insn = NEXT_INSN (insn))
1426 struct df_link *link;
1427 unsigned int uid = INSN_UID (insn);
1429 if (! INSN_P (insn))
1432 for (link = df->insns[uid].uses; link; link = link->next)
1434 struct ref *use = link->ref;
1435 unsigned int uregno = DF_REF_REGNO (use);
1436 /* Don't add ref's to the chain two times. I.e. only add
1437 new refs. XXX the same could be done by testing if the current
1438 insn is a modified (or a new) one. This would be faster. */
1439 if (DF_REF_ID (use) < df->use_id_save)
1442 df->regs[uregno].uses
1443 = df_link_create (use, df->regs[uregno].uses);
1449 /* Create reg-use chains for each basic block within BLOCKS. These
1450 are a list of uses for each register. */
1452 df_reg_use_chain_create (df, blocks)
1458 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1460 df_bb_reg_use_chain_create (df, bb);
1465 /* Create def-use chains from reaching use bitmaps for basic block BB. */
1467 df_bb_du_chain_create (df, bb, ru)
1472 struct bb_info *bb_info = DF_BB_INFO (df, bb);
1475 bitmap_copy (ru, bb_info->ru_out);
1477 /* For each def in BB create a linked list (chain) of uses
1478 reached from the def. */
1479 for (insn = bb->end; insn && insn != PREV_INSN (bb->head);
1480 insn = PREV_INSN (insn))
1482 struct df_link *def_link;
1483 struct df_link *use_link;
1484 unsigned int uid = INSN_UID (insn);
1486 if (! INSN_P (insn))
1489 /* For each def in insn... */
1490 for (def_link = df->insns[uid].defs; def_link; def_link = def_link->next)
1492 struct ref *def = def_link->ref;
1493 unsigned int dregno = DF_REF_REGNO (def);
1495 DF_REF_CHAIN (def) = 0;
1497 /* While the reg-use chains are not essential, it
1498 is _much_ faster to search these short lists rather
1499 than all the reaching uses, especially for large functions. */
1500 for (use_link = df->regs[dregno].uses; use_link;
1501 use_link = use_link->next)
1503 struct ref *use = use_link->ref;
1505 if (bitmap_bit_p (ru, DF_REF_ID (use)))
1508 = df_link_create (use, DF_REF_CHAIN (def));
1510 bitmap_clear_bit (ru, DF_REF_ID (use));
1515 /* For each use in insn... */
1516 for (use_link = df->insns[uid].uses; use_link; use_link = use_link->next)
1518 struct ref *use = use_link->ref;
1519 bitmap_set_bit (ru, DF_REF_ID (use));
1525 /* Create def-use chains from reaching use bitmaps for basic blocks
1528 df_du_chain_create (df, blocks)
1535 ru = BITMAP_XMALLOC ();
1537 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1539 df_bb_du_chain_create (df, bb, ru);
1546 /* Create use-def chains from reaching def bitmaps for basic block BB. */
1548 df_bb_ud_chain_create (df, bb)
1552 struct bb_info *bb_info = DF_BB_INFO (df, bb);
1553 struct ref **reg_def_last = df->reg_def_last;
1556 memset (reg_def_last, 0, df->n_regs * sizeof (struct ref *));
1558 /* For each use in BB create a linked list (chain) of defs
1559 that reach the use. */
1560 for (insn = bb->head; insn && insn != NEXT_INSN (bb->end);
1561 insn = NEXT_INSN (insn))
1563 unsigned int uid = INSN_UID (insn);
1564 struct df_link *use_link;
1565 struct df_link *def_link;
1567 if (! INSN_P (insn))
1570 /* For each use in insn... */
1571 for (use_link = df->insns[uid].uses; use_link; use_link = use_link->next)
1573 struct ref *use = use_link->ref;
1574 unsigned int regno = DF_REF_REGNO (use);
1576 DF_REF_CHAIN (use) = 0;
1578 /* Has regno been defined in this BB yet? If so, use
1579 the last def as the single entry for the use-def
1580 chain for this use. Otherwise, we need to add all
1581 the defs using this regno that reach the start of
1583 if (reg_def_last[regno])
1586 = df_link_create (reg_def_last[regno], 0);
1590 /* While the reg-def chains are not essential, it is
1591 _much_ faster to search these short lists rather than
1592 all the reaching defs, especially for large
1594 for (def_link = df->regs[regno].defs; def_link;
1595 def_link = def_link->next)
1597 struct ref *def = def_link->ref;
1599 if (bitmap_bit_p (bb_info->rd_in, DF_REF_ID (def)))
1602 = df_link_create (def, DF_REF_CHAIN (use));
1609 /* For each def in insn...record the last def of each reg. */
1610 for (def_link = df->insns[uid].defs; def_link; def_link = def_link->next)
1612 struct ref *def = def_link->ref;
1613 int dregno = DF_REF_REGNO (def);
1615 reg_def_last[dregno] = def;
1621 /* Create use-def chains from reaching def bitmaps for basic blocks
1624 df_ud_chain_create (df, blocks)
1630 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1632 df_bb_ud_chain_create (df, bb);
1639 df_rd_transfer_function (bb, changed, in, out, gen, kill, data)
1640 int bb ATTRIBUTE_UNUSED;
1642 bitmap in, out, gen, kill;
1643 void *data ATTRIBUTE_UNUSED;
1645 *changed = bitmap_union_of_diff (out, gen, in, kill);
1648 df_ru_transfer_function (bb, changed, in, out, gen, kill, data)
1649 int bb ATTRIBUTE_UNUSED;
1651 bitmap in, out, gen, kill;
1652 void *data ATTRIBUTE_UNUSED;
1654 *changed = bitmap_union_of_diff (in, gen, out, kill);
1658 df_lr_transfer_function (bb, changed, in, out, use, def, data)
1659 int bb ATTRIBUTE_UNUSED;
1661 bitmap in, out, use, def;
1662 void *data ATTRIBUTE_UNUSED;
1664 *changed = bitmap_union_of_diff (in, use, out, def);
1668 /* Compute local reaching def info for basic block BB. */
1670 df_bb_rd_local_compute (df, bb)
1674 struct bb_info *bb_info = DF_BB_INFO (df, bb);
1677 for (insn = bb->head; insn && insn != NEXT_INSN (bb->end);
1678 insn = NEXT_INSN (insn))
1680 unsigned int uid = INSN_UID (insn);
1681 struct df_link *def_link;
1683 if (! INSN_P (insn))
1686 for (def_link = df->insns[uid].defs; def_link; def_link = def_link->next)
1688 struct ref *def = def_link->ref;
1689 unsigned int regno = DF_REF_REGNO (def);
1690 struct df_link *def2_link;
1692 for (def2_link = df->regs[regno].defs; def2_link;
1693 def2_link = def2_link->next)
1695 struct ref *def2 = def2_link->ref;
1697 /* Add all defs of this reg to the set of kills. This
1698 is greedy since many of these defs will not actually
1699 be killed by this BB but it keeps things a lot
1701 bitmap_set_bit (bb_info->rd_kill, DF_REF_ID (def2));
1703 /* Zap from the set of gens for this BB. */
1704 bitmap_clear_bit (bb_info->rd_gen, DF_REF_ID (def2));
1707 bitmap_set_bit (bb_info->rd_gen, DF_REF_ID (def));
1711 bb_info->rd_valid = 1;
1715 /* Compute local reaching def info for each basic block within BLOCKS. */
1717 df_rd_local_compute (df, blocks)
1723 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1725 df_bb_rd_local_compute (df, bb);
1730 /* Compute local reaching use (upward exposed use) info for basic
1733 df_bb_ru_local_compute (df, bb)
1737 /* This is much more tricky than computing reaching defs. With
1738 reaching defs, defs get killed by other defs. With upwards
1739 exposed uses, these get killed by defs with the same regno. */
1741 struct bb_info *bb_info = DF_BB_INFO (df, bb);
1745 for (insn = bb->end; insn && insn != PREV_INSN (bb->head);
1746 insn = PREV_INSN (insn))
1748 unsigned int uid = INSN_UID (insn);
1749 struct df_link *def_link;
1750 struct df_link *use_link;
1752 if (! INSN_P (insn))
1755 for (def_link = df->insns[uid].defs; def_link; def_link = def_link->next)
1757 struct ref *def = def_link->ref;
1758 unsigned int dregno = DF_REF_REGNO (def);
1760 for (use_link = df->regs[dregno].uses; use_link;
1761 use_link = use_link->next)
1763 struct ref *use = use_link->ref;
1765 /* Add all uses of this reg to the set of kills. This
1766 is greedy since many of these uses will not actually
1767 be killed by this BB but it keeps things a lot
1769 bitmap_set_bit (bb_info->ru_kill, DF_REF_ID (use));
1771 /* Zap from the set of gens for this BB. */
1772 bitmap_clear_bit (bb_info->ru_gen, DF_REF_ID (use));
1776 for (use_link = df->insns[uid].uses; use_link; use_link = use_link->next)
1778 struct ref *use = use_link->ref;
1779 /* Add use to set of gens in this BB. */
1780 bitmap_set_bit (bb_info->ru_gen, DF_REF_ID (use));
1783 bb_info->ru_valid = 1;
1787 /* Compute local reaching use (upward exposed use) info for each basic
1788 block within BLOCKS. */
1790 df_ru_local_compute (df, blocks)
1796 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1798 df_bb_ru_local_compute (df, bb);
1803 /* Compute local live variable info for basic block BB. */
1805 df_bb_lr_local_compute (df, bb)
1809 struct bb_info *bb_info = DF_BB_INFO (df, bb);
1812 for (insn = bb->end; insn && insn != PREV_INSN (bb->head);
1813 insn = PREV_INSN (insn))
1815 unsigned int uid = INSN_UID (insn);
1816 struct df_link *link;
1818 if (! INSN_P (insn))
1821 for (link = df->insns[uid].defs; link; link = link->next)
1823 struct ref *def = link->ref;
1824 unsigned int dregno = DF_REF_REGNO (def);
1826 /* Add def to set of defs in this BB. */
1827 bitmap_set_bit (bb_info->lr_def, dregno);
1829 bitmap_clear_bit (bb_info->lr_use, dregno);
1832 for (link = df->insns[uid].uses; link; link = link->next)
1834 struct ref *use = link->ref;
1835 /* Add use to set of uses in this BB. */
1836 bitmap_set_bit (bb_info->lr_use, DF_REF_REGNO (use));
1839 bb_info->lr_valid = 1;
1843 /* Compute local live variable info for each basic block within BLOCKS. */
1845 df_lr_local_compute (df, blocks)
1851 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1853 df_bb_lr_local_compute (df, bb);
1858 /* Compute register info: lifetime, bb, and number of defs and uses
1859 for basic block BB. */
1861 df_bb_reg_info_compute (df, bb, live)
1866 struct reg_info *reg_info = df->regs;
1867 struct bb_info *bb_info = DF_BB_INFO (df, bb);
1870 bitmap_copy (live, bb_info->lr_out);
1872 for (insn = bb->end; insn && insn != PREV_INSN (bb->head);
1873 insn = PREV_INSN (insn))
1875 unsigned int uid = INSN_UID (insn);
1877 struct df_link *link;
1879 if (! INSN_P (insn))
1882 for (link = df->insns[uid].defs; link; link = link->next)
1884 struct ref *def = link->ref;
1885 unsigned int dregno = DF_REF_REGNO (def);
1887 /* Kill this register. */
1888 bitmap_clear_bit (live, dregno);
1889 reg_info[dregno].n_defs++;
1892 for (link = df->insns[uid].uses; link; link = link->next)
1894 struct ref *use = link->ref;
1895 unsigned int uregno = DF_REF_REGNO (use);
1897 /* This register is now live. */
1898 bitmap_set_bit (live, uregno);
1899 reg_info[uregno].n_uses++;
1902 /* Increment lifetimes of all live registers. */
1903 EXECUTE_IF_SET_IN_BITMAP (live, 0, regno,
1905 reg_info[regno].lifetime++;
1911 /* Compute register info: lifetime, bb, and number of defs and uses. */
1913 df_reg_info_compute (df, blocks)
1920 live = BITMAP_XMALLOC ();
1922 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1924 df_bb_reg_info_compute (df, bb, live);
1927 BITMAP_XFREE (live);
1931 /* Assign LUIDs for BB. */
1933 df_bb_luids_set (df, bb)
1940 /* The LUIDs are monotonically increasing for each basic block. */
1942 for (insn = bb->head; ; insn = NEXT_INSN (insn))
1945 DF_INSN_LUID (df, insn) = luid++;
1946 DF_INSN_LUID (df, insn) = luid;
1948 if (insn == bb->end)
1955 /* Assign LUIDs for each basic block within BLOCKS. */
1957 df_luids_set (df, blocks)
1964 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1966 total += df_bb_luids_set (df, bb);
1971 /* Perform dataflow analysis using existing DF structure for blocks
1972 within BLOCKS. If BLOCKS is zero, use all basic blocks in the CFG. */
1974 df_analyse_1 (df, blocks, flags, update)
1987 if (flags & DF_UD_CHAIN)
1988 aflags |= DF_RD | DF_RD_CHAIN;
1990 if (flags & DF_DU_CHAIN)
1994 aflags |= DF_RU_CHAIN;
1996 if (flags & DF_REG_INFO)
2000 blocks = df->all_blocks;
2005 df_refs_update (df);
2006 /* More fine grained incremental dataflow analysis would be
2007 nice. For now recompute the whole shebang for the
2010 df_refs_unlink (df, blocks);
2012 /* All the def-use, use-def chains can be potentially
2013 modified by changes in one block. The size of the
2014 bitmaps can also change. */
2018 /* Scan the function for all register defs and uses. */
2020 df_refs_record (df, blocks);
2022 /* Link all the new defs and uses to the insns. */
2023 df_refs_process (df);
2026 /* Allocate the bitmaps now the total number of defs and uses are
2027 known. If the number of defs or uses have changed, then
2028 these bitmaps need to be reallocated. */
2029 df_bitmaps_alloc (df, aflags);
2031 /* Set the LUIDs for each specified basic block. */
2032 df_luids_set (df, blocks);
2034 /* Recreate reg-def and reg-use chains from scratch so that first
2035 def is at the head of the reg-def chain and the last use is at
2036 the head of the reg-use chain. This is only important for
2037 regs local to a basic block as it speeds up searching. */
2038 if (aflags & DF_RD_CHAIN)
2040 df_reg_def_chain_create (df, blocks);
2043 if (aflags & DF_RU_CHAIN)
2045 df_reg_use_chain_create (df, blocks);
2048 df->dfs_order = xmalloc (sizeof (int) * n_basic_blocks);
2049 df->rc_order = xmalloc (sizeof (int) * n_basic_blocks);
2050 df->rts_order = xmalloc (sizeof (int) * n_basic_blocks);
2051 df->inverse_dfs_map = xmalloc (sizeof (int) * last_basic_block);
2052 df->inverse_rc_map = xmalloc (sizeof (int) * last_basic_block);
2053 df->inverse_rts_map = xmalloc (sizeof (int) * last_basic_block);
2055 flow_depth_first_order_compute (df->dfs_order, df->rc_order);
2056 flow_reverse_top_sort_order_compute (df->rts_order);
2057 for (i = 0; i < n_basic_blocks; i++)
2059 df->inverse_dfs_map[df->dfs_order[i]] = i;
2060 df->inverse_rc_map[df->rc_order[i]] = i;
2061 df->inverse_rts_map[df->rts_order[i]] = i;
2065 /* Compute the sets of gens and kills for the defs of each bb. */
2066 df_rd_local_compute (df, df->flags & DF_RD ? blocks : df->all_blocks);
2068 bitmap *in = xmalloc (sizeof (bitmap) * last_basic_block);
2069 bitmap *out = xmalloc (sizeof (bitmap) * last_basic_block);
2070 bitmap *gen = xmalloc (sizeof (bitmap) * last_basic_block);
2071 bitmap *kill = xmalloc (sizeof (bitmap) * last_basic_block);
2074 in[bb->index] = DF_BB_INFO (df, bb)->rd_in;
2075 out[bb->index] = DF_BB_INFO (df, bb)->rd_out;
2076 gen[bb->index] = DF_BB_INFO (df, bb)->rd_gen;
2077 kill[bb->index] = DF_BB_INFO (df, bb)->rd_kill;
2079 iterative_dataflow_bitmap (in, out, gen, kill, df->all_blocks,
2080 FORWARD, UNION, df_rd_transfer_function,
2081 df->inverse_rc_map, NULL);
2089 if (aflags & DF_UD_CHAIN)
2091 /* Create use-def chains. */
2092 df_ud_chain_create (df, df->all_blocks);
2094 if (! (flags & DF_RD))
2100 /* Compute the sets of gens and kills for the upwards exposed
2102 df_ru_local_compute (df, df->flags & DF_RU ? blocks : df->all_blocks);
2104 bitmap *in = xmalloc (sizeof (bitmap) * last_basic_block);
2105 bitmap *out = xmalloc (sizeof (bitmap) * last_basic_block);
2106 bitmap *gen = xmalloc (sizeof (bitmap) * last_basic_block);
2107 bitmap *kill = xmalloc (sizeof (bitmap) * last_basic_block);
2110 in[bb->index] = DF_BB_INFO (df, bb)->ru_in;
2111 out[bb->index] = DF_BB_INFO (df, bb)->ru_out;
2112 gen[bb->index] = DF_BB_INFO (df, bb)->ru_gen;
2113 kill[bb->index] = DF_BB_INFO (df, bb)->ru_kill;
2115 iterative_dataflow_bitmap (in, out, gen, kill, df->all_blocks,
2116 BACKWARD, UNION, df_ru_transfer_function,
2117 df->inverse_rts_map, NULL);
2125 if (aflags & DF_DU_CHAIN)
2127 /* Create def-use chains. */
2128 df_du_chain_create (df, df->all_blocks);
2130 if (! (flags & DF_RU))
2134 /* Free up bitmaps that are no longer required. */
2136 df_bitmaps_free (df, dflags);
2140 /* Compute the sets of defs and uses of live variables. */
2141 df_lr_local_compute (df, df->flags & DF_LR ? blocks : df->all_blocks);
2143 bitmap *in = xmalloc (sizeof (bitmap) * last_basic_block);
2144 bitmap *out = xmalloc (sizeof (bitmap) * last_basic_block);
2145 bitmap *use = xmalloc (sizeof (bitmap) * last_basic_block);
2146 bitmap *def = xmalloc (sizeof (bitmap) * last_basic_block);
2149 in[bb->index] = DF_BB_INFO (df, bb)->lr_in;
2150 out[bb->index] = DF_BB_INFO (df, bb)->lr_out;
2151 use[bb->index] = DF_BB_INFO (df, bb)->lr_use;
2152 def[bb->index] = DF_BB_INFO (df, bb)->lr_def;
2154 iterative_dataflow_bitmap (in, out, use, def, df->all_blocks,
2155 BACKWARD, UNION, df_lr_transfer_function,
2156 df->inverse_rts_map, NULL);
2164 if (aflags & DF_REG_INFO)
2166 df_reg_info_compute (df, df->all_blocks);
2168 free (df->dfs_order);
2169 free (df->rc_order);
2170 free (df->rts_order);
2171 free (df->inverse_rc_map);
2172 free (df->inverse_dfs_map);
2173 free (df->inverse_rts_map);
2177 /* Initialize dataflow analysis. */
2183 df = xcalloc (1, sizeof (struct df));
2185 /* Squirrel away a global for debugging. */
2192 /* Start queuing refs. */
2197 df->def_id_save = df->def_id;
2198 df->use_id_save = df->use_id;
2199 /* ???? Perhaps we should save current obstack state so that we can
2205 /* Process queued refs. */
2207 df_refs_process (df)
2212 /* Build new insn-def chains. */
2213 for (i = df->def_id_save; i != df->def_id; i++)
2215 struct ref *def = df->defs[i];
2216 unsigned int uid = DF_REF_INSN_UID (def);
2218 /* Add def to head of def list for INSN. */
2220 = df_link_create (def, df->insns[uid].defs);
2223 /* Build new insn-use chains. */
2224 for (i = df->use_id_save; i != df->use_id; i++)
2226 struct ref *use = df->uses[i];
2227 unsigned int uid = DF_REF_INSN_UID (use);
2229 /* Add use to head of use list for INSN. */
2231 = df_link_create (use, df->insns[uid].uses);
2237 /* Update refs for basic block BB. */
2239 df_bb_refs_update (df, bb)
2246 /* While we have to scan the chain of insns for this BB, we don't
2247 need to allocate and queue a long chain of BB/INSN pairs. Using
2248 a bitmap for insns_modified saves memory and avoids queuing
2251 for (insn = bb->head; ; insn = NEXT_INSN (insn))
2255 uid = INSN_UID (insn);
2257 if (bitmap_bit_p (df->insns_modified, uid))
2259 /* Delete any allocated refs of this insn. MPH, FIXME. */
2260 df_insn_refs_unlink (df, bb, insn);
2262 /* Scan the insn for refs. */
2263 df_insn_refs_record (df, bb, insn);
2267 if (insn == bb->end)
2274 /* Process all the modified/deleted insns that were queued. */
2282 if ((unsigned int) max_reg_num () >= df->reg_size)
2283 df_reg_table_realloc (df, 0);
2287 FOR_EACH_BB_IN_BITMAP (df->bbs_modified, 0, bb,
2289 count += df_bb_refs_update (df, bb);
2292 df_refs_process (df);
2297 /* Return nonzero if any of the requested blocks in the bitmap
2298 BLOCKS have been modified. */
2300 df_modified_p (df, blocks)
2311 if (bitmap_bit_p (df->bbs_modified, bb->index)
2312 && (! blocks || (blocks == (bitmap) -1) || bitmap_bit_p (blocks, bb->index)))
2322 /* Analyze dataflow info for the basic blocks specified by the bitmap
2323 BLOCKS, or for the whole CFG if BLOCKS is zero, or just for the
2324 modified blocks if BLOCKS is -1. */
2326 df_analyse (df, blocks, flags)
2333 /* We could deal with additional basic blocks being created by
2334 rescanning everything again. */
2335 if (df->n_bbs && df->n_bbs != (unsigned int) last_basic_block)
2338 update = df_modified_p (df, blocks);
2339 if (update || (flags != df->flags))
2345 /* Recompute everything from scratch. */
2348 /* Allocate and initialize data structures. */
2349 df_alloc (df, max_reg_num ());
2350 df_analyse_1 (df, 0, flags, 0);
2355 if (blocks == (bitmap) -1)
2356 blocks = df->bbs_modified;
2361 df_analyse_1 (df, blocks, flags, 1);
2362 bitmap_zero (df->bbs_modified);
2363 bitmap_zero (df->insns_modified);
2370 /* Free all the dataflow info and the DF structure. */
2380 /* Unlink INSN from its reference information. */
2382 df_insn_refs_unlink (df, bb, insn)
2384 basic_block bb ATTRIBUTE_UNUSED;
2387 struct df_link *link;
2390 uid = INSN_UID (insn);
2392 /* Unlink all refs defined by this insn. */
2393 for (link = df->insns[uid].defs; link; link = link->next)
2394 df_def_unlink (df, link->ref);
2396 /* Unlink all refs used by this insn. */
2397 for (link = df->insns[uid].uses; link; link = link->next)
2398 df_use_unlink (df, link->ref);
2400 df->insns[uid].defs = 0;
2401 df->insns[uid].uses = 0;
2406 /* Unlink all the insns within BB from their reference information. */
2408 df_bb_refs_unlink (df, bb)
2414 /* Scan the block an insn at a time from beginning to end. */
2415 for (insn = bb->head; ; insn = NEXT_INSN (insn))
2419 /* Unlink refs for INSN. */
2420 df_insn_refs_unlink (df, bb, insn);
2422 if (insn == bb->end)
2428 /* Unlink all the refs in the basic blocks specified by BLOCKS.
2429 Not currently used. */
2431 df_refs_unlink (df, blocks)
2439 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
2441 df_bb_refs_unlink (df, bb);
2447 df_bb_refs_unlink (df, bb);
2452 /* Functions to modify insns. */
2455 /* Delete INSN and all its reference information. */
2457 df_insn_delete (df, bb, insn)
2459 basic_block bb ATTRIBUTE_UNUSED;
2462 /* If the insn is a jump, we should perhaps call delete_insn to
2463 handle the JUMP_LABEL? */
2465 /* We should not be deleting the NOTE_INSN_BASIC_BLOCK or label. */
2466 if (insn == bb->head)
2469 /* Delete the insn. */
2472 df_insn_modify (df, bb, insn);
2474 return NEXT_INSN (insn);
2478 /* Mark that INSN within BB may have changed (created/modified/deleted).
2479 This may be called multiple times for the same insn. There is no
2480 harm calling this function if the insn wasn't changed; it will just
2481 slow down the rescanning of refs. */
2483 df_insn_modify (df, bb, insn)
2490 uid = INSN_UID (insn);
2491 if (uid >= df->insn_size)
2492 df_insn_table_realloc (df, uid);
2494 bitmap_set_bit (df->bbs_modified, bb->index);
2495 bitmap_set_bit (df->insns_modified, uid);
2497 /* For incremental updating on the fly, perhaps we could make a copy
2498 of all the refs of the original insn and turn them into
2499 anti-refs. When df_refs_update finds these anti-refs, it annihilates
2500 the original refs. If validate_change fails then these anti-refs
2501 will just get ignored. */
2505 typedef struct replace_args {
2513 /* Replace mem pointed to by PX with its associated pseudo register.
2514 DATA is actually a pointer to a structure describing the
2515 instruction currently being scanned and the MEM we are currently
2518 df_rtx_mem_replace (px, data)
2522 replace_args *args = (replace_args *) data;
2525 if (mem == NULL_RTX)
2528 switch (GET_CODE (mem))
2534 /* We're not interested in the MEM associated with a
2535 CONST_DOUBLE, so there's no need to traverse into one. */
2539 /* This is not a MEM. */
2543 if (!rtx_equal_p (args->match, mem))
2544 /* This is not the MEM we are currently replacing. */
2547 /* Actually replace the MEM. */
2548 validate_change (args->insn, px, args->replacement, 1);
2556 df_insn_mem_replace (df, bb, insn, mem, reg)
2567 args.replacement = reg;
2570 /* Search and replace all matching mems within insn. */
2571 for_each_rtx (&insn, df_rtx_mem_replace, &args);
2574 df_insn_modify (df, bb, insn);
2576 /* ???? FIXME. We may have a new def or one or more new uses of REG
2577 in INSN. REG should be a new pseudo so it won't affect the
2578 dataflow information that we currently have. We should add
2579 the new uses and defs to INSN and then recreate the chains
2580 when df_analyse is called. */
2581 return args.modified;
2585 /* Replace one register with another. Called through for_each_rtx; PX
2586 points to the rtx being scanned. DATA is actually a pointer to a
2587 structure of arguments. */
2589 df_rtx_reg_replace (px, data)
2594 replace_args *args = (replace_args *) data;
2599 if (x == args->match)
2601 validate_change (args->insn, px, args->replacement, 1);
2609 /* Replace the reg within every ref on CHAIN that is within the set
2610 BLOCKS of basic blocks with NEWREG. Also update the regs within
2613 df_refs_reg_replace (df, blocks, chain, oldreg, newreg)
2616 struct df_link *chain;
2620 struct df_link *link;
2624 blocks = df->all_blocks;
2626 args.match = oldreg;
2627 args.replacement = newreg;
2630 for (link = chain; link; link = link->next)
2632 struct ref *ref = link->ref;
2633 rtx insn = DF_REF_INSN (ref);
2635 if (! INSN_P (insn))
2638 if (bitmap_bit_p (blocks, DF_REF_BBNO (ref)))
2640 df_ref_reg_replace (df, ref, oldreg, newreg);
2642 /* Replace occurrences of the reg within the REG_NOTES. */
2643 if ((! link->next || DF_REF_INSN (ref)
2644 != DF_REF_INSN (link->next->ref))
2645 && REG_NOTES (insn))
2648 for_each_rtx (®_NOTES (insn), df_rtx_reg_replace, &args);
2653 /* Temporary check to ensure that we have a grip on which
2654 regs should be replaced. */
2661 /* Replace all occurrences of register OLDREG with register NEWREG in
2662 blocks defined by bitmap BLOCKS. This also replaces occurrences of
2663 OLDREG in the REG_NOTES but only for insns containing OLDREG. This
2664 routine expects the reg-use and reg-def chains to be valid. */
2666 df_reg_replace (df, blocks, oldreg, newreg)
2672 unsigned int oldregno = REGNO (oldreg);
2674 df_refs_reg_replace (df, blocks, df->regs[oldregno].defs, oldreg, newreg);
2675 df_refs_reg_replace (df, blocks, df->regs[oldregno].uses, oldreg, newreg);
2680 /* Try replacing the reg within REF with NEWREG. Do not modify
2681 def-use/use-def chains. */
2683 df_ref_reg_replace (df, ref, oldreg, newreg)
2689 /* Check that insn was deleted by being converted into a NOTE. If
2690 so ignore this insn. */
2691 if (! INSN_P (DF_REF_INSN (ref)))
2694 if (oldreg && oldreg != DF_REF_REG (ref))
2697 if (! validate_change (DF_REF_INSN (ref), DF_REF_LOC (ref), newreg, 1))
2700 df_insn_modify (df, DF_REF_BB (ref), DF_REF_INSN (ref));
2706 df_bb_def_use_swap (df, bb, def_insn, use_insn, regno)
2717 struct df_link *link;
2719 def = df_bb_insn_regno_first_def_find (df, bb, def_insn, regno);
2723 use = df_bb_insn_regno_last_use_find (df, bb, use_insn, regno);
2727 /* The USE no longer exists. */
2728 use_uid = INSN_UID (use_insn);
2729 df_use_unlink (df, use);
2730 df_ref_unlink (&df->insns[use_uid].uses, use);
2732 /* The DEF requires shifting so remove it from DEF_INSN
2733 and add it to USE_INSN by reusing LINK. */
2734 def_uid = INSN_UID (def_insn);
2735 link = df_ref_unlink (&df->insns[def_uid].defs, def);
2737 link->next = df->insns[use_uid].defs;
2738 df->insns[use_uid].defs = link;
2741 link = df_ref_unlink (&df->regs[regno].defs, def);
2743 link->next = df->regs[regno].defs;
2744 df->insns[regno].defs = link;
2747 DF_REF_INSN (def) = use_insn;
2752 /* Record df between FIRST_INSN and LAST_INSN inclusive. All new
2753 insns must be processed by this routine. */
2755 df_insns_modify (df, bb, first_insn, last_insn)
2763 for (insn = first_insn; ; insn = NEXT_INSN (insn))
2767 /* A non-const call should not have slipped through the net. If
2768 it does, we need to create a new basic block. Ouch. The
2769 same applies for a label. */
2770 if ((GET_CODE (insn) == CALL_INSN
2771 && ! CONST_OR_PURE_CALL_P (insn))
2772 || GET_CODE (insn) == CODE_LABEL)
2775 uid = INSN_UID (insn);
2777 if (uid >= df->insn_size)
2778 df_insn_table_realloc (df, uid);
2780 df_insn_modify (df, bb, insn);
2782 if (insn == last_insn)
2788 /* Emit PATTERN before INSN within BB. */
2790 df_pattern_emit_before (df, pattern, bb, insn)
2791 struct df *df ATTRIBUTE_UNUSED;
2797 rtx prev_insn = PREV_INSN (insn);
2799 /* We should not be inserting before the start of the block. */
2800 if (insn == bb->head)
2802 ret_insn = emit_insn_before (pattern, insn);
2803 if (ret_insn == insn)
2806 df_insns_modify (df, bb, NEXT_INSN (prev_insn), ret_insn);
2811 /* Emit PATTERN after INSN within BB. */
2813 df_pattern_emit_after (df, pattern, bb, insn)
2821 ret_insn = emit_insn_after (pattern, insn);
2822 if (ret_insn == insn)
2825 df_insns_modify (df, bb, NEXT_INSN (insn), ret_insn);
2830 /* Emit jump PATTERN after INSN within BB. */
2832 df_jump_pattern_emit_after (df, pattern, bb, insn)
2840 ret_insn = emit_jump_insn_after (pattern, insn);
2841 if (ret_insn == insn)
2844 df_insns_modify (df, bb, NEXT_INSN (insn), ret_insn);
2849 /* Move INSN within BB before BEFORE_INSN within BEFORE_BB.
2851 This function should only be used to move loop invariant insns
2852 out of a loop where it has been proven that the def-use info
2853 will still be valid. */
2855 df_insn_move_before (df, bb, insn, before_bb, before_insn)
2859 basic_block before_bb;
2862 struct df_link *link;
2866 return df_pattern_emit_before (df, insn, before_bb, before_insn);
2868 uid = INSN_UID (insn);
2870 /* Change bb for all df defined and used by this insn. */
2871 for (link = df->insns[uid].defs; link; link = link->next)
2872 DF_REF_BB (link->ref) = before_bb;
2873 for (link = df->insns[uid].uses; link; link = link->next)
2874 DF_REF_BB (link->ref) = before_bb;
2876 /* The lifetimes of the registers used in this insn will be reduced
2877 while the lifetimes of the registers defined in this insn
2878 are likely to be increased. */
2880 /* ???? Perhaps all the insns moved should be stored on a list
2881 which df_analyse removes when it recalculates data flow. */
2883 return emit_insn_before (insn, before_insn);
2886 /* Functions to query dataflow information. */
2890 df_insn_regno_def_p (df, bb, insn, regno)
2892 basic_block bb ATTRIBUTE_UNUSED;
2897 struct df_link *link;
2899 uid = INSN_UID (insn);
2901 for (link = df->insns[uid].defs; link; link = link->next)
2903 struct ref *def = link->ref;
2905 if (DF_REF_REGNO (def) == regno)
2914 df_def_dominates_all_uses_p (df, def)
2915 struct df *df ATTRIBUTE_UNUSED;
2918 struct df_link *du_link;
2920 /* Follow def-use chain to find all the uses of this def. */
2921 for (du_link = DF_REF_CHAIN (def); du_link; du_link = du_link->next)
2923 struct ref *use = du_link->ref;
2924 struct df_link *ud_link;
2926 /* Follow use-def chain to check all the defs for this use. */
2927 for (ud_link = DF_REF_CHAIN (use); ud_link; ud_link = ud_link->next)
2928 if (ud_link->ref != def)
2936 df_insn_dominates_all_uses_p (df, bb, insn)
2938 basic_block bb ATTRIBUTE_UNUSED;
2942 struct df_link *link;
2944 uid = INSN_UID (insn);
2946 for (link = df->insns[uid].defs; link; link = link->next)
2948 struct ref *def = link->ref;
2950 if (! df_def_dominates_all_uses_p (df, def))
2958 /* Return nonzero if all DF dominates all the uses within the bitmap
2961 df_def_dominates_uses_p (df, def, blocks)
2962 struct df *df ATTRIBUTE_UNUSED;
2966 struct df_link *du_link;
2968 /* Follow def-use chain to find all the uses of this def. */
2969 for (du_link = DF_REF_CHAIN (def); du_link; du_link = du_link->next)
2971 struct ref *use = du_link->ref;
2972 struct df_link *ud_link;
2974 /* Only worry about the uses within BLOCKS. For example,
2975 consider a register defined within a loop that is live at the
2977 if (bitmap_bit_p (blocks, DF_REF_BBNO (use)))
2979 /* Follow use-def chain to check all the defs for this use. */
2980 for (ud_link = DF_REF_CHAIN (use); ud_link; ud_link = ud_link->next)
2981 if (ud_link->ref != def)
2989 /* Return nonzero if all the defs of INSN within BB dominates
2990 all the corresponding uses. */
2992 df_insn_dominates_uses_p (df, bb, insn, blocks)
2994 basic_block bb ATTRIBUTE_UNUSED;
2999 struct df_link *link;
3001 uid = INSN_UID (insn);
3003 for (link = df->insns[uid].defs; link; link = link->next)
3005 struct ref *def = link->ref;
3007 /* Only consider the defs within BLOCKS. */
3008 if (bitmap_bit_p (blocks, DF_REF_BBNO (def))
3009 && ! df_def_dominates_uses_p (df, def, blocks))
3016 /* Return the basic block that REG referenced in or NULL if referenced
3017 in multiple basic blocks. */
3019 df_regno_bb (df, regno)
3023 struct df_link *defs = df->regs[regno].defs;
3024 struct df_link *uses = df->regs[regno].uses;
3025 struct ref *def = defs ? defs->ref : 0;
3026 struct ref *use = uses ? uses->ref : 0;
3027 basic_block bb_def = def ? DF_REF_BB (def) : 0;
3028 basic_block bb_use = use ? DF_REF_BB (use) : 0;
3030 /* Compare blocks of first def and last use. ???? FIXME. What if
3031 the reg-def and reg-use lists are not correctly ordered. */
3032 return bb_def == bb_use ? bb_def : 0;
3036 /* Return nonzero if REG used in multiple basic blocks. */
3038 df_reg_global_p (df, reg)
3042 return df_regno_bb (df, REGNO (reg)) != 0;
3046 /* Return total lifetime (in insns) of REG. */
3048 df_reg_lifetime (df, reg)
3052 return df->regs[REGNO (reg)].lifetime;
3056 /* Return nonzero if REG live at start of BB. */
3058 df_bb_reg_live_start_p (df, bb, reg)
3059 struct df *df ATTRIBUTE_UNUSED;
3063 struct bb_info *bb_info = DF_BB_INFO (df, bb);
3065 #ifdef ENABLE_CHECKING
3066 if (! bb_info->lr_in)
3070 return bitmap_bit_p (bb_info->lr_in, REGNO (reg));
3074 /* Return nonzero if REG live at end of BB. */
3076 df_bb_reg_live_end_p (df, bb, reg)
3077 struct df *df ATTRIBUTE_UNUSED;
3081 struct bb_info *bb_info = DF_BB_INFO (df, bb);
3083 #ifdef ENABLE_CHECKING
3084 if (! bb_info->lr_in)
3088 return bitmap_bit_p (bb_info->lr_out, REGNO (reg));
3092 /* Return -1 if life of REG1 before life of REG2, 1 if life of REG1
3093 after life of REG2, or 0, if the lives overlap. */
3095 df_bb_regs_lives_compare (df, bb, reg1, reg2)
3101 unsigned int regno1 = REGNO (reg1);
3102 unsigned int regno2 = REGNO (reg2);
3109 /* The regs must be local to BB. */
3110 if (df_regno_bb (df, regno1) != bb
3111 || df_regno_bb (df, regno2) != bb)
3114 def2 = df_bb_regno_first_def_find (df, bb, regno2);
3115 use1 = df_bb_regno_last_use_find (df, bb, regno1);
3117 if (DF_INSN_LUID (df, DF_REF_INSN (def2))
3118 > DF_INSN_LUID (df, DF_REF_INSN (use1)))
3121 def1 = df_bb_regno_first_def_find (df, bb, regno1);
3122 use2 = df_bb_regno_last_use_find (df, bb, regno2);
3124 if (DF_INSN_LUID (df, DF_REF_INSN (def1))
3125 > DF_INSN_LUID (df, DF_REF_INSN (use2)))
3132 /* Return last use of REGNO within BB. */
3134 df_bb_regno_last_use_find (df, bb, regno)
3136 basic_block bb ATTRIBUTE_UNUSED;
3139 struct df_link *link;
3141 /* This assumes that the reg-use list is ordered such that for any
3142 BB, the last use is found first. However, since the BBs are not
3143 ordered, the first use in the chain is not necessarily the last
3144 use in the function. */
3145 for (link = df->regs[regno].uses; link; link = link->next)
3147 struct ref *use = link->ref;
3149 if (DF_REF_BB (use) == bb)
3156 /* Return first def of REGNO within BB. */
3158 df_bb_regno_first_def_find (df, bb, regno)
3160 basic_block bb ATTRIBUTE_UNUSED;
3163 struct df_link *link;
3165 /* This assumes that the reg-def list is ordered such that for any
3166 BB, the first def is found first. However, since the BBs are not
3167 ordered, the first def in the chain is not necessarily the first
3168 def in the function. */
3169 for (link = df->regs[regno].defs; link; link = link->next)
3171 struct ref *def = link->ref;
3173 if (DF_REF_BB (def) == bb)
3180 /* Return first use of REGNO inside INSN within BB. */
3182 df_bb_insn_regno_last_use_find (df, bb, insn, regno)
3184 basic_block bb ATTRIBUTE_UNUSED;
3189 struct df_link *link;
3191 uid = INSN_UID (insn);
3193 for (link = df->insns[uid].uses; link; link = link->next)
3195 struct ref *use = link->ref;
3197 if (DF_REF_REGNO (use) == regno)
3205 /* Return first def of REGNO inside INSN within BB. */
3207 df_bb_insn_regno_first_def_find (df, bb, insn, regno)
3209 basic_block bb ATTRIBUTE_UNUSED;
3214 struct df_link *link;
3216 uid = INSN_UID (insn);
3218 for (link = df->insns[uid].defs; link; link = link->next)
3220 struct ref *def = link->ref;
3222 if (DF_REF_REGNO (def) == regno)
3230 /* Return insn using REG if the BB contains only a single
3231 use and def of REG. */
3233 df_bb_single_def_use_insn_find (df, bb, insn, reg)
3241 struct df_link *du_link;
3243 def = df_bb_insn_regno_first_def_find (df, bb, insn, REGNO (reg));
3248 du_link = DF_REF_CHAIN (def);
3255 /* Check if def is dead. */
3259 /* Check for multiple uses. */
3263 return DF_REF_INSN (use);
3266 /* Functions for debugging/dumping dataflow information. */
3269 /* Dump a def-use or use-def chain for REF to FILE. */
3271 df_chain_dump (link, file)
3272 struct df_link *link;
3275 fprintf (file, "{ ");
3276 for (; link; link = link->next)
3278 fprintf (file, "%c%d ",
3279 DF_REF_REG_DEF_P (link->ref) ? 'd' : 'u',
3280 DF_REF_ID (link->ref));
3282 fprintf (file, "}");
3286 df_chain_dump_regno (link, file)
3287 struct df_link *link;
3290 fprintf (file, "{ ");
3291 for (; link; link = link->next)
3293 fprintf (file, "%c%d(%d) ",
3294 DF_REF_REG_DEF_P (link->ref) ? 'd' : 'u',
3295 DF_REF_ID (link->ref),
3296 DF_REF_REGNO (link->ref));
3298 fprintf (file, "}");
3301 /* Dump dataflow info. */
3303 df_dump (df, flags, file)
3314 fprintf (file, "\nDataflow summary:\n");
3315 fprintf (file, "n_regs = %d, n_defs = %d, n_uses = %d, n_bbs = %d\n",
3316 df->n_regs, df->n_defs, df->n_uses, df->n_bbs);
3322 fprintf (file, "Reaching defs:\n");
3325 struct bb_info *bb_info = DF_BB_INFO (df, bb);
3327 if (! bb_info->rd_in)
3330 fprintf (file, "bb %d in \t", bb->index);
3331 dump_bitmap (file, bb_info->rd_in);
3332 fprintf (file, "bb %d gen \t", bb->index);
3333 dump_bitmap (file, bb_info->rd_gen);
3334 fprintf (file, "bb %d kill\t", bb->index);
3335 dump_bitmap (file, bb_info->rd_kill);
3336 fprintf (file, "bb %d out \t", bb->index);
3337 dump_bitmap (file, bb_info->rd_out);
3341 if (flags & DF_UD_CHAIN)
3343 fprintf (file, "Use-def chains:\n");
3344 for (j = 0; j < df->n_defs; j++)
3348 fprintf (file, "d%d bb %d luid %d insn %d reg %d ",
3349 j, DF_REF_BBNO (df->defs[j]),
3350 DF_INSN_LUID (df, DF_REF_INSN (df->defs[j])),
3351 DF_REF_INSN_UID (df->defs[j]),
3352 DF_REF_REGNO (df->defs[j]));
3353 if (df->defs[j]->flags & DF_REF_READ_WRITE)
3354 fprintf (file, "read/write ");
3355 df_chain_dump (DF_REF_CHAIN (df->defs[j]), file);
3356 fprintf (file, "\n");
3363 fprintf (file, "Reaching uses:\n");
3366 struct bb_info *bb_info = DF_BB_INFO (df, bb);
3368 if (! bb_info->ru_in)
3371 fprintf (file, "bb %d in \t", bb->index);
3372 dump_bitmap (file, bb_info->ru_in);
3373 fprintf (file, "bb %d gen \t", bb->index);
3374 dump_bitmap (file, bb_info->ru_gen);
3375 fprintf (file, "bb %d kill\t", bb->index);
3376 dump_bitmap (file, bb_info->ru_kill);
3377 fprintf (file, "bb %d out \t", bb->index);
3378 dump_bitmap (file, bb_info->ru_out);
3382 if (flags & DF_DU_CHAIN)
3384 fprintf (file, "Def-use chains:\n");
3385 for (j = 0; j < df->n_uses; j++)
3389 fprintf (file, "u%d bb %d luid %d insn %d reg %d ",
3390 j, DF_REF_BBNO (df->uses[j]),
3391 DF_INSN_LUID (df, DF_REF_INSN (df->uses[j])),
3392 DF_REF_INSN_UID (df->uses[j]),
3393 DF_REF_REGNO (df->uses[j]));
3394 if (df->uses[j]->flags & DF_REF_READ_WRITE)
3395 fprintf (file, "read/write ");
3396 df_chain_dump (DF_REF_CHAIN (df->uses[j]), file);
3397 fprintf (file, "\n");
3404 fprintf (file, "Live regs:\n");
3407 struct bb_info *bb_info = DF_BB_INFO (df, bb);
3409 if (! bb_info->lr_in)
3412 fprintf (file, "bb %d in \t", bb->index);
3413 dump_bitmap (file, bb_info->lr_in);
3414 fprintf (file, "bb %d use \t", bb->index);
3415 dump_bitmap (file, bb_info->lr_use);
3416 fprintf (file, "bb %d def \t", bb->index);
3417 dump_bitmap (file, bb_info->lr_def);
3418 fprintf (file, "bb %d out \t", bb->index);
3419 dump_bitmap (file, bb_info->lr_out);
3423 if (flags & (DF_REG_INFO | DF_RD_CHAIN | DF_RU_CHAIN))
3425 struct reg_info *reg_info = df->regs;
3427 fprintf (file, "Register info:\n");
3428 for (j = 0; j < df->n_regs; j++)
3430 if (((flags & DF_REG_INFO)
3431 && (reg_info[j].n_uses || reg_info[j].n_defs))
3432 || ((flags & DF_RD_CHAIN) && reg_info[j].defs)
3433 || ((flags & DF_RU_CHAIN) && reg_info[j].uses))
3435 fprintf (file, "reg %d", j);
3436 if ((flags & DF_RD_CHAIN) && (flags & DF_RU_CHAIN))
3438 basic_block bb = df_regno_bb (df, j);
3441 fprintf (file, " bb %d", bb->index);
3443 fprintf (file, " bb ?");
3445 if (flags & DF_REG_INFO)
3447 fprintf (file, " life %d", reg_info[j].lifetime);
3450 if ((flags & DF_REG_INFO) || (flags & DF_RD_CHAIN))
3452 fprintf (file, " defs ");
3453 if (flags & DF_REG_INFO)
3454 fprintf (file, "%d ", reg_info[j].n_defs);
3455 if (flags & DF_RD_CHAIN)
3456 df_chain_dump (reg_info[j].defs, file);
3459 if ((flags & DF_REG_INFO) || (flags & DF_RU_CHAIN))
3461 fprintf (file, " uses ");
3462 if (flags & DF_REG_INFO)
3463 fprintf (file, "%d ", reg_info[j].n_uses);
3464 if (flags & DF_RU_CHAIN)
3465 df_chain_dump (reg_info[j].uses, file);
3468 fprintf (file, "\n");
3472 fprintf (file, "\n");
3477 df_insn_debug (df, insn, file)
3485 uid = INSN_UID (insn);
3486 if (uid >= df->insn_size)
3489 if (df->insns[uid].defs)
3490 bbi = DF_REF_BBNO (df->insns[uid].defs->ref);
3491 else if (df->insns[uid].uses)
3492 bbi = DF_REF_BBNO (df->insns[uid].uses->ref);
3496 fprintf (file, "insn %d bb %d luid %d defs ",
3497 uid, bbi, DF_INSN_LUID (df, insn));
3498 df_chain_dump (df->insns[uid].defs, file);
3499 fprintf (file, " uses ");
3500 df_chain_dump (df->insns[uid].uses, file);
3501 fprintf (file, "\n");
3505 df_insn_debug_regno (df, insn, file)
3513 uid = INSN_UID (insn);
3514 if (uid >= df->insn_size)
3517 if (df->insns[uid].defs)
3518 bbi = DF_REF_BBNO (df->insns[uid].defs->ref);
3519 else if (df->insns[uid].uses)
3520 bbi = DF_REF_BBNO (df->insns[uid].uses->ref);
3524 fprintf (file, "insn %d bb %d luid %d defs ",
3525 uid, bbi, DF_INSN_LUID (df, insn));
3526 df_chain_dump_regno (df->insns[uid].defs, file);
3527 fprintf (file, " uses ");
3528 df_chain_dump_regno (df->insns[uid].uses, file);
3529 fprintf (file, "\n");
3533 df_regno_debug (df, regno, file)
3538 if (regno >= df->reg_size)
3541 fprintf (file, "reg %d life %d defs ",
3542 regno, df->regs[regno].lifetime);
3543 df_chain_dump (df->regs[regno].defs, file);
3544 fprintf (file, " uses ");
3545 df_chain_dump (df->regs[regno].uses, file);
3546 fprintf (file, "\n");
3551 df_ref_debug (df, ref, file)
3556 fprintf (file, "%c%d ",
3557 DF_REF_REG_DEF_P (ref) ? 'd' : 'u',
3559 fprintf (file, "reg %d bb %d luid %d insn %d chain ",
3562 DF_INSN_LUID (df, DF_REF_INSN (ref)),
3563 INSN_UID (DF_REF_INSN (ref)));
3564 df_chain_dump (DF_REF_CHAIN (ref), file);
3565 fprintf (file, "\n");
3570 debug_df_insn (insn)
3573 df_insn_debug (ddf, insn, stderr);
3582 df_regno_debug (ddf, REGNO (reg), stderr);
3587 debug_df_regno (regno)
3590 df_regno_debug (ddf, regno, stderr);
3598 df_ref_debug (ddf, ref, stderr);
3603 debug_df_defno (defno)
3606 df_ref_debug (ddf, ddf->defs[defno], stderr);
3611 debug_df_useno (defno)
3614 df_ref_debug (ddf, ddf->uses[defno], stderr);
3619 debug_df_chain (link)
3620 struct df_link *link;
3622 df_chain_dump (link, stderr);
3623 fputc ('\n', stderr);
3626 /* Hybrid search algorithm from "Implementation Techniques for
3627 Efficient Data-Flow Analysis of Large Programs". */
3629 hybrid_search_bitmap (block, in, out, gen, kill, dir,
3630 conf_op, transfun, visited, pending,
3633 bitmap *in, *out, *gen, *kill;
3634 enum df_flow_dir dir;
3635 enum df_confluence_op conf_op;
3636 transfer_function_bitmap transfun;
3642 int i = block->index;
3644 basic_block bb = block;
3645 SET_BIT (visited, block->index);
3646 if (TEST_BIT (pending, block->index))
3650 /* Calculate <conf_op> of predecessor_outs */
3651 bitmap_zero (in[i]);
3652 for (e = bb->pred; e != 0; e = e->pred_next)
3654 if (e->src == ENTRY_BLOCK_PTR)
3659 bitmap_a_or_b (in[i], in[i], out[e->src->index]);
3662 bitmap_a_and_b (in[i], in[i], out[e->src->index]);
3669 /* Calculate <conf_op> of successor ins */
3670 bitmap_zero (out[i]);
3671 for (e = bb->succ; e != 0; e = e->succ_next)
3673 if (e->dest == EXIT_BLOCK_PTR)
3678 bitmap_a_or_b (out[i], out[i], in[e->dest->index]);
3681 bitmap_a_and_b (out[i], out[i], in[e->dest->index]);
3687 (*transfun)(i, &changed, in[i], out[i], gen[i], kill[i], data);
3688 RESET_BIT (pending, i);
3693 for (e = bb->succ; e != 0; e = e->succ_next)
3695 if (e->dest == EXIT_BLOCK_PTR || e->dest->index == i)
3697 SET_BIT (pending, e->dest->index);
3702 for (e = bb->pred; e != 0; e = e->pred_next)
3704 if (e->src == ENTRY_BLOCK_PTR || e->dest->index == i)
3706 SET_BIT (pending, e->src->index);
3713 for (e = bb->succ; e != 0; e = e->succ_next)
3715 if (e->dest == EXIT_BLOCK_PTR || e->dest->index == i)
3717 if (!TEST_BIT (visited, e->dest->index))
3718 hybrid_search_bitmap (e->dest, in, out, gen, kill, dir,
3719 conf_op, transfun, visited, pending,
3725 for (e = bb->pred; e != 0; e = e->pred_next)
3727 if (e->src == ENTRY_BLOCK_PTR || e->src->index == i)
3729 if (!TEST_BIT (visited, e->src->index))
3730 hybrid_search_bitmap (e->src, in, out, gen, kill, dir,
3731 conf_op, transfun, visited, pending,
3738 /* Hybrid search for sbitmaps, rather than bitmaps. */
3740 hybrid_search_sbitmap (block, in, out, gen, kill, dir,
3741 conf_op, transfun, visited, pending,
3744 sbitmap *in, *out, *gen, *kill;
3745 enum df_flow_dir dir;
3746 enum df_confluence_op conf_op;
3747 transfer_function_sbitmap transfun;
3753 int i = block->index;
3755 basic_block bb = block;
3756 SET_BIT (visited, block->index);
3757 if (TEST_BIT (pending, block->index))
3761 /* Calculate <conf_op> of predecessor_outs */
3762 sbitmap_zero (in[i]);
3763 for (e = bb->pred; e != 0; e = e->pred_next)
3765 if (e->src == ENTRY_BLOCK_PTR)
3770 sbitmap_a_or_b (in[i], in[i], out[e->src->index]);
3773 sbitmap_a_and_b (in[i], in[i], out[e->src->index]);
3780 /* Calculate <conf_op> of successor ins */
3781 sbitmap_zero (out[i]);
3782 for (e = bb->succ; e != 0; e = e->succ_next)
3784 if (e->dest == EXIT_BLOCK_PTR)
3789 sbitmap_a_or_b (out[i], out[i], in[e->dest->index]);
3792 sbitmap_a_and_b (out[i], out[i], in[e->dest->index]);
3798 (*transfun)(i, &changed, in[i], out[i], gen[i], kill[i], data);
3799 RESET_BIT (pending, i);
3804 for (e = bb->succ; e != 0; e = e->succ_next)
3806 if (e->dest == EXIT_BLOCK_PTR || e->dest->index == i)
3808 SET_BIT (pending, e->dest->index);
3813 for (e = bb->pred; e != 0; e = e->pred_next)
3815 if (e->src == ENTRY_BLOCK_PTR || e->dest->index == i)
3817 SET_BIT (pending, e->src->index);
3824 for (e = bb->succ; e != 0; e = e->succ_next)
3826 if (e->dest == EXIT_BLOCK_PTR || e->dest->index == i)
3828 if (!TEST_BIT (visited, e->dest->index))
3829 hybrid_search_sbitmap (e->dest, in, out, gen, kill, dir,
3830 conf_op, transfun, visited, pending,
3836 for (e = bb->pred; e != 0; e = e->pred_next)
3838 if (e->src == ENTRY_BLOCK_PTR || e->src->index == i)
3840 if (!TEST_BIT (visited, e->src->index))
3841 hybrid_search_sbitmap (e->src, in, out, gen, kill, dir,
3842 conf_op, transfun, visited, pending,
3853 in, out = Filled in by function.
3854 blocks = Blocks to analyze.
3855 dir = Dataflow direction.
3856 conf_op = Confluence operation.
3857 transfun = Transfer function.
3858 order = Order to iterate in. (Should map block numbers -> order)
3859 data = Whatever you want. It's passed to the transfer function.
3861 This function will perform iterative bitvector dataflow, producing
3862 the in and out sets. Even if you only want to perform it for a
3863 small number of blocks, the vectors for in and out must be large
3864 enough for *all* blocks, because changing one block might affect
3865 others. However, it'll only put what you say to analyze on the
3868 For forward problems, you probably want to pass in a mapping of
3869 block number to rc_order (like df->inverse_rc_map).
3872 iterative_dataflow_sbitmap (in, out, gen, kill, blocks,
3873 dir, conf_op, transfun, order, data)
3874 sbitmap *in, *out, *gen, *kill;
3876 enum df_flow_dir dir;
3877 enum df_confluence_op conf_op;
3878 transfer_function_sbitmap transfun;
3885 sbitmap visited, pending;
3886 pending = sbitmap_alloc (last_basic_block);
3887 visited = sbitmap_alloc (last_basic_block);
3888 sbitmap_zero (pending);
3889 sbitmap_zero (visited);
3890 worklist = fibheap_new ();
3891 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i,
3893 fibheap_insert (worklist, order[i], (void *) (size_t) i);
3894 SET_BIT (pending, i);
3896 sbitmap_copy (out[i], gen[i]);
3898 sbitmap_copy (in[i], gen[i]);
3900 while (sbitmap_first_set_bit (pending) != -1)
3902 while (!fibheap_empty (worklist))
3904 i = (size_t) fibheap_extract_min (worklist);
3905 bb = BASIC_BLOCK (i);
3906 if (!TEST_BIT (visited, bb->index))
3907 hybrid_search_sbitmap (bb, in, out, gen, kill, dir,
3908 conf_op, transfun, visited, pending, data);
3910 if (sbitmap_first_set_bit (pending) != -1)
3912 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i,
3914 fibheap_insert (worklist, order[i], (void *) (size_t) i);
3916 sbitmap_zero (visited);
3923 sbitmap_free (pending);
3924 sbitmap_free (visited);
3925 fibheap_delete (worklist);
3928 /* Exactly the same as iterative_dataflow_sbitmap, except it works on
3931 iterative_dataflow_bitmap (in, out, gen, kill, blocks,
3932 dir, conf_op, transfun, order, data)
3933 bitmap *in, *out, *gen, *kill;
3935 enum df_flow_dir dir;
3936 enum df_confluence_op conf_op;
3937 transfer_function_bitmap transfun;
3944 sbitmap visited, pending;
3945 pending = sbitmap_alloc (last_basic_block);
3946 visited = sbitmap_alloc (last_basic_block);
3947 sbitmap_zero (pending);
3948 sbitmap_zero (visited);
3949 worklist = fibheap_new ();
3950 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i,
3952 fibheap_insert (worklist, order[i], (void *) (size_t) i);
3953 SET_BIT (pending, i);
3955 bitmap_copy (out[i], gen[i]);
3957 bitmap_copy (in[i], gen[i]);
3959 while (sbitmap_first_set_bit (pending) != -1)
3961 while (!fibheap_empty (worklist))
3963 i = (size_t) fibheap_extract_min (worklist);
3964 bb = BASIC_BLOCK (i);
3965 if (!TEST_BIT (visited, bb->index))
3966 hybrid_search_bitmap (bb, in, out, gen, kill, dir,
3967 conf_op, transfun, visited, pending, data);
3969 if (sbitmap_first_set_bit (pending) != -1)
3971 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i,
3973 fibheap_insert (worklist, order[i], (void *) (size_t) i);
3975 sbitmap_zero (visited);
3982 sbitmap_free (pending);
3983 sbitmap_free (visited);
3984 fibheap_delete (worklist);