1 /* Standard problems for dataflow support routines.
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
3 Free Software Foundation, Inc.
4 Originally contributed by Michael P. Hayes
5 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
6 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
7 and Kenneth Zadeck (zadeck@naturalbridge.com).
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 2, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING. If not, write to the Free
23 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
28 #include "coretypes.h"
32 #include "insn-config.h"
37 #include "alloc-pool.h"
39 #include "hard-reg-set.h"
40 #include "basic-block.h"
46 #define DF_SPARSE_THRESHOLD 32
48 static bitmap seen_in_block = NULL;
49 static bitmap seen_in_insn = NULL;
52 /*----------------------------------------------------------------------------
53 Public functions access functions for the dataflow problems.
54 ----------------------------------------------------------------------------*/
56 /* Get the instance of the problem that DFLOW is dependent on. */
59 df_get_dependent_problem (struct dataflow *dflow)
61 struct df *df = dflow->df;
62 struct df_problem *dependent_problem = dflow->problem->dependent_problem;
64 gcc_assert (dependent_problem);
65 return df->problems_by_index[dependent_problem->id];
69 /* Create a du or ud chain from SRC to DST and link it into SRC. */
72 df_chain_create (struct dataflow *dflow, struct df_ref *src, struct df_ref *dst)
74 struct df_link *head = DF_REF_CHAIN (src);
75 struct df_link *link = pool_alloc (dflow->block_pool);;
77 DF_REF_CHAIN (src) = link;
84 /* Delete a du or ud chain for REF. If LINK is NULL, delete all
85 chains for ref and check to see if the reverse chains can also be
86 deleted. If LINK is not NULL it must be a link off of ref. In
87 this case, the other end is not deleted. */
90 df_chain_unlink (struct dataflow *dflow, struct df_ref *ref, struct df_link *link)
92 struct df_link *chain = DF_REF_CHAIN (ref);
95 /* Link was the first element in the chain. */
97 DF_REF_CHAIN (ref) = link->next;
100 /* Link is an internal element in the chain. */
101 struct df_link *prev = chain;
106 prev->next = chain->next;
113 pool_free (dflow->block_pool, link);
117 /* If chain is NULL here, it was because of a recursive call
118 when the other flavor of chains was not built. Just run thru
119 the entire chain calling the other side and then deleting the
123 struct df_link *next = chain->next;
124 /* Delete the other side if it exists. */
125 df_chain_unlink (dflow, chain->ref, chain);
132 /* Copy the du or ud chain starting at FROM_REF and attach it to
136 df_chain_copy (struct dataflow *dflow,
137 struct df_ref *to_ref,
138 struct df_link *from_ref)
142 df_chain_create (dflow, to_ref, from_ref->ref);
143 from_ref = from_ref->next;
148 /* Get the live in set for BB no matter what problem happens to be
152 df_get_live_in (struct df *df, basic_block bb)
154 gcc_assert (df->problems_by_index[DF_LR]);
156 if (df->problems_by_index[DF_UREC])
157 return DF_RA_LIVE_IN (df, bb);
158 else if (df->problems_by_index[DF_UR])
159 return DF_LIVE_IN (df, bb);
161 return DF_UPWARD_LIVE_IN (df, bb);
165 /* Get the live out set for BB no matter what problem happens to be
169 df_get_live_out (struct df *df, basic_block bb)
171 gcc_assert (df->problems_by_index[DF_LR]);
173 if (df->problems_by_index[DF_UREC])
174 return DF_RA_LIVE_OUT (df, bb);
175 else if (df->problems_by_index[DF_UR])
176 return DF_LIVE_OUT (df, bb);
178 return DF_UPWARD_LIVE_OUT (df, bb);
182 /*----------------------------------------------------------------------------
184 ----------------------------------------------------------------------------*/
186 /* Generic versions to get the void* version of the block info. Only
187 used inside the problem instace vectors. */
189 /* Grow the bb_info array. */
192 df_grow_bb_info (struct dataflow *dflow)
194 unsigned int new_size = last_basic_block + 1;
195 if (dflow->block_info_size < new_size)
197 new_size += new_size / 4;
198 dflow->block_info = xrealloc (dflow->block_info,
199 new_size *sizeof (void*));
200 memset (dflow->block_info + dflow->block_info_size, 0,
201 (new_size - dflow->block_info_size) *sizeof (void *));
202 dflow->block_info_size = new_size;
206 /* Dump a def-use or use-def chain for REF to FILE. */
209 df_chain_dump (struct df *df ATTRIBUTE_UNUSED, struct df_link *link, FILE *file)
211 fprintf (file, "{ ");
212 for (; link; link = link->next)
214 fprintf (file, "%c%d(bb %d insn %d) ",
215 DF_REF_REG_DEF_P (link->ref) ? 'd' : 'u',
216 DF_REF_ID (link->ref),
217 DF_REF_BBNO (link->ref),
218 DF_REF_INSN (link->ref) ? DF_REF_INSN_UID (link->ref) : -1);
224 /* Print some basic block info as part of df_dump. */
227 df_print_bb_index (basic_block bb, FILE *file)
232 fprintf (file, "( ");
233 FOR_EACH_EDGE (e, ei, bb->preds)
235 basic_block pred = e->src;
236 fprintf (file, "%d ", pred->index);
238 fprintf (file, ")->[%d]->( ", bb->index);
239 FOR_EACH_EDGE (e, ei, bb->succs)
241 basic_block succ = e->dest;
242 fprintf (file, "%d ", succ->index);
244 fprintf (file, ")\n");
248 /* Return the set of reference ids in CHAIN, caching the result in *BMAP. */
251 df_ref_bitmap (bitmap *maps, unsigned int regno, int start, int count)
253 bitmap ids = maps[regno];
257 unsigned int end = start + count;;
258 ids = BITMAP_ALLOC (NULL);
260 for (i = start; i < end; i++)
261 bitmap_set_bit (ids, i);
267 /* Make sure that the seen_in_insn and seen_in_block sbitmaps are set
273 seen_in_block = BITMAP_ALLOC (NULL);
274 seen_in_insn = BITMAP_ALLOC (NULL);
281 BITMAP_FREE (seen_in_block);
282 BITMAP_FREE (seen_in_insn);
287 /*----------------------------------------------------------------------------
290 Find the locations in the function where each use site for a pseudo
293 ----------------------------------------------------------------------------*/
295 struct df_ru_problem_data
297 bitmap *use_sites; /* Bitmap of uses for each pseudo. */
298 unsigned int use_sites_size; /* Size of use_sites. */
299 /* The set of defs to regs invalidated by call. */
300 bitmap sparse_invalidated_by_call;
301 /* The set of defs to regs invalidate by call for ru. */
302 bitmap dense_invalidated_by_call;
305 /* Get basic block info. */
307 struct df_ru_bb_info *
308 df_ru_get_bb_info (struct dataflow *dflow, unsigned int index)
310 return (struct df_ru_bb_info *) dflow->block_info[index];
314 /* Set basic block info. */
317 df_ru_set_bb_info (struct dataflow *dflow, unsigned int index,
318 struct df_ru_bb_info *bb_info)
320 dflow->block_info[index] = bb_info;
324 /* Free basic block info. */
327 df_ru_free_bb_info (struct dataflow *dflow,
328 basic_block bb ATTRIBUTE_UNUSED,
331 struct df_ru_bb_info *bb_info = (struct df_ru_bb_info *) vbb_info;
334 BITMAP_FREE (bb_info->kill);
335 BITMAP_FREE (bb_info->sparse_kill);
336 BITMAP_FREE (bb_info->gen);
337 BITMAP_FREE (bb_info->in);
338 BITMAP_FREE (bb_info->out);
339 pool_free (dflow->block_pool, bb_info);
344 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
345 not touched unless the block is new. */
348 df_ru_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
350 unsigned int bb_index;
352 unsigned int reg_size = max_reg_num ();
354 if (! dflow->block_pool)
355 dflow->block_pool = create_alloc_pool ("df_ru_block pool",
356 sizeof (struct df_ru_bb_info), 50);
358 if (dflow->problem_data)
361 struct df_ru_problem_data *problem_data =
362 (struct df_ru_problem_data *) dflow->problem_data;
364 for (i = 0; i < problem_data->use_sites_size; i++)
366 bitmap bm = problem_data->use_sites[i];
370 problem_data->use_sites[i] = NULL;
374 if (problem_data->use_sites_size < reg_size)
376 problem_data->use_sites
377 = xrealloc (problem_data->use_sites, reg_size * sizeof (bitmap));
378 memset (problem_data->use_sites + problem_data->use_sites_size, 0,
379 (reg_size - problem_data->use_sites_size) * sizeof (bitmap));
380 problem_data->use_sites_size = reg_size;
383 bitmap_clear (problem_data->sparse_invalidated_by_call);
384 bitmap_clear (problem_data->dense_invalidated_by_call);
388 struct df_ru_problem_data *problem_data =
389 xmalloc (sizeof (struct df_ru_problem_data));
390 dflow->problem_data = problem_data;
392 problem_data->use_sites = xcalloc (reg_size, sizeof (bitmap));
393 problem_data->use_sites_size = reg_size;
394 problem_data->sparse_invalidated_by_call = BITMAP_ALLOC (NULL);
395 problem_data->dense_invalidated_by_call = BITMAP_ALLOC (NULL);
398 df_grow_bb_info (dflow);
400 /* Because of the clustering of all def sites for the same pseudo,
401 we have to process all of the blocks before doing the
404 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
406 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
409 bitmap_clear (bb_info->kill);
410 bitmap_clear (bb_info->sparse_kill);
411 bitmap_clear (bb_info->gen);
415 bb_info = (struct df_ru_bb_info *) pool_alloc (dflow->block_pool);
416 df_ru_set_bb_info (dflow, bb_index, bb_info);
417 bb_info->kill = BITMAP_ALLOC (NULL);
418 bb_info->sparse_kill = BITMAP_ALLOC (NULL);
419 bb_info->gen = BITMAP_ALLOC (NULL);
420 bb_info->in = BITMAP_ALLOC (NULL);
421 bb_info->out = BITMAP_ALLOC (NULL);
427 /* Process a list of DEFs for df_ru_bb_local_compute. */
430 df_ru_bb_local_compute_process_def (struct dataflow *dflow,
431 struct df_ru_bb_info *bb_info,
434 struct df *df = dflow->df;
437 unsigned int regno = DF_REF_REGNO (def);
438 unsigned int begin = DF_REG_USE_GET (df, regno)->begin;
439 unsigned int n_uses = DF_REG_USE_GET (df, regno)->n_refs;
440 if (!bitmap_bit_p (seen_in_block, regno))
442 /* The first def for regno, causes the kill info to be
443 generated and the gen information to cleared. */
444 if (!bitmap_bit_p (seen_in_insn, regno))
446 if (n_uses > DF_SPARSE_THRESHOLD)
448 bitmap_set_bit (bb_info->sparse_kill, regno);
449 bitmap_clear_range (bb_info->gen, begin, n_uses);
453 struct df_ru_problem_data *problem_data =
454 (struct df_ru_problem_data *) dflow->problem_data;
456 df_ref_bitmap (problem_data->use_sites, regno,
458 bitmap_ior_into (bb_info->kill, uses);
459 bitmap_and_compl_into (bb_info->gen, uses);
462 bitmap_set_bit (seen_in_insn, regno);
469 /* Process a list of USEs for df_ru_bb_local_compute. */
472 df_ru_bb_local_compute_process_use (struct df_ru_bb_info *bb_info,
474 enum df_ref_flags top_flag)
478 if (top_flag == (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
480 /* Add use to set of gens in this BB unless we have seen a
481 def in a previous instruction. */
482 unsigned int regno = DF_REF_REGNO (use);
483 if (!bitmap_bit_p (seen_in_block, regno))
484 bitmap_set_bit (bb_info->gen, DF_REF_ID (use));
490 /* Compute local reaching use (upward exposed use) info for basic
491 block BB. USE_INFO->REGS[R] caches the set of uses for register R. */
493 df_ru_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
495 struct df *df = dflow->df;
496 basic_block bb = BASIC_BLOCK (bb_index);
497 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
500 /* Set when a def for regno is seen. */
501 bitmap_clear (seen_in_block);
502 bitmap_clear (seen_in_insn);
505 /* Variables defined in the prolog that are used by the exception
507 df_ru_bb_local_compute_process_use (bb_info,
508 df_get_artificial_uses (df, bb_index),
512 /* Process the artificial defs first since these are at the top of
514 df_ru_bb_local_compute_process_def (dflow, bb_info,
515 df_get_artificial_defs (df, bb_index));
517 FOR_BB_INSNS (bb, insn)
519 unsigned int uid = INSN_UID (insn);
523 df_ru_bb_local_compute_process_def (dflow, bb_info,
524 DF_INSN_UID_GET (df, uid)->defs);
526 /* The use processing must happen after the defs processing even
527 though the uses logically happen first since the defs clear
528 the gen set. Otherwise, a use for regno occuring in the same
529 instruction as a def for regno would be cleared. */
530 df_ru_bb_local_compute_process_use (bb_info,
531 DF_INSN_UID_GET (df, uid)->uses, 0);
533 bitmap_ior_into (seen_in_block, seen_in_insn);
534 bitmap_clear (seen_in_insn);
537 /* Process the hardware registers that are always live. */
538 df_ru_bb_local_compute_process_use (bb_info,
539 df_get_artificial_uses (df, bb_index), 0);
543 /* Compute local reaching use (upward exposed use) info for each basic
544 block within BLOCKS. */
546 df_ru_local_compute (struct dataflow *dflow,
548 bitmap rescan_blocks ATTRIBUTE_UNUSED)
550 struct df *df = dflow->df;
551 unsigned int bb_index;
554 struct df_ru_problem_data *problem_data =
555 (struct df_ru_problem_data *) dflow->problem_data;
556 bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
557 bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
561 if (!df->use_info.refs_organized)
562 df_reorganize_refs (&df->use_info);
564 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
566 df_ru_bb_local_compute (dflow, bb_index);
569 /* Set up the knockout bit vectors to be applied across EH_EDGES. */
570 EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, regno, bi)
572 struct df_reg_info *reg_info = DF_REG_USE_GET (df, regno);
573 if (reg_info->n_refs > DF_SPARSE_THRESHOLD)
574 bitmap_set_bit (sparse_invalidated, regno);
577 bitmap defs = df_ref_bitmap (problem_data->use_sites, regno,
578 reg_info->begin, reg_info->n_refs);
579 bitmap_ior_into (dense_invalidated, defs);
587 /* Initialize the solution bit vectors for problem. */
590 df_ru_init_solution (struct dataflow *dflow, bitmap all_blocks)
592 unsigned int bb_index;
595 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
597 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
598 bitmap_copy (bb_info->in, bb_info->gen);
599 bitmap_clear (bb_info->out);
604 /* Out of target gets or of in of source. */
607 df_ru_confluence_n (struct dataflow *dflow, edge e)
609 bitmap op1 = df_ru_get_bb_info (dflow, e->src->index)->out;
610 bitmap op2 = df_ru_get_bb_info (dflow, e->dest->index)->in;
612 if (e->flags & EDGE_EH)
614 struct df_ru_problem_data *problem_data =
615 (struct df_ru_problem_data *) dflow->problem_data;
616 bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
617 bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
618 struct df *df = dflow->df;
621 bitmap tmp = BITMAP_ALLOC (NULL);
623 bitmap_copy (tmp, op2);
624 bitmap_and_compl_into (tmp, dense_invalidated);
626 EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated, 0, regno, bi)
628 bitmap_clear_range (tmp,
629 DF_REG_USE_GET (df, regno)->begin,
630 DF_REG_USE_GET (df, regno)->n_refs);
632 bitmap_ior_into (op1, tmp);
636 bitmap_ior_into (op1, op2);
640 /* Transfer function. */
643 df_ru_transfer_function (struct dataflow *dflow, int bb_index)
645 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
648 bitmap in = bb_info->in;
649 bitmap out = bb_info->out;
650 bitmap gen = bb_info->gen;
651 bitmap kill = bb_info->kill;
652 bitmap sparse_kill = bb_info->sparse_kill;
654 if (bitmap_empty_p (sparse_kill))
655 return bitmap_ior_and_compl (in, gen, out, kill);
658 struct df *df = dflow->df;
659 bool changed = false;
660 bitmap tmp = BITMAP_ALLOC (NULL);
661 bitmap_copy (tmp, in);
662 EXECUTE_IF_SET_IN_BITMAP (sparse_kill, 0, regno, bi)
664 bitmap_clear_range (tmp,
665 DF_REG_USE_GET (df, regno)->begin,
666 DF_REG_USE_GET (df, regno)->n_refs);
668 bitmap_and_compl_into (tmp, kill);
669 bitmap_ior_into (tmp, gen);
670 changed = !bitmap_equal_p (tmp, in);
683 /* Free all storage associated with the problem. */
686 df_ru_free (struct dataflow *dflow)
689 struct df_ru_problem_data *problem_data =
690 (struct df_ru_problem_data *) dflow->problem_data;
694 for (i = 0; i < dflow->block_info_size; i++)
696 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, i);
699 BITMAP_FREE (bb_info->kill);
700 BITMAP_FREE (bb_info->sparse_kill);
701 BITMAP_FREE (bb_info->gen);
702 BITMAP_FREE (bb_info->in);
703 BITMAP_FREE (bb_info->out);
707 free_alloc_pool (dflow->block_pool);
709 for (i = 0; i < problem_data->use_sites_size; i++)
711 bitmap bm = problem_data->use_sites[i];
716 free (problem_data->use_sites);
717 BITMAP_FREE (problem_data->sparse_invalidated_by_call);
718 BITMAP_FREE (problem_data->dense_invalidated_by_call);
720 dflow->block_info_size = 0;
721 free (dflow->block_info);
722 free (dflow->problem_data);
728 /* Debugging info. */
731 df_ru_dump (struct dataflow *dflow, FILE *file)
734 struct df *df = dflow->df;
735 struct df_ru_problem_data *problem_data =
736 (struct df_ru_problem_data *) dflow->problem_data;
737 unsigned int m = max_reg_num ();
740 fprintf (file, "Reaching uses:\n");
742 fprintf (file, " sparse invalidated \t");
743 dump_bitmap (file, problem_data->sparse_invalidated_by_call);
744 fprintf (file, " dense invalidated \t");
745 dump_bitmap (file, problem_data->dense_invalidated_by_call);
747 for (regno = 0; regno < m; regno++)
748 if (DF_REG_USE_GET (df, regno)->n_refs)
749 fprintf (file, "%d[%d,%d] ", regno,
750 DF_REG_USE_GET (df, regno)->begin,
751 DF_REG_USE_GET (df, regno)->n_refs);
752 fprintf (file, "\n");
756 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb->index);
757 df_print_bb_index (bb, file);
762 fprintf (file, " in \t");
763 dump_bitmap (file, bb_info->in);
764 fprintf (file, " gen \t");
765 dump_bitmap (file, bb_info->gen);
766 fprintf (file, " kill\t");
767 dump_bitmap (file, bb_info->kill);
768 fprintf (file, " out \t");
769 dump_bitmap (file, bb_info->out);
773 /* All of the information associated with every instance of the problem. */
775 static struct df_problem problem_RU =
777 DF_RU, /* Problem id. */
778 DF_BACKWARD, /* Direction. */
779 df_ru_alloc, /* Allocate the problem specific data. */
780 df_ru_free_bb_info, /* Free basic block info. */
781 df_ru_local_compute, /* Local compute function. */
782 df_ru_init_solution, /* Init the solution specific data. */
783 df_iterative_dataflow, /* Iterative solver. */
784 NULL, /* Confluence operator 0. */
785 df_ru_confluence_n, /* Confluence operator n. */
786 df_ru_transfer_function, /* Transfer function. */
787 NULL, /* Finalize function. */
788 df_ru_free, /* Free all of the problem information. */
789 df_ru_dump, /* Debugging. */
790 NULL /* Dependent problem. */
795 /* Create a new DATAFLOW instance and add it to an existing instance
796 of DF. The returned structure is what is used to get at the
800 df_ru_add_problem (struct df *df)
802 return df_add_problem (df, &problem_RU);
806 /*----------------------------------------------------------------------------
809 Find the locations in the function where each definition site for a
811 ----------------------------------------------------------------------------*/
813 struct df_rd_problem_data
815 bitmap *def_sites; /* Bitmap of defs for each pseudo. */
816 unsigned int def_sites_size; /* Size of def_sites. */
817 /* The set of defs to regs invalidated by call. */
818 bitmap sparse_invalidated_by_call;
819 /* The set of defs to regs invalidate by call for ru. */
820 bitmap dense_invalidated_by_call;
823 /* Get basic block info. */
825 struct df_rd_bb_info *
826 df_rd_get_bb_info (struct dataflow *dflow, unsigned int index)
828 return (struct df_rd_bb_info *) dflow->block_info[index];
832 /* Set basic block info. */
835 df_rd_set_bb_info (struct dataflow *dflow, unsigned int index,
836 struct df_rd_bb_info *bb_info)
838 dflow->block_info[index] = bb_info;
842 /* Free basic block info. */
845 df_rd_free_bb_info (struct dataflow *dflow,
846 basic_block bb ATTRIBUTE_UNUSED,
849 struct df_rd_bb_info *bb_info = (struct df_rd_bb_info *) vbb_info;
852 BITMAP_FREE (bb_info->kill);
853 BITMAP_FREE (bb_info->sparse_kill);
854 BITMAP_FREE (bb_info->gen);
855 BITMAP_FREE (bb_info->in);
856 BITMAP_FREE (bb_info->out);
857 pool_free (dflow->block_pool, bb_info);
862 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
863 not touched unless the block is new. */
866 df_rd_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
868 unsigned int bb_index;
870 unsigned int reg_size = max_reg_num ();
872 if (! dflow->block_pool)
873 dflow->block_pool = create_alloc_pool ("df_rd_block pool",
874 sizeof (struct df_rd_bb_info), 50);
876 if (dflow->problem_data)
879 struct df_rd_problem_data *problem_data =
880 (struct df_rd_problem_data *) dflow->problem_data;
882 for (i = 0; i < problem_data->def_sites_size; i++)
884 bitmap bm = problem_data->def_sites[i];
888 problem_data->def_sites[i] = NULL;
892 if (problem_data->def_sites_size < reg_size)
894 problem_data->def_sites
895 = xrealloc (problem_data->def_sites, reg_size *sizeof (bitmap));
896 memset (problem_data->def_sites + problem_data->def_sites_size, 0,
897 (reg_size - problem_data->def_sites_size) *sizeof (bitmap));
898 problem_data->def_sites_size = reg_size;
901 bitmap_clear (problem_data->sparse_invalidated_by_call);
902 bitmap_clear (problem_data->dense_invalidated_by_call);
906 struct df_rd_problem_data *problem_data =
907 xmalloc (sizeof (struct df_rd_problem_data));
908 dflow->problem_data = problem_data;
910 problem_data->def_sites = xcalloc (reg_size, sizeof (bitmap));
911 problem_data->def_sites_size = reg_size;
912 problem_data->sparse_invalidated_by_call = BITMAP_ALLOC (NULL);
913 problem_data->dense_invalidated_by_call = BITMAP_ALLOC (NULL);
916 df_grow_bb_info (dflow);
918 /* Because of the clustering of all def sites for the same pseudo,
919 we have to process all of the blocks before doing the
922 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
924 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
927 bitmap_clear (bb_info->kill);
928 bitmap_clear (bb_info->sparse_kill);
929 bitmap_clear (bb_info->gen);
933 bb_info = (struct df_rd_bb_info *) pool_alloc (dflow->block_pool);
934 df_rd_set_bb_info (dflow, bb_index, bb_info);
935 bb_info->kill = BITMAP_ALLOC (NULL);
936 bb_info->sparse_kill = BITMAP_ALLOC (NULL);
937 bb_info->gen = BITMAP_ALLOC (NULL);
938 bb_info->in = BITMAP_ALLOC (NULL);
939 bb_info->out = BITMAP_ALLOC (NULL);
945 /* Process a list of DEFs for df_rd_bb_local_compute. */
948 df_rd_bb_local_compute_process_def (struct dataflow *dflow,
949 struct df_rd_bb_info *bb_info,
952 struct df *df = dflow->df;
955 unsigned int regno = DF_REF_REGNO (def);
956 unsigned int begin = DF_REG_DEF_GET (df, regno)->begin;
957 unsigned int n_defs = DF_REG_DEF_GET (df, regno)->n_refs;
959 /* Only the last def(s) for a regno in the block has any
961 if (!bitmap_bit_p (seen_in_block, regno))
963 /* The first def for regno in insn gets to knock out the
964 defs from other instructions. */
965 if (!bitmap_bit_p (seen_in_insn, regno))
967 if (n_defs > DF_SPARSE_THRESHOLD)
969 bitmap_set_bit (bb_info->sparse_kill, regno);
970 bitmap_clear_range (bb_info->gen, begin, n_defs);
974 struct df_rd_problem_data *problem_data =
975 (struct df_rd_problem_data *) dflow->problem_data;
977 df_ref_bitmap (problem_data->def_sites, regno,
979 bitmap_ior_into (bb_info->kill, defs);
980 bitmap_and_compl_into (bb_info->gen, defs);
984 bitmap_set_bit (seen_in_insn, regno);
985 /* All defs for regno in the instruction may be put into
987 if (! (DF_REF_FLAGS (def) & DF_REF_CLOBBER))
988 bitmap_set_bit (bb_info->gen, DF_REF_ID (def));
994 /* Compute local reaching def info for basic block BB. */
997 df_rd_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
999 struct df *df = dflow->df;
1000 basic_block bb = BASIC_BLOCK (bb_index);
1001 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
1004 bitmap_clear (seen_in_block);
1005 bitmap_clear (seen_in_insn);
1007 FOR_BB_INSNS_REVERSE (bb, insn)
1009 unsigned int uid = INSN_UID (insn);
1011 if (! INSN_P (insn))
1014 df_rd_bb_local_compute_process_def (dflow, bb_info,
1015 DF_INSN_UID_GET (df, uid)->defs);
1017 /* This complex dance with the two bitmaps is required because
1018 instructions can assign twice to the same pseudo. This
1019 generally happens with calls that will have one def for the
1020 result and another def for the clobber. If only one vector
1021 is used and the clobber goes first, the result will be
1023 bitmap_ior_into (seen_in_block, seen_in_insn);
1024 bitmap_clear (seen_in_insn);
1027 /* Process the artificial defs last since we are going backwards
1028 thur the block and these are logically at the start. */
1029 df_rd_bb_local_compute_process_def (dflow, bb_info,
1030 df_get_artificial_defs (df, bb_index));
1034 /* Compute local reaching def info for each basic block within BLOCKS. */
1037 df_rd_local_compute (struct dataflow *dflow,
1039 bitmap rescan_blocks ATTRIBUTE_UNUSED)
1041 struct df *df = dflow->df;
1042 unsigned int bb_index;
1045 struct df_rd_problem_data *problem_data =
1046 (struct df_rd_problem_data *) dflow->problem_data;
1047 bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
1048 bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
1052 if (!df->def_info.refs_organized)
1053 df_reorganize_refs (&df->def_info);
1055 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1057 df_rd_bb_local_compute (dflow, bb_index);
1060 /* Set up the knockout bit vectors to be applied across EH_EDGES. */
1061 EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, regno, bi)
1063 struct df_reg_info *reg_info = DF_REG_DEF_GET (df, regno);
1064 if (reg_info->n_refs > DF_SPARSE_THRESHOLD)
1066 bitmap_set_bit (sparse_invalidated, regno);
1070 bitmap defs = df_ref_bitmap (problem_data->def_sites, regno,
1071 reg_info->begin, reg_info->n_refs);
1072 bitmap_ior_into (dense_invalidated, defs);
1079 /* Initialize the solution bit vectors for problem. */
1082 df_rd_init_solution (struct dataflow *dflow, bitmap all_blocks)
1084 unsigned int bb_index;
1087 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1089 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
1091 bitmap_copy (bb_info->out, bb_info->gen);
1092 bitmap_clear (bb_info->in);
1096 /* In of target gets or of out of source. */
1099 df_rd_confluence_n (struct dataflow *dflow, edge e)
1101 bitmap op1 = df_rd_get_bb_info (dflow, e->dest->index)->in;
1102 bitmap op2 = df_rd_get_bb_info (dflow, e->src->index)->out;
1104 if (e->flags & EDGE_EH)
1106 struct df_rd_problem_data *problem_data =
1107 (struct df_rd_problem_data *) dflow->problem_data;
1108 bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
1109 bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
1110 struct df *df = dflow->df;
1113 bitmap tmp = BITMAP_ALLOC (NULL);
1115 bitmap_copy (tmp, op2);
1116 bitmap_and_compl_into (tmp, dense_invalidated);
1118 EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated, 0, regno, bi)
1120 bitmap_clear_range (tmp,
1121 DF_REG_DEF_GET (df, regno)->begin,
1122 DF_REG_DEF_GET (df, regno)->n_refs);
1124 bitmap_ior_into (op1, tmp);
1128 bitmap_ior_into (op1, op2);
1132 /* Transfer function. */
1135 df_rd_transfer_function (struct dataflow *dflow, int bb_index)
1137 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
1140 bitmap in = bb_info->in;
1141 bitmap out = bb_info->out;
1142 bitmap gen = bb_info->gen;
1143 bitmap kill = bb_info->kill;
1144 bitmap sparse_kill = bb_info->sparse_kill;
1146 if (bitmap_empty_p (sparse_kill))
1147 return bitmap_ior_and_compl (out, gen, in, kill);
1150 struct df *df = dflow->df;
1151 bool changed = false;
1152 bitmap tmp = BITMAP_ALLOC (NULL);
1153 bitmap_copy (tmp, in);
1154 EXECUTE_IF_SET_IN_BITMAP (sparse_kill, 0, regno, bi)
1156 bitmap_clear_range (tmp,
1157 DF_REG_DEF_GET (df, regno)->begin,
1158 DF_REG_DEF_GET (df, regno)->n_refs);
1160 bitmap_and_compl_into (tmp, kill);
1161 bitmap_ior_into (tmp, gen);
1162 changed = !bitmap_equal_p (tmp, out);
1175 /* Free all storage associated with the problem. */
1178 df_rd_free (struct dataflow *dflow)
1181 struct df_rd_problem_data *problem_data =
1182 (struct df_rd_problem_data *) dflow->problem_data;
1186 for (i = 0; i < dflow->block_info_size; i++)
1188 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, i);
1191 BITMAP_FREE (bb_info->kill);
1192 BITMAP_FREE (bb_info->sparse_kill);
1193 BITMAP_FREE (bb_info->gen);
1194 BITMAP_FREE (bb_info->in);
1195 BITMAP_FREE (bb_info->out);
1199 free_alloc_pool (dflow->block_pool);
1201 for (i = 0; i < problem_data->def_sites_size; i++)
1203 bitmap bm = problem_data->def_sites[i];
1208 free (problem_data->def_sites);
1209 BITMAP_FREE (problem_data->sparse_invalidated_by_call);
1210 BITMAP_FREE (problem_data->dense_invalidated_by_call);
1212 dflow->block_info_size = 0;
1213 free (dflow->block_info);
1214 free (dflow->problem_data);
1220 /* Debugging info. */
1223 df_rd_dump (struct dataflow *dflow, FILE *file)
1225 struct df *df = dflow->df;
1227 struct df_rd_problem_data *problem_data =
1228 (struct df_rd_problem_data *) dflow->problem_data;
1229 unsigned int m = max_reg_num ();
1232 fprintf (file, "Reaching defs:\n\n");
1234 fprintf (file, " sparse invalidated \t");
1235 dump_bitmap (file, problem_data->sparse_invalidated_by_call);
1236 fprintf (file, " dense invalidated \t");
1237 dump_bitmap (file, problem_data->dense_invalidated_by_call);
1239 for (regno = 0; regno < m; regno++)
1240 if (DF_REG_DEF_GET (df, regno)->n_refs)
1241 fprintf (file, "%d[%d,%d] ", regno,
1242 DF_REG_DEF_GET (df, regno)->begin,
1243 DF_REG_DEF_GET (df, regno)->n_refs);
1244 fprintf (file, "\n");
1248 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb->index);
1249 df_print_bb_index (bb, file);
1254 fprintf (file, " in\t(%d)\n", (int) bitmap_count_bits (bb_info->in));
1255 dump_bitmap (file, bb_info->in);
1256 fprintf (file, " gen \t(%d)\n", (int) bitmap_count_bits (bb_info->gen));
1257 dump_bitmap (file, bb_info->gen);
1258 fprintf (file, " kill\t(%d)\n", (int) bitmap_count_bits (bb_info->kill));
1259 dump_bitmap (file, bb_info->kill);
1260 fprintf (file, " out\t(%d)\n", (int) bitmap_count_bits (bb_info->out));
1261 dump_bitmap (file, bb_info->out);
1265 /* All of the information associated with every instance of the problem. */
1267 static struct df_problem problem_RD =
1269 DF_RD, /* Problem id. */
1270 DF_FORWARD, /* Direction. */
1271 df_rd_alloc, /* Allocate the problem specific data. */
1272 df_rd_free_bb_info, /* Free basic block info. */
1273 df_rd_local_compute, /* Local compute function. */
1274 df_rd_init_solution, /* Init the solution specific data. */
1275 df_iterative_dataflow, /* Iterative solver. */
1276 NULL, /* Confluence operator 0. */
1277 df_rd_confluence_n, /* Confluence operator n. */
1278 df_rd_transfer_function, /* Transfer function. */
1279 NULL, /* Finalize function. */
1280 df_rd_free, /* Free all of the problem information. */
1281 df_rd_dump, /* Debugging. */
1282 NULL /* Dependent problem. */
1287 /* Create a new DATAFLOW instance and add it to an existing instance
1288 of DF. The returned structure is what is used to get at the
1292 df_rd_add_problem (struct df *df)
1294 return df_add_problem (df, &problem_RD);
1299 /*----------------------------------------------------------------------------
1302 Find the locations in the function where any use of a pseudo can reach
1303 in the backwards direction.
1304 ----------------------------------------------------------------------------*/
1306 /* Get basic block info. */
1308 struct df_lr_bb_info *
1309 df_lr_get_bb_info (struct dataflow *dflow, unsigned int index)
1311 return (struct df_lr_bb_info *) dflow->block_info[index];
1315 /* Set basic block info. */
1318 df_lr_set_bb_info (struct dataflow *dflow, unsigned int index,
1319 struct df_lr_bb_info *bb_info)
1321 dflow->block_info[index] = bb_info;
1325 /* Free basic block info. */
1328 df_lr_free_bb_info (struct dataflow *dflow,
1329 basic_block bb ATTRIBUTE_UNUSED,
1332 struct df_lr_bb_info *bb_info = (struct df_lr_bb_info *) vbb_info;
1335 BITMAP_FREE (bb_info->use);
1336 BITMAP_FREE (bb_info->def);
1337 BITMAP_FREE (bb_info->in);
1338 BITMAP_FREE (bb_info->out);
1339 pool_free (dflow->block_pool, bb_info);
1344 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
1345 not touched unless the block is new. */
1348 df_lr_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
1350 unsigned int bb_index;
1353 if (! dflow->block_pool)
1354 dflow->block_pool = create_alloc_pool ("df_lr_block pool",
1355 sizeof (struct df_lr_bb_info), 50);
1357 df_grow_bb_info (dflow);
1359 /* Because of the clustering of all def sites for the same pseudo,
1360 we have to process all of the blocks before doing the
1363 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
1365 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
1368 bitmap_clear (bb_info->def);
1369 bitmap_clear (bb_info->use);
1373 bb_info = (struct df_lr_bb_info *) pool_alloc (dflow->block_pool);
1374 df_lr_set_bb_info (dflow, bb_index, bb_info);
1375 bb_info->use = BITMAP_ALLOC (NULL);
1376 bb_info->def = BITMAP_ALLOC (NULL);
1377 bb_info->in = BITMAP_ALLOC (NULL);
1378 bb_info->out = BITMAP_ALLOC (NULL);
1384 /* Compute local live register info for basic block BB. */
1387 df_lr_bb_local_compute (struct dataflow *dflow,
1388 struct df *df, unsigned int bb_index)
1390 basic_block bb = BASIC_BLOCK (bb_index);
1391 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
1396 /* Process the hardware registers that are always live. */
1397 for (use = df_get_artificial_uses (df, bb_index); use; use = use->next_ref)
1398 /* Add use to set of uses in this BB. */
1399 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
1400 bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
1402 FOR_BB_INSNS_REVERSE (bb, insn)
1404 unsigned int uid = INSN_UID (insn);
1406 if (! INSN_P (insn))
1411 for (def = DF_INSN_UID_GET (df, uid)->defs; def; def = def->next_ref)
1413 unsigned int dregno = DF_REF_REGNO (def);
1415 if (dregno >= FIRST_PSEUDO_REGISTER
1416 || !(SIBLING_CALL_P (insn)
1417 && bitmap_bit_p (df->exit_block_uses, dregno)
1418 && !refers_to_regno_p (dregno, dregno+1,
1419 current_function_return_rtx,
1422 /* Add def to set of defs in this BB. */
1423 bitmap_set_bit (bb_info->def, dregno);
1424 bitmap_clear_bit (bb_info->use, dregno);
1430 for (def = DF_INSN_UID_GET (df, uid)->defs; def; def = def->next_ref)
1432 unsigned int dregno = DF_REF_REGNO (def);
1434 if (DF_INSN_CONTAINS_ASM (df, insn)
1435 && dregno < FIRST_PSEUDO_REGISTER)
1439 dregno + hard_regno_nregs[dregno][GET_MODE (DF_REF_REG (def))] - 1;
1440 for (i = dregno; i <= end; ++i)
1441 regs_asm_clobbered[i] = 1;
1443 /* Add def to set of defs in this BB. */
1444 bitmap_set_bit (bb_info->def, dregno);
1445 bitmap_clear_bit (bb_info->use, dregno);
1449 for (use = DF_INSN_UID_GET (df, uid)->uses; use; use = use->next_ref)
1450 /* Add use to set of uses in this BB. */
1451 bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
1454 /* Process the registers set in an exception handler. */
1455 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
1457 unsigned int dregno = DF_REF_REGNO (def);
1458 bitmap_set_bit (bb_info->def, dregno);
1459 bitmap_clear_bit (bb_info->use, dregno);
1463 /* Process the uses that are live into an exception handler. */
1464 for (use = df_get_artificial_uses (df, bb_index); use; use = use->next_ref)
1465 /* Add use to set of uses in this BB. */
1466 if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
1467 bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
1471 /* Compute local live register info for each basic block within BLOCKS. */
1474 df_lr_local_compute (struct dataflow *dflow,
1476 bitmap rescan_blocks)
1478 struct df *df = dflow->df;
1479 unsigned int bb_index;
1482 /* Assume that the stack pointer is unchanging if alloca hasn't
1484 if (bitmap_equal_p (all_blocks, rescan_blocks))
1485 memset (regs_asm_clobbered, 0, sizeof (regs_asm_clobbered));
1487 bitmap_clear (df->hardware_regs_used);
1489 /* The all-important stack pointer must always be live. */
1490 bitmap_set_bit (df->hardware_regs_used, STACK_POINTER_REGNUM);
1492 /* Before reload, there are a few registers that must be forced
1493 live everywhere -- which might not already be the case for
1494 blocks within infinite loops. */
1495 if (! reload_completed)
1497 /* Any reference to any pseudo before reload is a potential
1498 reference of the frame pointer. */
1499 bitmap_set_bit (df->hardware_regs_used, FRAME_POINTER_REGNUM);
1501 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1502 /* Pseudos with argument area equivalences may require
1503 reloading via the argument pointer. */
1504 if (fixed_regs[ARG_POINTER_REGNUM])
1505 bitmap_set_bit (df->hardware_regs_used, ARG_POINTER_REGNUM);
1508 /* Any constant, or pseudo with constant equivalences, may
1509 require reloading from memory using the pic register. */
1510 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1511 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1512 bitmap_set_bit (df->hardware_regs_used, PIC_OFFSET_TABLE_REGNUM);
1515 if (bitmap_bit_p (rescan_blocks, EXIT_BLOCK))
1517 /* The exit block is special for this problem and its bits are
1518 computed from thin air. */
1519 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, EXIT_BLOCK);
1520 bitmap_copy (bb_info->use, df->exit_block_uses);
1523 EXECUTE_IF_SET_IN_BITMAP (rescan_blocks, 0, bb_index, bi)
1525 if (bb_index == EXIT_BLOCK)
1527 df_lr_bb_local_compute (dflow, df, bb_index);
1532 /* Initialize the solution vectors. */
1535 df_lr_init (struct dataflow *dflow, bitmap all_blocks)
1537 unsigned int bb_index;
1540 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1542 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
1543 bitmap_copy (bb_info->in, bb_info->use);
1544 bitmap_clear (bb_info->out);
1549 /* Confluence function that processes infinite loops. This might be a
1550 noreturn function that throws. And even if it isn't, getting the
1551 unwind info right helps debugging. */
1553 df_lr_confluence_0 (struct dataflow *dflow, basic_block bb)
1555 struct df *df = dflow->df;
1557 bitmap op1 = df_lr_get_bb_info (dflow, bb->index)->out;
1558 if (bb != EXIT_BLOCK_PTR)
1559 bitmap_copy (op1, df->hardware_regs_used);
1563 /* Confluence function that ignores fake edges. */
1565 df_lr_confluence_n (struct dataflow *dflow, edge e)
1567 bitmap op1 = df_lr_get_bb_info (dflow, e->src->index)->out;
1568 bitmap op2 = df_lr_get_bb_info (dflow, e->dest->index)->in;
1570 /* Call-clobbered registers die across exception and call edges. */
1571 /* ??? Abnormal call edges ignored for the moment, as this gets
1572 confused by sibling call edges, which crashes reg-stack. */
1573 if (e->flags & EDGE_EH)
1574 bitmap_ior_and_compl_into (op1, op2, df_invalidated_by_call);
1576 bitmap_ior_into (op1, op2);
1578 bitmap_ior_into (op1, dflow->df->hardware_regs_used);
1582 /* Transfer function. */
1584 df_lr_transfer_function (struct dataflow *dflow, int bb_index)
1586 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
1587 bitmap in = bb_info->in;
1588 bitmap out = bb_info->out;
1589 bitmap use = bb_info->use;
1590 bitmap def = bb_info->def;
1592 return bitmap_ior_and_compl (in, use, out, def);
1596 /* Free all storage associated with the problem. */
1599 df_lr_free (struct dataflow *dflow)
1601 if (dflow->block_info)
1604 for (i = 0; i < dflow->block_info_size; i++)
1606 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, i);
1609 BITMAP_FREE (bb_info->use);
1610 BITMAP_FREE (bb_info->def);
1611 BITMAP_FREE (bb_info->in);
1612 BITMAP_FREE (bb_info->out);
1615 free_alloc_pool (dflow->block_pool);
1617 dflow->block_info_size = 0;
1618 free (dflow->block_info);
1624 /* Debugging info. */
1627 df_lr_dump (struct dataflow *dflow, FILE *file)
1631 fprintf (file, "Live Registers:\n");
1634 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb->index);
1635 df_print_bb_index (bb, file);
1640 fprintf (file, " in \t");
1641 dump_bitmap (file, bb_info->in);
1642 fprintf (file, " use \t");
1643 dump_bitmap (file, bb_info->use);
1644 fprintf (file, " def \t");
1645 dump_bitmap (file, bb_info->def);
1646 fprintf (file, " out \t");
1647 dump_bitmap (file, bb_info->out);
1651 /* All of the information associated with every instance of the problem. */
1653 static struct df_problem problem_LR =
1655 DF_LR, /* Problem id. */
1656 DF_BACKWARD, /* Direction. */
1657 df_lr_alloc, /* Allocate the problem specific data. */
1658 df_lr_free_bb_info, /* Free basic block info. */
1659 df_lr_local_compute, /* Local compute function. */
1660 df_lr_init, /* Init the solution specific data. */
1661 df_iterative_dataflow, /* Iterative solver. */
1662 df_lr_confluence_0, /* Confluence operator 0. */
1663 df_lr_confluence_n, /* Confluence operator n. */
1664 df_lr_transfer_function, /* Transfer function. */
1665 NULL, /* Finalize function. */
1666 df_lr_free, /* Free all of the problem information. */
1667 df_lr_dump, /* Debugging. */
1668 NULL /* Dependent problem. */
1672 /* Create a new DATAFLOW instance and add it to an existing instance
1673 of DF. The returned structure is what is used to get at the
1677 df_lr_add_problem (struct df *df)
1679 return df_add_problem (df, &problem_LR);
1684 /*----------------------------------------------------------------------------
1685 UNINITIALIZED REGISTERS
1687 Find the set of uses for registers that are reachable from the entry
1688 block without passing thru a definition.
1689 ----------------------------------------------------------------------------*/
1691 /* Get basic block info. */
1693 struct df_ur_bb_info *
1694 df_ur_get_bb_info (struct dataflow *dflow, unsigned int index)
1696 return (struct df_ur_bb_info *) dflow->block_info[index];
1700 /* Set basic block info. */
1703 df_ur_set_bb_info (struct dataflow *dflow, unsigned int index,
1704 struct df_ur_bb_info *bb_info)
1706 dflow->block_info[index] = bb_info;
1710 /* Free basic block info. */
1713 df_ur_free_bb_info (struct dataflow *dflow,
1714 basic_block bb ATTRIBUTE_UNUSED,
1717 struct df_ur_bb_info *bb_info = (struct df_ur_bb_info *) vbb_info;
1720 BITMAP_FREE (bb_info->gen);
1721 BITMAP_FREE (bb_info->kill);
1722 BITMAP_FREE (bb_info->in);
1723 BITMAP_FREE (bb_info->out);
1724 pool_free (dflow->block_pool, bb_info);
1729 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
1730 not touched unless the block is new. */
1733 df_ur_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
1735 unsigned int bb_index;
1738 if (! dflow->block_pool)
1739 dflow->block_pool = create_alloc_pool ("df_ur_block pool",
1740 sizeof (struct df_ur_bb_info), 100);
1742 df_grow_bb_info (dflow);
1744 /* Because of the clustering of all def sites for the same pseudo,
1745 we have to process all of the blocks before doing the
1748 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
1750 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1753 bitmap_clear (bb_info->kill);
1754 bitmap_clear (bb_info->gen);
1758 bb_info = (struct df_ur_bb_info *) pool_alloc (dflow->block_pool);
1759 df_ur_set_bb_info (dflow, bb_index, bb_info);
1760 bb_info->kill = BITMAP_ALLOC (NULL);
1761 bb_info->gen = BITMAP_ALLOC (NULL);
1762 bb_info->in = BITMAP_ALLOC (NULL);
1763 bb_info->out = BITMAP_ALLOC (NULL);
1769 /* Compute local uninitialized register info for basic block BB. */
1772 df_ur_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
1774 struct df *df = dflow->df;
1775 basic_block bb = BASIC_BLOCK (bb_index);
1776 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1780 bitmap_clear (seen_in_block);
1781 bitmap_clear (seen_in_insn);
1783 FOR_BB_INSNS_REVERSE (bb, insn)
1785 unsigned int uid = INSN_UID (insn);
1789 for (def = DF_INSN_UID_GET (df, uid)->defs; def; def = def->next_ref)
1791 unsigned int regno = DF_REF_REGNO (def);
1792 /* Only the last def counts. */
1793 if (!bitmap_bit_p (seen_in_block, regno))
1795 bitmap_set_bit (seen_in_insn, regno);
1797 if (DF_REF_FLAGS (def) & DF_REF_CLOBBER)
1798 bitmap_set_bit (bb_info->kill, regno);
1800 bitmap_set_bit (bb_info->gen, regno);
1803 bitmap_ior_into (seen_in_block, seen_in_insn);
1804 bitmap_clear (seen_in_insn);
1807 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
1809 unsigned int regno = DF_REF_REGNO (def);
1810 if (!bitmap_bit_p (seen_in_block, regno))
1812 bitmap_set_bit (seen_in_block, regno);
1813 bitmap_set_bit (bb_info->gen, regno);
1819 /* Compute local uninitialized register info. */
1822 df_ur_local_compute (struct dataflow *dflow,
1823 bitmap all_blocks ATTRIBUTE_UNUSED,
1824 bitmap rescan_blocks)
1826 unsigned int bb_index;
1831 EXECUTE_IF_SET_IN_BITMAP (rescan_blocks, 0, bb_index, bi)
1833 df_ur_bb_local_compute (dflow, bb_index);
1840 /* Initialize the solution vectors. */
1843 df_ur_init (struct dataflow *dflow, bitmap all_blocks)
1845 unsigned int bb_index;
1848 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1850 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1852 bitmap_copy (bb_info->out, bb_info->gen);
1853 bitmap_clear (bb_info->in);
1858 /* Or in the stack regs, hard regs and early clobber regs into the the
1859 ur_in sets of all of the blocks. */
1862 df_ur_local_finalize (struct dataflow *dflow, bitmap all_blocks)
1864 struct df *df = dflow->df;
1865 struct dataflow *lr_dflow = df->problems_by_index[DF_LR];
1866 bitmap tmp = BITMAP_ALLOC (NULL);
1868 unsigned int bb_index;
1870 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1872 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1873 struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (lr_dflow, bb_index);
1875 bitmap_ior_into (bb_info->in, df_all_hard_regs);
1876 bitmap_ior_into (bb_info->out, df_all_hard_regs);
1878 /* No register may reach a location where it is not used. Thus
1879 we trim the rr result to the places where it is used. */
1880 bitmap_and_into (bb_info->in, bb_lr_info->in);
1881 bitmap_and_into (bb_info->out, bb_lr_info->out);
1884 /* Hard registers may still stick in the ur_out set, but not
1885 be in the ur_in set, if their only mention was in a call
1886 in this block. This is because a call kills in the lr
1887 problem but does not kill in the ur problem. To clean
1888 this up, we execute the transfer function on the lr_in
1889 set and then use that to knock bits out of ur_out. */
1890 bitmap_ior_and_compl (tmp, bb_info->gen, bb_lr_info->in,
1892 bitmap_and_into (bb_info->out, tmp);
1900 /* Confluence function that ignores fake edges. */
1903 df_ur_confluence_n (struct dataflow *dflow, edge e)
1905 bitmap op1 = df_ur_get_bb_info (dflow, e->dest->index)->in;
1906 bitmap op2 = df_ur_get_bb_info (dflow, e->src->index)->out;
1908 if (e->flags & EDGE_FAKE)
1911 bitmap_ior_into (op1, op2);
1915 /* Transfer function. */
1918 df_ur_transfer_function (struct dataflow *dflow, int bb_index)
1920 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1921 bitmap in = bb_info->in;
1922 bitmap out = bb_info->out;
1923 bitmap gen = bb_info->gen;
1924 bitmap kill = bb_info->kill;
1926 return bitmap_ior_and_compl (out, gen, in, kill);
1930 /* Free all storage associated with the problem. */
1933 df_ur_free (struct dataflow *dflow)
1935 if (dflow->block_info)
1939 for (i = 0; i < dflow->block_info_size; i++)
1941 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, i);
1944 BITMAP_FREE (bb_info->gen);
1945 BITMAP_FREE (bb_info->kill);
1946 BITMAP_FREE (bb_info->in);
1947 BITMAP_FREE (bb_info->out);
1951 free_alloc_pool (dflow->block_pool);
1952 dflow->block_info_size = 0;
1953 free (dflow->block_info);
1959 /* Debugging info. */
1962 df_ur_dump (struct dataflow *dflow, FILE *file)
1966 fprintf (file, "Undefined regs:\n");
1970 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb->index);
1971 df_print_bb_index (bb, file);
1976 fprintf (file, " in \t");
1977 dump_bitmap (file, bb_info->in);
1978 fprintf (file, " gen \t");
1979 dump_bitmap (file, bb_info->gen);
1980 fprintf (file, " kill\t");
1981 dump_bitmap (file, bb_info->kill);
1982 fprintf (file, " out \t");
1983 dump_bitmap (file, bb_info->out);
1987 /* All of the information associated with every instance of the problem. */
1989 static struct df_problem problem_UR =
1991 DF_UR, /* Problem id. */
1992 DF_FORWARD, /* Direction. */
1993 df_ur_alloc, /* Allocate the problem specific data. */
1994 df_ur_free_bb_info, /* Free basic block info. */
1995 df_ur_local_compute, /* Local compute function. */
1996 df_ur_init, /* Init the solution specific data. */
1997 df_iterative_dataflow, /* Iterative solver. */
1998 NULL, /* Confluence operator 0. */
1999 df_ur_confluence_n, /* Confluence operator n. */
2000 df_ur_transfer_function, /* Transfer function. */
2001 df_ur_local_finalize, /* Finalize function. */
2002 df_ur_free, /* Free all of the problem information. */
2003 df_ur_dump, /* Debugging. */
2004 &problem_LR /* Dependent problem. */
2008 /* Create a new DATAFLOW instance and add it to an existing instance
2009 of DF. The returned structure is what is used to get at the
2013 df_ur_add_problem (struct df *df)
2015 return df_add_problem (df, &problem_UR);
2020 /*----------------------------------------------------------------------------
2021 UNINITIALIZED REGISTERS WITH EARLYCLOBBER
2023 Find the set of uses for registers that are reachable from the entry
2024 block without passing thru a definition.
2026 This is a variant of the UR problem above that has a lot of special
2027 features just for the register allocation phase.
2028 ----------------------------------------------------------------------------*/
2030 struct df_urec_problem_data
2032 bool earlyclobbers_found; /* True if any instruction contains an
2035 bitmap stack_regs; /* Registers that may be allocated to a STACK_REGS. */
2040 /* Get basic block info. */
2042 struct df_urec_bb_info *
2043 df_urec_get_bb_info (struct dataflow *dflow, unsigned int index)
2045 return (struct df_urec_bb_info *) dflow->block_info[index];
2049 /* Set basic block info. */
2052 df_urec_set_bb_info (struct dataflow *dflow, unsigned int index,
2053 struct df_urec_bb_info *bb_info)
2055 dflow->block_info[index] = bb_info;
2059 /* Free basic block info. */
2062 df_urec_free_bb_info (struct dataflow *dflow,
2063 basic_block bb ATTRIBUTE_UNUSED,
2066 struct df_urec_bb_info *bb_info = (struct df_urec_bb_info *) vbb_info;
2069 BITMAP_FREE (bb_info->gen);
2070 BITMAP_FREE (bb_info->kill);
2071 BITMAP_FREE (bb_info->in);
2072 BITMAP_FREE (bb_info->out);
2073 BITMAP_FREE (bb_info->earlyclobber);
2074 pool_free (dflow->block_pool, bb_info);
2079 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
2080 not touched unless the block is new. */
2083 df_urec_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
2085 unsigned int bb_index;
2087 struct df_urec_problem_data *problem_data =
2088 (struct df_urec_problem_data *) dflow->problem_data;
2090 if (! dflow->block_pool)
2091 dflow->block_pool = create_alloc_pool ("df_urec_block pool",
2092 sizeof (struct df_urec_bb_info), 50);
2094 if (!dflow->problem_data)
2096 problem_data = xmalloc (sizeof (struct df_urec_problem_data));
2097 dflow->problem_data = problem_data;
2099 problem_data->earlyclobbers_found = false;
2101 df_grow_bb_info (dflow);
2103 /* Because of the clustering of all def sites for the same pseudo,
2104 we have to process all of the blocks before doing the
2107 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
2109 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2112 bitmap_clear (bb_info->kill);
2113 bitmap_clear (bb_info->gen);
2114 bitmap_clear (bb_info->earlyclobber);
2118 bb_info = (struct df_urec_bb_info *) pool_alloc (dflow->block_pool);
2119 df_urec_set_bb_info (dflow, bb_index, bb_info);
2120 bb_info->kill = BITMAP_ALLOC (NULL);
2121 bb_info->gen = BITMAP_ALLOC (NULL);
2122 bb_info->in = BITMAP_ALLOC (NULL);
2123 bb_info->out = BITMAP_ALLOC (NULL);
2124 bb_info->earlyclobber = BITMAP_ALLOC (NULL);
2130 /* The function modifies local info for register REG being changed in
2131 SETTER. DATA is used to pass the current basic block info. */
2134 df_urec_mark_reg_change (rtx reg, rtx setter, void *data)
2139 struct df_urec_bb_info *bb_info = (struct df_urec_bb_info*) data;
2141 if (GET_CODE (reg) == SUBREG)
2142 reg = SUBREG_REG (reg);
2148 endregno = regno = REGNO (reg);
2149 if (regno < FIRST_PSEUDO_REGISTER)
2151 endregno +=hard_regno_nregs[regno][GET_MODE (reg)];
2153 for (i = regno; i < endregno; i++)
2155 bitmap_set_bit (bb_info->kill, i);
2157 if (GET_CODE (setter) != CLOBBER)
2158 bitmap_set_bit (bb_info->gen, i);
2160 bitmap_clear_bit (bb_info->gen, i);
2165 bitmap_set_bit (bb_info->kill, regno);
2167 if (GET_CODE (setter) != CLOBBER)
2168 bitmap_set_bit (bb_info->gen, regno);
2170 bitmap_clear_bit (bb_info->gen, regno);
2173 /* Classes of registers which could be early clobbered in the current
2177 DEF_VEC_ALLOC_I(int,heap);
2179 static VEC(int,heap) *earlyclobber_regclass;
2181 /* This function finds and stores register classes that could be early
2182 clobbered in INSN. If any earlyclobber classes are found, the function
2183 returns TRUE, in all other cases it returns FALSE. */
2186 df_urec_check_earlyclobber (rtx insn)
2191 extract_insn (insn);
2193 VEC_truncate (int, earlyclobber_regclass, 0);
2194 for (opno = 0; opno < recog_data.n_operands; opno++)
2199 enum reg_class class;
2200 const char *p = recog_data.constraints[opno];
2209 case '=': case '+': case '?':
2212 case 'm': case '<': case '>': case 'V': case 'o':
2213 case 'E': case 'F': case 'G': case 'H':
2214 case 's': case 'i': case 'n':
2215 case 'I': case 'J': case 'K': case 'L':
2216 case 'M': case 'N': case 'O': case 'P':
2218 case '0': case '1': case '2': case '3': case '4':
2219 case '5': case '6': case '7': case '8': case '9':
2220 /* These don't say anything we care about. */
2228 if (amp_p && class != NO_REGS)
2234 VEC_iterate (int, earlyclobber_regclass, i, rc);
2237 if (rc == (int) class)
2241 /* We use VEC_quick_push here because
2242 earlyclobber_regclass holds no more than
2243 N_REG_CLASSES elements. */
2244 VEC_quick_push (int, earlyclobber_regclass, (int) class);
2254 class = GENERAL_REGS;
2258 class = REG_CLASS_FROM_CONSTRAINT (c, p);
2263 p += CONSTRAINT_LEN (c, p);
2270 /* The function checks that pseudo-register *X has a class
2271 intersecting with the class of pseudo-register could be early
2272 clobbered in the same insn.
2274 This function is a no-op if earlyclobber_regclass is empty.
2276 Reload can assign the same hard register to uninitialized
2277 pseudo-register and early clobbered pseudo-register in an insn if
2278 the pseudo-register is used first time in given BB and not lived at
2279 the BB start. To prevent this we don't change life information for
2280 such pseudo-registers. */
2283 df_urec_mark_reg_use_for_earlyclobber (rtx *x, void *data)
2285 enum reg_class pref_class, alt_class;
2287 struct df_urec_bb_info *bb_info = (struct df_urec_bb_info*) data;
2289 if (REG_P (*x) && REGNO (*x) >= FIRST_PSEUDO_REGISTER)
2294 if (bitmap_bit_p (bb_info->kill, regno)
2295 || bitmap_bit_p (bb_info->gen, regno))
2297 pref_class = reg_preferred_class (regno);
2298 alt_class = reg_alternate_class (regno);
2299 for (i = 0; VEC_iterate (int, earlyclobber_regclass, i, rc); i++)
2301 if (reg_classes_intersect_p (rc, pref_class)
2303 && reg_classes_intersect_p (rc, alt_class)))
2305 bitmap_set_bit (bb_info->earlyclobber, regno);
2313 /* The function processes all pseudo-registers in *X with the aid of
2314 previous function. */
2317 df_urec_mark_reg_use_for_earlyclobber_1 (rtx *x, void *data)
2319 for_each_rtx (x, df_urec_mark_reg_use_for_earlyclobber, data);
2323 /* Compute local uninitialized register info for basic block BB. */
2326 df_urec_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
2328 struct df *df = dflow->df;
2329 basic_block bb = BASIC_BLOCK (bb_index);
2330 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2334 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
2336 unsigned int regno = DF_REF_REGNO (def);
2337 bitmap_set_bit (bb_info->gen, regno);
2340 FOR_BB_INSNS (bb, insn)
2344 note_stores (PATTERN (insn), df_urec_mark_reg_change, bb_info);
2345 if (df_state & (DF_SCAN_GLOBAL | DF_SCAN_POST_ALLOC)
2346 && df_urec_check_earlyclobber (insn))
2348 struct df_urec_problem_data *problem_data =
2349 (struct df_urec_problem_data *) dflow->problem_data;
2350 problem_data->earlyclobbers_found = true;
2351 note_uses (&PATTERN (insn),
2352 df_urec_mark_reg_use_for_earlyclobber_1, bb_info);
2359 /* Compute local uninitialized register info. */
2362 df_urec_local_compute (struct dataflow *dflow,
2363 bitmap all_blocks ATTRIBUTE_UNUSED,
2364 bitmap rescan_blocks)
2366 unsigned int bb_index;
2370 HARD_REG_SET zero, stack_hard_regs, used;
2371 struct df_urec_problem_data *problem_data =
2372 (struct df_urec_problem_data *) dflow->problem_data;
2374 /* Any register that MAY be allocated to a register stack (like the
2375 387) is treated poorly. Each such register is marked as being
2376 live everywhere. This keeps the register allocator and the
2377 subsequent passes from doing anything useful with these values.
2379 FIXME: This seems like an incredibly poor idea. */
2381 CLEAR_HARD_REG_SET (zero);
2382 CLEAR_HARD_REG_SET (stack_hard_regs);
2383 for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
2384 SET_HARD_REG_BIT (stack_hard_regs, i);
2385 problem_data->stack_regs = BITMAP_ALLOC (NULL);
2386 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2388 COPY_HARD_REG_SET (used, reg_class_contents[reg_preferred_class (i)]);
2389 IOR_HARD_REG_SET (used, reg_class_contents[reg_alternate_class (i)]);
2390 AND_HARD_REG_SET (used, stack_hard_regs);
2391 GO_IF_HARD_REG_EQUAL (used, zero, skip);
2392 bitmap_set_bit (problem_data->stack_regs, i);
2398 /* We know that earlyclobber_regclass holds no more than
2399 N_REG_CLASSES elements. See df_urec_check_earlyclobber. */
2400 earlyclobber_regclass = VEC_alloc (int, heap, N_REG_CLASSES);
2402 EXECUTE_IF_SET_IN_BITMAP (rescan_blocks, 0, bb_index, bi)
2404 df_urec_bb_local_compute (dflow, bb_index);
2407 VEC_free (int, heap, earlyclobber_regclass);
2411 /* Initialize the solution vectors. */
2414 df_urec_init (struct dataflow *dflow, bitmap all_blocks)
2416 unsigned int bb_index;
2419 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2421 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2423 /* FIXME: This is a hack, it has been copied over from
2424 make_accurate_live_analysis by Vlad. Most likely it is necessary
2425 because the generation of gen and kill information for hardware
2426 registers in ur is a subset of what is really necessary and what
2427 is done for the lr problem. */
2429 /* Inside the register allocator, partial availability is only
2430 allowed for the psuedo registers. To implement this, the rr is
2431 initially iored with a mask ones for the hard registers and zeros
2432 for the pseudos before being iterated. This means that each
2433 hardware register will be live unless explicitly killed by some
2434 statement. Eventually most of these bit will die because the
2435 results of rr are anded with the results of lr before being used.
2436 Outside of register allocation, a more conservative strategy of
2437 completely ignoring the unintialized registers is imployed in the
2438 finalizer function. */
2439 if (df_state & DF_SCAN_GLOBAL)
2441 bitmap_ior (bb_info->out, bb_info->gen, df_all_hard_regs);
2442 bitmap_copy (bb_info->in, df_all_hard_regs);
2446 bitmap_copy (bb_info->out, bb_info->gen);
2447 bitmap_clear (bb_info->in);
2453 /* Or in the stack regs, hard regs and early clobber regs into the the
2454 ur_in sets of all of the blocks. */
2457 df_urec_local_finalize (struct dataflow *dflow, bitmap all_blocks)
2459 struct df *df = dflow->df;
2460 struct dataflow *lr_dflow = df->problems_by_index[DF_LR];
2461 bitmap tmp = BITMAP_ALLOC (NULL);
2463 unsigned int bb_index;
2464 struct df_urec_problem_data *problem_data =
2465 (struct df_urec_problem_data *) dflow->problem_data;
2467 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2469 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2470 struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (lr_dflow, bb_index);
2472 if (bb_index != ENTRY_BLOCK && bb_index != EXIT_BLOCK)
2474 if (problem_data->earlyclobbers_found)
2475 bitmap_ior_into (bb_info->in, bb_info->earlyclobber);
2478 /* We can not use the same stack register for uninitialized
2479 pseudo-register and another living pseudo-register
2480 because if the uninitialized pseudo-register dies,
2481 subsequent pass reg-stack will be confused (it will
2482 believe that the other register dies). */
2483 bitmap_ior_into (bb_info->in, problem_data->stack_regs);
2484 bitmap_ior_into (bb_info->out, problem_data->stack_regs);
2488 if (!(df_state & DF_SCAN_GLOBAL))
2490 bitmap_ior_into (bb_info->in, df_all_hard_regs);
2491 bitmap_ior_into (bb_info->out, df_all_hard_regs);
2494 /* No register may reach a location where it is not used. Thus
2495 we trim the rr result to the places where it is used. */
2496 bitmap_and_into (bb_info->in, bb_lr_info->in);
2497 bitmap_and_into (bb_info->out, bb_lr_info->out);
2500 /* Hard registers may still stick in the ur_out set, but not
2501 be in the ur_in set, if their only mention was in a call
2502 in this block. This is because a call kills in the lr
2503 problem but does not kill in the rr problem. To clean
2504 this up, we execute the transfer function on the lr_in
2505 set and then use that to knock bits out of ur_out. */
2506 bitmap_ior_and_compl (tmp, bb_info->gen, bb_lr_info->in,
2508 bitmap_and_into (bb_info->out, tmp);
2513 BITMAP_FREE (problem_data->stack_regs);
2519 /* Confluence function that ignores fake edges. */
2522 df_urec_confluence_n (struct dataflow *dflow, edge e)
2524 bitmap op1 = df_urec_get_bb_info (dflow, e->dest->index)->in;
2525 bitmap op2 = df_urec_get_bb_info (dflow, e->src->index)->out;
2527 if (e->flags & EDGE_FAKE)
2530 bitmap_ior_into (op1, op2);
2534 /* Transfer function. */
2537 df_urec_transfer_function (struct dataflow *dflow, int bb_index)
2539 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2540 bitmap in = bb_info->in;
2541 bitmap out = bb_info->out;
2542 bitmap gen = bb_info->gen;
2543 bitmap kill = bb_info->kill;
2545 return bitmap_ior_and_compl (out, gen, in, kill);
2549 /* Free all storage associated with the problem. */
2552 df_urec_free (struct dataflow *dflow)
2554 if (dflow->block_info)
2558 for (i = 0; i < dflow->block_info_size; i++)
2560 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, i);
2563 BITMAP_FREE (bb_info->gen);
2564 BITMAP_FREE (bb_info->kill);
2565 BITMAP_FREE (bb_info->in);
2566 BITMAP_FREE (bb_info->out);
2567 BITMAP_FREE (bb_info->earlyclobber);
2571 free_alloc_pool (dflow->block_pool);
2573 dflow->block_info_size = 0;
2574 free (dflow->block_info);
2575 free (dflow->problem_data);
2581 /* Debugging info. */
2584 df_urec_dump (struct dataflow *dflow, FILE *file)
2588 fprintf (file, "Undefined regs:\n");
2592 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb->index);
2593 df_print_bb_index (bb, file);
2598 fprintf (file, " in \t");
2599 dump_bitmap (file, bb_info->in);
2600 fprintf (file, " gen \t");
2601 dump_bitmap (file, bb_info->gen);
2602 fprintf (file, " kill\t");
2603 dump_bitmap (file, bb_info->kill);
2604 fprintf (file, " ec\t");
2605 dump_bitmap (file, bb_info->earlyclobber);
2606 fprintf (file, " out \t");
2607 dump_bitmap (file, bb_info->out);
2611 /* All of the information associated with every instance of the problem. */
2613 static struct df_problem problem_UREC =
2615 DF_UREC, /* Problem id. */
2616 DF_FORWARD, /* Direction. */
2617 df_urec_alloc, /* Allocate the problem specific data. */
2618 df_urec_free_bb_info, /* Free basic block info. */
2619 df_urec_local_compute, /* Local compute function. */
2620 df_urec_init, /* Init the solution specific data. */
2621 df_iterative_dataflow, /* Iterative solver. */
2622 NULL, /* Confluence operator 0. */
2623 df_urec_confluence_n, /* Confluence operator n. */
2624 df_urec_transfer_function, /* Transfer function. */
2625 df_urec_local_finalize, /* Finalize function. */
2626 df_urec_free, /* Free all of the problem information. */
2627 df_urec_dump, /* Debugging. */
2628 &problem_LR /* Dependent problem. */
2632 /* Create a new DATAFLOW instance and add it to an existing instance
2633 of DF. The returned structure is what is used to get at the
2637 df_urec_add_problem (struct df *df)
2639 return df_add_problem (df, &problem_UREC);
2644 /*----------------------------------------------------------------------------
2645 CREATE DEF_USE (DU) and / or USE_DEF (UD) CHAINS
2647 Link either the defs to the uses and / or the uses to the defs.
2649 These problems are set up like the other dataflow problems so that
2650 they nicely fit into the framework. They are much simpler and only
2651 involve a single traversal of instructions and an examination of
2652 the reaching defs information (the dependent problem).
2653 ----------------------------------------------------------------------------*/
2655 struct df_chain_problem_data
2661 /* Create def-use or use-def chains. */
2664 df_chain_alloc (struct dataflow *dflow,
2665 bitmap blocks_to_rescan ATTRIBUTE_UNUSED)
2667 struct df *df = dflow->df;
2669 struct df_chain_problem_data *problem_data =
2670 (struct df_chain_problem_data *) dflow->problem_data;
2672 /* Wholesale destruction of the old chains. */
2673 if (dflow->block_pool)
2674 free_alloc_pool (dflow->block_pool);
2676 dflow->block_pool = create_alloc_pool ("df_chain_chain_block pool",
2677 sizeof (struct df_link), 100);
2679 if (problem_data->flags & DF_DU_CHAIN)
2681 if (!df->def_info.refs_organized)
2682 df_reorganize_refs (&df->def_info);
2684 /* Clear out the pointers from the refs. */
2685 for (i = 0; i < DF_DEFS_SIZE (df); i++)
2687 struct df_ref *ref = df->def_info.refs[i];
2688 DF_REF_CHAIN (ref) = NULL;
2692 if (problem_data->flags & DF_UD_CHAIN)
2694 if (!df->use_info.refs_organized)
2695 df_reorganize_refs (&df->use_info);
2696 for (i = 0; i < DF_USES_SIZE (df); i++)
2698 struct df_ref *ref = df->use_info.refs[i];
2699 DF_REF_CHAIN (ref) = NULL;
2705 /* Create the chains for a list of USEs. */
2708 df_chain_create_bb_process_use (struct dataflow *dflow,
2709 struct df_chain_problem_data *problem_data,
2712 enum df_ref_flags top_flag)
2714 struct df *df = dflow->df;
2716 unsigned int def_index;
2720 /* Do not want to go thur this for an uninitialized var. */
2721 unsigned int uregno = DF_REF_REGNO (use);
2722 int count = DF_REG_DEF_GET (df, uregno)->n_refs;
2725 if (top_flag == (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
2727 unsigned int first_index = DF_REG_DEF_GET (df, uregno)->begin;
2728 unsigned int last_index = first_index + count - 1;
2730 EXECUTE_IF_SET_IN_BITMAP (local_rd, first_index, def_index, bi)
2733 if (def_index > last_index)
2736 def = DF_DEFS_GET (df, def_index);
2737 if (problem_data->flags & DF_DU_CHAIN)
2738 df_chain_create (dflow, def, use);
2739 if (problem_data->flags & DF_UD_CHAIN)
2740 df_chain_create (dflow, use, def);
2744 use = use->next_ref;
2748 /* Reset the storage pool that the def-use or use-def chains have been
2749 allocated in. We do not need to re adjust the pointers in the refs,
2750 these have already been clean out.*/
2752 /* Create chains from reaching defs bitmaps for basic block BB. */
2754 df_chain_create_bb (struct dataflow *dflow,
2755 struct dataflow *rd_dflow,
2756 unsigned int bb_index)
2758 basic_block bb = BASIC_BLOCK (bb_index);
2759 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (rd_dflow, bb_index);
2761 bitmap cpy = BITMAP_ALLOC (NULL);
2762 struct df *df = dflow->df;
2763 struct df_chain_problem_data *problem_data =
2764 (struct df_chain_problem_data *) dflow->problem_data;
2767 bitmap_copy (cpy, bb_info->in);
2769 /* Since we are going forwards, process the artificial uses first
2770 then the artificial defs second. */
2773 /* Create the chains for the artificial uses from the EH_USES at the
2774 beginning of the block. */
2775 df_chain_create_bb_process_use (dflow, problem_data, cpy,
2776 df_get_artificial_uses (df, bb->index),
2780 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
2782 unsigned int dregno = DF_REF_REGNO (def);
2783 bitmap_clear_range (cpy,
2784 DF_REG_DEF_GET (df, dregno)->begin,
2785 DF_REG_DEF_GET (df, dregno)->n_refs);
2786 if (! (DF_REF_FLAGS (def) & DF_REF_CLOBBER))
2787 bitmap_set_bit (cpy, DF_REF_ID (def));
2790 /* Process the regular instructions next. */
2791 FOR_BB_INSNS (bb, insn)
2794 unsigned int uid = INSN_UID (insn);
2796 if (! INSN_P (insn))
2799 /* Now scan the uses and link them up with the defs that remain
2800 in the cpy vector. */
2802 df_chain_create_bb_process_use (dflow, problem_data, cpy,
2803 DF_INSN_UID_GET (df, uid)->uses, 0);
2805 /* Since we are going forwards, process the defs second. This
2806 pass only changes the bits in cpy. */
2807 for (def = DF_INSN_UID_GET (df, uid)->defs; def; def = def->next_ref)
2809 unsigned int dregno = DF_REF_REGNO (def);
2810 bitmap_clear_range (cpy,
2811 DF_REG_DEF_GET (df, dregno)->begin,
2812 DF_REG_DEF_GET (df, dregno)->n_refs);
2813 if (! (DF_REF_FLAGS (def) & DF_REF_CLOBBER))
2814 bitmap_set_bit (cpy, DF_REF_ID (def));
2818 /* Create the chains for the artificial uses of the hard registers
2819 at the end of the block. */
2820 df_chain_create_bb_process_use (dflow, problem_data, cpy,
2821 df_get_artificial_uses (df, bb->index), 0);
2824 /* Create def-use chains from reaching use bitmaps for basic blocks
2828 df_chain_finalize (struct dataflow *dflow, bitmap all_blocks)
2830 unsigned int bb_index;
2832 struct df *df = dflow->df;
2833 struct dataflow *rd_dflow = df->problems_by_index [DF_RD];
2835 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2837 df_chain_create_bb (dflow, rd_dflow, bb_index);
2842 /* Free all storage associated with the problem. */
2845 df_chain_free (struct dataflow *dflow)
2847 free_alloc_pool (dflow->block_pool);
2848 free (dflow->problem_data);
2853 /* Debugging info. */
2856 df_chains_dump (struct dataflow *dflow, FILE *file)
2858 struct df *df = dflow->df;
2860 struct df_chain_problem_data *problem_data =
2861 (struct df_chain_problem_data *) dflow->problem_data;
2863 if (problem_data->flags & DF_DU_CHAIN)
2865 fprintf (file, "Def-use chains:\n");
2866 for (j = 0; j < df->def_info.bitmap_size; j++)
2868 struct df_ref *def = DF_DEFS_GET (df, j);
2871 fprintf (file, "d%d bb %d luid %d insn %d reg %d ",
2872 j, DF_REF_BBNO (def),
2873 DF_INSN_LUID (df, DF_REF_INSN (def)),
2874 DF_REF_INSN (def) ? DF_REF_INSN_UID (def) : -1,
2875 DF_REF_REGNO (def));
2876 if (def->flags & DF_REF_READ_WRITE)
2877 fprintf (file, "read/write ");
2878 df_chain_dump (df, DF_REF_CHAIN (def), file);
2879 fprintf (file, "\n");
2884 if (problem_data->flags & DF_UD_CHAIN)
2886 fprintf (file, "Use-def chains:\n");
2887 for (j = 0; j < df->use_info.bitmap_size; j++)
2889 struct df_ref *use = DF_USES_GET (df, j);
2892 fprintf (file, "u%d bb %d luid %d insn %d reg %d ",
2893 j, DF_REF_BBNO (use),
2895 DF_INSN_LUID (df, DF_REF_INSN (use))
2897 DF_REF_INSN (DF_USES_GET (df, j)) ?
2898 DF_REF_INSN_UID (DF_USES_GET (df,j))
2900 DF_REF_REGNO (use));
2901 if (use->flags & DF_REF_READ_WRITE)
2902 fprintf (file, "read/write ");
2903 if (use->flags & DF_REF_STRIPPED)
2904 fprintf (file, "stripped ");
2905 if (use->flags & DF_REF_IN_NOTE)
2906 fprintf (file, "note ");
2907 df_chain_dump (df, DF_REF_CHAIN (use), file);
2908 fprintf (file, "\n");
2915 static struct df_problem problem_CHAIN =
2917 DF_CHAIN, /* Problem id. */
2918 DF_NONE, /* Direction. */
2919 df_chain_alloc, /* Allocate the problem specific data. */
2920 NULL, /* Free basic block info. */
2921 NULL, /* Local compute function. */
2922 NULL, /* Init the solution specific data. */
2923 NULL, /* Iterative solver. */
2924 NULL, /* Confluence operator 0. */
2925 NULL, /* Confluence operator n. */
2926 NULL, /* Transfer function. */
2927 df_chain_finalize, /* Finalize function. */
2928 df_chain_free, /* Free all of the problem information. */
2929 df_chains_dump, /* Debugging. */
2930 &problem_RD /* Dependent problem. */
2934 /* Create a new DATAFLOW instance and add it to an existing instance
2935 of DF. The returned structure is what is used to get at the
2939 df_chain_add_problem (struct df *df, int flags)
2941 struct df_chain_problem_data *problem_data =
2942 xmalloc (sizeof (struct df_chain_problem_data));
2943 struct dataflow *dflow = df_add_problem (df, &problem_CHAIN);
2945 dflow->problem_data = problem_data;
2946 problem_data->flags = flags;
2952 /*----------------------------------------------------------------------------
2953 REGISTER INFORMATION
2955 Currently this consists of only lifetime information. But the plan is
2956 to enhance it so that it produces all of the register information needed
2957 by the register allocators.
2958 ----------------------------------------------------------------------------*/
2961 struct df_ri_problem_data
2967 /* Allocate the lifetime information. */
2970 df_ri_alloc (struct dataflow *dflow, bitmap blocks_to_rescan ATTRIBUTE_UNUSED)
2972 struct df_ri_problem_data *problem_data =
2973 (struct df_ri_problem_data *) dflow->problem_data;
2975 if (!dflow->problem_data)
2977 struct df_ri_problem_data *problem_data =
2978 xmalloc (sizeof (struct df_ri_problem_data));
2979 dflow->problem_data = problem_data;
2982 problem_data->lifetime = xrealloc (problem_data->lifetime,
2983 max_reg_num () *sizeof (int));
2984 memset (problem_data->lifetime, 0, max_reg_num () *sizeof (int));
2987 /* Compute register info: lifetime, bb, and number of defs and uses
2988 for basic block BB. */
2991 df_ri_bb_compute (struct dataflow *dflow, unsigned int bb_index, bitmap live)
2993 struct df *df = dflow->df;
2994 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
2995 struct df_ri_problem_data *problem_data =
2996 (struct df_ri_problem_data *) dflow->problem_data;
2997 basic_block bb = BASIC_BLOCK (bb_index);
3000 bitmap_copy (live, bb_info->out);
3002 FOR_BB_INSNS_REVERSE (bb, insn)
3004 unsigned int uid = INSN_UID (insn);
3010 if (! INSN_P (insn))
3013 for (def = DF_INSN_UID_GET (df, uid)->defs; def; def = def->next_ref)
3015 unsigned int dregno = DF_REF_REGNO (def);
3017 /* Kill this register. */
3018 bitmap_clear_bit (live, dregno);
3021 for (use = DF_INSN_UID_GET (df, uid)->uses; use; use = use->next_ref)
3023 unsigned int uregno = DF_REF_REGNO (use);
3025 /* This register is now live. */
3026 bitmap_set_bit (live, uregno);
3029 /* Increment lifetimes of all live registers. */
3030 EXECUTE_IF_SET_IN_BITMAP (live, 0, regno, bi)
3032 problem_data->lifetime[regno]++;
3038 /* Compute register info: lifetime, bb, and number of defs and uses. */
3040 df_ri_compute (struct dataflow *dflow, bitmap all_blocks ATTRIBUTE_UNUSED,
3041 bitmap blocks_to_scan)
3043 unsigned int bb_index;
3047 live = BITMAP_ALLOC (NULL);
3049 EXECUTE_IF_SET_IN_BITMAP (blocks_to_scan, 0, bb_index, bi)
3051 df_ri_bb_compute (dflow, bb_index, live);
3058 /* Free all storage associated with the problem. */
3061 df_ri_free (struct dataflow *dflow)
3063 struct df_ri_problem_data *problem_data =
3064 (struct df_ri_problem_data *) dflow->problem_data;
3066 free (problem_data->lifetime);
3067 free (dflow->problem_data);
3072 /* Debugging info. */
3075 df_ri_dump (struct dataflow *dflow, FILE *file)
3077 struct df_ri_problem_data *problem_data =
3078 (struct df_ri_problem_data *) dflow->problem_data;
3081 fprintf (file, "Register info:\n");
3082 for (j = 0; j < max_reg_num (); j++)
3084 fprintf (file, "reg %d life %d\n", j, problem_data->lifetime[j]);
3088 /* All of the information associated every instance of the problem. */
3090 static struct df_problem problem_RI =
3092 DF_RI, /* Problem id. */
3093 DF_NONE, /* Direction. */
3094 df_ri_alloc, /* Allocate the problem specific data. */
3095 NULL, /* Free basic block info. */
3096 df_ri_compute, /* Local compute function. */
3097 NULL, /* Init the solution specific data. */
3098 NULL, /* Iterative solver. */
3099 NULL, /* Confluence operator 0. */
3100 NULL, /* Confluence operator n. */
3101 NULL, /* Transfer function. */
3102 NULL, /* Finalize function. */
3103 df_ri_free, /* Free all of the problem information. */
3104 df_ri_dump, /* Debugging. */
3105 &problem_UR /* Dependent problem. */
3109 /* Create a new DATAFLOW instance and add it to an existing instance
3110 of DF. The returned structure is what is used to get at the
3114 df_ri_add_problem (struct df *df)
3116 return df_add_problem (df, &problem_RI);
3120 /* Return total lifetime (in insns) of REG. */
3122 df_reg_lifetime (struct df *df, rtx reg)
3124 struct dataflow *dflow = df->problems_by_index[DF_RI];
3125 struct df_ri_problem_data *problem_data =
3126 (struct df_ri_problem_data *) dflow->problem_data;
3127 return problem_data->lifetime[REGNO (reg)];