1 /* Scanning of rtl for dataflow analysis.
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007,
3 2008, 2009 Free Software Foundation, Inc.
4 Originally contributed by Michael P. Hayes
5 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
6 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
7 and Kenneth Zadeck (zadeck@naturalbridge.com).
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
27 #include "coretypes.h"
31 #include "insn-config.h"
36 #include "alloc-pool.h"
38 #include "hard-reg-set.h"
39 #include "basic-block.h"
45 #include "target-def.h"
47 #include "tree-pass.h"
50 DEF_VEC_ALLOC_P_STACK(df_ref);
52 #define VEC_df_ref_stack_alloc(alloc) VEC_stack_alloc (df_ref, alloc)
54 typedef struct df_mw_hardreg *df_mw_hardreg_ptr;
56 DEF_VEC_P(df_mw_hardreg_ptr);
57 DEF_VEC_ALLOC_P_STACK(df_mw_hardreg_ptr);
59 #define VEC_df_mw_hardreg_ptr_stack_alloc(alloc) \
60 VEC_stack_alloc (df_mw_hardreg_ptr, alloc)
63 #define HAVE_epilogue 0
66 #define HAVE_prologue 0
68 #ifndef HAVE_sibcall_epilogue
69 #define HAVE_sibcall_epilogue 0
73 #define EPILOGUE_USES(REGNO) 0
76 /* The following two macros free the vecs that hold either the refs or
77 the mw refs. They are a little tricky because the vec has 0
78 elements is special and is not to be freed. */
79 #define df_scan_free_ref_vec(V) \
85 #define df_scan_free_mws_vec(V) \
91 /* The set of hard registers in eliminables[i].from. */
93 static HARD_REG_SET elim_reg_set;
95 /* Initialize ur_in and ur_out as if all hard registers were partially
98 struct df_collection_rec
100 VEC(df_ref,stack) *def_vec;
101 VEC(df_ref,stack) *use_vec;
102 VEC(df_ref,stack) *eq_use_vec;
103 VEC(df_mw_hardreg_ptr,stack) *mw_vec;
106 static df_ref df_null_ref_rec[1];
107 static struct df_mw_hardreg * df_null_mw_rec[1];
109 static void df_ref_record (enum df_ref_class, struct df_collection_rec *,
111 basic_block, struct df_insn_info *,
112 enum df_ref_type, int ref_flags,
113 int, int, enum machine_mode);
114 static void df_def_record_1 (struct df_collection_rec *, rtx,
115 basic_block, struct df_insn_info *,
117 static void df_defs_record (struct df_collection_rec *, rtx,
118 basic_block, struct df_insn_info *,
120 static void df_uses_record (enum df_ref_class, struct df_collection_rec *,
121 rtx *, enum df_ref_type,
122 basic_block, struct df_insn_info *,
124 int, int, enum machine_mode);
126 static df_ref df_ref_create_structure (enum df_ref_class,
127 struct df_collection_rec *, rtx, rtx *,
128 basic_block, struct df_insn_info *,
129 enum df_ref_type, int ref_flags,
130 int, int, enum machine_mode);
132 static void df_insn_refs_collect (struct df_collection_rec*,
133 basic_block, struct df_insn_info *);
134 static void df_canonize_collection_rec (struct df_collection_rec *);
136 static void df_get_regular_block_artificial_uses (bitmap);
137 static void df_get_eh_block_artificial_uses (bitmap);
139 static void df_record_entry_block_defs (bitmap);
140 static void df_record_exit_block_uses (bitmap);
141 static void df_get_exit_block_use_set (bitmap);
142 static void df_get_entry_block_def_set (bitmap);
143 static void df_grow_ref_info (struct df_ref_info *, unsigned int);
144 static void df_ref_chain_delete_du_chain (df_ref *);
145 static void df_ref_chain_delete (df_ref *);
147 static void df_refs_add_to_chains (struct df_collection_rec *,
150 static bool df_insn_refs_verify (struct df_collection_rec *, basic_block, rtx, bool);
151 static void df_entry_block_defs_collect (struct df_collection_rec *, bitmap);
152 static void df_exit_block_uses_collect (struct df_collection_rec *, bitmap);
153 static void df_install_ref (df_ref, struct df_reg_info *,
154 struct df_ref_info *, bool);
156 static int df_ref_compare (const void *, const void *);
157 static int df_mw_compare (const void *, const void *);
159 /* Indexed by hardware reg number, is true if that register is ever
160 used in the current function.
162 In df-scan.c, this is set up to record the hard regs used
163 explicitly. Reload adds in the hard regs used for holding pseudo
164 regs. Final uses it to generate the code in the function prologue
165 and epilogue to save and restore registers as needed. */
167 static bool regs_ever_live[FIRST_PSEUDO_REGISTER];
169 /*----------------------------------------------------------------------------
170 SCANNING DATAFLOW PROBLEM
172 There are several ways in which scanning looks just like the other
173 dataflow problems. It shares the all the mechanisms for local info
174 as well as basic block info. Where it differs is when and how often
175 it gets run. It also has no need for the iterative solver.
176 ----------------------------------------------------------------------------*/
178 /* Problem data for the scanning dataflow function. */
179 struct df_scan_problem_data
181 alloc_pool ref_base_pool;
182 alloc_pool ref_artificial_pool;
183 alloc_pool ref_regular_pool;
184 alloc_pool ref_extract_pool;
185 alloc_pool insn_pool;
187 alloc_pool mw_reg_pool;
188 bitmap_obstack reg_bitmaps;
189 bitmap_obstack insn_bitmaps;
192 typedef struct df_scan_bb_info *df_scan_bb_info_t;
195 /* Internal function to shut down the scanning problem. */
197 df_scan_free_internal (void)
199 struct df_scan_problem_data *problem_data
200 = (struct df_scan_problem_data *) df_scan->problem_data;
204 /* The vectors that hold the refs are not pool allocated because
205 they come in many sizes. This makes them impossible to delete
207 for (i = 0; i < DF_INSN_SIZE(); i++)
209 struct df_insn_info *insn_info = DF_INSN_UID_GET(i);
210 /* Skip the insns that have no insn_info or have been
214 df_scan_free_ref_vec (insn_info->defs);
215 df_scan_free_ref_vec (insn_info->uses);
216 df_scan_free_ref_vec (insn_info->eq_uses);
217 df_scan_free_mws_vec (insn_info->mw_hardregs);
223 unsigned int bb_index = bb->index;
224 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
227 df_scan_free_ref_vec (bb_info->artificial_defs);
228 df_scan_free_ref_vec (bb_info->artificial_uses);
232 free (df->def_info.refs);
233 free (df->def_info.begin);
234 free (df->def_info.count);
235 memset (&df->def_info, 0, (sizeof (struct df_ref_info)));
237 free (df->use_info.refs);
238 free (df->use_info.begin);
239 free (df->use_info.count);
240 memset (&df->use_info, 0, (sizeof (struct df_ref_info)));
246 free (df->eq_use_regs);
247 df->eq_use_regs = NULL;
255 free (df_scan->block_info);
256 df_scan->block_info = NULL;
257 df_scan->block_info_size = 0;
259 BITMAP_FREE (df->hardware_regs_used);
260 BITMAP_FREE (df->regular_block_artificial_uses);
261 BITMAP_FREE (df->eh_block_artificial_uses);
262 BITMAP_FREE (df->entry_block_defs);
263 BITMAP_FREE (df->exit_block_uses);
264 BITMAP_FREE (df->insns_to_delete);
265 BITMAP_FREE (df->insns_to_rescan);
266 BITMAP_FREE (df->insns_to_notes_rescan);
268 free_alloc_pool (df_scan->block_pool);
269 free_alloc_pool (problem_data->ref_base_pool);
270 free_alloc_pool (problem_data->ref_artificial_pool);
271 free_alloc_pool (problem_data->ref_regular_pool);
272 free_alloc_pool (problem_data->ref_extract_pool);
273 free_alloc_pool (problem_data->insn_pool);
274 free_alloc_pool (problem_data->reg_pool);
275 free_alloc_pool (problem_data->mw_reg_pool);
276 bitmap_obstack_release (&problem_data->reg_bitmaps);
277 bitmap_obstack_release (&problem_data->insn_bitmaps);
278 free (df_scan->problem_data);
282 /* Set basic block info. */
285 df_scan_set_bb_info (unsigned int index,
286 struct df_scan_bb_info *bb_info)
288 df_grow_bb_info (df_scan);
289 df_scan->block_info[index] = (void *) bb_info;
293 /* Free basic block info. */
296 df_scan_free_bb_info (basic_block bb, void *vbb_info)
298 struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info;
299 unsigned int bb_index = bb->index;
303 FOR_BB_INSNS (bb, insn)
306 /* Record defs within INSN. */
307 df_insn_delete (bb, INSN_UID (insn));
310 if (bb_index < df_scan->block_info_size)
311 bb_info = df_scan_get_bb_info (bb_index);
313 /* Get rid of any artificial uses or defs. */
314 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
315 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
316 df_ref_chain_delete (bb_info->artificial_defs);
317 df_ref_chain_delete (bb_info->artificial_uses);
318 bb_info->artificial_defs = NULL;
319 bb_info->artificial_uses = NULL;
320 pool_free (df_scan->block_pool, bb_info);
325 /* Allocate the problem data for the scanning problem. This should be
326 called when the problem is created or when the entire function is to
329 df_scan_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
331 struct df_scan_problem_data *problem_data;
332 unsigned int insn_num = get_max_uid () + 1;
333 unsigned int block_size = 400;
336 /* Given the number of pools, this is really faster than tearing
338 if (df_scan->problem_data)
339 df_scan_free_internal ();
342 = create_alloc_pool ("df_scan_block pool",
343 sizeof (struct df_scan_bb_info),
346 problem_data = XNEW (struct df_scan_problem_data);
347 df_scan->problem_data = problem_data;
348 df_scan->computed = true;
350 problem_data->ref_base_pool
351 = create_alloc_pool ("df_scan ref base",
352 sizeof (struct df_base_ref), block_size);
353 problem_data->ref_artificial_pool
354 = create_alloc_pool ("df_scan ref artificial",
355 sizeof (struct df_artificial_ref), block_size);
356 problem_data->ref_regular_pool
357 = create_alloc_pool ("df_scan ref regular",
358 sizeof (struct df_regular_ref), block_size);
359 problem_data->ref_extract_pool
360 = create_alloc_pool ("df_scan ref extract",
361 sizeof (struct df_extract_ref), block_size);
362 problem_data->insn_pool
363 = create_alloc_pool ("df_scan insn",
364 sizeof (struct df_insn_info), block_size);
365 problem_data->reg_pool
366 = create_alloc_pool ("df_scan reg",
367 sizeof (struct df_reg_info), block_size);
368 problem_data->mw_reg_pool
369 = create_alloc_pool ("df_scan mw_reg",
370 sizeof (struct df_mw_hardreg), block_size);
372 bitmap_obstack_initialize (&problem_data->reg_bitmaps);
373 bitmap_obstack_initialize (&problem_data->insn_bitmaps);
375 insn_num += insn_num / 4;
378 df_grow_insn_info ();
379 df_grow_bb_info (df_scan);
383 unsigned int bb_index = bb->index;
384 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
387 bb_info = (struct df_scan_bb_info *) pool_alloc (df_scan->block_pool);
388 df_scan_set_bb_info (bb_index, bb_info);
390 bb_info->artificial_defs = NULL;
391 bb_info->artificial_uses = NULL;
394 df->hardware_regs_used = BITMAP_ALLOC (&problem_data->reg_bitmaps);
395 df->regular_block_artificial_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
396 df->eh_block_artificial_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
397 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
398 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
399 df->insns_to_delete = BITMAP_ALLOC (&problem_data->insn_bitmaps);
400 df->insns_to_rescan = BITMAP_ALLOC (&problem_data->insn_bitmaps);
401 df->insns_to_notes_rescan = BITMAP_ALLOC (&problem_data->insn_bitmaps);
402 df_scan->optional_p = false;
406 /* Free all of the data associated with the scan problem. */
411 if (df_scan->problem_data)
412 df_scan_free_internal ();
414 if (df->blocks_to_analyze)
416 BITMAP_FREE (df->blocks_to_analyze);
417 df->blocks_to_analyze = NULL;
423 /* Dump the preamble for DF_SCAN dump. */
425 df_scan_start_dump (FILE *file ATTRIBUTE_UNUSED)
436 fprintf (file, ";; invalidated by call \t");
437 df_print_regset (file, regs_invalidated_by_call_regset);
438 fprintf (file, ";; hardware regs used \t");
439 df_print_regset (file, df->hardware_regs_used);
440 fprintf (file, ";; regular block artificial uses \t");
441 df_print_regset (file, df->regular_block_artificial_uses);
442 fprintf (file, ";; eh block artificial uses \t");
443 df_print_regset (file, df->eh_block_artificial_uses);
444 fprintf (file, ";; entry block defs \t");
445 df_print_regset (file, df->entry_block_defs);
446 fprintf (file, ";; exit block uses \t");
447 df_print_regset (file, df->exit_block_uses);
448 fprintf (file, ";; regs ever live \t");
449 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
450 if (df_regs_ever_live_p (i))
451 fprintf (file, " %d[%s]", i, reg_names[i]);
452 fprintf (file, "\n;; ref usage \t");
454 for (i = 0; i < (int)df->regs_inited; i++)
455 if (DF_REG_DEF_COUNT (i) || DF_REG_USE_COUNT (i) || DF_REG_EQ_USE_COUNT (i))
457 const char * sep = "";
459 fprintf (file, "r%d={", i);
460 if (DF_REG_DEF_COUNT (i))
462 fprintf (file, "%dd", DF_REG_DEF_COUNT (i));
464 dcount += DF_REG_DEF_COUNT (i);
466 if (DF_REG_USE_COUNT (i))
468 fprintf (file, "%s%du", sep, DF_REG_USE_COUNT (i));
470 ucount += DF_REG_USE_COUNT (i);
472 if (DF_REG_EQ_USE_COUNT (i))
474 fprintf (file, "%s%dd", sep, DF_REG_EQ_USE_COUNT (i));
475 ecount += DF_REG_EQ_USE_COUNT (i);
477 fprintf (file, "} ");
481 FOR_BB_INSNS (bb, insn)
490 fprintf (file, "\n;; total ref usage %d{%dd,%du,%de} in %d{%d regular + %d call} insns.\n",
491 dcount + ucount + ecount, dcount, ucount, ecount, icount + ccount, icount, ccount);
494 /* Dump the bb_info for a given basic block. */
496 df_scan_start_block (basic_block bb, FILE *file)
498 struct df_scan_bb_info *bb_info
499 = df_scan_get_bb_info (bb->index);
503 fprintf (file, ";; bb %d artificial_defs: ", bb->index);
504 df_refs_chain_dump (bb_info->artificial_defs, true, file);
505 fprintf (file, "\n;; bb %d artificial_uses: ", bb->index);
506 df_refs_chain_dump (bb_info->artificial_uses, true, file);
507 fprintf (file, "\n");
512 FOR_BB_INSNS (bb, insn)
514 df_insn_debug (insn, false, file);
519 static struct df_problem problem_SCAN =
521 DF_SCAN, /* Problem id. */
522 DF_NONE, /* Direction. */
523 df_scan_alloc, /* Allocate the problem specific data. */
524 NULL, /* Reset global information. */
525 df_scan_free_bb_info, /* Free basic block info. */
526 NULL, /* Local compute function. */
527 NULL, /* Init the solution specific data. */
528 NULL, /* Iterative solver. */
529 NULL, /* Confluence operator 0. */
530 NULL, /* Confluence operator n. */
531 NULL, /* Transfer function. */
532 NULL, /* Finalize function. */
533 df_scan_free, /* Free all of the problem information. */
534 NULL, /* Remove this problem from the stack of dataflow problems. */
535 df_scan_start_dump, /* Debugging. */
536 df_scan_start_block, /* Debugging start block. */
537 NULL, /* Debugging end block. */
538 NULL, /* Incremental solution verify start. */
539 NULL, /* Incremental solution verify end. */
540 NULL, /* Dependent problem. */
541 TV_DF_SCAN, /* Timing variable. */
542 false /* Reset blocks on dropping out of blocks_to_analyze. */
546 /* Create a new DATAFLOW instance and add it to an existing instance
547 of DF. The returned structure is what is used to get at the
551 df_scan_add_problem (void)
553 df_add_problem (&problem_SCAN);
557 /*----------------------------------------------------------------------------
558 Storage Allocation Utilities
559 ----------------------------------------------------------------------------*/
562 /* First, grow the reg_info information. If the current size is less than
563 the number of pseudos, grow to 25% more than the number of
566 Second, assure that all of the slots up to max_reg_num have been
567 filled with reg_info structures. */
570 df_grow_reg_info (void)
572 unsigned int max_reg = max_reg_num ();
573 unsigned int new_size = max_reg;
574 struct df_scan_problem_data *problem_data
575 = (struct df_scan_problem_data *) df_scan->problem_data;
578 if (df->regs_size < new_size)
580 new_size += new_size / 4;
581 df->def_regs = XRESIZEVEC (struct df_reg_info *, df->def_regs, new_size);
582 df->use_regs = XRESIZEVEC (struct df_reg_info *, df->use_regs, new_size);
583 df->eq_use_regs = XRESIZEVEC (struct df_reg_info *, df->eq_use_regs,
585 df->def_info.begin = XRESIZEVEC (unsigned, df->def_info.begin, new_size);
586 df->def_info.count = XRESIZEVEC (unsigned, df->def_info.count, new_size);
587 df->use_info.begin = XRESIZEVEC (unsigned, df->use_info.begin, new_size);
588 df->use_info.count = XRESIZEVEC (unsigned, df->use_info.count, new_size);
589 df->regs_size = new_size;
592 for (i = df->regs_inited; i < max_reg; i++)
594 struct df_reg_info *reg_info;
596 reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
597 memset (reg_info, 0, sizeof (struct df_reg_info));
598 df->def_regs[i] = reg_info;
599 reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
600 memset (reg_info, 0, sizeof (struct df_reg_info));
601 df->use_regs[i] = reg_info;
602 reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
603 memset (reg_info, 0, sizeof (struct df_reg_info));
604 df->eq_use_regs[i] = reg_info;
605 df->def_info.begin[i] = 0;
606 df->def_info.count[i] = 0;
607 df->use_info.begin[i] = 0;
608 df->use_info.count[i] = 0;
611 df->regs_inited = max_reg;
615 /* Grow the ref information. */
618 df_grow_ref_info (struct df_ref_info *ref_info, unsigned int new_size)
620 if (ref_info->refs_size < new_size)
622 ref_info->refs = XRESIZEVEC (df_ref, ref_info->refs, new_size);
623 memset (ref_info->refs + ref_info->refs_size, 0,
624 (new_size - ref_info->refs_size) *sizeof (df_ref));
625 ref_info->refs_size = new_size;
630 /* Check and grow the ref information if necessary. This routine
631 guarantees total_size + BITMAP_ADDEND amount of entries in refs
632 array. It updates ref_info->refs_size only and does not change
633 ref_info->total_size. */
636 df_check_and_grow_ref_info (struct df_ref_info *ref_info,
637 unsigned bitmap_addend)
639 if (ref_info->refs_size < ref_info->total_size + bitmap_addend)
641 int new_size = ref_info->total_size + bitmap_addend;
642 new_size += ref_info->total_size / 4;
643 df_grow_ref_info (ref_info, new_size);
648 /* Grow the ref information. If the current size is less than the
649 number of instructions, grow to 25% more than the number of
653 df_grow_insn_info (void)
655 unsigned int new_size = get_max_uid () + 1;
656 if (DF_INSN_SIZE () < new_size)
658 new_size += new_size / 4;
659 df->insns = XRESIZEVEC (struct df_insn_info *, df->insns, new_size);
660 memset (df->insns + df->insns_size, 0,
661 (new_size - DF_INSN_SIZE ()) *sizeof (struct df_insn_info *));
662 DF_INSN_SIZE () = new_size;
669 /*----------------------------------------------------------------------------
670 PUBLIC INTERFACES FOR SMALL GRAIN CHANGES TO SCANNING.
671 ----------------------------------------------------------------------------*/
673 /* Rescan all of the block_to_analyze or all of the blocks in the
674 function if df_set_blocks if blocks_to_analyze is NULL; */
677 df_scan_blocks (void)
681 df->def_info.ref_order = DF_REF_ORDER_NO_TABLE;
682 df->use_info.ref_order = DF_REF_ORDER_NO_TABLE;
684 df_get_regular_block_artificial_uses (df->regular_block_artificial_uses);
685 df_get_eh_block_artificial_uses (df->eh_block_artificial_uses);
687 bitmap_ior_into (df->eh_block_artificial_uses,
688 df->regular_block_artificial_uses);
690 /* ENTRY and EXIT blocks have special defs/uses. */
691 df_get_entry_block_def_set (df->entry_block_defs);
692 df_record_entry_block_defs (df->entry_block_defs);
693 df_get_exit_block_use_set (df->exit_block_uses);
694 df_record_exit_block_uses (df->exit_block_uses);
695 df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
696 df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
701 unsigned int bb_index = bb->index;
702 df_bb_refs_record (bb_index, true);
707 /* Create a new ref of type DF_REF_TYPE for register REG at address
708 LOC within INSN of BB. This function is only used externally.
710 If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
711 DF_REF_ZERO_EXTRACT. WIDTH, OFFSET and MODE are used to access the
712 fields if they were constants. Otherwise they should be -1 if
713 those flags were set. */
716 df_ref_create (rtx reg, rtx *loc, rtx insn,
718 enum df_ref_type ref_type,
720 int width, int offset, enum machine_mode mode)
723 struct df_reg_info **reg_info;
724 struct df_ref_info *ref_info;
726 df_ref **ref_rec_ptr;
727 unsigned int count = 0;
729 enum df_ref_class cl;
733 /* You cannot hack artificial refs. */
736 if (width != -1 || offset != -1)
742 ref = df_ref_create_structure (cl, NULL, reg, loc, bb, DF_INSN_INFO_GET (insn),
744 width, offset, mode);
746 if (DF_REF_REG_DEF_P (ref))
748 reg_info = df->def_regs;
749 ref_info = &df->def_info;
750 ref_rec_ptr = &DF_INSN_DEFS (insn);
751 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
753 else if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
755 reg_info = df->eq_use_regs;
756 ref_info = &df->use_info;
757 ref_rec_ptr = &DF_INSN_EQ_USES (insn);
758 switch (ref_info->ref_order)
760 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
761 case DF_REF_ORDER_BY_REG_WITH_NOTES:
762 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
766 add_to_table = false;
772 reg_info = df->use_regs;
773 ref_info = &df->use_info;
774 ref_rec_ptr = &DF_INSN_USES (insn);
775 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
778 /* Do not add if ref is not in the right blocks. */
779 if (add_to_table && df->analyze_subset)
780 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
782 df_install_ref (ref, reg_info[DF_REF_REGNO (ref)], ref_info, add_to_table);
785 switch (ref_info->ref_order)
787 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
788 case DF_REF_ORDER_BY_REG_WITH_NOTES:
789 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
790 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
793 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
797 ref_rec = *ref_rec_ptr;
804 ref_rec = *ref_rec_ptr;
807 ref_rec = XRESIZEVEC (df_ref, ref_rec, count+2);
808 *ref_rec_ptr = ref_rec;
809 ref_rec[count] = ref;
810 ref_rec[count+1] = NULL;
811 qsort (ref_rec, count + 1, sizeof (df_ref), df_ref_compare);
815 df_ref *ref_rec = XNEWVEC (df_ref, 2);
818 *ref_rec_ptr = ref_rec;
824 fprintf (dump_file, "adding ref ");
825 df_ref_debug (ref, dump_file);
828 /* By adding the ref directly, df_insn_rescan my not find any
829 differences even though the block will have changed. So we need
830 to mark the block dirty ourselves. */
831 if (!DEBUG_INSN_P (DF_REF_INSN (ref)))
832 df_set_bb_dirty (bb);
839 /*----------------------------------------------------------------------------
840 UTILITIES TO CREATE AND DESTROY REFS AND CHAINS.
841 ----------------------------------------------------------------------------*/
844 df_free_ref (df_ref ref)
846 struct df_scan_problem_data *problem_data
847 = (struct df_scan_problem_data *) df_scan->problem_data;
849 switch (DF_REF_CLASS (ref))
852 pool_free (problem_data->ref_base_pool, ref);
855 case DF_REF_ARTIFICIAL:
856 pool_free (problem_data->ref_artificial_pool, ref);
860 pool_free (problem_data->ref_regular_pool, ref);
864 pool_free (problem_data->ref_extract_pool, ref);
870 /* Unlink and delete REF at the reg_use, reg_eq_use or reg_def chain.
871 Also delete the def-use or use-def chain if it exists. */
874 df_reg_chain_unlink (df_ref ref)
876 df_ref next = DF_REF_NEXT_REG (ref);
877 df_ref prev = DF_REF_PREV_REG (ref);
878 int id = DF_REF_ID (ref);
879 struct df_reg_info *reg_info;
882 if (DF_REF_REG_DEF_P (ref))
884 int regno = DF_REF_REGNO (ref);
885 reg_info = DF_REG_DEF_GET (regno);
886 refs = df->def_info.refs;
890 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
892 reg_info = DF_REG_EQ_USE_GET (DF_REF_REGNO (ref));
893 switch (df->use_info.ref_order)
895 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
896 case DF_REF_ORDER_BY_REG_WITH_NOTES:
897 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
898 refs = df->use_info.refs;
906 reg_info = DF_REG_USE_GET (DF_REF_REGNO (ref));
907 refs = df->use_info.refs;
913 if (df->analyze_subset)
915 if (bitmap_bit_p (df->blocks_to_analyze, DF_REF_BBNO (ref)))
922 /* Delete any def-use or use-def chains that start here. It is
923 possible that there is trash in this field. This happens for
924 insns that have been deleted when rescanning has been deferred
925 and the chain problem has also been deleted. The chain tear down
926 code skips deleted insns. */
927 if (df_chain && DF_REF_CHAIN (ref))
928 df_chain_unlink (ref);
931 if (DF_REF_FLAGS_IS_SET (ref, DF_HARD_REG_LIVE))
933 gcc_assert (DF_REF_REGNO (ref) < FIRST_PSEUDO_REGISTER);
934 df->hard_regs_live_count[DF_REF_REGNO (ref)]--;
937 /* Unlink from the reg chain. If there is no prev, this is the
938 first of the list. If not, just join the next and prev. */
940 DF_REF_NEXT_REG (prev) = next;
943 gcc_assert (reg_info->reg_chain == ref);
944 reg_info->reg_chain = next;
947 DF_REF_PREV_REG (next) = prev;
953 /* Remove REF from VEC. */
956 df_ref_compress_rec (df_ref **vec_ptr, df_ref ref)
958 df_ref *vec = *vec_ptr;
962 while (*vec && *vec != ref)
974 *vec_ptr = df_null_ref_rec;
979 /* Unlink REF from all def-use/use-def chains, etc. */
982 df_ref_remove (df_ref ref)
987 fprintf (dump_file, "removing ref ");
988 df_ref_debug (ref, dump_file);
992 if (DF_REF_REG_DEF_P (ref))
994 if (DF_REF_IS_ARTIFICIAL (ref))
996 struct df_scan_bb_info *bb_info
997 = df_scan_get_bb_info (DF_REF_BBNO (ref));
998 df_ref_compress_rec (&bb_info->artificial_defs, ref);
1002 unsigned int uid = DF_REF_INSN_UID (ref);
1003 struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid);
1004 df_ref_compress_rec (&insn_rec->defs, ref);
1009 if (DF_REF_IS_ARTIFICIAL (ref))
1011 struct df_scan_bb_info *bb_info
1012 = df_scan_get_bb_info (DF_REF_BBNO (ref));
1013 df_ref_compress_rec (&bb_info->artificial_uses, ref);
1017 unsigned int uid = DF_REF_INSN_UID (ref);
1018 struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid);
1020 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
1021 df_ref_compress_rec (&insn_rec->eq_uses, ref);
1023 df_ref_compress_rec (&insn_rec->uses, ref);
1027 /* By deleting the ref directly, df_insn_rescan my not find any
1028 differences even though the block will have changed. So we need
1029 to mark the block dirty ourselves. */
1030 if (!DEBUG_INSN_P (DF_REF_INSN (ref)))
1031 df_set_bb_dirty (DF_REF_BB (ref));
1032 df_reg_chain_unlink (ref);
1036 /* Create the insn record for INSN. If there was one there, zero it
1039 struct df_insn_info *
1040 df_insn_create_insn_record (rtx insn)
1042 struct df_scan_problem_data *problem_data
1043 = (struct df_scan_problem_data *) df_scan->problem_data;
1044 struct df_insn_info *insn_rec;
1046 df_grow_insn_info ();
1047 insn_rec = DF_INSN_INFO_GET (insn);
1050 insn_rec = (struct df_insn_info *) pool_alloc (problem_data->insn_pool);
1051 DF_INSN_INFO_SET (insn, insn_rec);
1053 memset (insn_rec, 0, sizeof (struct df_insn_info));
1054 insn_rec->insn = insn;
1059 /* Delete all du chain (DF_REF_CHAIN()) of all refs in the ref chain. */
1062 df_ref_chain_delete_du_chain (df_ref *ref_rec)
1066 df_ref ref = *ref_rec;
1067 /* CHAIN is allocated by DF_CHAIN. So make sure to
1068 pass df_scan instance for the problem. */
1069 if (DF_REF_CHAIN (ref))
1070 df_chain_unlink (ref);
1076 /* Delete all refs in the ref chain. */
1079 df_ref_chain_delete (df_ref *ref_rec)
1081 df_ref *start = ref_rec;
1084 df_reg_chain_unlink (*ref_rec);
1088 /* If the list is empty, it has a special shared element that is not
1095 /* Delete the hardreg chain. */
1098 df_mw_hardreg_chain_delete (struct df_mw_hardreg **hardregs)
1100 struct df_scan_problem_data *problem_data;
1105 problem_data = (struct df_scan_problem_data *) df_scan->problem_data;
1109 pool_free (problem_data->mw_reg_pool, *hardregs);
1115 /* Delete all of the refs information from INSN. BB must be passed in
1116 except when called from df_process_deferred_rescans to mark the block
1120 df_insn_delete (basic_block bb, unsigned int uid)
1122 struct df_insn_info *insn_info = NULL;
1126 df_grow_bb_info (df_scan);
1127 df_grow_reg_info ();
1129 /* The block must be marked as dirty now, rather than later as in
1130 df_insn_rescan and df_notes_rescan because it may not be there at
1131 rescanning time and the mark would blow up. */
1133 df_set_bb_dirty (bb);
1135 insn_info = DF_INSN_UID_SAFE_GET (uid);
1137 /* The client has deferred rescanning. */
1138 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1142 bitmap_clear_bit (df->insns_to_rescan, uid);
1143 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1144 bitmap_set_bit (df->insns_to_delete, uid);
1147 fprintf (dump_file, "deferring deletion of insn with uid = %d.\n", uid);
1152 fprintf (dump_file, "deleting insn with uid = %d.\n", uid);
1154 bitmap_clear_bit (df->insns_to_delete, uid);
1155 bitmap_clear_bit (df->insns_to_rescan, uid);
1156 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1159 struct df_scan_problem_data *problem_data
1160 = (struct df_scan_problem_data *) df_scan->problem_data;
1162 /* In general, notes do not have the insn_info fields
1163 initialized. However, combine deletes insns by changing them
1164 to notes. How clever. So we cannot just check if it is a
1165 valid insn before short circuiting this code, we need to see
1166 if we actually initialized it. */
1167 if (insn_info->defs)
1169 df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
1173 df_ref_chain_delete_du_chain (insn_info->defs);
1174 df_ref_chain_delete_du_chain (insn_info->uses);
1175 df_ref_chain_delete_du_chain (insn_info->eq_uses);
1178 df_ref_chain_delete (insn_info->defs);
1179 df_ref_chain_delete (insn_info->uses);
1180 df_ref_chain_delete (insn_info->eq_uses);
1182 pool_free (problem_data->insn_pool, insn_info);
1183 DF_INSN_UID_SET (uid, NULL);
1188 /* Free all of the refs and the mw_hardregs in COLLECTION_REC. */
1191 df_free_collection_rec (struct df_collection_rec *collection_rec)
1194 struct df_scan_problem_data *problem_data
1195 = (struct df_scan_problem_data *) df_scan->problem_data;
1197 struct df_mw_hardreg *mw;
1199 for (ix = 0; VEC_iterate (df_ref, collection_rec->def_vec, ix, ref); ++ix)
1201 for (ix = 0; VEC_iterate (df_ref, collection_rec->use_vec, ix, ref); ++ix)
1203 for (ix = 0; VEC_iterate (df_ref, collection_rec->eq_use_vec, ix, ref); ++ix)
1206 VEC_iterate (df_mw_hardreg_ptr, collection_rec->mw_vec, ix, mw);
1208 pool_free (problem_data->mw_reg_pool, mw);
1210 VEC_free (df_ref, stack, collection_rec->def_vec);
1211 VEC_free (df_ref, stack, collection_rec->use_vec);
1212 VEC_free (df_ref, stack, collection_rec->eq_use_vec);
1213 VEC_free (df_mw_hardreg_ptr, stack, collection_rec->mw_vec);
1216 /* Rescan INSN. Return TRUE if the rescanning produced any changes. */
1219 df_insn_rescan (rtx insn)
1221 unsigned int uid = INSN_UID (insn);
1222 struct df_insn_info *insn_info = NULL;
1223 basic_block bb = BLOCK_FOR_INSN (insn);
1224 struct df_collection_rec collection_rec;
1226 if ((!df) || (!INSN_P (insn)))
1232 fprintf (dump_file, "no bb for insn with uid = %d.\n", uid);
1236 /* The client has disabled rescanning and plans to do it itself. */
1237 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1240 df_grow_bb_info (df_scan);
1241 df_grow_reg_info ();
1243 insn_info = DF_INSN_UID_SAFE_GET (uid);
1245 /* The client has deferred rescanning. */
1246 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1250 insn_info = df_insn_create_insn_record (insn);
1251 insn_info->defs = df_null_ref_rec;
1252 insn_info->uses = df_null_ref_rec;
1253 insn_info->eq_uses = df_null_ref_rec;
1254 insn_info->mw_hardregs = df_null_mw_rec;
1257 fprintf (dump_file, "deferring rescan insn with uid = %d.\n", uid);
1259 bitmap_clear_bit (df->insns_to_delete, uid);
1260 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1261 bitmap_set_bit (df->insns_to_rescan, INSN_UID (insn));
1265 collection_rec.def_vec = VEC_alloc (df_ref, stack, 128);
1266 collection_rec.use_vec = VEC_alloc (df_ref, stack, 32);
1267 collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
1268 collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
1270 bitmap_clear_bit (df->insns_to_delete, uid);
1271 bitmap_clear_bit (df->insns_to_rescan, uid);
1272 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1276 bool the_same = df_insn_refs_verify (&collection_rec, bb, insn, false);
1277 /* If there's no change, return false. */
1280 df_free_collection_rec (&collection_rec);
1282 fprintf (dump_file, "verify found no changes in insn with uid = %d.\n", uid);
1286 fprintf (dump_file, "rescanning insn with uid = %d.\n", uid);
1288 /* There's change - we need to delete the existing info.
1289 Since the insn isn't moved, we can salvage its LUID. */
1290 luid = DF_INSN_LUID (insn);
1291 df_insn_delete (NULL, uid);
1292 df_insn_create_insn_record (insn);
1293 DF_INSN_LUID (insn) = luid;
1297 struct df_insn_info *insn_info = df_insn_create_insn_record (insn);
1298 df_insn_refs_collect (&collection_rec, bb, insn_info);
1300 fprintf (dump_file, "scanning new insn with uid = %d.\n", uid);
1303 df_refs_add_to_chains (&collection_rec, bb, insn);
1304 df_set_bb_dirty (bb);
1306 VEC_free (df_ref, stack, collection_rec.def_vec);
1307 VEC_free (df_ref, stack, collection_rec.use_vec);
1308 VEC_free (df_ref, stack, collection_rec.eq_use_vec);
1309 VEC_free (df_mw_hardreg_ptr, stack, collection_rec.mw_vec);
1314 /* Same as df_insn_rescan, but don't mark the basic block as
1318 df_insn_rescan_debug_internal (rtx insn)
1320 unsigned int uid = INSN_UID (insn);
1321 struct df_insn_info *insn_info;
1323 gcc_assert (DEBUG_INSN_P (insn)
1324 && VAR_LOC_UNKNOWN_P (INSN_VAR_LOCATION_LOC (insn)));
1329 insn_info = DF_INSN_UID_SAFE_GET (INSN_UID (insn));
1334 fprintf (dump_file, "deleting debug_insn with uid = %d.\n", uid);
1336 bitmap_clear_bit (df->insns_to_delete, uid);
1337 bitmap_clear_bit (df->insns_to_rescan, uid);
1338 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1340 if (!insn_info->defs)
1343 if (insn_info->defs == df_null_ref_rec
1344 && insn_info->uses == df_null_ref_rec
1345 && insn_info->eq_uses == df_null_ref_rec
1346 && insn_info->mw_hardregs == df_null_mw_rec)
1349 df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
1353 df_ref_chain_delete_du_chain (insn_info->defs);
1354 df_ref_chain_delete_du_chain (insn_info->uses);
1355 df_ref_chain_delete_du_chain (insn_info->eq_uses);
1358 df_ref_chain_delete (insn_info->defs);
1359 df_ref_chain_delete (insn_info->uses);
1360 df_ref_chain_delete (insn_info->eq_uses);
1362 insn_info->defs = df_null_ref_rec;
1363 insn_info->uses = df_null_ref_rec;
1364 insn_info->eq_uses = df_null_ref_rec;
1365 insn_info->mw_hardregs = df_null_mw_rec;
1371 /* Rescan all of the insns in the function. Note that the artificial
1372 uses and defs are not touched. This function will destroy def-se
1373 or use-def chains. */
1376 df_insn_rescan_all (void)
1378 bool no_insn_rescan = false;
1379 bool defer_insn_rescan = false;
1383 bitmap tmp = BITMAP_ALLOC (&df_bitmap_obstack);
1385 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1387 df_clear_flags (DF_NO_INSN_RESCAN);
1388 no_insn_rescan = true;
1391 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1393 df_clear_flags (DF_DEFER_INSN_RESCAN);
1394 defer_insn_rescan = true;
1397 bitmap_copy (tmp, df->insns_to_delete);
1398 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1400 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1402 df_insn_delete (NULL, uid);
1406 bitmap_clear (df->insns_to_delete);
1407 bitmap_clear (df->insns_to_rescan);
1408 bitmap_clear (df->insns_to_notes_rescan);
1413 FOR_BB_INSNS (bb, insn)
1415 df_insn_rescan (insn);
1420 df_set_flags (DF_NO_INSN_RESCAN);
1421 if (defer_insn_rescan)
1422 df_set_flags (DF_DEFER_INSN_RESCAN);
1426 /* Process all of the deferred rescans or deletions. */
1429 df_process_deferred_rescans (void)
1431 bool no_insn_rescan = false;
1432 bool defer_insn_rescan = false;
1435 bitmap tmp = BITMAP_ALLOC (&df_bitmap_obstack);
1437 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1439 df_clear_flags (DF_NO_INSN_RESCAN);
1440 no_insn_rescan = true;
1443 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1445 df_clear_flags (DF_DEFER_INSN_RESCAN);
1446 defer_insn_rescan = true;
1450 fprintf (dump_file, "starting the processing of deferred insns\n");
1452 bitmap_copy (tmp, df->insns_to_delete);
1453 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1455 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1457 df_insn_delete (NULL, uid);
1460 bitmap_copy (tmp, df->insns_to_rescan);
1461 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1463 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1465 df_insn_rescan (insn_info->insn);
1468 bitmap_copy (tmp, df->insns_to_notes_rescan);
1469 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1471 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1473 df_notes_rescan (insn_info->insn);
1477 fprintf (dump_file, "ending the processing of deferred insns\n");
1480 bitmap_clear (df->insns_to_delete);
1481 bitmap_clear (df->insns_to_rescan);
1482 bitmap_clear (df->insns_to_notes_rescan);
1485 df_set_flags (DF_NO_INSN_RESCAN);
1486 if (defer_insn_rescan)
1487 df_set_flags (DF_DEFER_INSN_RESCAN);
1489 /* If someone changed regs_ever_live during this pass, fix up the
1490 entry and exit blocks. */
1491 if (df->redo_entry_and_exit)
1493 df_update_entry_exit_and_calls ();
1494 df->redo_entry_and_exit = false;
1499 /* Count the number of refs. Include the defs if INCLUDE_DEFS. Include
1500 the uses if INCLUDE_USES. Include the eq_uses if
1504 df_count_refs (bool include_defs, bool include_uses,
1505 bool include_eq_uses)
1509 unsigned int m = df->regs_inited;
1511 for (regno = 0; regno < m; regno++)
1514 size += DF_REG_DEF_COUNT (regno);
1516 size += DF_REG_USE_COUNT (regno);
1517 if (include_eq_uses)
1518 size += DF_REG_EQ_USE_COUNT (regno);
1524 /* Take build ref table for either the uses or defs from the reg-use
1525 or reg-def chains. This version processes the refs in reg order
1526 which is likely to be best if processing the whole function. */
1529 df_reorganize_refs_by_reg_by_reg (struct df_ref_info *ref_info,
1532 bool include_eq_uses)
1534 unsigned int m = df->regs_inited;
1536 unsigned int offset = 0;
1539 if (df->changeable_flags & DF_NO_HARD_REGS)
1541 start = FIRST_PSEUDO_REGISTER;
1542 memset (ref_info->begin, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1543 memset (ref_info->count, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1548 ref_info->total_size
1549 = df_count_refs (include_defs, include_uses, include_eq_uses);
1551 df_check_and_grow_ref_info (ref_info, 1);
1553 for (regno = start; regno < m; regno++)
1556 ref_info->begin[regno] = offset;
1559 df_ref ref = DF_REG_DEF_CHAIN (regno);
1562 ref_info->refs[offset] = ref;
1563 DF_REF_ID (ref) = offset++;
1565 ref = DF_REF_NEXT_REG (ref);
1566 gcc_assert (offset < ref_info->refs_size);
1571 df_ref ref = DF_REG_USE_CHAIN (regno);
1574 ref_info->refs[offset] = ref;
1575 DF_REF_ID (ref) = offset++;
1577 ref = DF_REF_NEXT_REG (ref);
1578 gcc_assert (offset < ref_info->refs_size);
1581 if (include_eq_uses)
1583 df_ref ref = DF_REG_EQ_USE_CHAIN (regno);
1586 ref_info->refs[offset] = ref;
1587 DF_REF_ID (ref) = offset++;
1589 ref = DF_REF_NEXT_REG (ref);
1590 gcc_assert (offset < ref_info->refs_size);
1593 ref_info->count[regno] = count;
1596 /* The bitmap size is not decremented when refs are deleted. So
1597 reset it now that we have squished out all of the empty
1599 ref_info->table_size = offset;
1603 /* Take build ref table for either the uses or defs from the reg-use
1604 or reg-def chains. This version processes the refs in insn order
1605 which is likely to be best if processing some segment of the
1609 df_reorganize_refs_by_reg_by_insn (struct df_ref_info *ref_info,
1612 bool include_eq_uses)
1615 unsigned int bb_index;
1616 unsigned int m = df->regs_inited;
1617 unsigned int offset = 0;
1620 = (df->changeable_flags & DF_NO_HARD_REGS) ? FIRST_PSEUDO_REGISTER : 0;
1622 memset (ref_info->begin, 0, sizeof (int) * df->regs_inited);
1623 memset (ref_info->count, 0, sizeof (int) * df->regs_inited);
1625 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1626 df_check_and_grow_ref_info (ref_info, 1);
1628 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1630 basic_block bb = BASIC_BLOCK (bb_index);
1635 for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
1637 unsigned int regno = DF_REF_REGNO (*ref_rec);
1638 ref_info->count[regno]++;
1641 for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
1643 unsigned int regno = DF_REF_REGNO (*ref_rec);
1644 ref_info->count[regno]++;
1647 FOR_BB_INSNS (bb, insn)
1651 unsigned int uid = INSN_UID (insn);
1654 for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
1656 unsigned int regno = DF_REF_REGNO (*ref_rec);
1657 ref_info->count[regno]++;
1660 for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
1662 unsigned int regno = DF_REF_REGNO (*ref_rec);
1663 ref_info->count[regno]++;
1665 if (include_eq_uses)
1666 for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
1668 unsigned int regno = DF_REF_REGNO (*ref_rec);
1669 ref_info->count[regno]++;
1675 for (r = start; r < m; r++)
1677 ref_info->begin[r] = offset;
1678 offset += ref_info->count[r];
1679 ref_info->count[r] = 0;
1682 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1684 basic_block bb = BASIC_BLOCK (bb_index);
1689 for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
1691 df_ref ref = *ref_rec;
1692 unsigned int regno = DF_REF_REGNO (ref);
1696 = ref_info->begin[regno] + ref_info->count[regno]++;
1697 DF_REF_ID (ref) = id;
1698 ref_info->refs[id] = ref;
1702 for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
1704 df_ref ref = *ref_rec;
1705 unsigned int regno = DF_REF_REGNO (ref);
1709 = ref_info->begin[regno] + ref_info->count[regno]++;
1710 DF_REF_ID (ref) = id;
1711 ref_info->refs[id] = ref;
1715 FOR_BB_INSNS (bb, insn)
1719 unsigned int uid = INSN_UID (insn);
1722 for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
1724 df_ref ref = *ref_rec;
1725 unsigned int regno = DF_REF_REGNO (ref);
1729 = ref_info->begin[regno] + ref_info->count[regno]++;
1730 DF_REF_ID (ref) = id;
1731 ref_info->refs[id] = ref;
1735 for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
1737 df_ref ref = *ref_rec;
1738 unsigned int regno = DF_REF_REGNO (ref);
1742 = ref_info->begin[regno] + ref_info->count[regno]++;
1743 DF_REF_ID (ref) = id;
1744 ref_info->refs[id] = ref;
1747 if (include_eq_uses)
1748 for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
1750 df_ref ref = *ref_rec;
1751 unsigned int regno = DF_REF_REGNO (ref);
1755 = ref_info->begin[regno] + ref_info->count[regno]++;
1756 DF_REF_ID (ref) = id;
1757 ref_info->refs[id] = ref;
1764 /* The bitmap size is not decremented when refs are deleted. So
1765 reset it now that we have squished out all of the empty
1768 ref_info->table_size = offset;
1771 /* Take build ref table for either the uses or defs from the reg-use
1772 or reg-def chains. */
1775 df_reorganize_refs_by_reg (struct df_ref_info *ref_info,
1778 bool include_eq_uses)
1780 if (df->analyze_subset)
1781 df_reorganize_refs_by_reg_by_insn (ref_info, include_defs,
1782 include_uses, include_eq_uses);
1784 df_reorganize_refs_by_reg_by_reg (ref_info, include_defs,
1785 include_uses, include_eq_uses);
1789 /* Add the refs in REF_VEC to the table in REF_INFO starting at OFFSET. */
1791 df_add_refs_to_table (unsigned int offset,
1792 struct df_ref_info *ref_info,
1797 df_ref ref = *ref_vec;
1798 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
1799 || (DF_REF_REGNO (ref) >= FIRST_PSEUDO_REGISTER))
1801 ref_info->refs[offset] = ref;
1802 DF_REF_ID (*ref_vec) = offset++;
1810 /* Count the number of refs in all of the insns of BB. Include the
1811 defs if INCLUDE_DEFS. Include the uses if INCLUDE_USES. Include the
1812 eq_uses if INCLUDE_EQ_USES. */
1815 df_reorganize_refs_by_insn_bb (basic_block bb, unsigned int offset,
1816 struct df_ref_info *ref_info,
1817 bool include_defs, bool include_uses,
1818 bool include_eq_uses)
1823 offset = df_add_refs_to_table (offset, ref_info,
1824 df_get_artificial_defs (bb->index));
1826 offset = df_add_refs_to_table (offset, ref_info,
1827 df_get_artificial_uses (bb->index));
1829 FOR_BB_INSNS (bb, insn)
1832 unsigned int uid = INSN_UID (insn);
1834 offset = df_add_refs_to_table (offset, ref_info,
1835 DF_INSN_UID_DEFS (uid));
1837 offset = df_add_refs_to_table (offset, ref_info,
1838 DF_INSN_UID_USES (uid));
1839 if (include_eq_uses)
1840 offset = df_add_refs_to_table (offset, ref_info,
1841 DF_INSN_UID_EQ_USES (uid));
1847 /* Organize the refs by insn into the table in REF_INFO. If
1848 blocks_to_analyze is defined, use that set, otherwise the entire
1849 program. Include the defs if INCLUDE_DEFS. Include the uses if
1850 INCLUDE_USES. Include the eq_uses if INCLUDE_EQ_USES. */
1853 df_reorganize_refs_by_insn (struct df_ref_info *ref_info,
1854 bool include_defs, bool include_uses,
1855 bool include_eq_uses)
1858 unsigned int offset = 0;
1860 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1861 df_check_and_grow_ref_info (ref_info, 1);
1862 if (df->blocks_to_analyze)
1867 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, index, bi)
1869 offset = df_reorganize_refs_by_insn_bb (BASIC_BLOCK (index), offset, ref_info,
1870 include_defs, include_uses,
1874 ref_info->table_size = offset;
1879 offset = df_reorganize_refs_by_insn_bb (bb, offset, ref_info,
1880 include_defs, include_uses,
1882 ref_info->table_size = offset;
1887 /* If the use refs in DF are not organized, reorganize them. */
1890 df_maybe_reorganize_use_refs (enum df_ref_order order)
1892 if (order == df->use_info.ref_order)
1897 case DF_REF_ORDER_BY_REG:
1898 df_reorganize_refs_by_reg (&df->use_info, false, true, false);
1901 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1902 df_reorganize_refs_by_reg (&df->use_info, false, true, true);
1905 case DF_REF_ORDER_BY_INSN:
1906 df_reorganize_refs_by_insn (&df->use_info, false, true, false);
1909 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1910 df_reorganize_refs_by_insn (&df->use_info, false, true, true);
1913 case DF_REF_ORDER_NO_TABLE:
1914 free (df->use_info.refs);
1915 df->use_info.refs = NULL;
1916 df->use_info.refs_size = 0;
1919 case DF_REF_ORDER_UNORDERED:
1920 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1925 df->use_info.ref_order = order;
1929 /* If the def refs in DF are not organized, reorganize them. */
1932 df_maybe_reorganize_def_refs (enum df_ref_order order)
1934 if (order == df->def_info.ref_order)
1939 case DF_REF_ORDER_BY_REG:
1940 df_reorganize_refs_by_reg (&df->def_info, true, false, false);
1943 case DF_REF_ORDER_BY_INSN:
1944 df_reorganize_refs_by_insn (&df->def_info, true, false, false);
1947 case DF_REF_ORDER_NO_TABLE:
1948 free (df->def_info.refs);
1949 df->def_info.refs = NULL;
1950 df->def_info.refs_size = 0;
1953 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1954 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1955 case DF_REF_ORDER_UNORDERED:
1956 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1961 df->def_info.ref_order = order;
1965 /* Change all of the basic block references in INSN to use the insn's
1966 current basic block. This function is called from routines that move
1967 instructions from one block to another. */
1970 df_insn_change_bb (rtx insn, basic_block new_bb)
1972 basic_block old_bb = BLOCK_FOR_INSN (insn);
1973 struct df_insn_info *insn_info;
1974 unsigned int uid = INSN_UID (insn);
1976 if (old_bb == new_bb)
1979 set_block_for_insn (insn, new_bb);
1985 fprintf (dump_file, "changing bb of uid %d\n", uid);
1987 insn_info = DF_INSN_UID_SAFE_GET (uid);
1988 if (insn_info == NULL)
1991 fprintf (dump_file, " unscanned insn\n");
1992 df_insn_rescan (insn);
1999 df_set_bb_dirty (new_bb);
2003 fprintf (dump_file, " from %d to %d\n",
2004 old_bb->index, new_bb->index);
2005 df_set_bb_dirty (old_bb);
2009 fprintf (dump_file, " to %d\n", new_bb->index);
2013 /* Helper function for df_ref_change_reg_with_loc. */
2016 df_ref_change_reg_with_loc_1 (struct df_reg_info *old_df,
2017 struct df_reg_info *new_df,
2018 int new_regno, rtx loc)
2020 df_ref the_ref = old_df->reg_chain;
2024 if ((!DF_REF_IS_ARTIFICIAL (the_ref))
2025 && (DF_REF_LOC (the_ref))
2026 && (*DF_REF_LOC (the_ref) == loc))
2028 df_ref next_ref = DF_REF_NEXT_REG (the_ref);
2029 df_ref prev_ref = DF_REF_PREV_REG (the_ref);
2030 df_ref *ref_vec, *ref_vec_t;
2031 struct df_insn_info *insn_info = DF_REF_INSN_INFO (the_ref);
2032 unsigned int count = 0;
2034 DF_REF_REGNO (the_ref) = new_regno;
2035 DF_REF_REG (the_ref) = regno_reg_rtx[new_regno];
2037 /* Pull the_ref out of the old regno chain. */
2039 DF_REF_NEXT_REG (prev_ref) = next_ref;
2041 old_df->reg_chain = next_ref;
2043 DF_REF_PREV_REG (next_ref) = prev_ref;
2046 /* Put the ref into the new regno chain. */
2047 DF_REF_PREV_REG (the_ref) = NULL;
2048 DF_REF_NEXT_REG (the_ref) = new_df->reg_chain;
2049 if (new_df->reg_chain)
2050 DF_REF_PREV_REG (new_df->reg_chain) = the_ref;
2051 new_df->reg_chain = the_ref;
2053 if (DF_REF_BB (the_ref))
2054 df_set_bb_dirty (DF_REF_BB (the_ref));
2056 /* Need to sort the record again that the ref was in because
2057 the regno is a sorting key. First, find the right
2059 if (DF_REF_FLAGS (the_ref) & DF_REF_IN_NOTE)
2060 ref_vec = insn_info->eq_uses;
2062 ref_vec = insn_info->uses;
2064 fprintf (dump_file, "changing reg in insn %d\n",
2065 DF_REF_INSN_UID (the_ref));
2067 ref_vec_t = ref_vec;
2069 /* Find the length. */
2075 qsort (ref_vec, count, sizeof (df_ref ), df_ref_compare);
2080 the_ref = DF_REF_NEXT_REG (the_ref);
2085 /* Change the regno of all refs that contained LOC from OLD_REGNO to
2086 NEW_REGNO. Refs that do not match LOC are not changed which means
2087 that artificial refs are not changed since they have no loc. This
2088 call is to support the SET_REGNO macro. */
2091 df_ref_change_reg_with_loc (int old_regno, int new_regno, rtx loc)
2093 if ((!df) || (old_regno == -1) || (old_regno == new_regno))
2096 df_grow_reg_info ();
2098 df_ref_change_reg_with_loc_1 (DF_REG_DEF_GET (old_regno),
2099 DF_REG_DEF_GET (new_regno), new_regno, loc);
2100 df_ref_change_reg_with_loc_1 (DF_REG_USE_GET (old_regno),
2101 DF_REG_USE_GET (new_regno), new_regno, loc);
2102 df_ref_change_reg_with_loc_1 (DF_REG_EQ_USE_GET (old_regno),
2103 DF_REG_EQ_USE_GET (new_regno), new_regno, loc);
2107 /* Delete the mw_hardregs that point into the eq_notes. */
2110 df_mw_hardreg_chain_delete_eq_uses (struct df_insn_info *insn_info)
2112 struct df_mw_hardreg **mw_vec = insn_info->mw_hardregs;
2113 unsigned int deleted = 0;
2114 unsigned int count = 0;
2115 struct df_scan_problem_data *problem_data
2116 = (struct df_scan_problem_data *) df_scan->problem_data;
2123 if ((*mw_vec)->flags & DF_REF_IN_NOTE)
2125 struct df_mw_hardreg **temp_vec = mw_vec;
2127 pool_free (problem_data->mw_reg_pool, *mw_vec);
2129 /* Shove the remaining ones down one to fill the gap. While
2130 this looks n**2, it is highly unusual to have any mw regs
2131 in eq_notes and the chances of more than one are almost
2135 *temp_vec = *(temp_vec + 1);
2149 df_scan_free_mws_vec (insn_info->mw_hardregs);
2150 insn_info->mw_hardregs = df_null_mw_rec;
2157 /* Rescan only the REG_EQUIV/REG_EQUAL notes part of INSN. */
2160 df_notes_rescan (rtx insn)
2162 struct df_insn_info *insn_info;
2163 unsigned int uid = INSN_UID (insn);
2168 /* The client has disabled rescanning and plans to do it itself. */
2169 if (df->changeable_flags & DF_NO_INSN_RESCAN)
2172 /* Do nothing if the insn hasn't been emitted yet. */
2173 if (!BLOCK_FOR_INSN (insn))
2176 df_grow_bb_info (df_scan);
2177 df_grow_reg_info ();
2179 insn_info = DF_INSN_UID_SAFE_GET (INSN_UID(insn));
2181 /* The client has deferred rescanning. */
2182 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
2186 insn_info = df_insn_create_insn_record (insn);
2187 insn_info->defs = df_null_ref_rec;
2188 insn_info->uses = df_null_ref_rec;
2189 insn_info->eq_uses = df_null_ref_rec;
2190 insn_info->mw_hardregs = df_null_mw_rec;
2193 bitmap_clear_bit (df->insns_to_delete, uid);
2194 /* If the insn is set to be rescanned, it does not need to also
2195 be notes rescanned. */
2196 if (!bitmap_bit_p (df->insns_to_rescan, uid))
2197 bitmap_set_bit (df->insns_to_notes_rescan, INSN_UID (insn));
2201 bitmap_clear_bit (df->insns_to_delete, uid);
2202 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
2206 basic_block bb = BLOCK_FOR_INSN (insn);
2208 struct df_collection_rec collection_rec;
2209 unsigned int num_deleted;
2210 unsigned int mw_len;
2212 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
2213 collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
2214 collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
2216 num_deleted = df_mw_hardreg_chain_delete_eq_uses (insn_info);
2217 df_ref_chain_delete (insn_info->eq_uses);
2218 insn_info->eq_uses = NULL;
2220 /* Process REG_EQUIV/REG_EQUAL notes */
2221 for (note = REG_NOTES (insn); note;
2222 note = XEXP (note, 1))
2224 switch (REG_NOTE_KIND (note))
2228 df_uses_record (DF_REF_REGULAR, &collection_rec,
2229 &XEXP (note, 0), DF_REF_REG_USE,
2230 bb, insn_info, DF_REF_IN_NOTE, -1, -1, VOIDmode);
2236 /* Find some place to put any new mw_hardregs. */
2237 df_canonize_collection_rec (&collection_rec);
2238 mw_len = VEC_length (df_mw_hardreg_ptr, collection_rec.mw_vec);
2241 unsigned int count = 0;
2242 struct df_mw_hardreg **mw_rec = insn_info->mw_hardregs;
2251 /* Append to the end of the existing record after
2252 expanding it if necessary. */
2253 if (mw_len > num_deleted)
2255 insn_info->mw_hardregs =
2256 XRESIZEVEC (struct df_mw_hardreg *,
2257 insn_info->mw_hardregs,
2258 count + 1 + mw_len);
2260 memcpy (&insn_info->mw_hardregs[count],
2261 VEC_address (df_mw_hardreg_ptr, collection_rec.mw_vec),
2262 mw_len * sizeof (struct df_mw_hardreg *));
2263 insn_info->mw_hardregs[count + mw_len] = NULL;
2264 qsort (insn_info->mw_hardregs, count + mw_len,
2265 sizeof (struct df_mw_hardreg *), df_mw_compare);
2269 /* No vector there. */
2270 insn_info->mw_hardregs
2271 = XNEWVEC (struct df_mw_hardreg*, 1 + mw_len);
2272 memcpy (insn_info->mw_hardregs,
2273 VEC_address (df_mw_hardreg_ptr, collection_rec.mw_vec),
2274 mw_len * sizeof (struct df_mw_hardreg *));
2275 insn_info->mw_hardregs[mw_len] = NULL;
2278 /* Get rid of the mw_rec so that df_refs_add_to_chains will
2280 VEC_free (df_mw_hardreg_ptr, stack, collection_rec.mw_vec);
2281 df_refs_add_to_chains (&collection_rec, bb, insn);
2282 VEC_free (df_ref, stack, collection_rec.eq_use_vec);
2285 df_insn_rescan (insn);
2290 /*----------------------------------------------------------------------------
2291 Hard core instruction scanning code. No external interfaces here,
2292 just a lot of routines that look inside insns.
2293 ----------------------------------------------------------------------------*/
2296 /* Return true if the contents of two df_ref's are identical.
2297 It ignores DF_REF_MARKER. */
2300 df_ref_equal_p (df_ref ref1, df_ref ref2)
2308 if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2)
2309 || DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2)
2310 || DF_REF_REG (ref1) != DF_REF_REG (ref2)
2311 || DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2)
2312 || ((DF_REF_FLAGS (ref1) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG))
2313 != (DF_REF_FLAGS (ref2) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG)))
2314 || DF_REF_BB (ref1) != DF_REF_BB (ref2)
2315 || DF_REF_INSN_INFO (ref1) != DF_REF_INSN_INFO (ref2))
2318 switch (DF_REF_CLASS (ref1))
2320 case DF_REF_ARTIFICIAL:
2324 case DF_REF_EXTRACT:
2325 if ((DF_REF_EXTRACT_OFFSET (ref1) != DF_REF_EXTRACT_OFFSET (ref2))
2326 || (DF_REF_EXTRACT_WIDTH (ref1) != DF_REF_EXTRACT_WIDTH (ref2))
2327 || (DF_REF_EXTRACT_MODE (ref1) != DF_REF_EXTRACT_MODE (ref2)))
2331 case DF_REF_REGULAR:
2332 return DF_REF_LOC (ref1) == DF_REF_LOC (ref2);
2341 /* Compare REF1 and REF2 for sorting. This is only called from places
2342 where all of the refs are of the same type, in the same insn, and
2343 have the same bb. So these fields are not checked. */
2346 df_ref_compare (const void *r1, const void *r2)
2348 const df_ref ref1 = *(const df_ref *)r1;
2349 const df_ref ref2 = *(const df_ref *)r2;
2354 if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2))
2355 return (int)DF_REF_CLASS (ref1) - (int)DF_REF_CLASS (ref2);
2357 if (DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2))
2358 return (int)DF_REF_REGNO (ref1) - (int)DF_REF_REGNO (ref2);
2360 if (DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2))
2361 return (int)DF_REF_TYPE (ref1) - (int)DF_REF_TYPE (ref2);
2363 if (DF_REF_REG (ref1) != DF_REF_REG (ref2))
2364 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2366 /* Cannot look at the LOC field on artificial refs. */
2367 if (DF_REF_CLASS (ref1) != DF_REF_ARTIFICIAL
2368 && DF_REF_LOC (ref1) != DF_REF_LOC (ref2))
2369 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2371 if (DF_REF_FLAGS (ref1) != DF_REF_FLAGS (ref2))
2373 /* If two refs are identical except that one of them has is from
2374 a mw and one is not, we need to have the one with the mw
2376 if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG) ==
2377 DF_REF_FLAGS_IS_SET (ref2, DF_REF_MW_HARDREG))
2378 return DF_REF_FLAGS (ref1) - DF_REF_FLAGS (ref2);
2379 else if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG))
2385 /* The classes are the same at this point so it is safe to only look
2387 if (DF_REF_CLASS (ref1) == DF_REF_EXTRACT)
2389 if (DF_REF_EXTRACT_OFFSET (ref1) != DF_REF_EXTRACT_OFFSET (ref2))
2390 return DF_REF_EXTRACT_OFFSET (ref1) - DF_REF_EXTRACT_OFFSET (ref2);
2391 if (DF_REF_EXTRACT_WIDTH (ref1) != DF_REF_EXTRACT_WIDTH (ref2))
2392 return DF_REF_EXTRACT_WIDTH (ref1) - DF_REF_EXTRACT_WIDTH (ref2);
2393 if (DF_REF_EXTRACT_MODE (ref1) != DF_REF_EXTRACT_MODE (ref2))
2394 return DF_REF_EXTRACT_MODE (ref1) - DF_REF_EXTRACT_MODE (ref2);
2400 df_swap_refs (VEC(df_ref,stack) **ref_vec, int i, int j)
2402 df_ref tmp = VEC_index (df_ref, *ref_vec, i);
2403 VEC_replace (df_ref, *ref_vec, i, VEC_index (df_ref, *ref_vec, j));
2404 VEC_replace (df_ref, *ref_vec, j, tmp);
2407 /* Sort and compress a set of refs. */
2410 df_sort_and_compress_refs (VEC(df_ref,stack) **ref_vec)
2414 unsigned int dist = 0;
2416 count = VEC_length (df_ref, *ref_vec);
2418 /* If there are 1 or 0 elements, there is nothing to do. */
2421 else if (count == 2)
2423 df_ref r0 = VEC_index (df_ref, *ref_vec, 0);
2424 df_ref r1 = VEC_index (df_ref, *ref_vec, 1);
2425 if (df_ref_compare (&r0, &r1) > 0)
2426 df_swap_refs (ref_vec, 0, 1);
2430 for (i = 0; i < count - 1; i++)
2432 df_ref r0 = VEC_index (df_ref, *ref_vec, i);
2433 df_ref r1 = VEC_index (df_ref, *ref_vec, i + 1);
2434 if (df_ref_compare (&r0, &r1) >= 0)
2437 /* If the array is already strictly ordered,
2438 which is the most common case for large COUNT case
2439 (which happens for CALL INSNs),
2440 no need to sort and filter out duplicate.
2441 Simply return the count.
2442 Make sure DF_GET_ADD_REFS adds refs in the increasing order
2443 of DF_REF_COMPARE. */
2446 qsort (VEC_address (df_ref, *ref_vec), count, sizeof (df_ref),
2450 for (i=0; i<count-dist; i++)
2452 /* Find the next ref that is not equal to the current ref. */
2453 while (i + dist + 1 < count
2454 && df_ref_equal_p (VEC_index (df_ref, *ref_vec, i),
2455 VEC_index (df_ref, *ref_vec, i + dist + 1)))
2457 df_free_ref (VEC_index (df_ref, *ref_vec, i + dist + 1));
2460 /* Copy it down to the next position. */
2461 if (dist && i + dist + 1 < count)
2462 VEC_replace (df_ref, *ref_vec, i + 1,
2463 VEC_index (df_ref, *ref_vec, i + dist + 1));
2467 VEC_truncate (df_ref, *ref_vec, count);
2471 /* Return true if the contents of two df_ref's are identical.
2472 It ignores DF_REF_MARKER. */
2475 df_mw_equal_p (struct df_mw_hardreg *mw1, struct df_mw_hardreg *mw2)
2479 return (mw1 == mw2) ||
2480 (mw1->mw_reg == mw2->mw_reg
2481 && mw1->type == mw2->type
2482 && mw1->flags == mw2->flags
2483 && mw1->start_regno == mw2->start_regno
2484 && mw1->end_regno == mw2->end_regno);
2488 /* Compare MW1 and MW2 for sorting. */
2491 df_mw_compare (const void *m1, const void *m2)
2493 const struct df_mw_hardreg *const mw1 = *(const struct df_mw_hardreg *const*)m1;
2494 const struct df_mw_hardreg *const mw2 = *(const struct df_mw_hardreg *const*)m2;
2499 if (mw1->type != mw2->type)
2500 return mw1->type - mw2->type;
2502 if (mw1->flags != mw2->flags)
2503 return mw1->flags - mw2->flags;
2505 if (mw1->start_regno != mw2->start_regno)
2506 return mw1->start_regno - mw2->start_regno;
2508 if (mw1->end_regno != mw2->end_regno)
2509 return mw1->end_regno - mw2->end_regno;
2511 if (mw1->mw_reg != mw2->mw_reg)
2512 return mw1->mw_order - mw2->mw_order;
2518 /* Sort and compress a set of refs. */
2521 df_sort_and_compress_mws (VEC(df_mw_hardreg_ptr,stack) **mw_vec)
2524 struct df_scan_problem_data *problem_data
2525 = (struct df_scan_problem_data *) df_scan->problem_data;
2527 unsigned int dist = 0;
2529 count = VEC_length (df_mw_hardreg_ptr, *mw_vec);
2532 else if (count == 2)
2534 struct df_mw_hardreg *m0 = VEC_index (df_mw_hardreg_ptr, *mw_vec, 0);
2535 struct df_mw_hardreg *m1 = VEC_index (df_mw_hardreg_ptr, *mw_vec, 1);
2536 if (df_mw_compare (&m0, &m1) > 0)
2538 struct df_mw_hardreg *tmp = VEC_index (df_mw_hardreg_ptr,
2540 VEC_replace (df_mw_hardreg_ptr, *mw_vec, 0,
2541 VEC_index (df_mw_hardreg_ptr, *mw_vec, 1));
2542 VEC_replace (df_mw_hardreg_ptr, *mw_vec, 1, tmp);
2546 qsort (VEC_address (df_mw_hardreg_ptr, *mw_vec), count,
2547 sizeof (struct df_mw_hardreg *), df_mw_compare);
2549 for (i=0; i<count-dist; i++)
2551 /* Find the next ref that is not equal to the current ref. */
2552 while (i + dist + 1 < count
2553 && df_mw_equal_p (VEC_index (df_mw_hardreg_ptr, *mw_vec, i),
2554 VEC_index (df_mw_hardreg_ptr, *mw_vec,
2557 pool_free (problem_data->mw_reg_pool,
2558 VEC_index (df_mw_hardreg_ptr, *mw_vec, i + dist + 1));
2561 /* Copy it down to the next position. */
2562 if (dist && i + dist + 1 < count)
2563 VEC_replace (df_mw_hardreg_ptr, *mw_vec, i + 1,
2564 VEC_index (df_mw_hardreg_ptr, *mw_vec, i + dist + 1));
2568 VEC_truncate (df_mw_hardreg_ptr, *mw_vec, count);
2572 /* Sort and remove duplicates from the COLLECTION_REC. */
2575 df_canonize_collection_rec (struct df_collection_rec *collection_rec)
2577 df_sort_and_compress_refs (&collection_rec->def_vec);
2578 df_sort_and_compress_refs (&collection_rec->use_vec);
2579 df_sort_and_compress_refs (&collection_rec->eq_use_vec);
2580 df_sort_and_compress_mws (&collection_rec->mw_vec);
2584 /* Add the new df_ref to appropriate reg_info/ref_info chains. */
2587 df_install_ref (df_ref this_ref,
2588 struct df_reg_info *reg_info,
2589 struct df_ref_info *ref_info,
2592 unsigned int regno = DF_REF_REGNO (this_ref);
2593 /* Add the ref to the reg_{def,use,eq_use} chain. */
2594 df_ref head = reg_info->reg_chain;
2596 reg_info->reg_chain = this_ref;
2599 if (DF_REF_FLAGS_IS_SET (this_ref, DF_HARD_REG_LIVE))
2601 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
2602 df->hard_regs_live_count[regno]++;
2605 gcc_assert (DF_REF_NEXT_REG (this_ref) == NULL
2606 && DF_REF_PREV_REG (this_ref) == NULL);
2608 DF_REF_NEXT_REG (this_ref) = head;
2610 /* We cannot actually link to the head of the chain. */
2611 DF_REF_PREV_REG (this_ref) = NULL;
2614 DF_REF_PREV_REG (head) = this_ref;
2618 gcc_assert (ref_info->ref_order != DF_REF_ORDER_NO_TABLE);
2619 df_check_and_grow_ref_info (ref_info, 1);
2620 DF_REF_ID (this_ref) = ref_info->table_size;
2621 /* Add the ref to the big array of defs. */
2622 ref_info->refs[ref_info->table_size] = this_ref;
2623 ref_info->table_size++;
2626 DF_REF_ID (this_ref) = -1;
2628 ref_info->total_size++;
2632 /* This function takes one of the groups of refs (defs, uses or
2633 eq_uses) and installs the entire group into the insn. It also adds
2634 each of these refs into the appropriate chains. */
2637 df_install_refs (basic_block bb,
2638 VEC(df_ref,stack)* old_vec,
2639 struct df_reg_info **reg_info,
2640 struct df_ref_info *ref_info,
2645 count = VEC_length (df_ref, old_vec);
2648 df_ref *new_vec = XNEWVEC (df_ref, count + 1);
2653 switch (ref_info->ref_order)
2655 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
2656 case DF_REF_ORDER_BY_REG_WITH_NOTES:
2657 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
2658 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
2659 add_to_table = true;
2661 case DF_REF_ORDER_UNORDERED:
2662 case DF_REF_ORDER_BY_REG:
2663 case DF_REF_ORDER_BY_INSN:
2664 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
2665 add_to_table = !is_notes;
2668 add_to_table = false;
2672 /* Do not add if ref is not in the right blocks. */
2673 if (add_to_table && df->analyze_subset)
2674 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
2676 for (ix = 0; VEC_iterate (df_ref, old_vec, ix, this_ref); ++ix)
2678 new_vec[ix] = this_ref;
2679 df_install_ref (this_ref, reg_info[DF_REF_REGNO (this_ref)],
2680 ref_info, add_to_table);
2683 new_vec[count] = NULL;
2687 return df_null_ref_rec;
2691 /* This function takes the mws installs the entire group into the
2694 static struct df_mw_hardreg **
2695 df_install_mws (VEC(df_mw_hardreg_ptr,stack) *old_vec)
2699 count = VEC_length (df_mw_hardreg_ptr, old_vec);
2702 struct df_mw_hardreg **new_vec
2703 = XNEWVEC (struct df_mw_hardreg*, count + 1);
2704 memcpy (new_vec, VEC_address (df_mw_hardreg_ptr, old_vec),
2705 sizeof (struct df_mw_hardreg*) * count);
2706 new_vec[count] = NULL;
2710 return df_null_mw_rec;
2714 /* Add a chain of df_refs to appropriate ref chain/reg_info/ref_info
2715 chains and update other necessary information. */
2718 df_refs_add_to_chains (struct df_collection_rec *collection_rec,
2719 basic_block bb, rtx insn)
2723 struct df_insn_info *insn_rec = DF_INSN_INFO_GET (insn);
2724 /* If there is a vector in the collection rec, add it to the
2725 insn. A null rec is a signal that the caller will handle the
2727 if (collection_rec->def_vec)
2729 df_scan_free_ref_vec (insn_rec->defs);
2731 = df_install_refs (bb, collection_rec->def_vec,
2733 &df->def_info, false);
2735 if (collection_rec->use_vec)
2737 df_scan_free_ref_vec (insn_rec->uses);
2739 = df_install_refs (bb, collection_rec->use_vec,
2741 &df->use_info, false);
2743 if (collection_rec->eq_use_vec)
2745 df_scan_free_ref_vec (insn_rec->eq_uses);
2747 = df_install_refs (bb, collection_rec->eq_use_vec,
2749 &df->use_info, true);
2751 if (collection_rec->mw_vec)
2753 df_scan_free_mws_vec (insn_rec->mw_hardregs);
2754 insn_rec->mw_hardregs
2755 = df_install_mws (collection_rec->mw_vec);
2760 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
2762 df_scan_free_ref_vec (bb_info->artificial_defs);
2763 bb_info->artificial_defs
2764 = df_install_refs (bb, collection_rec->def_vec,
2766 &df->def_info, false);
2767 df_scan_free_ref_vec (bb_info->artificial_uses);
2768 bb_info->artificial_uses
2769 = df_install_refs (bb, collection_rec->use_vec,
2771 &df->use_info, false);
2776 /* Allocate a ref and initialize its fields.
2778 If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
2779 DF_REF_ZERO_EXTRACT. WIDTH, OFFSET and MODE are used to access the fields
2780 if they were constants. Otherwise they should be -1 if those flags
2784 df_ref_create_structure (enum df_ref_class cl,
2785 struct df_collection_rec *collection_rec,
2787 basic_block bb, struct df_insn_info *info,
2788 enum df_ref_type ref_type,
2790 int width, int offset, enum machine_mode mode)
2792 df_ref this_ref = NULL;
2793 int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2794 struct df_scan_problem_data *problem_data
2795 = (struct df_scan_problem_data *) df_scan->problem_data;
2800 this_ref = (df_ref) pool_alloc (problem_data->ref_base_pool);
2801 gcc_assert (loc == NULL);
2804 case DF_REF_ARTIFICIAL:
2805 this_ref = (df_ref) pool_alloc (problem_data->ref_artificial_pool);
2806 this_ref->artificial_ref.bb = bb;
2807 gcc_assert (loc == NULL);
2810 case DF_REF_REGULAR:
2811 this_ref = (df_ref) pool_alloc (problem_data->ref_regular_pool);
2812 this_ref->regular_ref.loc = loc;
2816 case DF_REF_EXTRACT:
2817 this_ref = (df_ref) pool_alloc (problem_data->ref_extract_pool);
2818 DF_REF_EXTRACT_WIDTH (this_ref) = width;
2819 DF_REF_EXTRACT_OFFSET (this_ref) = offset;
2820 DF_REF_EXTRACT_MODE (this_ref) = mode;
2821 this_ref->regular_ref.loc = loc;
2826 DF_REF_CLASS (this_ref) = cl;
2827 DF_REF_ID (this_ref) = -1;
2828 DF_REF_REG (this_ref) = reg;
2829 DF_REF_REGNO (this_ref) = regno;
2830 DF_REF_TYPE (this_ref) = ref_type;
2831 DF_REF_INSN_INFO (this_ref) = info;
2832 DF_REF_CHAIN (this_ref) = NULL;
2833 DF_REF_FLAGS (this_ref) = ref_flags;
2834 DF_REF_NEXT_REG (this_ref) = NULL;
2835 DF_REF_PREV_REG (this_ref) = NULL;
2836 DF_REF_ORDER (this_ref) = df->ref_order++;
2838 /* We need to clear this bit because fwprop, and in the future
2839 possibly other optimizations sometimes create new refs using ond
2840 refs as the model. */
2841 DF_REF_FLAGS_CLEAR (this_ref, DF_HARD_REG_LIVE);
2843 /* See if this ref needs to have DF_HARD_REG_LIVE bit set. */
2844 if ((regno < FIRST_PSEUDO_REGISTER)
2845 && (!DF_REF_IS_ARTIFICIAL (this_ref)))
2847 if (DF_REF_REG_DEF_P (this_ref))
2849 if (!DF_REF_FLAGS_IS_SET (this_ref, DF_REF_MAY_CLOBBER))
2850 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2852 else if (!(TEST_HARD_REG_BIT (elim_reg_set, regno)
2853 && (regno == FRAME_POINTER_REGNUM
2854 || regno == ARG_POINTER_REGNUM)))
2855 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2860 if (DF_REF_REG_DEF_P (this_ref))
2861 VEC_safe_push (df_ref, stack, collection_rec->def_vec, this_ref);
2862 else if (DF_REF_FLAGS (this_ref) & DF_REF_IN_NOTE)
2863 VEC_safe_push (df_ref, stack, collection_rec->eq_use_vec, this_ref);
2865 VEC_safe_push (df_ref, stack, collection_rec->use_vec, this_ref);
2872 /* Create new references of type DF_REF_TYPE for each part of register REG
2873 at address LOC within INSN of BB.
2875 If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
2876 DF_REF_ZERO_EXTRACT. WIDTH, OFFSET and MODE are used to access the
2877 fields if they were constants. Otherwise they should be -1 if
2878 those flags were set. */
2882 df_ref_record (enum df_ref_class cl,
2883 struct df_collection_rec *collection_rec,
2885 basic_block bb, struct df_insn_info *insn_info,
2886 enum df_ref_type ref_type,
2888 int width, int offset, enum machine_mode mode)
2892 gcc_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
2894 regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2895 if (regno < FIRST_PSEUDO_REGISTER)
2897 struct df_mw_hardreg *hardreg = NULL;
2898 struct df_scan_problem_data *problem_data
2899 = (struct df_scan_problem_data *) df_scan->problem_data;
2901 unsigned int endregno;
2904 if (GET_CODE (reg) == SUBREG)
2906 regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
2907 SUBREG_BYTE (reg), GET_MODE (reg));
2908 endregno = regno + subreg_nregs (reg);
2911 endregno = END_HARD_REGNO (reg);
2913 /* If this is a multiword hardreg, we create some extra
2914 datastructures that will enable us to easily build REG_DEAD
2915 and REG_UNUSED notes. */
2916 if ((endregno != regno + 1) && insn_info)
2918 /* Sets to a subreg of a multiword register are partial.
2919 Sets to a non-subreg of a multiword register are not. */
2920 if (GET_CODE (reg) == SUBREG)
2921 ref_flags |= DF_REF_PARTIAL;
2922 ref_flags |= DF_REF_MW_HARDREG;
2924 hardreg = (struct df_mw_hardreg *) pool_alloc (problem_data->mw_reg_pool);
2925 hardreg->type = ref_type;
2926 hardreg->flags = ref_flags;
2927 hardreg->mw_reg = reg;
2928 hardreg->start_regno = regno;
2929 hardreg->end_regno = endregno - 1;
2930 hardreg->mw_order = df->ref_order++;
2931 VEC_safe_push (df_mw_hardreg_ptr, stack, collection_rec->mw_vec,
2935 for (i = regno; i < endregno; i++)
2937 ref = df_ref_create_structure (cl, collection_rec, regno_reg_rtx[i], loc,
2938 bb, insn_info, ref_type, ref_flags,
2939 width, offset, mode);
2941 gcc_assert (ORIGINAL_REGNO (DF_REF_REG (ref)) == i);
2946 df_ref_create_structure (cl, collection_rec, reg, loc, bb, insn_info,
2947 ref_type, ref_flags, width, offset, mode);
2952 /* A set to a non-paradoxical SUBREG for which the number of word_mode units
2953 covered by the outer mode is smaller than that covered by the inner mode,
2954 is a read-modify-write operation.
2955 This function returns true iff the SUBREG X is such a SUBREG. */
2958 df_read_modify_subreg_p (rtx x)
2960 unsigned int isize, osize;
2961 if (GET_CODE (x) != SUBREG)
2963 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
2964 osize = GET_MODE_SIZE (GET_MODE (x));
2965 return isize > osize
2966 && isize > REGMODE_NATURAL_SIZE (GET_MODE (SUBREG_REG (x)));
2970 /* Process all the registers defined in the rtx, X.
2971 Autoincrement/decrement definitions will be picked up by
2975 df_def_record_1 (struct df_collection_rec *collection_rec,
2976 rtx x, basic_block bb, struct df_insn_info *insn_info,
2983 enum machine_mode mode = VOIDmode;
2984 enum df_ref_class cl = DF_REF_REGULAR;
2986 /* We may recursively call ourselves on EXPR_LIST when dealing with PARALLEL
2988 if (GET_CODE (x) == EXPR_LIST || GET_CODE (x) == CLOBBER)
2991 loc = &SET_DEST (x);
2994 /* It is legal to have a set destination be a parallel. */
2995 if (GET_CODE (dst) == PARALLEL)
2999 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
3001 rtx temp = XVECEXP (dst, 0, i);
3002 if (GET_CODE (temp) == EXPR_LIST || GET_CODE (temp) == CLOBBER
3003 || GET_CODE (temp) == SET)
3004 df_def_record_1 (collection_rec,
3005 temp, bb, insn_info,
3006 GET_CODE (temp) == CLOBBER
3007 ? flags | DF_REF_MUST_CLOBBER : flags);
3012 if (GET_CODE (dst) == STRICT_LOW_PART)
3014 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_STRICT_LOW_PART;
3016 loc = &XEXP (dst, 0);
3020 if (GET_CODE (dst) == ZERO_EXTRACT)
3022 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_ZERO_EXTRACT;
3024 if (CONST_INT_P (XEXP (dst, 1))
3025 && CONST_INT_P (XEXP (dst, 2)))
3027 width = INTVAL (XEXP (dst, 1));
3028 offset = INTVAL (XEXP (dst, 2));
3029 mode = GET_MODE (dst);
3030 cl = DF_REF_EXTRACT;
3033 loc = &XEXP (dst, 0);
3037 /* At this point if we do not have a reg or a subreg, just return. */
3040 df_ref_record (cl, collection_rec,
3041 dst, loc, bb, insn_info, DF_REF_REG_DEF, flags,
3042 width, offset, mode);
3044 /* We want to keep sp alive everywhere - by making all
3045 writes to sp also use of sp. */
3046 if (REGNO (dst) == STACK_POINTER_REGNUM)
3047 df_ref_record (DF_REF_BASE, collection_rec,
3048 dst, NULL, bb, insn_info, DF_REF_REG_USE, flags,
3049 width, offset, mode);
3051 else if (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst)))
3053 if (df_read_modify_subreg_p (dst))
3054 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL;
3056 flags |= DF_REF_SUBREG;
3058 df_ref_record (cl, collection_rec,
3059 dst, loc, bb, insn_info, DF_REF_REG_DEF, flags,
3060 width, offset, mode);
3065 /* Process all the registers defined in the pattern rtx, X. */
3068 df_defs_record (struct df_collection_rec *collection_rec,
3069 rtx x, basic_block bb, struct df_insn_info *insn_info,
3072 RTX_CODE code = GET_CODE (x);
3074 if (code == SET || code == CLOBBER)
3076 /* Mark the single def within the pattern. */
3077 int clobber_flags = flags;
3078 clobber_flags |= (code == CLOBBER) ? DF_REF_MUST_CLOBBER : 0;
3079 df_def_record_1 (collection_rec, x, bb, insn_info, clobber_flags);
3081 else if (code == COND_EXEC)
3083 df_defs_record (collection_rec, COND_EXEC_CODE (x),
3084 bb, insn_info, DF_REF_CONDITIONAL);
3086 else if (code == PARALLEL)
3090 /* Mark the multiple defs within the pattern. */
3091 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3092 df_defs_record (collection_rec, XVECEXP (x, 0, i), bb, insn_info, flags);
3097 /* Process all the registers used in the rtx at address LOC.
3099 If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
3100 DF_REF_ZERO_EXTRACT. WIDTH, OFFSET and MODE are used to access the
3101 fields if they were constants. Otherwise they should be -1 if
3102 those flags were set. */
3105 df_uses_record (enum df_ref_class cl, struct df_collection_rec *collection_rec,
3106 rtx *loc, enum df_ref_type ref_type,
3107 basic_block bb, struct df_insn_info *insn_info,
3109 int width, int offset, enum machine_mode mode)
3118 code = GET_CODE (x);
3135 /* If we are clobbering a MEM, mark any registers inside the address
3137 if (MEM_P (XEXP (x, 0)))
3138 df_uses_record (cl, collection_rec,
3139 &XEXP (XEXP (x, 0), 0),
3140 DF_REF_REG_MEM_STORE,
3142 flags, width, offset, mode);
3144 /* If we're clobbering a REG then we have a def so ignore. */
3148 df_uses_record (cl, collection_rec,
3149 &XEXP (x, 0), DF_REF_REG_MEM_LOAD,
3150 bb, insn_info, flags & DF_REF_IN_NOTE,
3151 width, offset, mode);
3155 /* While we're here, optimize this case. */
3156 flags |= DF_REF_PARTIAL;
3157 /* In case the SUBREG is not of a REG, do not optimize. */
3158 if (!REG_P (SUBREG_REG (x)))
3160 loc = &SUBREG_REG (x);
3161 df_uses_record (cl, collection_rec, loc, ref_type, bb, insn_info, flags,
3162 width, offset, mode);
3165 /* ... Fall through ... */
3168 df_ref_record (cl, collection_rec,
3169 x, loc, bb, insn_info,
3171 width, offset, mode);
3177 /* If the parameters to the zero or sign extract are
3178 constants, strip them off and recurse, otherwise there is
3179 no information that we can gain from this operation. */
3180 if (CONST_INT_P (XEXP (x, 1))
3181 && CONST_INT_P (XEXP (x, 2)))
3183 width = INTVAL (XEXP (x, 1));
3184 offset = INTVAL (XEXP (x, 2));
3185 mode = GET_MODE (x);
3187 if (code == ZERO_EXTRACT)
3188 flags |= DF_REF_ZERO_EXTRACT;
3190 flags |= DF_REF_SIGN_EXTRACT;
3192 df_uses_record (DF_REF_EXTRACT, collection_rec,
3193 &XEXP (x, 0), ref_type, bb, insn_info, flags,
3194 width, offset, mode);
3202 rtx dst = SET_DEST (x);
3203 gcc_assert (!(flags & DF_REF_IN_NOTE));
3204 df_uses_record (cl, collection_rec,
3205 &SET_SRC (x), DF_REF_REG_USE, bb, insn_info, flags,
3206 width, offset, mode);
3208 switch (GET_CODE (dst))
3211 if (df_read_modify_subreg_p (dst))
3213 df_uses_record (cl, collection_rec, &SUBREG_REG (dst),
3214 DF_REF_REG_USE, bb, insn_info,
3215 flags | DF_REF_READ_WRITE | DF_REF_SUBREG,
3216 width, offset, mode);
3227 df_uses_record (cl, collection_rec, &XEXP (dst, 0),
3228 DF_REF_REG_MEM_STORE, bb, insn_info, flags,
3229 width, offset, mode);
3231 case STRICT_LOW_PART:
3233 rtx *temp = &XEXP (dst, 0);
3234 /* A strict_low_part uses the whole REG and not just the
3236 dst = XEXP (dst, 0);
3237 df_uses_record (cl, collection_rec,
3238 (GET_CODE (dst) == SUBREG) ? &SUBREG_REG (dst) : temp,
3239 DF_REF_REG_USE, bb, insn_info,
3240 DF_REF_READ_WRITE | DF_REF_STRICT_LOW_PART,
3241 width, offset, mode);
3246 if (CONST_INT_P (XEXP (dst, 1))
3247 && CONST_INT_P (XEXP (dst, 2)))
3249 width = INTVAL (XEXP (dst, 1));
3250 offset = INTVAL (XEXP (dst, 2));
3251 mode = GET_MODE (dst);
3252 if (GET_CODE (XEXP (dst,0)) == MEM)
3254 /* Handle the case of zero_extract(mem(...)) in the set dest.
3255 This special case is allowed only if the mem is a single byte and
3256 is useful to set a bitfield in memory. */
3257 df_uses_record (DF_REF_EXTRACT, collection_rec, &XEXP (XEXP (dst,0), 0),
3258 DF_REF_REG_MEM_STORE, bb, insn_info,
3259 DF_REF_ZERO_EXTRACT,
3260 width, offset, mode);
3264 df_uses_record (DF_REF_EXTRACT, collection_rec, &XEXP (dst, 0),
3265 DF_REF_REG_USE, bb, insn_info,
3266 DF_REF_READ_WRITE | DF_REF_ZERO_EXTRACT,
3267 width, offset, mode);
3272 df_uses_record (cl, collection_rec, &XEXP (dst, 1),
3273 DF_REF_REG_USE, bb, insn_info, flags,
3274 width, offset, mode);
3275 df_uses_record (cl, collection_rec, &XEXP (dst, 2),
3276 DF_REF_REG_USE, bb, insn_info, flags,
3277 width, offset, mode);
3278 df_uses_record (cl, collection_rec, &XEXP (dst, 0),
3279 DF_REF_REG_USE, bb, insn_info,
3280 DF_REF_READ_WRITE | DF_REF_ZERO_EXTRACT,
3281 width, offset, mode);
3297 case UNSPEC_VOLATILE:
3301 /* Traditional and volatile asm instructions must be
3302 considered to use and clobber all hard registers, all
3303 pseudo-registers and all of memory. So must TRAP_IF and
3304 UNSPEC_VOLATILE operations.
3306 Consider for instance a volatile asm that changes the fpu
3307 rounding mode. An insn should not be moved across this
3308 even if it only uses pseudo-regs because it might give an
3309 incorrectly rounded result.
3311 However, flow.c's liveness computation did *not* do this,
3312 giving the reasoning as " ?!? Unfortunately, marking all
3313 hard registers as live causes massive problems for the
3314 register allocator and marking all pseudos as live creates
3315 mountains of uninitialized variable warnings."
3317 In order to maintain the status quo with regard to liveness
3318 and uses, we do what flow.c did and just mark any regs we
3319 can find in ASM_OPERANDS as used. In global asm insns are
3320 scanned and regs_asm_clobbered is filled out.
3322 For all ASM_OPERANDS, we must traverse the vector of input
3323 operands. We can not just fall through here since then we
3324 would be confused by the ASM_INPUT rtx inside ASM_OPERANDS,
3325 which do not indicate traditional asms unlike their normal
3327 if (code == ASM_OPERANDS)
3331 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
3332 df_uses_record (cl, collection_rec, &ASM_OPERANDS_INPUT (x, j),
3333 DF_REF_REG_USE, bb, insn_info, flags,
3334 width, offset, mode);
3341 df_uses_record (cl, collection_rec,
3342 &PAT_VAR_LOCATION_LOC (x),
3343 DF_REF_REG_USE, bb, insn_info,
3344 flags, width, offset, mode);
3353 gcc_assert (!DEBUG_INSN_P (insn_info->insn));
3354 /* Catch the def of the register being modified. */
3355 df_ref_record (cl, collection_rec, XEXP (x, 0), &XEXP (x, 0),
3358 flags | DF_REF_READ_WRITE | DF_REF_PRE_POST_MODIFY,
3359 width, offset, mode);
3361 /* ... Fall through to handle uses ... */
3367 /* Recursively scan the operands of this expression. */
3369 const char *fmt = GET_RTX_FORMAT (code);
3372 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3376 /* Tail recursive case: save a function call level. */
3382 df_uses_record (cl, collection_rec, &XEXP (x, i), ref_type,
3383 bb, insn_info, flags,
3384 width, offset, mode);
3386 else if (fmt[i] == 'E')
3389 for (j = 0; j < XVECLEN (x, i); j++)
3390 df_uses_record (cl, collection_rec,
3391 &XVECEXP (x, i, j), ref_type,
3392 bb, insn_info, flags,
3393 width, offset, mode);
3402 /* For all DF_REF_CONDITIONAL defs, add a corresponding uses. */
3405 df_get_conditional_uses (struct df_collection_rec *collection_rec)
3410 for (ix = 0; VEC_iterate (df_ref, collection_rec->def_vec, ix, ref); ++ix)
3412 if (DF_REF_FLAGS_IS_SET (ref, DF_REF_CONDITIONAL))
3416 enum machine_mode mode = VOIDmode;
3419 if (DF_REF_FLAGS_IS_SET (ref, DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT))
3421 width = DF_REF_EXTRACT_WIDTH (ref);
3422 offset = DF_REF_EXTRACT_OFFSET (ref);
3423 mode = DF_REF_EXTRACT_MODE (ref);
3426 use = df_ref_create_structure (DF_REF_CLASS (ref), collection_rec, DF_REF_REG (ref),
3427 DF_REF_LOC (ref), DF_REF_BB (ref),
3428 DF_REF_INSN_INFO (ref), DF_REF_REG_USE,
3429 DF_REF_FLAGS (ref) & ~DF_REF_CONDITIONAL,
3430 width, offset, mode);
3431 DF_REF_REGNO (use) = DF_REF_REGNO (ref);
3437 /* Get call's extra defs and uses. */
3440 df_get_call_refs (struct df_collection_rec * collection_rec,
3442 struct df_insn_info *insn_info,
3448 bool is_sibling_call;
3451 bitmap defs_generated = BITMAP_ALLOC (&df_bitmap_obstack);
3453 /* Do not generate clobbers for registers that are the result of the
3454 call. This causes ordering problems in the chain building code
3455 depending on which def is seen first. */
3456 for (i = 0; VEC_iterate (df_ref, collection_rec->def_vec, i, def); ++i)
3457 bitmap_set_bit (defs_generated, DF_REF_REGNO (def));
3459 /* Record the registers used to pass arguments, and explicitly
3460 noted as clobbered. */
3461 for (note = CALL_INSN_FUNCTION_USAGE (insn_info->insn); note;
3462 note = XEXP (note, 1))
3464 if (GET_CODE (XEXP (note, 0)) == USE)
3465 df_uses_record (DF_REF_REGULAR, collection_rec, &XEXP (XEXP (note, 0), 0),
3466 DF_REF_REG_USE, bb, insn_info, flags, -1, -1,
3468 else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3470 if (REG_P (XEXP (XEXP (note, 0), 0)))
3472 unsigned int regno = REGNO (XEXP (XEXP (note, 0), 0));
3473 if (!bitmap_bit_p (defs_generated, regno))
3474 df_defs_record (collection_rec, XEXP (note, 0), bb,
3478 df_uses_record (DF_REF_REGULAR, collection_rec, &XEXP (note, 0),
3479 DF_REF_REG_USE, bb, insn_info, flags, -1, -1,
3484 /* The stack ptr is used (honorarily) by a CALL insn. */
3485 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[STACK_POINTER_REGNUM],
3486 NULL, bb, insn_info, DF_REF_REG_USE,
3487 DF_REF_CALL_STACK_USAGE | flags,
3490 /* Calls may also reference any of the global registers,
3491 so they are recorded as used. */
3492 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3495 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3496 NULL, bb, insn_info, DF_REF_REG_USE, flags, -1, -1,
3498 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3499 NULL, bb, insn_info, DF_REF_REG_DEF, flags, -1, -1,
3503 is_sibling_call = SIBLING_CALL_P (insn_info->insn);
3504 EXECUTE_IF_SET_IN_BITMAP (regs_invalidated_by_call_regset, 0, ui, bi)
3506 if (!global_regs[ui]
3507 && (!bitmap_bit_p (defs_generated, ui))
3508 && (!is_sibling_call
3509 || !bitmap_bit_p (df->exit_block_uses, ui)
3510 || refers_to_regno_p (ui, ui+1,
3511 crtl->return_rtx, NULL)))
3512 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[ui],
3513 NULL, bb, insn_info, DF_REF_REG_DEF,
3514 DF_REF_MAY_CLOBBER | flags,
3518 BITMAP_FREE (defs_generated);
3522 /* Collect all refs in the INSN. This function is free of any
3523 side-effect - it will create and return a lists of df_ref's in the
3524 COLLECTION_REC without putting those refs into existing ref chains
3528 df_insn_refs_collect (struct df_collection_rec* collection_rec,
3529 basic_block bb, struct df_insn_info *insn_info)
3532 bool is_cond_exec = (GET_CODE (PATTERN (insn_info->insn)) == COND_EXEC);
3534 /* Clear out the collection record. */
3535 VEC_truncate (df_ref, collection_rec->def_vec, 0);
3536 VEC_truncate (df_ref, collection_rec->use_vec, 0);
3537 VEC_truncate (df_ref, collection_rec->eq_use_vec, 0);
3538 VEC_truncate (df_mw_hardreg_ptr, collection_rec->mw_vec, 0);
3540 /* Record register defs. */
3541 df_defs_record (collection_rec, PATTERN (insn_info->insn), bb, insn_info, 0);
3543 /* Process REG_EQUIV/REG_EQUAL notes. */
3544 for (note = REG_NOTES (insn_info->insn); note;
3545 note = XEXP (note, 1))
3547 switch (REG_NOTE_KIND (note))
3551 df_uses_record (DF_REF_REGULAR, collection_rec,
3552 &XEXP (note, 0), DF_REF_REG_USE,
3553 bb, insn_info, DF_REF_IN_NOTE, -1, -1, VOIDmode);
3555 case REG_NON_LOCAL_GOTO:
3556 /* The frame ptr is used by a non-local goto. */
3557 df_ref_record (DF_REF_BASE, collection_rec,
3558 regno_reg_rtx[FRAME_POINTER_REGNUM],
3559 NULL, bb, insn_info,
3560 DF_REF_REG_USE, 0, -1, -1, VOIDmode);
3561 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3562 df_ref_record (DF_REF_BASE, collection_rec,
3563 regno_reg_rtx[HARD_FRAME_POINTER_REGNUM],
3564 NULL, bb, insn_info,
3565 DF_REF_REG_USE, 0, -1, -1, VOIDmode);
3573 if (CALL_P (insn_info->insn))
3574 df_get_call_refs (collection_rec, bb, insn_info,
3575 (is_cond_exec) ? DF_REF_CONDITIONAL : 0);
3577 /* Record the register uses. */
3578 df_uses_record (DF_REF_REGULAR, collection_rec,
3579 &PATTERN (insn_info->insn), DF_REF_REG_USE, bb, insn_info, 0,
3582 /* DF_REF_CONDITIONAL needs corresponding USES. */
3584 df_get_conditional_uses (collection_rec);
3586 df_canonize_collection_rec (collection_rec);
3589 /* Recompute the luids for the insns in BB. */
3592 df_recompute_luids (basic_block bb)
3597 df_grow_insn_info ();
3599 /* Scan the block an insn at a time from beginning to end. */
3600 FOR_BB_INSNS (bb, insn)
3602 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3603 /* Inserting labels does not always trigger the incremental
3607 gcc_assert (!INSN_P (insn));
3608 insn_info = df_insn_create_insn_record (insn);
3611 DF_INSN_INFO_LUID (insn_info) = luid;
3618 /* Collect all artificial refs at the block level for BB and add them
3619 to COLLECTION_REC. */
3622 df_bb_refs_collect (struct df_collection_rec *collection_rec, basic_block bb)
3624 VEC_truncate (df_ref, collection_rec->def_vec, 0);
3625 VEC_truncate (df_ref, collection_rec->use_vec, 0);
3626 VEC_truncate (df_ref, collection_rec->eq_use_vec, 0);
3627 VEC_truncate (df_mw_hardreg_ptr, collection_rec->mw_vec, 0);
3629 if (bb->index == ENTRY_BLOCK)
3631 df_entry_block_defs_collect (collection_rec, df->entry_block_defs);
3634 else if (bb->index == EXIT_BLOCK)
3636 df_exit_block_uses_collect (collection_rec, df->exit_block_uses);
3640 #ifdef EH_RETURN_DATA_REGNO
3641 if (bb_has_eh_pred (bb))
3644 /* Mark the registers that will contain data for the handler. */
3647 unsigned regno = EH_RETURN_DATA_REGNO (i);
3648 if (regno == INVALID_REGNUM)
3650 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
3651 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP, -1, -1,
3657 /* Add the hard_frame_pointer if this block is the target of a
3659 if (bb->flags & BB_NON_LOCAL_GOTO_TARGET)
3660 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, hard_frame_pointer_rtx, NULL,
3661 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP, -1, -1, VOIDmode);
3663 /* Add the artificial uses. */
3664 if (bb->index >= NUM_FIXED_BLOCKS)
3668 bitmap au = bb_has_eh_pred (bb)
3669 ? df->eh_block_artificial_uses
3670 : df->regular_block_artificial_uses;
3672 EXECUTE_IF_SET_IN_BITMAP (au, 0, regno, bi)
3674 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
3675 bb, NULL, DF_REF_REG_USE, 0, -1, -1, VOIDmode);
3679 df_canonize_collection_rec (collection_rec);
3683 /* Record all the refs within the basic block BB_INDEX and scan the instructions if SCAN_INSNS. */
3686 df_bb_refs_record (int bb_index, bool scan_insns)
3688 basic_block bb = BASIC_BLOCK (bb_index);
3691 struct df_scan_bb_info *bb_info;
3692 struct df_collection_rec collection_rec;
3697 bb_info = df_scan_get_bb_info (bb_index);
3699 /* Need to make sure that there is a record in the basic block info. */
3702 bb_info = (struct df_scan_bb_info *) pool_alloc (df_scan->block_pool);
3703 df_scan_set_bb_info (bb_index, bb_info);
3704 bb_info->artificial_defs = NULL;
3705 bb_info->artificial_uses = NULL;
3708 collection_rec.def_vec = VEC_alloc (df_ref, stack, 128);
3709 collection_rec.use_vec = VEC_alloc (df_ref, stack, 32);
3710 collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
3711 collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
3714 /* Scan the block an insn at a time from beginning to end. */
3715 FOR_BB_INSNS (bb, insn)
3717 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3718 gcc_assert (!insn_info);
3720 insn_info = df_insn_create_insn_record (insn);
3723 /* Record refs within INSN. */
3724 DF_INSN_INFO_LUID (insn_info) = luid++;
3725 df_insn_refs_collect (&collection_rec, bb, DF_INSN_INFO_GET (insn));
3726 df_refs_add_to_chains (&collection_rec, bb, insn);
3728 DF_INSN_INFO_LUID (insn_info) = luid;
3731 /* Other block level artificial refs */
3732 df_bb_refs_collect (&collection_rec, bb);
3733 df_refs_add_to_chains (&collection_rec, bb, NULL);
3735 VEC_free (df_ref, stack, collection_rec.def_vec);
3736 VEC_free (df_ref, stack, collection_rec.use_vec);
3737 VEC_free (df_ref, stack, collection_rec.eq_use_vec);
3738 VEC_free (df_mw_hardreg_ptr, stack, collection_rec.mw_vec);
3740 /* Now that the block has been processed, set the block as dirty so
3741 LR and LIVE will get it processed. */
3742 df_set_bb_dirty (bb);
3746 /* Get the artificial use set for a regular (i.e. non-exit/non-entry)
3750 df_get_regular_block_artificial_uses (bitmap regular_block_artificial_uses)
3756 bitmap_clear (regular_block_artificial_uses);
3758 if (reload_completed)
3760 if (frame_pointer_needed)
3761 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3764 /* Before reload, there are a few registers that must be forced
3765 live everywhere -- which might not already be the case for
3766 blocks within infinite loops. */
3768 /* Any reference to any pseudo before reload is a potential
3769 reference of the frame pointer. */
3770 bitmap_set_bit (regular_block_artificial_uses, FRAME_POINTER_REGNUM);
3772 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3773 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3776 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3777 /* Pseudos with argument area equivalences may require
3778 reloading via the argument pointer. */
3779 if (fixed_regs[ARG_POINTER_REGNUM])
3780 bitmap_set_bit (regular_block_artificial_uses, ARG_POINTER_REGNUM);
3783 /* Any constant, or pseudo with constant equivalences, may
3784 require reloading from memory using the pic register. */
3785 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3786 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3787 bitmap_set_bit (regular_block_artificial_uses, PIC_OFFSET_TABLE_REGNUM);
3789 /* The all-important stack pointer must always be live. */
3790 bitmap_set_bit (regular_block_artificial_uses, STACK_POINTER_REGNUM);
3793 /* EH_USES registers are used:
3794 1) at all insns that might throw (calls or with -fnon-call-exceptions
3797 3) to support backtraces and/or debugging, anywhere between their
3798 initialization and where they the saved registers are restored
3799 from them, including the cases where we don't reach the epilogue
3800 (noreturn call or infinite loop). */
3801 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3803 bitmap_set_bit (regular_block_artificial_uses, i);
3808 /* Get the artificial use set for an eh block. */
3811 df_get_eh_block_artificial_uses (bitmap eh_block_artificial_uses)
3813 bitmap_clear (eh_block_artificial_uses);
3815 /* The following code (down thru the arg_pointer setting APPEARS
3816 to be necessary because there is nothing that actually
3817 describes what the exception handling code may actually need
3819 if (reload_completed)
3821 if (frame_pointer_needed)
3823 bitmap_set_bit (eh_block_artificial_uses, FRAME_POINTER_REGNUM);
3824 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3825 bitmap_set_bit (eh_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3828 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3829 if (fixed_regs[ARG_POINTER_REGNUM])
3830 bitmap_set_bit (eh_block_artificial_uses, ARG_POINTER_REGNUM);
3837 /*----------------------------------------------------------------------------
3838 Specialized hard register scanning functions.
3839 ----------------------------------------------------------------------------*/
3842 /* Mark a register in SET. Hard registers in large modes get all
3843 of their component registers set as well. */
3846 df_mark_reg (rtx reg, void *vset)
3848 bitmap set = (bitmap) vset;
3849 int regno = REGNO (reg);
3851 gcc_assert (GET_MODE (reg) != BLKmode);
3853 if (regno < FIRST_PSEUDO_REGISTER)
3855 int n = hard_regno_nregs[regno][GET_MODE (reg)];
3856 bitmap_set_range (set, regno, n);
3859 bitmap_set_bit (set, regno);
3863 /* Set the bit for regs that are considered being defined at the entry. */
3866 df_get_entry_block_def_set (bitmap entry_block_defs)
3871 bitmap_clear (entry_block_defs);
3873 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3875 if (FUNCTION_ARG_REGNO_P (i))
3876 #ifdef INCOMING_REGNO
3877 bitmap_set_bit (entry_block_defs, INCOMING_REGNO (i));
3879 bitmap_set_bit (entry_block_defs, i);
3883 /* The always important stack pointer. */
3884 bitmap_set_bit (entry_block_defs, STACK_POINTER_REGNUM);
3886 /* Once the prologue has been generated, all of these registers
3887 should just show up in the first regular block. */
3888 if (HAVE_prologue && epilogue_completed)
3890 /* Defs for the callee saved registers are inserted so that the
3891 pushes have some defining location. */
3892 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3893 if ((call_used_regs[i] == 0) && (df_regs_ever_live_p (i)))
3894 bitmap_set_bit (entry_block_defs, i);
3897 r = targetm.calls.struct_value_rtx (current_function_decl, true);
3899 bitmap_set_bit (entry_block_defs, REGNO (r));
3901 /* If the function has an incoming STATIC_CHAIN, it has to show up
3902 in the entry def set. */
3903 r = targetm.calls.static_chain (current_function_decl, true);
3905 bitmap_set_bit (entry_block_defs, REGNO (r));
3907 if ((!reload_completed) || frame_pointer_needed)
3909 /* Any reference to any pseudo before reload is a potential
3910 reference of the frame pointer. */
3911 bitmap_set_bit (entry_block_defs, FRAME_POINTER_REGNUM);
3912 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3913 /* If they are different, also mark the hard frame pointer as live. */
3914 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3915 bitmap_set_bit (entry_block_defs, HARD_FRAME_POINTER_REGNUM);
3919 /* These registers are live everywhere. */
3920 if (!reload_completed)
3922 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3923 /* Pseudos with argument area equivalences may require
3924 reloading via the argument pointer. */
3925 if (fixed_regs[ARG_POINTER_REGNUM])
3926 bitmap_set_bit (entry_block_defs, ARG_POINTER_REGNUM);
3929 #ifdef PIC_OFFSET_TABLE_REGNUM
3930 /* Any constant, or pseudo with constant equivalences, may
3931 require reloading from memory using the pic register. */
3932 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3933 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3934 bitmap_set_bit (entry_block_defs, PIC_OFFSET_TABLE_REGNUM);
3938 #ifdef INCOMING_RETURN_ADDR_RTX
3939 if (REG_P (INCOMING_RETURN_ADDR_RTX))
3940 bitmap_set_bit (entry_block_defs, REGNO (INCOMING_RETURN_ADDR_RTX));
3943 targetm.live_on_entry (entry_block_defs);
3947 /* Return the (conservative) set of hard registers that are defined on
3948 entry to the function.
3949 It uses df->entry_block_defs to determine which register
3950 reference to include. */
3953 df_entry_block_defs_collect (struct df_collection_rec *collection_rec,
3954 bitmap entry_block_defs)
3959 EXECUTE_IF_SET_IN_BITMAP (entry_block_defs, 0, i, bi)
3961 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
3962 ENTRY_BLOCK_PTR, NULL, DF_REF_REG_DEF, 0, -1, -1,
3966 df_canonize_collection_rec (collection_rec);
3970 /* Record the (conservative) set of hard registers that are defined on
3971 entry to the function. */
3974 df_record_entry_block_defs (bitmap entry_block_defs)
3976 struct df_collection_rec collection_rec;
3977 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
3978 collection_rec.def_vec = VEC_alloc (df_ref, stack, FIRST_PSEUDO_REGISTER);
3979 df_entry_block_defs_collect (&collection_rec, entry_block_defs);
3981 /* Process bb_refs chain */
3982 df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (ENTRY_BLOCK), NULL);
3983 VEC_free (df_ref, stack, collection_rec.def_vec);
3987 /* Update the defs in the entry block. */
3990 df_update_entry_block_defs (void)
3992 bitmap refs = BITMAP_ALLOC (&df_bitmap_obstack);
3993 bool changed = false;
3995 df_get_entry_block_def_set (refs);
3996 if (df->entry_block_defs)
3998 if (!bitmap_equal_p (df->entry_block_defs, refs))
4000 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (ENTRY_BLOCK);
4001 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
4002 df_ref_chain_delete (bb_info->artificial_defs);
4003 bb_info->artificial_defs = NULL;
4009 struct df_scan_problem_data *problem_data
4010 = (struct df_scan_problem_data *) df_scan->problem_data;
4011 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
4017 df_record_entry_block_defs (refs);
4018 bitmap_copy (df->entry_block_defs, refs);
4019 df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
4025 /* Set the bit for regs that are considered being used at the exit. */
4028 df_get_exit_block_use_set (bitmap exit_block_uses)
4032 bitmap_clear (exit_block_uses);
4034 /* Stack pointer is always live at the exit. */
4035 bitmap_set_bit (exit_block_uses, STACK_POINTER_REGNUM);
4037 /* Mark the frame pointer if needed at the end of the function.
4038 If we end up eliminating it, it will be removed from the live
4039 list of each basic block by reload. */
4041 if ((!reload_completed) || frame_pointer_needed)
4043 bitmap_set_bit (exit_block_uses, FRAME_POINTER_REGNUM);
4044 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4045 /* If they are different, also mark the hard frame pointer as live. */
4046 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
4047 bitmap_set_bit (exit_block_uses, HARD_FRAME_POINTER_REGNUM);
4051 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
4052 /* Many architectures have a GP register even without flag_pic.
4053 Assume the pic register is not in use, or will be handled by
4054 other means, if it is not fixed. */
4055 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4056 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
4057 bitmap_set_bit (exit_block_uses, PIC_OFFSET_TABLE_REGNUM);
4060 /* Mark all global registers, and all registers used by the
4061 epilogue as being live at the end of the function since they
4062 may be referenced by our caller. */
4063 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4064 if (global_regs[i] || EPILOGUE_USES (i))
4065 bitmap_set_bit (exit_block_uses, i);
4067 if (HAVE_epilogue && epilogue_completed)
4069 /* Mark all call-saved registers that we actually used. */
4070 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4071 if (df_regs_ever_live_p (i) && !LOCAL_REGNO (i)
4072 && !TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
4073 bitmap_set_bit (exit_block_uses, i);
4076 #ifdef EH_RETURN_DATA_REGNO
4077 /* Mark the registers that will contain data for the handler. */
4078 if (reload_completed && crtl->calls_eh_return)
4081 unsigned regno = EH_RETURN_DATA_REGNO (i);
4082 if (regno == INVALID_REGNUM)
4084 bitmap_set_bit (exit_block_uses, regno);
4088 #ifdef EH_RETURN_STACKADJ_RTX
4089 if ((!HAVE_epilogue || ! epilogue_completed)
4090 && crtl->calls_eh_return)
4092 rtx tmp = EH_RETURN_STACKADJ_RTX;
4093 if (tmp && REG_P (tmp))
4094 df_mark_reg (tmp, exit_block_uses);
4098 #ifdef EH_RETURN_HANDLER_RTX
4099 if ((!HAVE_epilogue || ! epilogue_completed)
4100 && crtl->calls_eh_return)
4102 rtx tmp = EH_RETURN_HANDLER_RTX;
4103 if (tmp && REG_P (tmp))
4104 df_mark_reg (tmp, exit_block_uses);
4108 /* Mark function return value. */
4109 diddle_return_value (df_mark_reg, (void*) exit_block_uses);
4113 /* Return the refs of hard registers that are used in the exit block.
4114 It uses df->exit_block_uses to determine register to include. */
4117 df_exit_block_uses_collect (struct df_collection_rec *collection_rec, bitmap exit_block_uses)
4122 EXECUTE_IF_SET_IN_BITMAP (exit_block_uses, 0, i, bi)
4123 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
4124 EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0, -1, -1, VOIDmode);
4126 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4127 /* It is deliberate that this is not put in the exit block uses but
4128 I do not know why. */
4129 if (reload_completed
4130 && !bitmap_bit_p (exit_block_uses, ARG_POINTER_REGNUM)
4131 && bb_has_eh_pred (EXIT_BLOCK_PTR)
4132 && fixed_regs[ARG_POINTER_REGNUM])
4133 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[ARG_POINTER_REGNUM], NULL,
4134 EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0, -1, -1, VOIDmode);
4137 df_canonize_collection_rec (collection_rec);
4141 /* Record the set of hard registers that are used in the exit block.
4142 It uses df->exit_block_uses to determine which bit to include. */
4145 df_record_exit_block_uses (bitmap exit_block_uses)
4147 struct df_collection_rec collection_rec;
4148 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
4149 collection_rec.use_vec = VEC_alloc (df_ref, stack, FIRST_PSEUDO_REGISTER);
4151 df_exit_block_uses_collect (&collection_rec, exit_block_uses);
4153 /* Process bb_refs chain */
4154 df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (EXIT_BLOCK), NULL);
4155 VEC_free (df_ref, stack, collection_rec.use_vec);
4159 /* Update the uses in the exit block. */
4162 df_update_exit_block_uses (void)
4164 bitmap refs = BITMAP_ALLOC (&df_bitmap_obstack);
4165 bool changed = false;
4167 df_get_exit_block_use_set (refs);
4168 if (df->exit_block_uses)
4170 if (!bitmap_equal_p (df->exit_block_uses, refs))
4172 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (EXIT_BLOCK);
4173 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
4174 df_ref_chain_delete (bb_info->artificial_uses);
4175 bb_info->artificial_uses = NULL;
4181 struct df_scan_problem_data *problem_data
4182 = (struct df_scan_problem_data *) df_scan->problem_data;
4183 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
4189 df_record_exit_block_uses (refs);
4190 bitmap_copy (df->exit_block_uses, refs);
4191 df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
4196 static bool initialized = false;
4199 /* Initialize some platform specific structures. */
4202 df_hard_reg_init (void)
4204 #ifdef ELIMINABLE_REGS
4206 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
4211 /* Record which registers will be eliminated. We use this in
4213 CLEAR_HARD_REG_SET (elim_reg_set);
4215 #ifdef ELIMINABLE_REGS
4216 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
4217 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
4219 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
4226 /* Recompute the parts of scanning that are based on regs_ever_live
4227 because something changed in that array. */
4230 df_update_entry_exit_and_calls (void)
4234 df_update_entry_block_defs ();
4235 df_update_exit_block_uses ();
4237 /* The call insns need to be rescanned because there may be changes
4238 in the set of registers clobbered across the call. */
4242 FOR_BB_INSNS (bb, insn)
4244 if (INSN_P (insn) && CALL_P (insn))
4245 df_insn_rescan (insn);
4251 /* Return true if hard REG is actually used in the some instruction.
4252 There are a fair number of conditions that affect the setting of
4253 this array. See the comment in df.h for df->hard_regs_live_count
4254 for the conditions that this array is set. */
4257 df_hard_reg_used_p (unsigned int reg)
4259 return df->hard_regs_live_count[reg] != 0;
4263 /* A count of the number of times REG is actually used in the some
4264 instruction. There are a fair number of conditions that affect the
4265 setting of this array. See the comment in df.h for
4266 df->hard_regs_live_count for the conditions that this array is
4271 df_hard_reg_used_count (unsigned int reg)
4273 return df->hard_regs_live_count[reg];
4277 /* Get the value of regs_ever_live[REGNO]. */
4280 df_regs_ever_live_p (unsigned int regno)
4282 return regs_ever_live[regno];
4286 /* Set regs_ever_live[REGNO] to VALUE. If this cause regs_ever_live
4287 to change, schedule that change for the next update. */
4290 df_set_regs_ever_live (unsigned int regno, bool value)
4292 if (regs_ever_live[regno] == value)
4295 regs_ever_live[regno] = value;
4297 df->redo_entry_and_exit = true;
4301 /* Compute "regs_ever_live" information from the underlying df
4302 information. Set the vector to all false if RESET. */
4305 df_compute_regs_ever_live (bool reset)
4308 bool changed = df->redo_entry_and_exit;
4311 memset (regs_ever_live, 0, sizeof (regs_ever_live));
4313 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4314 if ((!regs_ever_live[i]) && df_hard_reg_used_p (i))
4316 regs_ever_live[i] = true;
4320 df_update_entry_exit_and_calls ();
4321 df->redo_entry_and_exit = false;
4325 /*----------------------------------------------------------------------------
4326 Dataflow ref information verification functions.
4328 df_reg_chain_mark (refs, regno, is_def, is_eq_use)
4329 df_reg_chain_verify_unmarked (refs)
4330 df_refs_verify (VEC(stack,df_ref)*, ref*, bool)
4331 df_mws_verify (mw*, mw*, bool)
4332 df_insn_refs_verify (collection_rec, bb, insn, bool)
4333 df_bb_refs_verify (bb, refs, bool)
4335 df_exit_block_bitmap_verify (bool)
4336 df_entry_block_bitmap_verify (bool)
4338 ----------------------------------------------------------------------------*/
4341 /* Mark all refs in the reg chain. Verify that all of the registers
4342 are in the correct chain. */
4345 df_reg_chain_mark (df_ref refs, unsigned int regno,
4346 bool is_def, bool is_eq_use)
4348 unsigned int count = 0;
4350 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4352 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4354 /* If there are no def-use or use-def chains, make sure that all
4355 of the chains are clear. */
4357 gcc_assert (!DF_REF_CHAIN (ref));
4359 /* Check to make sure the ref is in the correct chain. */
4360 gcc_assert (DF_REF_REGNO (ref) == regno);
4362 gcc_assert (DF_REF_REG_DEF_P (ref));
4364 gcc_assert (!DF_REF_REG_DEF_P (ref));
4367 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE));
4369 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE) == 0);
4371 if (DF_REF_NEXT_REG (ref))
4372 gcc_assert (DF_REF_PREV_REG (DF_REF_NEXT_REG (ref)) == ref);
4374 DF_REF_REG_MARK (ref);
4380 /* Verify that all of the registers in the chain are unmarked. */
4383 df_reg_chain_verify_unmarked (df_ref refs)
4386 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4387 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4391 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4394 df_refs_verify (VEC(df_ref,stack) *new_rec, df_ref *old_rec,
4400 for (ix = 0; VEC_iterate (df_ref, new_rec, ix, new_ref); ++ix)
4402 if (*old_rec == NULL || !df_ref_equal_p (new_ref, *old_rec))
4410 /* Abort if fail is called from the function level verifier. If
4411 that is the context, mark this reg as being seem. */
4414 gcc_assert (DF_REF_IS_REG_MARKED (*old_rec));
4415 DF_REF_REG_UNMARK (*old_rec);
4422 gcc_assert (*old_rec == NULL);
4424 return *old_rec == NULL;
4429 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4432 df_mws_verify (VEC(df_mw_hardreg_ptr,stack) *new_rec,
4433 struct df_mw_hardreg **old_rec,
4437 struct df_mw_hardreg *new_reg;
4439 for (ix = 0; VEC_iterate (df_mw_hardreg_ptr, new_rec, ix, new_reg); ++ix)
4441 if (*old_rec == NULL || !df_mw_equal_p (new_reg, *old_rec))
4452 gcc_assert (*old_rec == NULL);
4454 return *old_rec == NULL;
4459 /* Return true if the existing insn refs information is complete and
4460 correct. Otherwise (i.e. if there's any missing or extra refs),
4461 return the correct df_ref chain in REFS_RETURN.
4463 If ABORT_IF_FAIL, leave the refs that are verified (already in the
4464 ref chain) as DF_REF_MARKED(). If it's false, then it's a per-insn
4465 verification mode instead of the whole function, so unmark
4468 If ABORT_IF_FAIL is set, this function never returns false. */
4471 df_insn_refs_verify (struct df_collection_rec *collection_rec,
4476 bool ret1, ret2, ret3, ret4;
4477 unsigned int uid = INSN_UID (insn);
4478 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
4480 df_insn_refs_collect (collection_rec, bb, insn_info);
4482 if (!DF_INSN_UID_DEFS (uid))
4484 /* The insn_rec was created but it was never filled out. */
4491 /* Unfortunately we cannot opt out early if one of these is not
4492 right because the marks will not get cleared. */
4493 ret1 = df_refs_verify (collection_rec->def_vec, DF_INSN_UID_DEFS (uid),
4495 ret2 = df_refs_verify (collection_rec->use_vec, DF_INSN_UID_USES (uid),
4497 ret3 = df_refs_verify (collection_rec->eq_use_vec, DF_INSN_UID_EQ_USES (uid),
4499 ret4 = df_mws_verify (collection_rec->mw_vec, DF_INSN_UID_MWS (uid),
4501 return (ret1 && ret2 && ret3 && ret4);
4505 /* Return true if all refs in the basic block are correct and complete.
4506 Due to df_ref_chain_verify, it will cause all refs
4507 that are verified to have DF_REF_MARK bit set. */
4510 df_bb_verify (basic_block bb)
4513 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
4514 struct df_collection_rec collection_rec;
4516 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
4517 collection_rec.def_vec = VEC_alloc (df_ref, stack, 128);
4518 collection_rec.use_vec = VEC_alloc (df_ref, stack, 32);
4519 collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
4520 collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
4522 gcc_assert (bb_info);
4524 /* Scan the block, one insn at a time, from beginning to end. */
4525 FOR_BB_INSNS_REVERSE (bb, insn)
4529 df_insn_refs_verify (&collection_rec, bb, insn, true);
4530 df_free_collection_rec (&collection_rec);
4533 /* Do the artificial defs and uses. */
4534 df_bb_refs_collect (&collection_rec, bb);
4535 df_refs_verify (collection_rec.def_vec, df_get_artificial_defs (bb->index), true);
4536 df_refs_verify (collection_rec.use_vec, df_get_artificial_uses (bb->index), true);
4537 df_free_collection_rec (&collection_rec);
4543 /* Returns true if the entry block has correct and complete df_ref set.
4544 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4547 df_entry_block_bitmap_verify (bool abort_if_fail)
4549 bitmap entry_block_defs = BITMAP_ALLOC (&df_bitmap_obstack);
4552 df_get_entry_block_def_set (entry_block_defs);
4554 is_eq = bitmap_equal_p (entry_block_defs, df->entry_block_defs);
4556 if (!is_eq && abort_if_fail)
4558 print_current_pass (stderr);
4559 fprintf (stderr, "entry_block_defs = ");
4560 df_print_regset (stderr, entry_block_defs);
4561 fprintf (stderr, "df->entry_block_defs = ");
4562 df_print_regset (stderr, df->entry_block_defs);
4566 BITMAP_FREE (entry_block_defs);
4572 /* Returns true if the exit block has correct and complete df_ref set.
4573 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4576 df_exit_block_bitmap_verify (bool abort_if_fail)
4578 bitmap exit_block_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4581 df_get_exit_block_use_set (exit_block_uses);
4583 is_eq = bitmap_equal_p (exit_block_uses, df->exit_block_uses);
4585 if (!is_eq && abort_if_fail)
4587 print_current_pass (stderr);
4588 fprintf (stderr, "exit_block_uses = ");
4589 df_print_regset (stderr, exit_block_uses);
4590 fprintf (stderr, "df->exit_block_uses = ");
4591 df_print_regset (stderr, df->exit_block_uses);
4595 BITMAP_FREE (exit_block_uses);
4601 /* Return true if df_ref information for all insns in all blocks are
4602 correct and complete. */
4605 df_scan_verify (void)
4609 bitmap regular_block_artificial_uses;
4610 bitmap eh_block_artificial_uses;
4615 /* Verification is a 4 step process. */
4617 /* (1) All of the refs are marked by going thru the reg chains. */
4618 for (i = 0; i < DF_REG_SIZE (df); i++)
4620 gcc_assert (df_reg_chain_mark (DF_REG_DEF_CHAIN (i), i, true, false)
4621 == DF_REG_DEF_COUNT(i));
4622 gcc_assert (df_reg_chain_mark (DF_REG_USE_CHAIN (i), i, false, false)
4623 == DF_REG_USE_COUNT(i));
4624 gcc_assert (df_reg_chain_mark (DF_REG_EQ_USE_CHAIN (i), i, false, true)
4625 == DF_REG_EQ_USE_COUNT(i));
4628 /* (2) There are various bitmaps whose value may change over the
4629 course of the compilation. This step recomputes them to make
4630 sure that they have not slipped out of date. */
4631 regular_block_artificial_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4632 eh_block_artificial_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4634 df_get_regular_block_artificial_uses (regular_block_artificial_uses);
4635 df_get_eh_block_artificial_uses (eh_block_artificial_uses);
4637 bitmap_ior_into (eh_block_artificial_uses,
4638 regular_block_artificial_uses);
4640 /* Check artificial_uses bitmaps didn't change. */
4641 gcc_assert (bitmap_equal_p (regular_block_artificial_uses,
4642 df->regular_block_artificial_uses));
4643 gcc_assert (bitmap_equal_p (eh_block_artificial_uses,
4644 df->eh_block_artificial_uses));
4646 BITMAP_FREE (regular_block_artificial_uses);
4647 BITMAP_FREE (eh_block_artificial_uses);
4649 /* Verify entry block and exit block. These only verify the bitmaps,
4650 the refs are verified in df_bb_verify. */
4651 df_entry_block_bitmap_verify (true);
4652 df_exit_block_bitmap_verify (true);
4654 /* (3) All of the insns in all of the blocks are traversed and the
4655 marks are cleared both in the artificial refs attached to the
4656 blocks and the real refs inside the insns. It is a failure to
4657 clear a mark that has not been set as this means that the ref in
4658 the block or insn was not in the reg chain. */
4663 /* (4) See if all reg chains are traversed a second time. This time
4664 a check is made that the marks are clear. A set mark would be a
4665 from a reg that is not in any insn or basic block. */
4667 for (i = 0; i < DF_REG_SIZE (df); i++)
4669 df_reg_chain_verify_unmarked (DF_REG_DEF_CHAIN (i));
4670 df_reg_chain_verify_unmarked (DF_REG_USE_CHAIN (i));
4671 df_reg_chain_verify_unmarked (DF_REG_EQ_USE_CHAIN (i));