1 /* Write the GIMPLE representation to a file stream.
3 Copyright (C) 2009-2013 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
28 #include "stor-layout.h"
29 #include "stringpool.h"
35 #include "basic-block.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-expr.h"
41 #include "gimple-iterator.h"
42 #include "gimple-ssa.h"
43 #include "tree-ssanames.h"
44 #include "tree-pass.h"
46 #include "diagnostic-core.h"
48 #include "lto-symtab.h"
49 #include "lto-streamer.h"
50 #include "data-streamer.h"
51 #include "gimple-streamer.h"
52 #include "tree-streamer.h"
53 #include "streamer-hooks.h"
57 /* Clear the line info stored in DATA_IN. */
60 clear_line_info (struct output_block *ob)
62 ob->current_file = NULL;
68 /* Create the output block and return it. SECTION_TYPE is
69 LTO_section_function_body or LTO_static_initializer. */
72 create_output_block (enum lto_section_type section_type)
74 struct output_block *ob = XCNEW (struct output_block);
76 ob->section_type = section_type;
77 ob->decl_state = lto_get_out_decl_state ();
78 ob->main_stream = XCNEW (struct lto_output_stream);
79 ob->string_stream = XCNEW (struct lto_output_stream);
80 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true);
82 if (section_type == LTO_section_function_body)
83 ob->cfg_stream = XCNEW (struct lto_output_stream);
87 ob->string_hash_table.create (37);
88 gcc_obstack_init (&ob->obstack);
94 /* Destroy the output block OB. */
97 destroy_output_block (struct output_block *ob)
99 enum lto_section_type section_type = ob->section_type;
101 ob->string_hash_table.dispose ();
103 free (ob->main_stream);
104 free (ob->string_stream);
105 if (section_type == LTO_section_function_body)
106 free (ob->cfg_stream);
108 streamer_tree_cache_delete (ob->writer_cache);
109 obstack_free (&ob->obstack, NULL);
115 /* Look up NODE in the type table and write the index for it to OB. */
118 output_type_ref (struct output_block *ob, tree node)
120 streamer_write_record_start (ob, LTO_type_ref);
121 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
125 /* Return true if tree node T is written to various tables. For these
126 nodes, we sometimes want to write their phyiscal representation
127 (via lto_output_tree), and sometimes we need to emit an index
128 reference into a table (via lto_output_tree_ref). */
131 tree_is_indexable (tree t)
133 /* Parameters and return values of functions of variably modified types
134 must go to global stream, because they may be used in the type
136 if (TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
137 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
138 else if (((TREE_CODE (t) == VAR_DECL && !TREE_STATIC (t))
139 || TREE_CODE (t) == TYPE_DECL
140 || TREE_CODE (t) == CONST_DECL)
141 && decl_function_context (t))
143 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
145 /* Variably modified types need to be streamed alongside function
146 bodies because they can refer to local entities. Together with
147 them we have to localize their members as well.
148 ??? In theory that includes non-FIELD_DECLs as well. */
150 && variably_modified_type_p (t, NULL_TREE))
152 else if (TREE_CODE (t) == FIELD_DECL
153 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
156 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
160 /* Output info about new location into bitpack BP.
161 After outputting bitpack, lto_output_location_data has
162 to be done to output actual data. */
165 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
168 expanded_location xloc;
170 loc = LOCATION_LOCUS (loc);
171 bp_pack_value (bp, loc == UNKNOWN_LOCATION, 1);
172 if (loc == UNKNOWN_LOCATION)
175 xloc = expand_location (loc);
177 bp_pack_value (bp, ob->current_file != xloc.file, 1);
178 bp_pack_value (bp, ob->current_line != xloc.line, 1);
179 bp_pack_value (bp, ob->current_col != xloc.column, 1);
181 if (ob->current_file != xloc.file)
182 bp_pack_var_len_unsigned (bp,
183 streamer_string_index (ob, xloc.file,
184 strlen (xloc.file) + 1,
186 ob->current_file = xloc.file;
188 if (ob->current_line != xloc.line)
189 bp_pack_var_len_unsigned (bp, xloc.line);
190 ob->current_line = xloc.line;
192 if (ob->current_col != xloc.column)
193 bp_pack_var_len_unsigned (bp, xloc.column);
194 ob->current_col = xloc.column;
198 /* If EXPR is an indexable tree node, output a reference to it to
199 output block OB. Otherwise, output the physical representation of
203 lto_output_tree_ref (struct output_block *ob, tree expr)
209 output_type_ref (ob, expr);
213 code = TREE_CODE (expr);
217 streamer_write_record_start (ob, LTO_ssa_name_ref);
218 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
222 streamer_write_record_start (ob, LTO_field_decl_ref);
223 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
227 streamer_write_record_start (ob, LTO_function_decl_ref);
228 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
232 case DEBUG_EXPR_DECL:
233 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
235 streamer_write_record_start (ob, LTO_global_decl_ref);
236 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
240 streamer_write_record_start (ob, LTO_const_decl_ref);
241 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
245 gcc_assert (decl_function_context (expr) == NULL);
246 streamer_write_record_start (ob, LTO_imported_decl_ref);
247 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
251 streamer_write_record_start (ob, LTO_type_decl_ref);
252 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
256 streamer_write_record_start (ob, LTO_namespace_decl_ref);
257 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
261 streamer_write_record_start (ob, LTO_label_decl_ref);
262 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
266 streamer_write_record_start (ob, LTO_result_decl_ref);
267 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
270 case TRANSLATION_UNIT_DECL:
271 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
272 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
276 /* No other node is indexable, so it should have been handled by
283 /* Return true if EXPR is a tree node that can be written to disk. */
286 lto_is_streamable (tree expr)
288 enum tree_code code = TREE_CODE (expr);
290 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
291 name version in lto_output_tree_ref (see output_ssa_names). */
292 return !is_lang_specific (expr)
296 && code != MODIFY_EXPR
298 && code != TARGET_EXPR
300 && code != WITH_CLEANUP_EXPR
301 && code != STATEMENT_LIST
302 && (code == CASE_LABEL_EXPR
304 || TREE_CODE_CLASS (code) != tcc_statement);
308 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
311 get_symbol_initial_value (struct output_block *ob, tree expr)
313 gcc_checking_assert (DECL_P (expr)
314 && TREE_CODE (expr) != FUNCTION_DECL
315 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
317 /* Handle DECL_INITIAL for symbols. */
318 tree initial = DECL_INITIAL (expr);
319 if (TREE_CODE (expr) == VAR_DECL
320 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
321 && !DECL_IN_CONSTANT_POOL (expr)
324 lto_symtab_encoder_t encoder;
325 struct varpool_node *vnode;
327 encoder = ob->decl_state->symtab_node_encoder;
328 vnode = varpool_get_node (expr);
330 || !lto_symtab_encoder_encode_initializer_p (encoder,
332 initial = error_mark_node;
339 /* Write a physical representation of tree node EXPR to output block
340 OB. If REF_P is true, the leaves of EXPR are emitted as references
341 via lto_output_tree_ref. IX is the index into the streamer cache
342 where EXPR is stored. */
345 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
347 /* Pack all the non-pointer fields in EXPR into a bitpack and write
348 the resulting bitpack. */
349 bitpack_d bp = bitpack_create (ob->main_stream);
350 streamer_pack_tree_bitfields (ob, &bp, expr);
351 streamer_write_bitpack (&bp);
353 /* Write all the pointer fields in EXPR. */
354 streamer_write_tree_body (ob, expr, ref_p);
356 /* Write any LTO-specific data to OB. */
358 && TREE_CODE (expr) != FUNCTION_DECL
359 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
361 /* Handle DECL_INITIAL for symbols. */
362 tree initial = get_symbol_initial_value (ob, expr);
363 stream_write_tree (ob, initial, ref_p);
367 /* Write a physical representation of tree node EXPR to output block
368 OB. If REF_P is true, the leaves of EXPR are emitted as references
369 via lto_output_tree_ref. IX is the index into the streamer cache
370 where EXPR is stored. */
373 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
375 if (!lto_is_streamable (expr))
376 internal_error ("tree code %qs is not supported in LTO streams",
377 get_tree_code_name (TREE_CODE (expr)));
379 /* Write the header, containing everything needed to materialize
380 EXPR on the reading side. */
381 streamer_write_tree_header (ob, expr);
383 lto_write_tree_1 (ob, expr, ref_p);
385 /* Mark the end of EXPR. */
386 streamer_write_zero (ob);
389 /* Emit the physical representation of tree node EXPR to output block
390 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
391 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
394 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
395 bool ref_p, bool this_ref_p)
399 gcc_checking_assert (expr != NULL_TREE
400 && !(this_ref_p && tree_is_indexable (expr)));
402 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
404 gcc_assert (!exists_p);
405 if (streamer_handle_as_builtin_p (expr))
407 /* MD and NORMAL builtins do not need to be written out
408 completely as they are always instantiated by the
409 compiler on startup. The only builtins that need to
410 be written out are BUILT_IN_FRONTEND. For all other
411 builtins, we simply write the class and code. */
412 streamer_write_builtin (ob, expr);
414 else if (TREE_CODE (expr) == INTEGER_CST
415 && !TREE_OVERFLOW (expr))
417 /* Shared INTEGER_CST nodes are special because they need their
418 original type to be materialized by the reader (to implement
419 TYPE_CACHED_VALUES). */
420 streamer_write_integer_cst (ob, expr, ref_p);
424 /* This is the first time we see EXPR, write its fields
426 lto_write_tree (ob, expr, ref_p);
442 static unsigned int next_dfs_num;
443 static vec<scc_entry> sccstack;
444 static struct pointer_map_t *sccstate;
445 static struct obstack sccstate_obstack;
448 DFS_write_tree (struct output_block *ob, sccs *from_state,
449 tree expr, bool ref_p, bool this_ref_p);
451 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
452 DFS recurse for all tree edges originating from it. */
455 DFS_write_tree_body (struct output_block *ob,
456 tree expr, sccs *expr_state, bool ref_p)
458 #define DFS_follow_tree_edge(DEST) \
459 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
463 code = TREE_CODE (expr);
465 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
467 if (TREE_CODE (expr) != IDENTIFIER_NODE)
468 DFS_follow_tree_edge (TREE_TYPE (expr));
471 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
473 for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
474 DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
477 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
479 DFS_follow_tree_edge (TREE_REALPART (expr));
480 DFS_follow_tree_edge (TREE_IMAGPART (expr));
483 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
485 /* Drop names that were created for anonymous entities. */
487 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
488 && ANON_AGGRNAME_P (DECL_NAME (expr)))
491 DFS_follow_tree_edge (DECL_NAME (expr));
492 DFS_follow_tree_edge (DECL_CONTEXT (expr));
495 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
497 DFS_follow_tree_edge (DECL_SIZE (expr));
498 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
500 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
501 special handling in LTO, it must be handled by streamer hooks. */
503 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
505 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
506 for early inlining so drop it on the floor instead of ICEing in
509 if ((TREE_CODE (expr) == VAR_DECL
510 || TREE_CODE (expr) == PARM_DECL)
511 && DECL_HAS_VALUE_EXPR_P (expr))
512 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
513 if (TREE_CODE (expr) == VAR_DECL)
514 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
517 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
519 if (TREE_CODE (expr) == TYPE_DECL)
520 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
521 DFS_follow_tree_edge (DECL_VINDEX (expr));
524 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
526 /* Make sure we don't inadvertently set the assembler name. */
527 if (DECL_ASSEMBLER_NAME_SET_P (expr))
528 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
529 DFS_follow_tree_edge (DECL_SECTION_NAME (expr));
530 DFS_follow_tree_edge (DECL_COMDAT_GROUP (expr));
533 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
535 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
536 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
537 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
538 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
539 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
542 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
544 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
545 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
546 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
549 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
551 DFS_follow_tree_edge (TYPE_SIZE (expr));
552 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
553 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
554 DFS_follow_tree_edge (TYPE_NAME (expr));
555 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
556 reconstructed during fixup. */
557 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
559 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
560 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
561 /* TYPE_CANONICAL is re-computed during type merging, so no need
562 to follow it here. */
563 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
566 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
568 if (TREE_CODE (expr) == ENUMERAL_TYPE)
569 DFS_follow_tree_edge (TYPE_VALUES (expr));
570 else if (TREE_CODE (expr) == ARRAY_TYPE)
571 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
572 else if (RECORD_OR_UNION_TYPE_P (expr))
573 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
574 DFS_follow_tree_edge (t);
575 else if (TREE_CODE (expr) == FUNCTION_TYPE
576 || TREE_CODE (expr) == METHOD_TYPE)
577 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
579 if (!POINTER_TYPE_P (expr))
580 DFS_follow_tree_edge (TYPE_MINVAL (expr));
581 DFS_follow_tree_edge (TYPE_MAXVAL (expr));
582 if (RECORD_OR_UNION_TYPE_P (expr))
583 DFS_follow_tree_edge (TYPE_BINFO (expr));
586 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
588 DFS_follow_tree_edge (TREE_PURPOSE (expr));
589 DFS_follow_tree_edge (TREE_VALUE (expr));
590 DFS_follow_tree_edge (TREE_CHAIN (expr));
593 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
595 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
596 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
599 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
601 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
602 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
603 DFS_follow_tree_edge (TREE_BLOCK (expr));
606 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
608 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
609 /* ??? FIXME. See also streamer_write_chain. */
610 if (!(VAR_OR_FUNCTION_DECL_P (t)
611 && DECL_EXTERNAL (t)))
612 DFS_follow_tree_edge (t);
614 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
616 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
617 handle - those that represent inlined function scopes.
618 For the drop rest them on the floor instead of ICEing
620 if (inlined_function_outer_scope_p (expr))
622 tree ultimate_origin = block_ultimate_origin (expr);
623 DFS_follow_tree_edge (ultimate_origin);
625 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
626 information for early inlined BLOCKs so drop it on the floor instead
627 of ICEing in dwarf2out.c. */
629 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
632 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
633 list is re-constructed from BLOCK_SUPERCONTEXT. */
636 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
641 /* Note that the number of BINFO slots has already been emitted in
642 EXPR's header (see streamer_write_tree_header) because this length
643 is needed to build the empty BINFO node on the reader side. */
644 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
645 DFS_follow_tree_edge (t);
646 DFS_follow_tree_edge (BINFO_OFFSET (expr));
647 DFS_follow_tree_edge (BINFO_VTABLE (expr));
648 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
650 /* The number of BINFO_BASE_ACCESSES has already been emitted in
651 EXPR's bitfield section. */
652 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
653 DFS_follow_tree_edge (t);
655 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
656 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
659 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
664 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
666 DFS_follow_tree_edge (index);
667 DFS_follow_tree_edge (value);
671 if (code == OMP_CLAUSE)
674 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
675 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
676 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
679 #undef DFS_follow_tree_edge
682 /* Return a hash value for the tree T. */
685 hash_tree (struct streamer_tree_cache_d *cache, tree t)
687 #define visit(SIBLING) \
690 if (SIBLING && streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
691 v = iterative_hash_hashval_t (streamer_tree_cache_get_hash (cache, ix), v); \
695 enum tree_code code = TREE_CODE (t);
696 hashval_t v = iterative_hash_host_wide_int (code, 0);
699 v = iterative_hash_host_wide_int (TREE_SIDE_EFFECTS (t)
700 | (TREE_CONSTANT (t) << 1)
701 | (TREE_READONLY (t) << 2)
702 | (TREE_PUBLIC (t) << 3), v);
704 v = iterative_hash_host_wide_int (TREE_ADDRESSABLE (t)
705 | (TREE_THIS_VOLATILE (t) << 1), v);
707 v = iterative_hash_host_wide_int (DECL_UNSIGNED (t), v);
709 v = iterative_hash_host_wide_int (TYPE_UNSIGNED (t), v);
711 v = iterative_hash_host_wide_int (TYPE_ARTIFICIAL (t), v);
713 v = iterative_hash_host_wide_int (TREE_NO_WARNING (t), v);
714 v = iterative_hash_host_wide_int (TREE_NOTHROW (t)
715 | (TREE_STATIC (t) << 1)
716 | (TREE_PROTECTED (t) << 2)
717 | (TREE_DEPRECATED (t) << 3), v);
718 if (code != TREE_BINFO)
719 v = iterative_hash_host_wide_int (TREE_PRIVATE (t), v);
721 v = iterative_hash_host_wide_int (TYPE_SATURATING (t)
722 | (TYPE_ADDR_SPACE (t) << 1), v);
723 else if (code == SSA_NAME)
724 v = iterative_hash_host_wide_int (SSA_NAME_IS_DEFAULT_DEF (t), v);
726 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
728 v = iterative_hash_host_wide_int (TREE_INT_CST_LOW (t), v);
729 v = iterative_hash_host_wide_int (TREE_INT_CST_HIGH (t), v);
732 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
734 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
735 v = iterative_hash_host_wide_int (r.cl, v);
736 v = iterative_hash_host_wide_int (r.decimal
738 | (r.signalling << 2)
739 | (r.canonical << 3), v);
740 v = iterative_hash_host_wide_int (r.uexp, v);
741 for (unsigned i = 0; i < SIGSZ; ++i)
742 v = iterative_hash_host_wide_int (r.sig[i], v);
745 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
747 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
748 v = iterative_hash_host_wide_int (f.mode, v);
749 v = iterative_hash_host_wide_int (f.data.low, v);
750 v = iterative_hash_host_wide_int (f.data.high, v);
753 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
755 v = iterative_hash_host_wide_int (DECL_MODE (t), v);
756 v = iterative_hash_host_wide_int (DECL_NONLOCAL (t)
757 | (DECL_VIRTUAL_P (t) << 1)
758 | (DECL_IGNORED_P (t) << 2)
759 | (DECL_ABSTRACT (t) << 3)
760 | (DECL_ARTIFICIAL (t) << 4)
761 | (DECL_USER_ALIGN (t) << 5)
762 | (DECL_PRESERVE_P (t) << 6)
763 | (DECL_EXTERNAL (t) << 7)
764 | (DECL_GIMPLE_REG_P (t) << 8), v);
765 v = iterative_hash_host_wide_int (DECL_ALIGN (t), v);
766 if (code == LABEL_DECL)
768 v = iterative_hash_host_wide_int (EH_LANDING_PAD_NR (t), v);
769 v = iterative_hash_host_wide_int (LABEL_DECL_UID (t), v);
771 else if (code == FIELD_DECL)
773 v = iterative_hash_host_wide_int (DECL_PACKED (t)
774 | (DECL_NONADDRESSABLE_P (t) << 1),
776 v = iterative_hash_host_wide_int (DECL_OFFSET_ALIGN (t), v);
778 else if (code == VAR_DECL)
780 v = iterative_hash_host_wide_int (DECL_HAS_DEBUG_EXPR_P (t)
781 | (DECL_NONLOCAL_FRAME (t) << 1),
784 if (code == RESULT_DECL
788 v = iterative_hash_host_wide_int (DECL_BY_REFERENCE (t), v);
790 || code == PARM_DECL)
791 v = iterative_hash_host_wide_int (DECL_HAS_VALUE_EXPR_P (t), v);
795 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
796 v = iterative_hash_host_wide_int (DECL_REGISTER (t), v);
798 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
800 v = iterative_hash_host_wide_int ((DECL_COMMON (t))
801 | (DECL_DLLIMPORT_P (t) << 1)
802 | (DECL_WEAK (t) << 2)
803 | (DECL_SEEN_IN_BIND_EXPR_P (t) << 3)
804 | (DECL_COMDAT (t) << 4)
805 | (DECL_VISIBILITY_SPECIFIED (t) << 6),
807 v = iterative_hash_host_wide_int (DECL_VISIBILITY (t), v);
808 if (code == VAR_DECL)
810 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
811 v = iterative_hash_host_wide_int (DECL_HARD_REGISTER (t)
812 | (DECL_IN_CONSTANT_POOL (t) << 1),
814 v = iterative_hash_host_wide_int (DECL_TLS_MODEL (t), v);
816 if (TREE_CODE (t) == FUNCTION_DECL)
817 v = iterative_hash_host_wide_int (DECL_FINAL_P (t)
818 | (DECL_CXX_CONSTRUCTOR_P (t) << 1)
819 | (DECL_CXX_DESTRUCTOR_P (t) << 2),
821 if (VAR_OR_FUNCTION_DECL_P (t))
822 v = iterative_hash_host_wide_int (DECL_INIT_PRIORITY (t), v);
825 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
827 v = iterative_hash_host_wide_int (DECL_BUILT_IN_CLASS (t), v);
828 v = iterative_hash_host_wide_int (DECL_STATIC_CONSTRUCTOR (t)
829 | (DECL_STATIC_DESTRUCTOR (t) << 1)
830 | (DECL_UNINLINABLE (t) << 2)
831 | (DECL_POSSIBLY_INLINED (t) << 3)
832 | (DECL_IS_NOVOPS (t) << 4)
833 | (DECL_IS_RETURNS_TWICE (t) << 5)
834 | (DECL_IS_MALLOC (t) << 6)
835 | (DECL_IS_OPERATOR_NEW (t) << 7)
836 | (DECL_DECLARED_INLINE_P (t) << 8)
837 | (DECL_STATIC_CHAIN (t) << 9)
838 | (DECL_NO_INLINE_WARNING_P (t) << 10)
839 | (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t) << 11)
840 | (DECL_NO_LIMIT_STACK (t) << 12)
841 | (DECL_DISREGARD_INLINE_LIMITS (t) << 13)
842 | (DECL_PURE_P (t) << 14)
843 | (DECL_LOOPING_CONST_OR_PURE_P (t) << 15), v);
844 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
845 v = iterative_hash_host_wide_int (DECL_FUNCTION_CODE (t), v);
846 if (DECL_STATIC_DESTRUCTOR (t))
847 v = iterative_hash_host_wide_int (DECL_FINI_PRIORITY (t), v);
850 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
852 v = iterative_hash_host_wide_int (TYPE_MODE (t), v);
853 v = iterative_hash_host_wide_int (TYPE_STRING_FLAG (t)
854 | (TYPE_NO_FORCE_BLK (t) << 1)
855 | (TYPE_NEEDS_CONSTRUCTING (t) << 2)
856 | (TYPE_PACKED (t) << 3)
857 | (TYPE_RESTRICT (t) << 4)
858 | (TYPE_USER_ALIGN (t) << 5)
859 | (TYPE_READONLY (t) << 6), v);
860 if (RECORD_OR_UNION_TYPE_P (t))
862 v = iterative_hash_host_wide_int (TYPE_TRANSPARENT_AGGR (t)
863 | (TYPE_FINAL_P (t) << 1), v);
865 else if (code == ARRAY_TYPE)
866 v = iterative_hash_host_wide_int (TYPE_NONALIASED_COMPONENT (t), v);
867 v = iterative_hash_host_wide_int (TYPE_PRECISION (t), v);
868 v = iterative_hash_host_wide_int (TYPE_ALIGN (t), v);
869 v = iterative_hash_host_wide_int ((TYPE_ALIAS_SET (t) == 0
871 && get_alias_set (t) == 0))
875 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
876 v = iterative_hash (TRANSLATION_UNIT_LANGUAGE (t),
877 strlen (TRANSLATION_UNIT_LANGUAGE (t)), v);
879 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION))
880 v = iterative_hash (t, sizeof (struct cl_target_option), v);
882 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
883 v = iterative_hash (t, sizeof (struct cl_optimization), v);
885 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
886 v = iterative_hash_host_wide_int (IDENTIFIER_HASH_VALUE (t), v);
888 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
889 v = iterative_hash (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t), v);
891 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
893 if (POINTER_TYPE_P (t))
895 /* For pointers factor in the pointed-to type recursively as
896 we cannot recurse through only pointers.
897 ??? We can generalize this by keeping track of the
898 in-SCC edges for each tree (or arbitrarily the first
899 such edge) and hashing that in in a second stage
900 (instead of the quadratic mixing of the SCC we do now). */
903 if (streamer_tree_cache_lookup (cache, TREE_TYPE (t), &ix))
904 x = streamer_tree_cache_get_hash (cache, ix);
906 x = hash_tree (cache, TREE_TYPE (t));
907 v = iterative_hash_hashval_t (x, v);
909 else if (code != IDENTIFIER_NODE)
910 visit (TREE_TYPE (t));
913 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
914 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
915 visit (VECTOR_CST_ELT (t, i));
917 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
919 visit (TREE_REALPART (t));
920 visit (TREE_IMAGPART (t));
923 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
925 /* Drop names that were created for anonymous entities. */
927 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
928 && ANON_AGGRNAME_P (DECL_NAME (t)))
931 visit (DECL_NAME (t));
932 if (DECL_FILE_SCOPE_P (t))
935 visit (DECL_CONTEXT (t));
938 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
940 visit (DECL_SIZE (t));
941 visit (DECL_SIZE_UNIT (t));
942 visit (DECL_ATTRIBUTES (t));
943 if ((code == VAR_DECL
944 || code == PARM_DECL)
945 && DECL_HAS_VALUE_EXPR_P (t))
946 visit (DECL_VALUE_EXPR (t));
948 && DECL_HAS_DEBUG_EXPR_P (t))
949 visit (DECL_DEBUG_EXPR (t));
950 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
951 be able to call get_symbol_initial_value. */
954 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
956 if (code == TYPE_DECL)
957 visit (DECL_ORIGINAL_TYPE (t));
958 visit (DECL_VINDEX (t));
961 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
963 if (DECL_ASSEMBLER_NAME_SET_P (t))
964 visit (DECL_ASSEMBLER_NAME (t));
965 visit (DECL_SECTION_NAME (t));
966 visit (DECL_COMDAT_GROUP (t));
969 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
971 visit (DECL_FIELD_OFFSET (t));
972 visit (DECL_BIT_FIELD_TYPE (t));
973 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
974 visit (DECL_FIELD_BIT_OFFSET (t));
975 visit (DECL_FCONTEXT (t));
978 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
980 visit (DECL_FUNCTION_PERSONALITY (t));
981 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
982 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
985 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
987 visit (TYPE_SIZE (t));
988 visit (TYPE_SIZE_UNIT (t));
989 visit (TYPE_ATTRIBUTES (t));
990 visit (TYPE_NAME (t));
991 visit (TYPE_MAIN_VARIANT (t));
992 if (TYPE_FILE_SCOPE_P (t))
995 visit (TYPE_CONTEXT (t));
996 visit (TYPE_STUB_DECL (t));
999 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1001 if (code == ENUMERAL_TYPE)
1002 visit (TYPE_VALUES (t));
1003 else if (code == ARRAY_TYPE)
1004 visit (TYPE_DOMAIN (t));
1005 else if (RECORD_OR_UNION_TYPE_P (t))
1006 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1008 else if (code == FUNCTION_TYPE
1009 || code == METHOD_TYPE)
1010 visit (TYPE_ARG_TYPES (t));
1011 if (!POINTER_TYPE_P (t))
1012 visit (TYPE_MINVAL (t));
1013 visit (TYPE_MAXVAL (t));
1014 if (RECORD_OR_UNION_TYPE_P (t))
1015 visit (TYPE_BINFO (t));
1018 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1020 visit (TREE_PURPOSE (t));
1021 visit (TREE_VALUE (t));
1022 visit (TREE_CHAIN (t));
1025 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1026 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1027 visit (TREE_VEC_ELT (t, i));
1029 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1031 v = iterative_hash_host_wide_int (TREE_OPERAND_LENGTH (t), v);
1032 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1033 visit (TREE_OPERAND (t, i));
1036 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1040 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1042 visit (BINFO_OFFSET (t));
1043 visit (BINFO_VTABLE (t));
1044 visit (BINFO_VPTR_FIELD (t));
1045 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1047 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1048 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1051 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1055 v = iterative_hash_host_wide_int (CONSTRUCTOR_NELTS (t), v);
1056 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1063 if (code == OMP_CLAUSE)
1067 v = iterative_hash_host_wide_int (OMP_CLAUSE_CODE (t), v);
1068 switch (OMP_CLAUSE_CODE (t))
1070 case OMP_CLAUSE_DEFAULT:
1071 v = iterative_hash_host_wide_int (OMP_CLAUSE_DEFAULT_KIND (t), v);
1073 case OMP_CLAUSE_SCHEDULE:
1074 v = iterative_hash_host_wide_int (OMP_CLAUSE_SCHEDULE_KIND (t), v);
1076 case OMP_CLAUSE_DEPEND:
1077 v = iterative_hash_host_wide_int (OMP_CLAUSE_DEPEND_KIND (t), v);
1079 case OMP_CLAUSE_MAP:
1080 v = iterative_hash_host_wide_int (OMP_CLAUSE_MAP_KIND (t), v);
1082 case OMP_CLAUSE_PROC_BIND:
1083 v = iterative_hash_host_wide_int (OMP_CLAUSE_PROC_BIND_KIND (t), v);
1085 case OMP_CLAUSE_REDUCTION:
1086 v = iterative_hash_host_wide_int (OMP_CLAUSE_REDUCTION_CODE (t), v);
1091 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1092 visit (OMP_CLAUSE_OPERAND (t, i));
1093 visit (OMP_CLAUSE_CHAIN (t));
1101 /* Compare two SCC entries by their hash value for qsorting them. */
1104 scc_entry_compare (const void *p1_, const void *p2_)
1106 const scc_entry *p1 = (const scc_entry *) p1_;
1107 const scc_entry *p2 = (const scc_entry *) p2_;
1108 if (p1->hash < p2->hash)
1110 else if (p1->hash > p2->hash)
1115 /* Return a hash value for the SCC on the SCC stack from FIRST with
1119 hash_scc (struct streamer_tree_cache_d *cache, unsigned first, unsigned size)
1121 /* Compute hash values for the SCC members. */
1122 for (unsigned i = 0; i < size; ++i)
1123 sccstack[first+i].hash = hash_tree (cache, sccstack[first+i].t);
1126 return sccstack[first].hash;
1128 /* Sort the SCC of type, hash pairs so that when we mix in
1129 all members of the SCC the hash value becomes independent on
1130 the order we visited the SCC. Disregard hashes equal to
1131 the hash of the tree we mix into because we cannot guarantee
1132 a stable sort for those across different TUs. */
1133 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1134 hashval_t *tem = XALLOCAVEC (hashval_t, size);
1135 for (unsigned i = 0; i < size; ++i)
1137 hashval_t hash = sccstack[first+i].hash;
1138 hashval_t orig_hash = hash;
1140 /* Skip same hashes. */
1142 j < size && sccstack[first+j].hash == orig_hash; ++j)
1144 for (; j < size; ++j)
1145 hash = iterative_hash_hashval_t (sccstack[first+j].hash, hash);
1146 for (j = 0; sccstack[first+j].hash != orig_hash; ++j)
1147 hash = iterative_hash_hashval_t (sccstack[first+j].hash, hash);
1150 hashval_t scc_hash = 0;
1151 for (unsigned i = 0; i < size; ++i)
1153 sccstack[first+i].hash = tem[i];
1154 scc_hash = iterative_hash_hashval_t (tem[i], scc_hash);
1159 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1160 already in the streamer cache. Main routine called for
1161 each visit of EXPR. */
1164 DFS_write_tree (struct output_block *ob, sccs *from_state,
1165 tree expr, bool ref_p, bool this_ref_p)
1170 /* Handle special cases. */
1171 if (expr == NULL_TREE)
1174 /* Do not DFS walk into indexable trees. */
1175 if (this_ref_p && tree_is_indexable (expr))
1178 /* Check if we already streamed EXPR. */
1179 if (streamer_tree_cache_lookup (ob->writer_cache, expr, &ix))
1182 slot = (sccs **)pointer_map_insert (sccstate, expr);
1183 sccs *cstate = *slot;
1186 scc_entry e = { expr, 0 };
1187 /* Not yet visited. DFS recurse and push it onto the stack. */
1188 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
1189 sccstack.safe_push (e);
1190 cstate->dfsnum = next_dfs_num++;
1191 cstate->low = cstate->dfsnum;
1193 if (streamer_handle_as_builtin_p (expr))
1195 else if (TREE_CODE (expr) == INTEGER_CST
1196 && !TREE_OVERFLOW (expr))
1197 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
1200 DFS_write_tree_body (ob, expr, cstate, ref_p);
1202 /* Walk any LTO-specific edges. */
1204 && TREE_CODE (expr) != FUNCTION_DECL
1205 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1207 /* Handle DECL_INITIAL for symbols. */
1208 tree initial = get_symbol_initial_value (ob, expr);
1209 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
1213 /* See if we found an SCC. */
1214 if (cstate->low == cstate->dfsnum)
1216 unsigned first, size;
1219 /* Pop the SCC and compute its size. */
1220 first = sccstack.length ();
1223 x = sccstack[--first].t;
1226 size = sccstack.length () - first;
1228 /* No need to compute hashes for LTRANS units, we don't perform
1229 any merging there. */
1230 hashval_t scc_hash = 0;
1231 unsigned scc_entry_len = 0;
1234 scc_hash = hash_scc (ob->writer_cache, first, size);
1236 /* Put the entries with the least number of collisions first. */
1237 unsigned entry_start = 0;
1238 scc_entry_len = size + 1;
1239 for (unsigned i = 0; i < size;)
1242 for (i = i + 1; i < size
1243 && (sccstack[first + i].hash
1244 == sccstack[first + from].hash); ++i)
1246 if (i - from < scc_entry_len)
1248 scc_entry_len = i - from;
1252 for (unsigned i = 0; i < scc_entry_len; ++i)
1254 scc_entry tem = sccstack[first + i];
1255 sccstack[first + i] = sccstack[first + entry_start + i];
1256 sccstack[first + entry_start + i] = tem;
1260 /* Write LTO_tree_scc. */
1261 streamer_write_record_start (ob, LTO_tree_scc);
1262 streamer_write_uhwi (ob, size);
1263 streamer_write_uhwi (ob, scc_hash);
1265 /* Write size-1 SCCs without wrapping them inside SCC bundles.
1266 All INTEGER_CSTs need to be handled this way as we need
1267 their type to materialize them. Also builtins are handled
1269 ??? We still wrap these in LTO_tree_scc so at the
1270 input side we can properly identify the tree we want
1271 to ultimatively return. */
1272 size_t old_len = ob->writer_cache->nodes.length ();
1274 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
1277 /* Write the size of the SCC entry candidates. */
1278 streamer_write_uhwi (ob, scc_entry_len);
1280 /* Write all headers and populate the streamer cache. */
1281 for (unsigned i = 0; i < size; ++i)
1283 hashval_t hash = sccstack[first+i].hash;
1284 tree t = sccstack[first+i].t;
1285 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
1287 gcc_assert (!exists_p);
1289 if (!lto_is_streamable (t))
1290 internal_error ("tree code %qs is not supported "
1292 get_tree_code_name (TREE_CODE (t)));
1294 gcc_checking_assert (!streamer_handle_as_builtin_p (t));
1296 /* Write the header, containing everything needed to
1297 materialize EXPR on the reading side. */
1298 streamer_write_tree_header (ob, t);
1301 /* Write the bitpacks and tree references. */
1302 for (unsigned i = 0; i < size; ++i)
1304 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
1306 /* Mark the end of the tree. */
1307 streamer_write_zero (ob);
1310 gcc_assert (old_len + size == ob->writer_cache->nodes.length ());
1312 /* Finally truncate the vector. */
1313 sccstack.truncate (first);
1316 from_state->low = MIN (from_state->low, cstate->low);
1321 from_state->low = MIN (from_state->low, cstate->low);
1323 gcc_checking_assert (from_state);
1324 if (cstate->dfsnum < from_state->dfsnum)
1325 from_state->low = MIN (cstate->dfsnum, from_state->low);
1329 /* Emit the physical representation of tree node EXPR to output block
1330 OB. If THIS_REF_P is true, the leaves of EXPR are emitted as references
1331 via lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1334 lto_output_tree (struct output_block *ob, tree expr,
1335 bool ref_p, bool this_ref_p)
1340 if (expr == NULL_TREE)
1342 streamer_write_record_start (ob, LTO_null);
1346 if (this_ref_p && tree_is_indexable (expr))
1348 lto_output_tree_ref (ob, expr);
1352 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1355 /* If a node has already been streamed out, make sure that
1356 we don't write it more than once. Otherwise, the reader
1357 will instantiate two different nodes for the same object. */
1358 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1359 streamer_write_uhwi (ob, ix);
1360 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1361 lto_tree_code_to_tag (TREE_CODE (expr)));
1362 lto_stats.num_pickle_refs_output++;
1366 /* This is the first time we see EXPR, write all reachable
1368 static bool in_dfs_walk;
1370 /* Protect against recursion which means disconnect between
1371 what tree edges we walk in the DFS walk and what edges
1373 gcc_assert (!in_dfs_walk);
1375 /* Start the DFS walk. */
1376 /* Save ob state ... */
1379 sccstate = pointer_map_create ();
1380 gcc_obstack_init (&sccstate_obstack);
1382 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
1383 sccstack.release ();
1384 pointer_map_destroy (sccstate);
1385 obstack_free (&sccstate_obstack, NULL);
1386 in_dfs_walk = false;
1388 /* Finally append a reference to the tree we were writing.
1389 ??? If expr ended up as a singleton we could have
1390 inlined it here and avoid outputting a reference. */
1391 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1392 gcc_assert (existed_p);
1393 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1394 streamer_write_uhwi (ob, ix);
1395 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1396 lto_tree_code_to_tag (TREE_CODE (expr)));
1397 lto_stats.num_pickle_refs_output++;
1402 /* Output to OB a list of try/catch handlers starting with FIRST. */
1405 output_eh_try_list (struct output_block *ob, eh_catch first)
1409 for (n = first; n; n = n->next_catch)
1411 streamer_write_record_start (ob, LTO_eh_catch);
1412 stream_write_tree (ob, n->type_list, true);
1413 stream_write_tree (ob, n->filter_list, true);
1414 stream_write_tree (ob, n->label, true);
1417 streamer_write_record_start (ob, LTO_null);
1421 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1422 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1423 detect EH region sharing. */
1426 output_eh_region (struct output_block *ob, eh_region r)
1432 streamer_write_record_start (ob, LTO_null);
1436 if (r->type == ERT_CLEANUP)
1437 tag = LTO_ert_cleanup;
1438 else if (r->type == ERT_TRY)
1440 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1441 tag = LTO_ert_allowed_exceptions;
1442 else if (r->type == ERT_MUST_NOT_THROW)
1443 tag = LTO_ert_must_not_throw;
1447 streamer_write_record_start (ob, tag);
1448 streamer_write_hwi (ob, r->index);
1451 streamer_write_hwi (ob, r->outer->index);
1453 streamer_write_zero (ob);
1456 streamer_write_hwi (ob, r->inner->index);
1458 streamer_write_zero (ob);
1461 streamer_write_hwi (ob, r->next_peer->index);
1463 streamer_write_zero (ob);
1465 if (r->type == ERT_TRY)
1467 output_eh_try_list (ob, r->u.eh_try.first_catch);
1469 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1471 stream_write_tree (ob, r->u.allowed.type_list, true);
1472 stream_write_tree (ob, r->u.allowed.label, true);
1473 streamer_write_uhwi (ob, r->u.allowed.filter);
1475 else if (r->type == ERT_MUST_NOT_THROW)
1477 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1478 bitpack_d bp = bitpack_create (ob->main_stream);
1479 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1480 streamer_write_bitpack (&bp);
1483 if (r->landing_pads)
1484 streamer_write_hwi (ob, r->landing_pads->index);
1486 streamer_write_zero (ob);
1490 /* Output landing pad LP to OB. */
1493 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1497 streamer_write_record_start (ob, LTO_null);
1501 streamer_write_record_start (ob, LTO_eh_landing_pad);
1502 streamer_write_hwi (ob, lp->index);
1504 streamer_write_hwi (ob, lp->next_lp->index);
1506 streamer_write_zero (ob);
1509 streamer_write_hwi (ob, lp->region->index);
1511 streamer_write_zero (ob);
1513 stream_write_tree (ob, lp->post_landing_pad, true);
1517 /* Output the existing eh_table to OB. */
1520 output_eh_regions (struct output_block *ob, struct function *fn)
1522 if (fn->eh && fn->eh->region_tree)
1529 streamer_write_record_start (ob, LTO_eh_table);
1531 /* Emit the index of the root of the EH region tree. */
1532 streamer_write_hwi (ob, fn->eh->region_tree->index);
1534 /* Emit all the EH regions in the region array. */
1535 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1536 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1537 output_eh_region (ob, eh);
1539 /* Emit all landing pads. */
1540 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1541 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1542 output_eh_lp (ob, lp);
1544 /* Emit all the runtime type data. */
1545 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1546 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1547 stream_write_tree (ob, ttype, true);
1549 /* Emit the table of action chains. */
1550 if (targetm.arm_eabi_unwinder)
1553 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1554 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1555 stream_write_tree (ob, t, true);
1560 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1561 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1562 streamer_write_char_stream (ob->main_stream, c);
1566 /* The LTO_null either terminates the record or indicates that there
1567 are no eh_records at all. */
1568 streamer_write_record_start (ob, LTO_null);
1572 /* Output all of the active ssa names to the ssa_names stream. */
1575 output_ssa_names (struct output_block *ob, struct function *fn)
1577 unsigned int i, len;
1579 len = vec_safe_length (SSANAMES (fn));
1580 streamer_write_uhwi (ob, len);
1582 for (i = 1; i < len; i++)
1584 tree ptr = (*SSANAMES (fn))[i];
1586 if (ptr == NULL_TREE
1587 || SSA_NAME_IN_FREE_LIST (ptr)
1588 || virtual_operand_p (ptr))
1591 streamer_write_uhwi (ob, i);
1592 streamer_write_char_stream (ob->main_stream,
1593 SSA_NAME_IS_DEFAULT_DEF (ptr));
1594 if (SSA_NAME_VAR (ptr))
1595 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1597 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1598 stream_write_tree (ob, TREE_TYPE (ptr), true);
1601 streamer_write_zero (ob);
1605 /* Output the cfg. */
1608 output_cfg (struct output_block *ob, struct function *fn)
1610 struct lto_output_stream *tmp_stream = ob->main_stream;
1613 ob->main_stream = ob->cfg_stream;
1615 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1616 profile_status_for_function (fn));
1618 /* Output the number of the highest basic block. */
1619 streamer_write_uhwi (ob, last_basic_block_for_function (fn));
1621 FOR_ALL_BB_FN (bb, fn)
1626 streamer_write_hwi (ob, bb->index);
1628 /* Output the successors and the edge flags. */
1629 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1630 FOR_EACH_EDGE (e, ei, bb->succs)
1632 streamer_write_uhwi (ob, e->dest->index);
1633 streamer_write_hwi (ob, e->probability);
1634 streamer_write_gcov_count (ob, e->count);
1635 streamer_write_uhwi (ob, e->flags);
1639 streamer_write_hwi (ob, -1);
1641 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1644 streamer_write_hwi (ob, bb->next_bb->index);
1648 streamer_write_hwi (ob, -1);
1650 /* ??? The cfgloop interface is tied to cfun. */
1651 gcc_assert (cfun == fn);
1653 /* Output the number of loops. */
1654 streamer_write_uhwi (ob, number_of_loops (fn));
1656 /* Output each loop, skipping the tree root which has number zero. */
1657 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1659 struct loop *loop = get_loop (fn, i);
1661 /* Write the index of the loop header. That's enough to rebuild
1662 the loop tree on the reader side. Stream -1 for an unused
1666 streamer_write_hwi (ob, -1);
1670 streamer_write_hwi (ob, loop->header->index);
1672 /* Write everything copy_loop_info copies. */
1673 streamer_write_enum (ob->main_stream,
1674 loop_estimation, EST_LAST, loop->estimate_state);
1675 streamer_write_hwi (ob, loop->any_upper_bound);
1676 if (loop->any_upper_bound)
1678 streamer_write_uhwi (ob, loop->nb_iterations_upper_bound.low);
1679 streamer_write_hwi (ob, loop->nb_iterations_upper_bound.high);
1681 streamer_write_hwi (ob, loop->any_estimate);
1682 if (loop->any_estimate)
1684 streamer_write_uhwi (ob, loop->nb_iterations_estimate.low);
1685 streamer_write_hwi (ob, loop->nb_iterations_estimate.high);
1688 /* Write OMP SIMD related info. */
1689 streamer_write_hwi (ob, loop->safelen);
1690 streamer_write_hwi (ob, loop->force_vect);
1691 stream_write_tree (ob, loop->simduid, true);
1694 ob->main_stream = tmp_stream;
1698 /* Create the header in the file using OB. If the section type is for
1699 a function, set FN to the decl for that function. */
1702 produce_asm (struct output_block *ob, tree fn)
1704 enum lto_section_type section_type = ob->section_type;
1705 struct lto_function_header header;
1707 struct lto_output_stream *header_stream;
1709 if (section_type == LTO_section_function_body)
1711 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1712 section_name = lto_get_section_name (section_type, name, NULL);
1715 section_name = lto_get_section_name (section_type, NULL, NULL);
1717 lto_begin_section (section_name, !flag_wpa);
1718 free (section_name);
1720 /* The entire header is stream computed here. */
1721 memset (&header, 0, sizeof (struct lto_function_header));
1723 /* Write the header. */
1724 header.lto_header.major_version = LTO_major_version;
1725 header.lto_header.minor_version = LTO_minor_version;
1727 header.compressed_size = 0;
1729 if (section_type == LTO_section_function_body)
1730 header.cfg_size = ob->cfg_stream->total_size;
1731 header.main_size = ob->main_stream->total_size;
1732 header.string_size = ob->string_stream->total_size;
1734 header_stream = XCNEW (struct lto_output_stream);
1735 lto_output_data_stream (header_stream, &header, sizeof header);
1736 lto_write_stream (header_stream);
1737 free (header_stream);
1739 /* Put all of the gimple and the string table out the asm file as a
1741 if (section_type == LTO_section_function_body)
1742 lto_write_stream (ob->cfg_stream);
1743 lto_write_stream (ob->main_stream);
1744 lto_write_stream (ob->string_stream);
1750 /* Output the base body of struct function FN using output block OB. */
1753 output_struct_function_base (struct output_block *ob, struct function *fn)
1755 struct bitpack_d bp;
1759 /* Output the static chain and non-local goto save area. */
1760 stream_write_tree (ob, fn->static_chain_decl, true);
1761 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
1763 /* Output all the local variables in the function. */
1764 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
1765 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
1766 stream_write_tree (ob, t, true);
1768 /* Output current IL state of the function. */
1769 streamer_write_uhwi (ob, fn->curr_properties);
1771 /* Write all the attributes for FN. */
1772 bp = bitpack_create (ob->main_stream);
1773 bp_pack_value (&bp, fn->is_thunk, 1);
1774 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
1775 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
1776 bp_pack_value (&bp, fn->returns_struct, 1);
1777 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
1778 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
1779 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
1780 bp_pack_value (&bp, fn->after_inlining, 1);
1781 bp_pack_value (&bp, fn->stdarg, 1);
1782 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
1783 bp_pack_value (&bp, fn->calls_alloca, 1);
1784 bp_pack_value (&bp, fn->calls_setjmp, 1);
1785 bp_pack_value (&bp, fn->has_force_vect_loops, 1);
1786 bp_pack_value (&bp, fn->has_simduid_loops, 1);
1787 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
1788 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
1790 /* Output the function start and end loci. */
1791 stream_output_location (ob, &bp, fn->function_start_locus);
1792 stream_output_location (ob, &bp, fn->function_end_locus);
1794 streamer_write_bitpack (&bp);
1798 /* Output the body of function NODE->DECL. */
1801 output_function (struct cgraph_node *node)
1804 struct function *fn;
1806 struct output_block *ob;
1808 function = node->decl;
1809 fn = DECL_STRUCT_FUNCTION (function);
1810 ob = create_output_block (LTO_section_function_body);
1812 clear_line_info (ob);
1813 ob->cgraph_node = node;
1815 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
1817 /* Set current_function_decl and cfun. */
1820 /* Make string 0 be a NULL string. */
1821 streamer_write_char_stream (ob->string_stream, 0);
1823 streamer_write_record_start (ob, LTO_function);
1825 /* Output decls for parameters and args. */
1826 stream_write_tree (ob, DECL_RESULT (function), true);
1827 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
1829 /* Output DECL_INITIAL for the function, which contains the tree of
1831 stream_write_tree (ob, DECL_INITIAL (function), true);
1833 /* We also stream abstract functions where we stream only stuff needed for
1835 if (gimple_has_body_p (function))
1837 streamer_write_uhwi (ob, 1);
1838 output_struct_function_base (ob, fn);
1840 /* Output all the SSA names used in the function. */
1841 output_ssa_names (ob, fn);
1843 /* Output any exception handling regions. */
1844 output_eh_regions (ob, fn);
1847 /* We will renumber the statements. The code that does this uses
1848 the same ordering that we use for serializing them so we can use
1849 the same code on the other end and not have to write out the
1850 statement numbers. We do not assign UIDs to PHIs here because
1851 virtual PHIs get re-computed on-the-fly which would make numbers
1853 set_gimple_stmt_max_uid (cfun, 0);
1856 gimple_stmt_iterator gsi;
1857 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1859 gimple stmt = gsi_stmt (gsi);
1861 /* Virtual PHIs are not going to be streamed. */
1862 if (!virtual_operand_p (gimple_phi_result (stmt)))
1863 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1865 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1867 gimple stmt = gsi_stmt (gsi);
1868 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1871 /* To avoid keeping duplicate gimple IDs in the statements, renumber
1872 virtual phis now. */
1875 gimple_stmt_iterator gsi;
1876 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1878 gimple stmt = gsi_stmt (gsi);
1879 if (virtual_operand_p (gimple_phi_result (stmt)))
1880 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1884 /* Output the code for the function. */
1885 FOR_ALL_BB_FN (bb, fn)
1886 output_bb (ob, bb, fn);
1888 /* The terminator for this function. */
1889 streamer_write_record_start (ob, LTO_null);
1891 output_cfg (ob, fn);
1896 streamer_write_uhwi (ob, 0);
1898 /* Create a section to hold the pickled output of this function. */
1899 produce_asm (ob, function);
1901 destroy_output_block (ob);
1905 /* Emit toplevel asms. */
1908 lto_output_toplevel_asms (void)
1910 struct output_block *ob;
1911 struct asm_node *can;
1913 struct lto_output_stream *header_stream;
1914 struct lto_asm_header header;
1919 ob = create_output_block (LTO_section_asm);
1921 /* Make string 0 be a NULL string. */
1922 streamer_write_char_stream (ob->string_stream, 0);
1924 for (can = asm_nodes; can; can = can->next)
1926 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
1927 streamer_write_hwi (ob, can->order);
1930 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
1932 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
1933 lto_begin_section (section_name, !flag_wpa);
1934 free (section_name);
1936 /* The entire header stream is computed here. */
1937 memset (&header, 0, sizeof (header));
1939 /* Write the header. */
1940 header.lto_header.major_version = LTO_major_version;
1941 header.lto_header.minor_version = LTO_minor_version;
1943 header.main_size = ob->main_stream->total_size;
1944 header.string_size = ob->string_stream->total_size;
1946 header_stream = XCNEW (struct lto_output_stream);
1947 lto_output_data_stream (header_stream, &header, sizeof (header));
1948 lto_write_stream (header_stream);
1949 free (header_stream);
1951 /* Put all of the gimple and the string table out the asm file as a
1953 lto_write_stream (ob->main_stream);
1954 lto_write_stream (ob->string_stream);
1958 destroy_output_block (ob);
1962 /* Copy the function body of NODE without deserializing. */
1965 copy_function (struct cgraph_node *node)
1967 tree function = node->decl;
1968 struct lto_file_decl_data *file_data = node->lto_file_data;
1969 struct lto_output_stream *output_stream = XCNEW (struct lto_output_stream);
1972 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
1973 char *section_name =
1974 lto_get_section_name (LTO_section_function_body, name, NULL);
1976 struct lto_in_decl_state *in_state;
1977 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
1979 lto_begin_section (section_name, !flag_wpa);
1980 free (section_name);
1982 /* We may have renamed the declaration, e.g., a static function. */
1983 name = lto_get_decl_name_mapping (file_data, name);
1985 data = lto_get_section_data (file_data, LTO_section_function_body,
1989 /* Do a bit copy of the function body. */
1990 lto_output_data_stream (output_stream, data, len);
1991 lto_write_stream (output_stream);
1995 lto_get_function_in_decl_state (node->lto_file_data, function);
1996 gcc_assert (in_state);
1998 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2000 size_t n = in_state->streams[i].size;
2001 tree *trees = in_state->streams[i].trees;
2002 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2004 /* The out state must have the same indices and the in state.
2005 So just copy the vector. All the encoders in the in state
2006 must be empty where we reach here. */
2007 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2008 encoder->trees.reserve_exact (n);
2009 for (j = 0; j < n; j++)
2010 encoder->trees.safe_push (trees[j]);
2013 lto_free_section_data (file_data, LTO_section_function_body, name,
2015 free (output_stream);
2020 /* Main entry point from the pass manager. */
2025 struct lto_out_decl_state *decl_state;
2026 #ifdef ENABLE_CHECKING
2027 bitmap output = lto_bitmap_alloc ();
2030 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2032 /* Initialize the streamer. */
2033 lto_streamer_init ();
2035 n_nodes = lto_symtab_encoder_size (encoder);
2036 /* Process only the functions with bodies. */
2037 for (i = 0; i < n_nodes; i++)
2039 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2040 cgraph_node *node = dyn_cast <cgraph_node> (snode);
2042 && lto_symtab_encoder_encode_body_p (encoder, node)
2045 #ifdef ENABLE_CHECKING
2046 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2047 bitmap_set_bit (output, DECL_UID (node->decl));
2049 decl_state = lto_new_out_decl_state ();
2050 lto_push_out_decl_state (decl_state);
2051 if (gimple_has_body_p (node->decl) || !flag_wpa)
2052 output_function (node);
2054 copy_function (node);
2055 gcc_assert (lto_get_out_decl_state () == decl_state);
2056 lto_pop_out_decl_state ();
2057 lto_record_function_out_decl_state (node->decl, decl_state);
2061 /* Emit the callgraph after emitting function bodies. This needs to
2062 be done now to make sure that all the statements in every function
2063 have been renumbered so that edges can be associated with call
2064 statements using the statement UIDs. */
2067 #ifdef ENABLE_CHECKING
2068 lto_bitmap_free (output);
2072 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2073 from it and required for correct representation of its semantics.
2074 Each node in ENCODER must be a global declaration or a type. A node
2075 is written only once, even if it appears multiple times in the
2076 vector. Certain transitively-reachable nodes, such as those
2077 representing expressions, may be duplicated, but such nodes
2078 must not appear in ENCODER itself. */
2081 write_global_stream (struct output_block *ob,
2082 struct lto_tree_ref_encoder *encoder)
2086 const size_t size = lto_tree_ref_encoder_size (encoder);
2088 for (index = 0; index < size; index++)
2090 t = lto_tree_ref_encoder_get_tree (encoder, index);
2091 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2092 stream_write_tree (ob, t, false);
2097 /* Write a sequence of indices into the globals vector corresponding
2098 to the trees in ENCODER. These are used by the reader to map the
2099 indices used to refer to global entities within function bodies to
2103 write_global_references (struct output_block *ob,
2104 struct lto_output_stream *ref_stream,
2105 struct lto_tree_ref_encoder *encoder)
2109 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2111 /* Write size as 32-bit unsigned. */
2112 lto_output_data_stream (ref_stream, &size, sizeof (int32_t));
2114 for (index = 0; index < size; index++)
2118 t = lto_tree_ref_encoder_get_tree (encoder, index);
2119 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2120 gcc_assert (slot_num != (unsigned)-1);
2121 lto_output_data_stream (ref_stream, &slot_num, sizeof slot_num);
2126 /* Write all the streams in an lto_out_decl_state STATE using
2127 output block OB and output stream OUT_STREAM. */
2130 lto_output_decl_state_streams (struct output_block *ob,
2131 struct lto_out_decl_state *state)
2135 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2136 write_global_stream (ob, &state->streams[i]);
2140 /* Write all the references in an lto_out_decl_state STATE using
2141 output block OB and output stream OUT_STREAM. */
2144 lto_output_decl_state_refs (struct output_block *ob,
2145 struct lto_output_stream *out_stream,
2146 struct lto_out_decl_state *state)
2152 /* Write reference to FUNCTION_DECL. If there is not function,
2153 write reference to void_type_node. */
2154 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2155 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2156 gcc_assert (ref != (unsigned)-1);
2157 lto_output_data_stream (out_stream, &ref, sizeof (uint32_t));
2159 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2160 write_global_references (ob, out_stream, &state->streams[i]);
2164 /* Return the written size of STATE. */
2167 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2172 size = sizeof (int32_t); /* fn_ref. */
2173 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2175 size += sizeof (int32_t); /* vector size. */
2176 size += (lto_tree_ref_encoder_size (&state->streams[i])
2177 * sizeof (int32_t));
2183 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2187 write_symbol (struct streamer_tree_cache_d *cache,
2188 struct lto_output_stream *stream,
2189 tree t, struct pointer_set_t *seen, bool alias)
2192 enum gcc_plugin_symbol_kind kind;
2193 enum gcc_plugin_symbol_visibility visibility;
2195 unsigned HOST_WIDEST_INT size;
2199 /* None of the following kinds of symbols are needed in the
2201 if (!TREE_PUBLIC (t)
2202 || is_builtin_fn (t)
2203 || DECL_ABSTRACT (t)
2204 || (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t)))
2206 gcc_assert (TREE_CODE (t) != RESULT_DECL);
2208 gcc_assert (TREE_CODE (t) == VAR_DECL
2209 || TREE_CODE (t) == FUNCTION_DECL);
2211 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2213 /* This behaves like assemble_name_raw in varasm.c, performing the
2214 same name manipulations that ASM_OUTPUT_LABELREF does. */
2215 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2217 if (pointer_set_contains (seen, name))
2219 pointer_set_insert (seen, name);
2221 streamer_tree_cache_lookup (cache, t, &slot_num);
2222 gcc_assert (slot_num != (unsigned)-1);
2224 if (DECL_EXTERNAL (t))
2227 kind = GCCPK_WEAKUNDEF;
2234 kind = GCCPK_WEAKDEF;
2235 else if (DECL_COMMON (t))
2236 kind = GCCPK_COMMON;
2240 /* When something is defined, it should have node attached. */
2241 gcc_assert (alias || TREE_CODE (t) != VAR_DECL
2242 || varpool_get_node (t)->definition);
2243 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2244 || (cgraph_get_node (t)
2245 && cgraph_get_node (t)->definition));
2248 /* Imitate what default_elf_asm_output_external do.
2249 When symbol is external, we need to output it with DEFAULT visibility
2250 when compiling with -fvisibility=default, while with HIDDEN visibility
2251 when symbol has attribute (visibility("hidden")) specified.
2252 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2255 if (DECL_EXTERNAL (t)
2256 && !targetm.binds_local_p (t))
2257 visibility = GCCPV_DEFAULT;
2259 switch (DECL_VISIBILITY (t))
2261 case VISIBILITY_DEFAULT:
2262 visibility = GCCPV_DEFAULT;
2264 case VISIBILITY_PROTECTED:
2265 visibility = GCCPV_PROTECTED;
2267 case VISIBILITY_HIDDEN:
2268 visibility = GCCPV_HIDDEN;
2270 case VISIBILITY_INTERNAL:
2271 visibility = GCCPV_INTERNAL;
2275 if (kind == GCCPK_COMMON
2276 && DECL_SIZE_UNIT (t)
2277 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2278 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2282 if (DECL_ONE_ONLY (t))
2283 comdat = IDENTIFIER_POINTER (DECL_COMDAT_GROUP (t));
2287 lto_output_data_stream (stream, name, strlen (name) + 1);
2288 lto_output_data_stream (stream, comdat, strlen (comdat) + 1);
2289 c = (unsigned char) kind;
2290 lto_output_data_stream (stream, &c, 1);
2291 c = (unsigned char) visibility;
2292 lto_output_data_stream (stream, &c, 1);
2293 lto_output_data_stream (stream, &size, 8);
2294 lto_output_data_stream (stream, &slot_num, 4);
2297 /* Return true if NODE should appear in the plugin symbol table. */
2300 output_symbol_p (symtab_node *node)
2302 struct cgraph_node *cnode;
2303 if (!symtab_real_symbol_p (node))
2305 /* We keep external functions in symtab for sake of inlining
2306 and devirtualization. We do not want to see them in symbol table as
2307 references unless they are really used. */
2308 cnode = dyn_cast <cgraph_node> (node);
2309 if (cnode && (!node->definition || DECL_EXTERNAL (cnode->decl))
2313 /* Ignore all references from external vars initializers - they are not really
2314 part of the compilation unit until they are used by folding. Some symbols,
2315 like references to external construction vtables can not be referred to at all.
2316 We decide this at can_refer_decl_in_current_unit_p. */
2317 if (!node->definition || DECL_EXTERNAL (node->decl))
2320 struct ipa_ref *ref;
2321 for (i = 0; ipa_ref_list_referring_iterate (&node->ref_list,
2324 if (ref->use == IPA_REF_ALIAS)
2326 if (is_a <cgraph_node> (ref->referring))
2328 if (!DECL_EXTERNAL (ref->referring->decl))
2337 /* Write an IL symbol table to OB.
2338 SET and VSET are cgraph/varpool node sets we are outputting. */
2341 produce_symtab (struct output_block *ob)
2343 struct streamer_tree_cache_d *cache = ob->writer_cache;
2344 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2345 struct pointer_set_t *seen;
2346 struct lto_output_stream stream;
2347 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2348 lto_symtab_encoder_iterator lsei;
2350 lto_begin_section (section_name, false);
2351 free (section_name);
2353 seen = pointer_set_create ();
2354 memset (&stream, 0, sizeof (stream));
2356 /* Write the symbol table.
2357 First write everything defined and then all declarations.
2358 This is necessary to handle cases where we have duplicated symbols. */
2359 for (lsei = lsei_start (encoder);
2360 !lsei_end_p (lsei); lsei_next (&lsei))
2362 symtab_node *node = lsei_node (lsei);
2364 if (!output_symbol_p (node) || DECL_EXTERNAL (node->decl))
2366 write_symbol (cache, &stream, node->decl, seen, false);
2368 for (lsei = lsei_start (encoder);
2369 !lsei_end_p (lsei); lsei_next (&lsei))
2371 symtab_node *node = lsei_node (lsei);
2373 if (!output_symbol_p (node) || !DECL_EXTERNAL (node->decl))
2375 write_symbol (cache, &stream, node->decl, seen, false);
2378 lto_write_stream (&stream);
2379 pointer_set_destroy (seen);
2385 /* This pass is run after all of the functions are serialized and all
2386 of the IPA passes have written their serialized forms. This pass
2387 causes the vector of all of the global decls and types used from
2388 this file to be written in to a section that can then be read in to
2389 recover these on other side. */
2392 produce_asm_for_decls (void)
2394 struct lto_out_decl_state *out_state;
2395 struct lto_out_decl_state *fn_out_state;
2396 struct lto_decl_header header;
2398 struct output_block *ob;
2399 struct lto_output_stream *header_stream, *decl_state_stream;
2400 unsigned idx, num_fns;
2401 size_t decl_state_size;
2402 int32_t num_decl_states;
2404 ob = create_output_block (LTO_section_decls);
2407 memset (&header, 0, sizeof (struct lto_decl_header));
2409 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2410 lto_begin_section (section_name, !flag_wpa);
2411 free (section_name);
2413 /* Make string 0 be a NULL string. */
2414 streamer_write_char_stream (ob->string_stream, 0);
2416 gcc_assert (!alias_pairs);
2418 /* Write the global symbols. */
2419 out_state = lto_get_out_decl_state ();
2420 num_fns = lto_function_decl_states.length ();
2421 lto_output_decl_state_streams (ob, out_state);
2422 for (idx = 0; idx < num_fns; idx++)
2425 lto_function_decl_states[idx];
2426 lto_output_decl_state_streams (ob, fn_out_state);
2429 header.lto_header.major_version = LTO_major_version;
2430 header.lto_header.minor_version = LTO_minor_version;
2432 /* Currently not used. This field would allow us to preallocate
2433 the globals vector, so that it need not be resized as it is extended. */
2434 header.num_nodes = -1;
2436 /* Compute the total size of all decl out states. */
2437 decl_state_size = sizeof (int32_t);
2438 decl_state_size += lto_out_decl_state_written_size (out_state);
2439 for (idx = 0; idx < num_fns; idx++)
2442 lto_function_decl_states[idx];
2443 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2445 header.decl_state_size = decl_state_size;
2447 header.main_size = ob->main_stream->total_size;
2448 header.string_size = ob->string_stream->total_size;
2450 header_stream = XCNEW (struct lto_output_stream);
2451 lto_output_data_stream (header_stream, &header, sizeof header);
2452 lto_write_stream (header_stream);
2453 free (header_stream);
2455 /* Write the main out-decl state, followed by out-decl states of
2457 decl_state_stream = XCNEW (struct lto_output_stream);
2458 num_decl_states = num_fns + 1;
2459 lto_output_data_stream (decl_state_stream, &num_decl_states,
2460 sizeof (num_decl_states));
2461 lto_output_decl_state_refs (ob, decl_state_stream, out_state);
2462 for (idx = 0; idx < num_fns; idx++)
2465 lto_function_decl_states[idx];
2466 lto_output_decl_state_refs (ob, decl_state_stream, fn_out_state);
2468 lto_write_stream (decl_state_stream);
2469 free (decl_state_stream);
2471 lto_write_stream (ob->main_stream);
2472 lto_write_stream (ob->string_stream);
2476 /* Write the symbol table. It is used by linker to determine dependencies
2477 and thus we can skip it for WPA. */
2479 produce_symtab (ob);
2481 /* Write command line opts. */
2482 lto_write_options ();
2484 /* Deallocate memory and clean up. */
2485 for (idx = 0; idx < num_fns; idx++)
2488 lto_function_decl_states[idx];
2489 lto_delete_out_decl_state (fn_out_state);
2491 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2492 lto_function_decl_states.release ();
2493 destroy_output_block (ob);