Daily bump.
[gcc.git] / gcc / lto-cgraph.c
1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
3
4 Copyright (C) 2009-2021 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "gimple.h"
30 #include "predict.h"
31 #include "stringpool.h"
32 #include "tree-streamer.h"
33 #include "cgraph.h"
34 #include "tree-pass.h"
35 #include "profile.h"
36 #include "context.h"
37 #include "pass_manager.h"
38 #include "ipa-utils.h"
39 #include "omp-offload.h"
40 #include "stringpool.h"
41 #include "attribs.h"
42 #include "alloc-pool.h"
43 #include "symbol-summary.h"
44 #include "symtab-thunks.h"
45 #include "symtab-clones.h"
46
47 /* True when asm nodes has been output. */
48 bool asm_nodes_output = false;
49
50 static void output_cgraph_opt_summary (void);
51 static void input_cgraph_opt_summary (vec<symtab_node *> nodes);
52
53 /* Number of LDPR values known to GCC. */
54 #define LDPR_NUM_KNOWN (LDPR_PREVAILING_DEF_IRONLY_EXP + 1)
55
56 /* Cgraph streaming is organized as set of record whose type
57 is indicated by a tag. */
58 enum LTO_symtab_tags
59 {
60 /* Must leave 0 for the stopper. */
61
62 /* Cgraph node without body available. */
63 LTO_symtab_unavail_node = 1,
64 /* Cgraph node with function body. */
65 LTO_symtab_analyzed_node,
66 /* Cgraph edges. */
67 LTO_symtab_edge,
68 LTO_symtab_indirect_edge,
69 LTO_symtab_variable,
70 LTO_symtab_last_tag
71 };
72
73 /* Create a new symtab encoder.
74 if FOR_INPUT, the encoder allocate only datastructures needed
75 to read the symtab. */
76
77 lto_symtab_encoder_t
78 lto_symtab_encoder_new (bool for_input)
79 {
80 lto_symtab_encoder_t encoder = XCNEW (struct lto_symtab_encoder_d);
81
82 if (!for_input)
83 encoder->map = new hash_map<symtab_node *, size_t>;
84 encoder->nodes.create (0);
85 return encoder;
86 }
87
88
89 /* Delete ENCODER and its components. */
90
91 void
92 lto_symtab_encoder_delete (lto_symtab_encoder_t encoder)
93 {
94 encoder->nodes.release ();
95 if (encoder->map)
96 delete encoder->map;
97 free (encoder);
98 }
99
100
101 /* Return the existing reference number of NODE in the symtab encoder in
102 output block OB. Assign a new reference if this is the first time
103 NODE is encoded. */
104
105 int
106 lto_symtab_encoder_encode (lto_symtab_encoder_t encoder,
107 symtab_node *node)
108 {
109 int ref;
110
111 if (!encoder->map)
112 {
113 lto_encoder_entry entry = {node, false, false, false};
114
115 ref = encoder->nodes.length ();
116 encoder->nodes.safe_push (entry);
117 return ref;
118 }
119
120 size_t *slot = encoder->map->get (node);
121 if (!slot || !*slot)
122 {
123 lto_encoder_entry entry = {node, false, false, false};
124 ref = encoder->nodes.length ();
125 if (!slot)
126 encoder->map->put (node, ref + 1);
127 encoder->nodes.safe_push (entry);
128 }
129 else
130 ref = *slot - 1;
131
132 return ref;
133 }
134
135 /* Remove NODE from encoder. */
136
137 bool
138 lto_symtab_encoder_delete_node (lto_symtab_encoder_t encoder,
139 symtab_node *node)
140 {
141 int index;
142 lto_encoder_entry last_node;
143
144 size_t *slot = encoder->map->get (node);
145 if (slot == NULL || !*slot)
146 return false;
147
148 index = *slot - 1;
149 gcc_checking_assert (encoder->nodes[index].node == node);
150
151 /* Remove from vector. We do this by swapping node with the last element
152 of the vector. */
153 last_node = encoder->nodes.pop ();
154 if (last_node.node != node)
155 {
156 gcc_assert (encoder->map->put (last_node.node, index + 1));
157
158 /* Move the last element to the original spot of NODE. */
159 encoder->nodes[index] = last_node;
160 }
161
162 /* Remove element from hash table. */
163 encoder->map->remove (node);
164 return true;
165 }
166
167
168 /* Return TRUE if we should encode the body of NODE (if any). */
169
170 bool
171 lto_symtab_encoder_encode_body_p (lto_symtab_encoder_t encoder,
172 struct cgraph_node *node)
173 {
174 int index = lto_symtab_encoder_lookup (encoder, node);
175 return encoder->nodes[index].body;
176 }
177
178 /* Specify that we encode the body of NODE in this partition. */
179
180 static void
181 lto_set_symtab_encoder_encode_body (lto_symtab_encoder_t encoder,
182 struct cgraph_node *node)
183 {
184 int index = lto_symtab_encoder_encode (encoder, node);
185 gcc_checking_assert (encoder->nodes[index].node == node);
186 encoder->nodes[index].body = true;
187 }
188
189 /* Return TRUE if we should encode initializer of NODE (if any). */
190
191 bool
192 lto_symtab_encoder_encode_initializer_p (lto_symtab_encoder_t encoder,
193 varpool_node *node)
194 {
195 int index = lto_symtab_encoder_lookup (encoder, node);
196 if (index == LCC_NOT_FOUND)
197 return false;
198 return encoder->nodes[index].initializer;
199 }
200
201 /* Specify that we should encode initializer of NODE (if any). */
202
203 static void
204 lto_set_symtab_encoder_encode_initializer (lto_symtab_encoder_t encoder,
205 varpool_node *node)
206 {
207 int index = lto_symtab_encoder_lookup (encoder, node);
208 encoder->nodes[index].initializer = true;
209 }
210
211 /* Return TRUE if NODE is in this partition. */
212
213 bool
214 lto_symtab_encoder_in_partition_p (lto_symtab_encoder_t encoder,
215 symtab_node *node)
216 {
217 int index = lto_symtab_encoder_lookup (encoder, node);
218 if (index == LCC_NOT_FOUND)
219 return false;
220 return encoder->nodes[index].in_partition;
221 }
222
223 /* Specify that NODE is in this partition. */
224
225 void
226 lto_set_symtab_encoder_in_partition (lto_symtab_encoder_t encoder,
227 symtab_node *node)
228 {
229 int index = lto_symtab_encoder_encode (encoder, node);
230 encoder->nodes[index].in_partition = true;
231 }
232
233 /* Output the cgraph EDGE to OB using ENCODER. */
234
235 static void
236 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
237 lto_symtab_encoder_t encoder)
238 {
239 unsigned int uid;
240 intptr_t ref;
241 struct bitpack_d bp;
242
243 if (edge->indirect_unknown_callee)
244 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
245 LTO_symtab_indirect_edge);
246 else
247 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
248 LTO_symtab_edge);
249
250 ref = lto_symtab_encoder_lookup (encoder, edge->caller);
251 gcc_assert (ref != LCC_NOT_FOUND);
252 streamer_write_hwi_stream (ob->main_stream, ref);
253
254 if (!edge->indirect_unknown_callee)
255 {
256 ref = lto_symtab_encoder_lookup (encoder, edge->callee);
257 gcc_assert (ref != LCC_NOT_FOUND);
258 streamer_write_hwi_stream (ob->main_stream, ref);
259 }
260
261 edge->count.stream_out (ob->main_stream);
262
263 bp = bitpack_create (ob->main_stream);
264 uid = !edge->call_stmt ? edge->lto_stmt_uid
265 : gimple_uid (edge->call_stmt) + 1;
266 bp_pack_enum (&bp, cgraph_inline_failed_t,
267 CIF_N_REASONS, edge->inline_failed);
268 gcc_checking_assert (uid || edge->caller->thunk);
269 bp_pack_var_len_unsigned (&bp, uid);
270 bp_pack_value (&bp, edge->speculative_id, 16);
271 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
272 bp_pack_value (&bp, edge->speculative, 1);
273 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
274 gcc_assert (!edge->call_stmt_cannot_inline_p
275 || edge->inline_failed != CIF_BODY_NOT_AVAILABLE);
276 bp_pack_value (&bp, edge->can_throw_external, 1);
277 bp_pack_value (&bp, edge->in_polymorphic_cdtor, 1);
278 if (edge->indirect_unknown_callee)
279 {
280 int flags = edge->indirect_info->ecf_flags;
281 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
282 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
283 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
284 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
285 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
286 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
287 /* Flags that should not appear on indirect calls. */
288 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
289 | ECF_MAY_BE_ALLOCA
290 | ECF_SIBCALL
291 | ECF_LEAF
292 | ECF_NOVOPS)));
293
294 bp_pack_value (&bp, edge->indirect_info->num_speculative_call_targets,
295 16);
296 }
297 streamer_write_bitpack (&bp);
298 }
299
300 /* Return if NODE contain references from other partitions. */
301
302 bool
303 referenced_from_other_partition_p (symtab_node *node, lto_symtab_encoder_t encoder)
304 {
305 int i;
306 struct ipa_ref *ref = NULL;
307
308 for (i = 0; node->iterate_referring (i, ref); i++)
309 {
310 /* Ignore references from non-offloadable nodes while streaming NODE into
311 offload LTO section. */
312 if (!ref->referring->need_lto_streaming)
313 continue;
314
315 if (ref->referring->in_other_partition
316 || !lto_symtab_encoder_in_partition_p (encoder, ref->referring))
317 return true;
318 }
319 return false;
320 }
321
322 /* Return true when node is reachable from other partition. */
323
324 bool
325 reachable_from_other_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
326 {
327 struct cgraph_edge *e;
328 if (!node->definition)
329 return false;
330 if (node->inlined_to)
331 return false;
332 for (e = node->callers; e; e = e->next_caller)
333 {
334 /* Ignore references from non-offloadable nodes while streaming NODE into
335 offload LTO section. */
336 if (!e->caller->need_lto_streaming)
337 continue;
338
339 if (e->caller->in_other_partition
340 || !lto_symtab_encoder_in_partition_p (encoder, e->caller))
341 return true;
342 }
343 return false;
344 }
345
346 /* Return if NODE contain references from other partitions. */
347
348 bool
349 referenced_from_this_partition_p (symtab_node *node,
350 lto_symtab_encoder_t encoder)
351 {
352 int i;
353 struct ipa_ref *ref = NULL;
354
355 for (i = 0; node->iterate_referring (i, ref); i++)
356 if (lto_symtab_encoder_in_partition_p (encoder, ref->referring))
357 return true;
358 return false;
359 }
360
361 /* Return true when node is reachable from other partition. */
362
363 bool
364 reachable_from_this_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
365 {
366 struct cgraph_edge *e;
367 for (e = node->callers; e; e = e->next_caller)
368 if (lto_symtab_encoder_in_partition_p (encoder, e->caller))
369 return true;
370 return false;
371 }
372
373 /* Output the cgraph NODE to OB. ENCODER is used to find the
374 reference number of NODE->inlined_to. SET is the set of nodes we
375 are writing to the current file. If NODE is not in SET, then NODE
376 is a boundary of a cgraph_node_set and we pretend NODE just has a
377 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
378 that have had their callgraph node written so far. This is used to
379 determine if NODE is a clone of a previously written node. */
380
381 static void
382 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
383 lto_symtab_encoder_t encoder)
384 {
385 unsigned int tag;
386 struct bitpack_d bp;
387 bool boundary_p;
388 intptr_t ref;
389 bool in_other_partition = false;
390 struct cgraph_node *clone_of, *ultimate_clone_of;
391 ipa_opt_pass_d *pass;
392 int i;
393 const char *comdat;
394 const char *section;
395 tree group;
396
397 boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
398
399 if (node->analyzed && (!boundary_p || node->alias
400 || (node->thunk && !node->inlined_to)))
401 tag = LTO_symtab_analyzed_node;
402 else
403 tag = LTO_symtab_unavail_node;
404
405 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
406 tag);
407 streamer_write_hwi_stream (ob->main_stream, node->order);
408
409 /* In WPA mode, we only output part of the call-graph. Also, we
410 fake cgraph node attributes. There are two cases that we care.
411
412 Boundary nodes: There are nodes that are not part of SET but are
413 called from within SET. We artificially make them look like
414 externally visible nodes with no function body.
415
416 Cherry-picked nodes: These are nodes we pulled from other
417 translation units into SET during IPA-inlining. We make them as
418 local static nodes to prevent clashes with other local statics. */
419 if (boundary_p && node->analyzed
420 && node->get_partitioning_class () == SYMBOL_PARTITION)
421 {
422 /* Inline clones cannot be part of boundary.
423 gcc_assert (!node->inlined_to);
424
425 FIXME: At the moment they can be, when partition contains an inline
426 clone that is clone of inline clone from outside partition. We can
427 reshape the clone tree and make other tree to be the root, but it
428 needs a bit extra work and will be promplty done by cgraph_remove_node
429 after reading back. */
430 in_other_partition = 1;
431 }
432
433 clone_of = node->clone_of;
434 while (clone_of
435 && (ref = lto_symtab_encoder_lookup (encoder, clone_of)) == LCC_NOT_FOUND)
436 if (clone_of->prev_sibling_clone)
437 clone_of = clone_of->prev_sibling_clone;
438 else
439 clone_of = clone_of->clone_of;
440
441 /* See if body of the master function is output. If not, we are seeing only
442 an declaration and we do not need to pass down clone tree. */
443 ultimate_clone_of = clone_of;
444 while (ultimate_clone_of && ultimate_clone_of->clone_of)
445 ultimate_clone_of = ultimate_clone_of->clone_of;
446
447 if (clone_of && !lto_symtab_encoder_encode_body_p (encoder, ultimate_clone_of))
448 clone_of = NULL;
449
450 if (tag == LTO_symtab_analyzed_node)
451 gcc_assert (clone_of || !node->clone_of);
452 if (!clone_of)
453 streamer_write_hwi_stream (ob->main_stream, LCC_NOT_FOUND);
454 else
455 streamer_write_hwi_stream (ob->main_stream, ref);
456
457
458 lto_output_fn_decl_ref (ob->decl_state, ob->main_stream, node->decl);
459 node->count.stream_out (ob->main_stream);
460 streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale);
461
462 streamer_write_hwi_stream (ob->main_stream,
463 node->ipa_transforms_to_apply.length ());
464 FOR_EACH_VEC_ELT (node->ipa_transforms_to_apply, i, pass)
465 streamer_write_hwi_stream (ob->main_stream, pass->static_pass_number);
466
467 if (tag == LTO_symtab_analyzed_node)
468 {
469 if (node->inlined_to)
470 {
471 ref = lto_symtab_encoder_lookup (encoder, node->inlined_to);
472 gcc_assert (ref != LCC_NOT_FOUND);
473 }
474 else
475 ref = LCC_NOT_FOUND;
476
477 streamer_write_hwi_stream (ob->main_stream, ref);
478 }
479
480 group = node->get_comdat_group ();
481 if (group)
482 comdat = IDENTIFIER_POINTER (group);
483 else
484 comdat = "";
485 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
486
487 if (group)
488 {
489 if (node->same_comdat_group)
490 {
491 ref = LCC_NOT_FOUND;
492 for (struct symtab_node *n = node->same_comdat_group;
493 ref == LCC_NOT_FOUND && n != node; n = n->same_comdat_group)
494 ref = lto_symtab_encoder_lookup (encoder, n);
495 }
496 else
497 ref = LCC_NOT_FOUND;
498 streamer_write_hwi_stream (ob->main_stream, ref);
499 }
500
501 section = node->get_section ();
502 if (!section)
503 section = "";
504
505 streamer_write_hwi_stream (ob->main_stream, node->tp_first_run);
506
507 bp = bitpack_create (ob->main_stream);
508 bp_pack_value (&bp, node->local, 1);
509 bp_pack_value (&bp, node->externally_visible, 1);
510 bp_pack_value (&bp, node->no_reorder, 1);
511 bp_pack_value (&bp, node->definition, 1);
512 bp_pack_value (&bp, node->versionable, 1);
513 bp_pack_value (&bp, node->can_change_signature, 1);
514 bp_pack_value (&bp, node->redefined_extern_inline, 1);
515 bp_pack_value (&bp, node->force_output, 1);
516 bp_pack_value (&bp, node->forced_by_abi, 1);
517 bp_pack_value (&bp, node->unique_name, 1);
518 bp_pack_value (&bp, node->body_removed, 1);
519 bp_pack_value (&bp, node->implicit_section, 1);
520 bp_pack_value (&bp, node->address_taken, 1);
521 bp_pack_value (&bp, tag == LTO_symtab_analyzed_node
522 && node->get_partitioning_class () == SYMBOL_PARTITION
523 && (reachable_from_other_partition_p (node, encoder)
524 || referenced_from_other_partition_p (node, encoder)), 1);
525 bp_pack_value (&bp, node->lowered, 1);
526 bp_pack_value (&bp, in_other_partition, 1);
527 bp_pack_value (&bp, node->alias, 1);
528 bp_pack_value (&bp, node->transparent_alias, 1);
529 bp_pack_value (&bp, node->weakref, 1);
530 bp_pack_value (&bp, node->symver, 1);
531 bp_pack_value (&bp, node->frequency, 2);
532 bp_pack_value (&bp, node->only_called_at_startup, 1);
533 bp_pack_value (&bp, node->only_called_at_exit, 1);
534 bp_pack_value (&bp, node->tm_clone, 1);
535 bp_pack_value (&bp, node->calls_comdat_local, 1);
536 bp_pack_value (&bp, node->icf_merged, 1);
537 bp_pack_value (&bp, node->nonfreeing_fn, 1);
538 bp_pack_value (&bp, node->merged_comdat, 1);
539 bp_pack_value (&bp, node->merged_extern_inline, 1);
540 bp_pack_value (&bp, node->thunk, 1);
541 bp_pack_value (&bp, node->parallelized_function, 1);
542 bp_pack_value (&bp, node->declare_variant_alt, 1);
543 bp_pack_value (&bp, node->calls_declare_variant_alt, 1);
544
545 /* Stream thunk info always because we use it in
546 ipa_polymorphic_call_context::ipa_polymorphic_call_context
547 to properly interpret THIS pointers for thunks that has been converted
548 to Gimple. */
549 struct thunk_info *thunk = node->definition ? thunk_info::get (node) : NULL;
550
551 bp_pack_value (&bp, thunk != NULL, 1);
552
553 bp_pack_enum (&bp, ld_plugin_symbol_resolution,
554 LDPR_NUM_KNOWN,
555 /* When doing incremental link, we will get new resolution
556 info next time we process the file. */
557 flag_incremental_link ? LDPR_UNKNOWN : node->resolution);
558 bp_pack_value (&bp, node->split_part, 1);
559 streamer_write_bitpack (&bp);
560 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
561
562 streamer_write_hwi_stream (ob->main_stream, node->profile_id);
563 streamer_write_hwi_stream (ob->main_stream, node->unit_id);
564 if (DECL_STATIC_CONSTRUCTOR (node->decl))
565 streamer_write_hwi_stream (ob->main_stream, node->get_init_priority ());
566 if (DECL_STATIC_DESTRUCTOR (node->decl))
567 streamer_write_hwi_stream (ob->main_stream, node->get_fini_priority ());
568
569 if (thunk)
570 thunk_info::get (node)->stream_out (ob);
571 }
572
573 /* Output the varpool NODE to OB.
574 If NODE is not in SET, then NODE is a boundary. */
575
576 static void
577 lto_output_varpool_node (struct lto_simple_output_block *ob, varpool_node *node,
578 lto_symtab_encoder_t encoder)
579 {
580 bool boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
581 bool encode_initializer_p
582 = (node->definition
583 && lto_symtab_encoder_encode_initializer_p (encoder, node));
584 struct bitpack_d bp;
585 int ref;
586 const char *comdat;
587 const char *section;
588 tree group;
589
590 gcc_assert (!encode_initializer_p || node->definition);
591 gcc_assert (boundary_p || encode_initializer_p);
592
593 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
594 LTO_symtab_variable);
595 streamer_write_hwi_stream (ob->main_stream, node->order);
596 lto_output_var_decl_ref (ob->decl_state, ob->main_stream, node->decl);
597 bp = bitpack_create (ob->main_stream);
598 bp_pack_value (&bp, node->externally_visible, 1);
599 bp_pack_value (&bp, node->no_reorder, 1);
600 bp_pack_value (&bp, node->force_output, 1);
601 bp_pack_value (&bp, node->forced_by_abi, 1);
602 bp_pack_value (&bp, node->unique_name, 1);
603 bp_pack_value (&bp,
604 node->body_removed
605 || (!encode_initializer_p && !node->alias && node->definition),
606 1);
607 bp_pack_value (&bp, node->implicit_section, 1);
608 bp_pack_value (&bp, node->writeonly, 1);
609 bp_pack_value (&bp, node->definition && (encode_initializer_p || node->alias),
610 1);
611 bp_pack_value (&bp, node->alias, 1);
612 bp_pack_value (&bp, node->transparent_alias, 1);
613 bp_pack_value (&bp, node->weakref, 1);
614 bp_pack_value (&bp, node->symver, 1);
615 bp_pack_value (&bp, node->analyzed && (!boundary_p || node->alias), 1);
616 gcc_assert (node->definition || !node->analyzed);
617 /* Constant pool initializers can be de-unified into individual ltrans units.
618 FIXME: Alternatively at -Os we may want to avoid generating for them the local
619 labels and share them across LTRANS partitions. */
620 if (node->get_partitioning_class () != SYMBOL_PARTITION)
621 {
622 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
623 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
624 }
625 else
626 {
627 bp_pack_value (&bp, node->definition
628 && referenced_from_other_partition_p (node, encoder), 1);
629 bp_pack_value (&bp, node->analyzed
630 && boundary_p && !DECL_EXTERNAL (node->decl), 1);
631 /* in_other_partition. */
632 }
633 bp_pack_value (&bp, node->tls_model, 3);
634 bp_pack_value (&bp, node->used_by_single_function, 1);
635 bp_pack_value (&bp, node->dynamically_initialized, 1);
636 streamer_write_bitpack (&bp);
637
638 group = node->get_comdat_group ();
639 if (group)
640 comdat = IDENTIFIER_POINTER (group);
641 else
642 comdat = "";
643 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
644
645 if (group)
646 {
647 if (node->same_comdat_group)
648 {
649 ref = LCC_NOT_FOUND;
650 for (struct symtab_node *n = node->same_comdat_group;
651 ref == LCC_NOT_FOUND && n != node; n = n->same_comdat_group)
652 ref = lto_symtab_encoder_lookup (encoder, n);
653 }
654 else
655 ref = LCC_NOT_FOUND;
656 streamer_write_hwi_stream (ob->main_stream, ref);
657 }
658
659 section = node->get_section ();
660 if (!section)
661 section = "";
662 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
663
664 streamer_write_enum (ob->main_stream, ld_plugin_symbol_resolution,
665 LDPR_NUM_KNOWN, node->resolution);
666 }
667
668 /* Output the varpool NODE to OB.
669 If NODE is not in SET, then NODE is a boundary. */
670
671 static void
672 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
673 lto_symtab_encoder_t encoder)
674 {
675 struct bitpack_d bp;
676 int nref;
677 int uid = !ref->stmt ? ref->lto_stmt_uid : gimple_uid (ref->stmt) + 1;
678 struct cgraph_node *node;
679
680 bp = bitpack_create (ob->main_stream);
681 bp_pack_value (&bp, ref->use, 3);
682 bp_pack_value (&bp, ref->speculative, 1);
683 streamer_write_bitpack (&bp);
684 nref = lto_symtab_encoder_lookup (encoder, ref->referred);
685 gcc_assert (nref != LCC_NOT_FOUND);
686 streamer_write_hwi_stream (ob->main_stream, nref);
687
688 node = dyn_cast <cgraph_node *> (ref->referring);
689 if (node)
690 {
691 if (ref->stmt)
692 uid = gimple_uid (ref->stmt) + 1;
693 streamer_write_hwi_stream (ob->main_stream, uid);
694 bp_pack_value (&bp, ref->speculative_id, 16);
695 streamer_write_bitpack (&bp);
696 }
697 }
698
699 /* Stream out profile_summary to OB. */
700
701 static void
702 output_profile_summary (struct lto_simple_output_block *ob)
703 {
704 if (profile_info)
705 {
706 /* We do not output num and run_max, they are not used by
707 GCC profile feedback and they are difficult to merge from multiple
708 units. */
709 unsigned runs = (profile_info->runs);
710 streamer_write_uhwi_stream (ob->main_stream, runs);
711
712 /* IPA-profile computes hot bb threshold based on cumulated
713 whole program profile. We need to stream it down to ltrans. */
714 if (flag_wpa)
715 streamer_write_gcov_count_stream (ob->main_stream,
716 get_hot_bb_threshold ());
717 }
718 else
719 streamer_write_uhwi_stream (ob->main_stream, 0);
720 }
721
722 /* Output all callees or indirect outgoing edges. EDGE must be the first such
723 edge. */
724
725 static void
726 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
727 struct lto_simple_output_block *ob,
728 lto_symtab_encoder_t encoder)
729 {
730 if (!edge)
731 return;
732
733 /* Output edges in backward direction, so the reconstructed callgraph match
734 and it is easy to associate call sites in the IPA pass summaries. */
735 while (edge->next_callee)
736 edge = edge->next_callee;
737 for (; edge; edge = edge->prev_callee)
738 lto_output_edge (ob, edge, encoder);
739 }
740
741 /* Output the part of the cgraph in SET. */
742
743 static void
744 output_refs (lto_symtab_encoder_t encoder)
745 {
746 struct lto_simple_output_block *ob;
747 int count;
748 struct ipa_ref *ref;
749
750 ob = lto_create_simple_output_block (LTO_section_refs);
751
752 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
753 {
754 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
755
756 /* IPA_REF_ALIAS references are always preserved
757 in the boundary. Alias node can't have other references and
758 can be always handled as if it's not in the boundary. */
759 if (!node->alias && !lto_symtab_encoder_in_partition_p (encoder, node))
760 continue;
761
762 count = node->ref_list.nreferences ();
763 if (count)
764 {
765 streamer_write_gcov_count_stream (ob->main_stream, count);
766 streamer_write_uhwi_stream (ob->main_stream,
767 lto_symtab_encoder_lookup (encoder, node));
768 for (int i = 0; node->iterate_reference (i, ref); i++)
769 lto_output_ref (ob, ref, encoder);
770 }
771 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
772 if (cnode->declare_variant_alt)
773 omp_lto_output_declare_variant_alt (ob, cnode, encoder);
774 }
775
776 streamer_write_uhwi_stream (ob->main_stream, 0);
777
778 lto_destroy_simple_output_block (ob);
779 }
780
781 /* Add NODE into encoder as well as nodes it is cloned from.
782 Do it in a way so clones appear first. */
783
784 static void
785 add_node_to (lto_symtab_encoder_t encoder, struct cgraph_node *node,
786 bool include_body)
787 {
788 if (node->clone_of)
789 add_node_to (encoder, node->clone_of, include_body);
790 else if (include_body)
791 lto_set_symtab_encoder_encode_body (encoder, node);
792 lto_symtab_encoder_encode (encoder, node);
793 }
794
795 /* Add all references in NODE to encoders. */
796
797 static void
798 create_references (lto_symtab_encoder_t encoder, symtab_node *node)
799 {
800 int i;
801 struct ipa_ref *ref = NULL;
802 for (i = 0; node->iterate_reference (i, ref); i++)
803 if (is_a <cgraph_node *> (ref->referred))
804 add_node_to (encoder, dyn_cast <cgraph_node *> (ref->referred), false);
805 else
806 lto_symtab_encoder_encode (encoder, ref->referred);
807 }
808
809 /* Select what needs to be streamed out. In regular lto mode stream everything.
810 In offload lto mode stream only nodes marked as offloadable. */
811 void
812 select_what_to_stream (void)
813 {
814 struct symtab_node *snode;
815 FOR_EACH_SYMBOL (snode)
816 snode->need_lto_streaming = !lto_stream_offload_p || snode->offloadable;
817 }
818
819 /* Find all symbols we want to stream into given partition and insert them
820 to encoders.
821
822 The function actually replaces IN_ENCODER by new one. The reason is that
823 streaming code needs clone's origin to be streamed before clone. This
824 means that we need to insert the nodes in specific order. This order is
825 ignored by the partitioning logic earlier. */
826
827 lto_symtab_encoder_t
828 compute_ltrans_boundary (lto_symtab_encoder_t in_encoder)
829 {
830 struct cgraph_edge *edge;
831 int i;
832 lto_symtab_encoder_t encoder;
833 lto_symtab_encoder_iterator lsei;
834 hash_set<void *> reachable_call_targets;
835
836 encoder = lto_symtab_encoder_new (false);
837
838 /* Go over all entries in the IN_ENCODER and duplicate them to
839 ENCODER. At the same time insert masters of clones so
840 every master appears before clone. */
841 for (lsei = lsei_start_function_in_partition (in_encoder);
842 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
843 {
844 struct cgraph_node *node = lsei_cgraph_node (lsei);
845 if (!node->need_lto_streaming)
846 continue;
847 add_node_to (encoder, node, true);
848 lto_set_symtab_encoder_in_partition (encoder, node);
849 create_references (encoder, node);
850 }
851 for (lsei = lsei_start_variable_in_partition (in_encoder);
852 !lsei_end_p (lsei); lsei_next_variable_in_partition (&lsei))
853 {
854 varpool_node *vnode = lsei_varpool_node (lsei);
855
856 if (!vnode->need_lto_streaming)
857 continue;
858 lto_set_symtab_encoder_in_partition (encoder, vnode);
859 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
860 create_references (encoder, vnode);
861 }
862 /* Pickle in also the initializer of all referenced readonly variables
863 to help folding. Constant pool variables are not shared, so we must
864 pickle those too. */
865 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
866 {
867 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
868 if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
869 {
870 if (!lto_symtab_encoder_encode_initializer_p (encoder,
871 vnode)
872 && (((vnode->ctor_useable_for_folding_p ()
873 && (!DECL_VIRTUAL_P (vnode->decl)
874 || !flag_wpa
875 || flag_ltrans_devirtualize)))))
876 {
877 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
878 create_references (encoder, vnode);
879 }
880 }
881 }
882
883 /* Go over all the nodes again to include callees that are not in
884 SET. */
885 for (lsei = lsei_start_function_in_partition (encoder);
886 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
887 {
888 struct cgraph_node *node = lsei_cgraph_node (lsei);
889 for (edge = node->callees; edge; edge = edge->next_callee)
890 {
891 struct cgraph_node *callee = edge->callee;
892 if (!lto_symtab_encoder_in_partition_p (encoder, callee))
893 {
894 /* We should have moved all the inlines. */
895 gcc_assert (!callee->inlined_to);
896 add_node_to (encoder, callee, false);
897 }
898 }
899 /* Add all possible targets for late devirtualization. */
900 if (flag_ltrans_devirtualize || !flag_wpa)
901 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
902 if (edge->indirect_info->polymorphic)
903 {
904 unsigned int i;
905 void *cache_token;
906 bool final;
907 vec <cgraph_node *>targets
908 = possible_polymorphic_call_targets
909 (edge, &final, &cache_token);
910 if (!reachable_call_targets.add (cache_token))
911 {
912 for (i = 0; i < targets.length (); i++)
913 {
914 struct cgraph_node *callee = targets[i];
915
916 /* Adding an external declarations into the unit serves
917 no purpose and just increases its boundary. */
918 if (callee->definition
919 && !lto_symtab_encoder_in_partition_p
920 (encoder, callee))
921 {
922 gcc_assert (!callee->inlined_to);
923 add_node_to (encoder, callee, false);
924 }
925 }
926 }
927 }
928 }
929 /* Be sure to also insert alias targert and thunk callees. These needs
930 to stay to aid local calling conventions. */
931 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
932 {
933 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
934 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
935
936 if (node->alias && node->analyzed)
937 create_references (encoder, node);
938 if (cnode
939 && cnode->thunk && !cnode->inlined_to)
940 add_node_to (encoder, cnode->callees->callee, false);
941 while (node->transparent_alias && node->analyzed)
942 {
943 node = node->get_alias_target ();
944 if (is_a <cgraph_node *> (node))
945 add_node_to (encoder, dyn_cast <cgraph_node *> (node),
946 false);
947 else
948 lto_symtab_encoder_encode (encoder, node);
949 }
950 }
951 lto_symtab_encoder_delete (in_encoder);
952 return encoder;
953 }
954
955 /* Output the part of the symtab in SET and VSET. */
956
957 void
958 output_symtab (void)
959 {
960 struct cgraph_node *node;
961 struct lto_simple_output_block *ob;
962 int i, n_nodes;
963 lto_symtab_encoder_t encoder;
964
965 if (flag_wpa)
966 output_cgraph_opt_summary ();
967
968 ob = lto_create_simple_output_block (LTO_section_symtab_nodes);
969
970 output_profile_summary (ob);
971
972 /* An encoder for cgraph nodes should have been created by
973 ipa_write_summaries_1. */
974 gcc_assert (ob->decl_state->symtab_node_encoder);
975 encoder = ob->decl_state->symtab_node_encoder;
976
977 /* Write out the nodes. We must first output a node and then its clones,
978 otherwise at a time reading back the node there would be nothing to clone
979 from. */
980 n_nodes = lto_symtab_encoder_size (encoder);
981 for (i = 0; i < n_nodes; i++)
982 {
983 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
984 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
985 lto_output_node (ob, cnode, encoder);
986 else
987 lto_output_varpool_node (ob, dyn_cast<varpool_node *> (node), encoder);
988 }
989
990 /* Go over the nodes in SET again to write edges. */
991 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
992 {
993 node = dyn_cast <cgraph_node *> (lto_symtab_encoder_deref (encoder, i));
994 if (node
995 && ((node->thunk && !node->inlined_to)
996 || lto_symtab_encoder_in_partition_p (encoder, node)))
997 {
998 output_outgoing_cgraph_edges (node->callees, ob, encoder);
999 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
1000 }
1001 }
1002
1003 streamer_write_uhwi_stream (ob->main_stream, 0);
1004
1005 lto_destroy_simple_output_block (ob);
1006
1007 /* Emit toplevel asms.
1008 When doing WPA we must output every asm just once. Since we do not partition asm
1009 nodes at all, output them to first output. This is kind of hack, but should work
1010 well. */
1011 if (!asm_nodes_output)
1012 {
1013 asm_nodes_output = true;
1014 lto_output_toplevel_asms ();
1015 }
1016
1017 output_refs (encoder);
1018 }
1019
1020 /* Return identifier encoded in IB as a plain string. */
1021
1022 static tree
1023 read_identifier (class lto_input_block *ib)
1024 {
1025 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1026 tree id;
1027
1028 if (ib->data[ib->p + len])
1029 lto_section_overrun (ib);
1030 if (!len)
1031 {
1032 ib->p++;
1033 return NULL;
1034 }
1035 id = get_identifier (ib->data + ib->p);
1036 ib->p += len + 1;
1037 return id;
1038 }
1039
1040 /* Return string encoded in IB, NULL if string is empty. */
1041
1042 static const char *
1043 read_string (class lto_input_block *ib)
1044 {
1045 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1046 const char *str;
1047
1048 if (ib->data[ib->p + len])
1049 lto_section_overrun (ib);
1050 if (!len)
1051 {
1052 ib->p++;
1053 return NULL;
1054 }
1055 str = ib->data + ib->p;
1056 ib->p += len + 1;
1057 return str;
1058 }
1059
1060 /* Output function/variable tables that will allow libgomp to look up offload
1061 target code.
1062 OFFLOAD_FUNCS is filled in expand_omp_target, OFFLOAD_VARS is filled in
1063 varpool_node::get_create. In WHOPR (partitioned) mode during the WPA stage
1064 both OFFLOAD_FUNCS and OFFLOAD_VARS are filled by input_offload_tables. */
1065
1066 void
1067 output_offload_tables (void)
1068 {
1069 if (vec_safe_is_empty (offload_funcs) && vec_safe_is_empty (offload_vars))
1070 return;
1071
1072 struct lto_simple_output_block *ob
1073 = lto_create_simple_output_block (LTO_section_offload_table);
1074
1075 for (unsigned i = 0; i < vec_safe_length (offload_funcs); i++)
1076 {
1077 symtab_node *node = symtab_node::get ((*offload_funcs)[i]);
1078 if (!node)
1079 continue;
1080 node->force_output = true;
1081 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1082 LTO_symtab_last_tag, LTO_symtab_unavail_node);
1083 lto_output_fn_decl_ref (ob->decl_state, ob->main_stream,
1084 (*offload_funcs)[i]);
1085 }
1086
1087 for (unsigned i = 0; i < vec_safe_length (offload_vars); i++)
1088 {
1089 symtab_node *node = symtab_node::get ((*offload_vars)[i]);
1090 if (!node)
1091 continue;
1092 node->force_output = true;
1093 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1094 LTO_symtab_last_tag, LTO_symtab_variable);
1095 lto_output_var_decl_ref (ob->decl_state, ob->main_stream,
1096 (*offload_vars)[i]);
1097 }
1098
1099 streamer_write_uhwi_stream (ob->main_stream, 0);
1100 lto_destroy_simple_output_block (ob);
1101
1102 /* In WHOPR mode during the WPA stage the joint offload tables need to be
1103 streamed to one partition only. That's why we free offload_funcs and
1104 offload_vars after the first call of output_offload_tables. */
1105 if (flag_wpa)
1106 {
1107 vec_free (offload_funcs);
1108 vec_free (offload_vars);
1109 }
1110 }
1111
1112 /* Verify the partitioning of NODE. */
1113
1114 static inline void
1115 verify_node_partition (symtab_node *node)
1116 {
1117 if (flag_ltrans)
1118 return;
1119
1120 #ifdef ACCEL_COMPILER
1121 if (node->in_other_partition)
1122 {
1123 if (TREE_CODE (node->decl) == FUNCTION_DECL)
1124 error_at (DECL_SOURCE_LOCATION (node->decl),
1125 "function %qs has been referenced in offloaded code but"
1126 " hasn%'t been marked to be included in the offloaded code",
1127 node->name ());
1128 else if (VAR_P (node->decl))
1129 error_at (DECL_SOURCE_LOCATION (node->decl),
1130 "variable %qs has been referenced in offloaded code but"
1131 " hasn%'t been marked to be included in the offloaded code",
1132 node->name ());
1133 else
1134 gcc_unreachable ();
1135 }
1136 #else
1137 gcc_assert (!node->in_other_partition
1138 && !node->used_from_other_partition);
1139 #endif
1140 }
1141
1142 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
1143 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
1144 NODE or to replace the values in it, for instance because the first
1145 time we saw it, the function body was not available but now it
1146 is. BP is a bitpack with all the bitflags for NODE read from the
1147 stream. Initialize HAS_THUNK_INFO to indicate if thunk info should
1148 be streamed in. */
1149
1150 static void
1151 input_overwrite_node (struct lto_file_decl_data *file_data,
1152 struct cgraph_node *node,
1153 enum LTO_symtab_tags tag,
1154 struct bitpack_d *bp, bool *has_thunk_info)
1155 {
1156 node->aux = (void *) tag;
1157 node->lto_file_data = file_data;
1158
1159 node->local = bp_unpack_value (bp, 1);
1160 node->externally_visible = bp_unpack_value (bp, 1);
1161 node->no_reorder = bp_unpack_value (bp, 1);
1162 node->definition = bp_unpack_value (bp, 1);
1163 node->versionable = bp_unpack_value (bp, 1);
1164 node->can_change_signature = bp_unpack_value (bp, 1);
1165 node->redefined_extern_inline = bp_unpack_value (bp, 1);
1166 node->force_output = bp_unpack_value (bp, 1);
1167 node->forced_by_abi = bp_unpack_value (bp, 1);
1168 node->unique_name = bp_unpack_value (bp, 1);
1169 node->body_removed = bp_unpack_value (bp, 1);
1170 node->implicit_section = bp_unpack_value (bp, 1);
1171 node->address_taken = bp_unpack_value (bp, 1);
1172 node->used_from_other_partition = bp_unpack_value (bp, 1);
1173 node->lowered = bp_unpack_value (bp, 1);
1174 node->analyzed = tag == LTO_symtab_analyzed_node;
1175 node->in_other_partition = bp_unpack_value (bp, 1);
1176 if (node->in_other_partition
1177 /* Avoid updating decl when we are seeing just inline clone.
1178 When inlining function that has functions already inlined into it,
1179 we produce clones of inline clones.
1180
1181 WPA partitioning might put each clone into different unit and
1182 we might end up streaming inline clone from other partition
1183 to support clone we are interested in. */
1184 && (!node->clone_of
1185 || node->clone_of->decl != node->decl))
1186 {
1187 DECL_EXTERNAL (node->decl) = 1;
1188 TREE_STATIC (node->decl) = 0;
1189 }
1190 node->alias = bp_unpack_value (bp, 1);
1191 node->transparent_alias = bp_unpack_value (bp, 1);
1192 node->weakref = bp_unpack_value (bp, 1);
1193 node->symver = bp_unpack_value (bp, 1);
1194 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
1195 node->only_called_at_startup = bp_unpack_value (bp, 1);
1196 node->only_called_at_exit = bp_unpack_value (bp, 1);
1197 node->tm_clone = bp_unpack_value (bp, 1);
1198 node->calls_comdat_local = bp_unpack_value (bp, 1);
1199 node->icf_merged = bp_unpack_value (bp, 1);
1200 node->nonfreeing_fn = bp_unpack_value (bp, 1);
1201 node->merged_comdat = bp_unpack_value (bp, 1);
1202 node->merged_extern_inline = bp_unpack_value (bp, 1);
1203 node->thunk = bp_unpack_value (bp, 1);
1204 node->parallelized_function = bp_unpack_value (bp, 1);
1205 node->declare_variant_alt = bp_unpack_value (bp, 1);
1206 node->calls_declare_variant_alt = bp_unpack_value (bp, 1);
1207 *has_thunk_info = bp_unpack_value (bp, 1);
1208 node->resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
1209 LDPR_NUM_KNOWN);
1210 node->split_part = bp_unpack_value (bp, 1);
1211 verify_node_partition (node);
1212 }
1213
1214 /* Return string alias is alias of. */
1215
1216 static tree
1217 get_alias_symbol (tree decl)
1218 {
1219 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
1220 return get_identifier (TREE_STRING_POINTER
1221 (TREE_VALUE (TREE_VALUE (alias))));
1222 }
1223
1224 /* Read a node from input_block IB. TAG is the node's tag just read.
1225 Return the node read or overwriten. */
1226
1227 static struct cgraph_node *
1228 input_node (struct lto_file_decl_data *file_data,
1229 class lto_input_block *ib,
1230 enum LTO_symtab_tags tag,
1231 vec<symtab_node *> nodes)
1232 {
1233 gcc::pass_manager *passes = g->get_passes ();
1234 tree fn_decl;
1235 struct cgraph_node *node;
1236 struct bitpack_d bp;
1237 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
1238 int clone_ref;
1239 int order;
1240 int i, count;
1241 tree group;
1242 const char *section;
1243 order = streamer_read_hwi (ib) + file_data->order_base;
1244 clone_ref = streamer_read_hwi (ib);
1245 bool has_thunk_info;
1246
1247 fn_decl = lto_input_fn_decl_ref (ib, file_data);
1248
1249 if (clone_ref != LCC_NOT_FOUND)
1250 {
1251 node = dyn_cast<cgraph_node *> (nodes[clone_ref])->create_clone (fn_decl,
1252 profile_count::uninitialized (), false,
1253 vNULL, false, NULL, NULL);
1254 }
1255 else
1256 {
1257 /* Declaration of functions can be already merged with a declaration
1258 from other input file. We keep cgraph unmerged until after streaming
1259 of ipa passes is done. Alays forcingly create a fresh node. */
1260 node = symtab->create_empty ();
1261 node->decl = fn_decl;
1262 if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (fn_decl)))
1263 node->ifunc_resolver = 1;
1264 node->register_symbol ();
1265 }
1266
1267 node->order = order;
1268 if (order >= symtab->order)
1269 symtab->order = order + 1;
1270
1271 node->count = profile_count::stream_in (ib);
1272 node->count_materialization_scale = streamer_read_hwi (ib);
1273
1274 count = streamer_read_hwi (ib);
1275 node->ipa_transforms_to_apply = vNULL;
1276 for (i = 0; i < count; i++)
1277 {
1278 opt_pass *pass;
1279 int pid = streamer_read_hwi (ib);
1280
1281 gcc_assert (pid < passes->passes_by_id_size);
1282 pass = passes->passes_by_id[pid];
1283 node->ipa_transforms_to_apply.safe_push ((ipa_opt_pass_d *) pass);
1284 }
1285
1286 if (tag == LTO_symtab_analyzed_node)
1287 ref = streamer_read_hwi (ib);
1288
1289 group = read_identifier (ib);
1290 if (group)
1291 ref2 = streamer_read_hwi (ib);
1292
1293 /* Make sure that we have not read this node before. Nodes that
1294 have already been read will have their tag stored in the 'aux'
1295 field. Since built-in functions can be referenced in multiple
1296 functions, they are expected to be read more than once. */
1297 if (node->aux && !fndecl_built_in_p (node->decl))
1298 internal_error ("bytecode stream: found multiple instances of cgraph "
1299 "node with uid %d", node->get_uid ());
1300
1301 node->tp_first_run = streamer_read_uhwi (ib);
1302
1303 bp = streamer_read_bitpack (ib);
1304
1305 input_overwrite_node (file_data, node, tag, &bp, &has_thunk_info);
1306
1307 /* Store a reference for now, and fix up later to be a pointer. */
1308 node->inlined_to = (cgraph_node *) (intptr_t) ref;
1309
1310 if (group)
1311 {
1312 node->set_comdat_group (group);
1313 /* Store a reference for now, and fix up later to be a pointer. */
1314 node->same_comdat_group = (symtab_node *) (intptr_t) ref2;
1315 }
1316 else
1317 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1318 section = read_string (ib);
1319 if (section)
1320 node->set_section_for_node (section);
1321
1322 if (node->alias && !node->analyzed && node->weakref)
1323 node->alias_target = get_alias_symbol (node->decl);
1324 node->profile_id = streamer_read_hwi (ib);
1325 node->unit_id = streamer_read_hwi (ib) + file_data->unit_base;
1326 if (symtab->max_unit < node->unit_id)
1327 symtab->max_unit = node->unit_id;
1328 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1329 node->set_init_priority (streamer_read_hwi (ib));
1330 if (DECL_STATIC_DESTRUCTOR (node->decl))
1331 node->set_fini_priority (streamer_read_hwi (ib));
1332
1333 if (has_thunk_info)
1334 thunk_info::get_create (node)->stream_in (ib);
1335
1336 return node;
1337 }
1338
1339 /* Read a node from input_block IB. TAG is the node's tag just read.
1340 Return the node read or overwriten. */
1341
1342 static varpool_node *
1343 input_varpool_node (struct lto_file_decl_data *file_data,
1344 class lto_input_block *ib)
1345 {
1346 tree var_decl;
1347 varpool_node *node;
1348 struct bitpack_d bp;
1349 int ref = LCC_NOT_FOUND;
1350 int order;
1351 tree group;
1352 const char *section;
1353
1354 order = streamer_read_hwi (ib) + file_data->order_base;
1355 var_decl = lto_input_var_decl_ref (ib, file_data);
1356
1357 /* Declaration of functions can be already merged with a declaration
1358 from other input file. We keep cgraph unmerged until after streaming
1359 of ipa passes is done. Alays forcingly create a fresh node. */
1360 node = varpool_node::create_empty ();
1361 node->decl = var_decl;
1362 node->register_symbol ();
1363
1364 node->order = order;
1365 if (order >= symtab->order)
1366 symtab->order = order + 1;
1367 node->lto_file_data = file_data;
1368
1369 bp = streamer_read_bitpack (ib);
1370 node->externally_visible = bp_unpack_value (&bp, 1);
1371 node->no_reorder = bp_unpack_value (&bp, 1);
1372 node->force_output = bp_unpack_value (&bp, 1);
1373 node->forced_by_abi = bp_unpack_value (&bp, 1);
1374 node->unique_name = bp_unpack_value (&bp, 1);
1375 node->body_removed = bp_unpack_value (&bp, 1);
1376 node->implicit_section = bp_unpack_value (&bp, 1);
1377 node->writeonly = bp_unpack_value (&bp, 1);
1378 node->definition = bp_unpack_value (&bp, 1);
1379 node->alias = bp_unpack_value (&bp, 1);
1380 node->transparent_alias = bp_unpack_value (&bp, 1);
1381 node->weakref = bp_unpack_value (&bp, 1);
1382 node->symver = bp_unpack_value (&bp, 1);
1383 node->analyzed = bp_unpack_value (&bp, 1);
1384 node->used_from_other_partition = bp_unpack_value (&bp, 1);
1385 node->in_other_partition = bp_unpack_value (&bp, 1);
1386 if (node->in_other_partition)
1387 {
1388 DECL_EXTERNAL (node->decl) = 1;
1389 TREE_STATIC (node->decl) = 0;
1390 }
1391 if (node->alias && !node->analyzed && node->weakref)
1392 node->alias_target = get_alias_symbol (node->decl);
1393 node->tls_model = (enum tls_model)bp_unpack_value (&bp, 3);
1394 node->used_by_single_function = (enum tls_model)bp_unpack_value (&bp, 1);
1395 node->dynamically_initialized = bp_unpack_value (&bp, 1);
1396 group = read_identifier (ib);
1397 if (group)
1398 {
1399 node->set_comdat_group (group);
1400 ref = streamer_read_hwi (ib);
1401 /* Store a reference for now, and fix up later to be a pointer. */
1402 node->same_comdat_group = (symtab_node *) (intptr_t) ref;
1403 }
1404 else
1405 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1406 section = read_string (ib);
1407 if (section)
1408 node->set_section_for_node (section);
1409 node->resolution = streamer_read_enum (ib, ld_plugin_symbol_resolution,
1410 LDPR_NUM_KNOWN);
1411 verify_node_partition (node);
1412 return node;
1413 }
1414
1415 /* Read a node from input_block IB. TAG is the node's tag just read.
1416 Return the node read or overwriten. */
1417
1418 static void
1419 input_ref (class lto_input_block *ib,
1420 symtab_node *referring_node,
1421 vec<symtab_node *> nodes)
1422 {
1423 symtab_node *node = NULL;
1424 struct bitpack_d bp;
1425 enum ipa_ref_use use;
1426 bool speculative;
1427 struct ipa_ref *ref;
1428
1429 bp = streamer_read_bitpack (ib);
1430 use = (enum ipa_ref_use) bp_unpack_value (&bp, 3);
1431 speculative = (enum ipa_ref_use) bp_unpack_value (&bp, 1);
1432 node = nodes[streamer_read_hwi (ib)];
1433 ref = referring_node->create_reference (node, use);
1434 ref->speculative = speculative;
1435 if (is_a <cgraph_node *> (referring_node))
1436 {
1437 ref->lto_stmt_uid = streamer_read_hwi (ib);
1438 bp = streamer_read_bitpack (ib);
1439 ref->speculative_id = bp_unpack_value (&bp, 16);
1440 }
1441 }
1442
1443 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1444 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1445 edge being read is indirect (in the sense that it has
1446 indirect_unknown_callee set). */
1447
1448 static void
1449 input_edge (class lto_input_block *ib, vec<symtab_node *> nodes,
1450 bool indirect)
1451 {
1452 struct cgraph_node *caller, *callee;
1453 struct cgraph_edge *edge;
1454 unsigned int stmt_id, speculative_id;
1455 profile_count count;
1456 cgraph_inline_failed_t inline_failed;
1457 struct bitpack_d bp;
1458 int ecf_flags = 0;
1459
1460 caller = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1461 if (caller == NULL || caller->decl == NULL_TREE)
1462 internal_error ("bytecode stream: no caller found while reading edge");
1463
1464 if (!indirect)
1465 {
1466 callee = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1467 if (callee == NULL || callee->decl == NULL_TREE)
1468 internal_error ("bytecode stream: no callee found while reading edge");
1469 }
1470 else
1471 callee = NULL;
1472
1473 count = profile_count::stream_in (ib);
1474
1475 bp = streamer_read_bitpack (ib);
1476 inline_failed = bp_unpack_enum (&bp, cgraph_inline_failed_t, CIF_N_REASONS);
1477 stmt_id = bp_unpack_var_len_unsigned (&bp);
1478 speculative_id = bp_unpack_value (&bp, 16);
1479
1480 if (indirect)
1481 edge = caller->create_indirect_edge (NULL, 0, count);
1482 else
1483 edge = caller->create_edge (callee, NULL, count);
1484
1485 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1486 edge->speculative = bp_unpack_value (&bp, 1);
1487 edge->lto_stmt_uid = stmt_id;
1488 edge->speculative_id = speculative_id;
1489 edge->inline_failed = inline_failed;
1490 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1491 edge->can_throw_external = bp_unpack_value (&bp, 1);
1492 edge->in_polymorphic_cdtor = bp_unpack_value (&bp, 1);
1493 if (indirect)
1494 {
1495 if (bp_unpack_value (&bp, 1))
1496 ecf_flags |= ECF_CONST;
1497 if (bp_unpack_value (&bp, 1))
1498 ecf_flags |= ECF_PURE;
1499 if (bp_unpack_value (&bp, 1))
1500 ecf_flags |= ECF_NORETURN;
1501 if (bp_unpack_value (&bp, 1))
1502 ecf_flags |= ECF_MALLOC;
1503 if (bp_unpack_value (&bp, 1))
1504 ecf_flags |= ECF_NOTHROW;
1505 if (bp_unpack_value (&bp, 1))
1506 ecf_flags |= ECF_RETURNS_TWICE;
1507 edge->indirect_info->ecf_flags = ecf_flags;
1508
1509 edge->indirect_info->num_speculative_call_targets
1510 = bp_unpack_value (&bp, 16);
1511 }
1512 }
1513
1514
1515 /* Read a cgraph from IB using the info in FILE_DATA. */
1516
1517 static vec<symtab_node *>
1518 input_cgraph_1 (struct lto_file_decl_data *file_data,
1519 class lto_input_block *ib)
1520 {
1521 enum LTO_symtab_tags tag;
1522 vec<symtab_node *> nodes = vNULL;
1523 symtab_node *node;
1524 unsigned i;
1525
1526 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1527 file_data->order_base = symtab->order;
1528 file_data->unit_base = symtab->max_unit + 1;
1529 while (tag)
1530 {
1531 if (tag == LTO_symtab_edge)
1532 input_edge (ib, nodes, false);
1533 else if (tag == LTO_symtab_indirect_edge)
1534 input_edge (ib, nodes, true);
1535 else if (tag == LTO_symtab_variable)
1536 {
1537 node = input_varpool_node (file_data, ib);
1538 nodes.safe_push (node);
1539 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1540 }
1541 else
1542 {
1543 node = input_node (file_data, ib, tag, nodes);
1544 if (node == NULL || node->decl == NULL_TREE)
1545 internal_error ("bytecode stream: found empty cgraph node");
1546 nodes.safe_push (node);
1547 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1548 }
1549
1550 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1551 }
1552
1553 lto_input_toplevel_asms (file_data, file_data->order_base);
1554
1555 /* AUX pointers should be all non-zero for function nodes read from the stream. */
1556 if (flag_checking)
1557 {
1558 FOR_EACH_VEC_ELT (nodes, i, node)
1559 gcc_assert (node->aux || !is_a <cgraph_node *> (node));
1560 }
1561 FOR_EACH_VEC_ELT (nodes, i, node)
1562 {
1563 int ref;
1564 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1565 {
1566 ref = (int) (intptr_t) cnode->inlined_to;
1567
1568 /* We share declaration of builtins, so we may read same node twice. */
1569 if (!node->aux)
1570 continue;
1571 node->aux = NULL;
1572
1573 /* Fixup inlined_to from reference to pointer. */
1574 if (ref != LCC_NOT_FOUND)
1575 dyn_cast<cgraph_node *> (node)->inlined_to
1576 = dyn_cast<cgraph_node *> (nodes[ref]);
1577 else
1578 cnode->inlined_to = NULL;
1579 }
1580
1581 ref = (int) (intptr_t) node->same_comdat_group;
1582
1583 /* Fixup same_comdat_group from reference to pointer. */
1584 if (ref != LCC_NOT_FOUND)
1585 node->same_comdat_group = nodes[ref];
1586 else
1587 node->same_comdat_group = NULL;
1588 }
1589 FOR_EACH_VEC_ELT (nodes, i, node)
1590 node->aux = is_a <cgraph_node *> (node) ? (void *)1 : NULL;
1591 return nodes;
1592 }
1593
1594 /* Input ipa_refs. */
1595
1596 static void
1597 input_refs (class lto_input_block *ib,
1598 vec<symtab_node *> nodes)
1599 {
1600 int count;
1601 int idx;
1602 while (true)
1603 {
1604 symtab_node *node;
1605 count = streamer_read_uhwi (ib);
1606 if (!count)
1607 break;
1608 idx = streamer_read_uhwi (ib);
1609 node = nodes[idx];
1610 while (count)
1611 {
1612 input_ref (ib, node, nodes);
1613 count--;
1614 }
1615 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1616 if (cnode->declare_variant_alt)
1617 omp_lto_input_declare_variant_alt (ib, cnode, nodes);
1618 }
1619 }
1620
1621 /* Input profile_info from IB. */
1622 static void
1623 input_profile_summary (class lto_input_block *ib,
1624 struct lto_file_decl_data *file_data)
1625 {
1626 unsigned int runs = streamer_read_uhwi (ib);
1627 if (runs)
1628 {
1629 file_data->profile_info.runs = runs;
1630
1631 /* IPA-profile computes hot bb threshold based on cumulated
1632 whole program profile. We need to stream it down to ltrans. */
1633 if (flag_ltrans)
1634 set_hot_bb_threshold (streamer_read_gcov_count (ib));
1635 }
1636
1637 }
1638
1639 /* Rescale profile summaries to the same number of runs in the whole unit. */
1640
1641 static void
1642 merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
1643 {
1644 struct lto_file_decl_data *file_data;
1645 unsigned int j;
1646 gcov_unsigned_t max_runs = 0;
1647 struct cgraph_node *node;
1648 struct cgraph_edge *edge;
1649
1650 /* Find unit with maximal number of runs. If we ever get serious about
1651 roundoff errors, we might also consider computing smallest common
1652 multiply. */
1653 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1654 if (max_runs < file_data->profile_info.runs)
1655 max_runs = file_data->profile_info.runs;
1656
1657 if (!max_runs)
1658 return;
1659
1660 /* Simple overflow check. We probably don't need to support that many train
1661 runs. Such a large value probably imply data corruption anyway. */
1662 if (max_runs > INT_MAX / REG_BR_PROB_BASE)
1663 {
1664 sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1665 INT_MAX / REG_BR_PROB_BASE);
1666 return;
1667 }
1668
1669 profile_info = XCNEW (gcov_summary);
1670 profile_info->runs = max_runs;
1671
1672 /* If merging already happent at WPA time, we are done. */
1673 if (flag_ltrans)
1674 return;
1675
1676 /* Now compute count_materialization_scale of each node.
1677 During LTRANS we already have values of count_materialization_scale
1678 computed, so just update them. */
1679 FOR_EACH_FUNCTION (node)
1680 if (node->lto_file_data
1681 && node->lto_file_data->profile_info.runs)
1682 {
1683 int scale;
1684
1685 scale = RDIV (node->count_materialization_scale * max_runs,
1686 node->lto_file_data->profile_info.runs);
1687 node->count_materialization_scale = scale;
1688 if (scale < 0)
1689 fatal_error (input_location, "Profile information in %s corrupted",
1690 file_data->file_name);
1691
1692 if (scale == REG_BR_PROB_BASE)
1693 continue;
1694 for (edge = node->callees; edge; edge = edge->next_callee)
1695 if (edge->count.ipa ().nonzero_p ())
1696 edge->count = edge->count.apply_scale (scale, REG_BR_PROB_BASE);
1697 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
1698 if (edge->count.ipa ().nonzero_p ())
1699 edge->count = edge->count.apply_scale (scale, REG_BR_PROB_BASE);
1700 if (node->count.ipa ().nonzero_p ())
1701 node->count = node->count.apply_scale (scale, REG_BR_PROB_BASE);
1702 }
1703 }
1704
1705 /* Input and merge the symtab from each of the .o files passed to
1706 lto1. */
1707
1708 void
1709 input_symtab (void)
1710 {
1711 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1712 struct lto_file_decl_data *file_data;
1713 unsigned int j = 0;
1714 struct cgraph_node *node;
1715
1716 while ((file_data = file_data_vec[j++]))
1717 {
1718 const char *data;
1719 size_t len;
1720 class lto_input_block *ib;
1721 vec<symtab_node *> nodes;
1722
1723 ib = lto_create_simple_input_block (file_data, LTO_section_symtab_nodes,
1724 &data, &len);
1725 if (!ib)
1726 fatal_error (input_location,
1727 "cannot find LTO cgraph in %s", file_data->file_name);
1728 input_profile_summary (ib, file_data);
1729 file_data->symtab_node_encoder = lto_symtab_encoder_new (true);
1730 nodes = input_cgraph_1 (file_data, ib);
1731 lto_destroy_simple_input_block (file_data, LTO_section_symtab_nodes,
1732 ib, data, len);
1733
1734 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1735 &data, &len);
1736 if (!ib)
1737 fatal_error (input_location, "cannot find LTO section refs in %s",
1738 file_data->file_name);
1739 input_refs (ib, nodes);
1740 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1741 ib, data, len);
1742 if (flag_ltrans)
1743 input_cgraph_opt_summary (nodes);
1744 nodes.release ();
1745 }
1746
1747 merge_profile_summaries (file_data_vec);
1748
1749 /* Clear out the aux field that was used to store enough state to
1750 tell which nodes should be overwritten. */
1751 FOR_EACH_FUNCTION (node)
1752 {
1753 /* Some nodes may have been created by cgraph_node. This
1754 happens when the callgraph contains nested functions. If the
1755 node for the parent function was never emitted to the gimple
1756 file, cgraph_node will create a node for it when setting the
1757 context of the nested function. */
1758 if (node->lto_file_data)
1759 node->aux = NULL;
1760 }
1761 }
1762
1763 /* Input function/variable tables that will allow libgomp to look up offload
1764 target code, and store them into OFFLOAD_FUNCS and OFFLOAD_VARS. */
1765
1766 void
1767 input_offload_tables (bool do_force_output)
1768 {
1769 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1770 struct lto_file_decl_data *file_data;
1771 unsigned int j = 0;
1772
1773 while ((file_data = file_data_vec[j++]))
1774 {
1775 const char *data;
1776 size_t len;
1777 class lto_input_block *ib
1778 = lto_create_simple_input_block (file_data, LTO_section_offload_table,
1779 &data, &len);
1780 if (!ib)
1781 continue;
1782
1783 enum LTO_symtab_tags tag
1784 = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1785 while (tag)
1786 {
1787 if (tag == LTO_symtab_unavail_node)
1788 {
1789 tree fn_decl
1790 = lto_input_fn_decl_ref (ib, file_data);
1791 vec_safe_push (offload_funcs, fn_decl);
1792
1793 /* Prevent IPA from removing fn_decl as unreachable, since there
1794 may be no refs from the parent function to child_fn in offload
1795 LTO mode. */
1796 if (do_force_output)
1797 cgraph_node::get (fn_decl)->mark_force_output ();
1798 }
1799 else if (tag == LTO_symtab_variable)
1800 {
1801 tree var_decl
1802 = lto_input_var_decl_ref (ib, file_data);
1803 vec_safe_push (offload_vars, var_decl);
1804
1805 /* Prevent IPA from removing var_decl as unused, since there
1806 may be no refs to var_decl in offload LTO mode. */
1807 if (do_force_output)
1808 varpool_node::get (var_decl)->force_output = 1;
1809 }
1810 else
1811 fatal_error (input_location,
1812 "invalid offload table in %s", file_data->file_name);
1813
1814 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1815 }
1816
1817 lto_destroy_simple_input_block (file_data, LTO_section_offload_table,
1818 ib, data, len);
1819 }
1820 }
1821
1822 /* True when we need optimization summary for NODE. */
1823
1824 static int
1825 output_cgraph_opt_summary_p (struct cgraph_node *node)
1826 {
1827 if (node->clone_of || node->former_clone_of)
1828 return true;
1829 clone_info *info = clone_info::get (node);
1830 return info && (info->tree_map || info->param_adjustments);
1831 }
1832
1833 /* Output optimization summary for EDGE to OB. */
1834 static void
1835 output_edge_opt_summary (struct output_block *ob ATTRIBUTE_UNUSED,
1836 struct cgraph_edge *edge ATTRIBUTE_UNUSED)
1837 {
1838 }
1839
1840 /* Output optimization summary for NODE to OB. */
1841
1842 static void
1843 output_node_opt_summary (struct output_block *ob,
1844 struct cgraph_node *node,
1845 lto_symtab_encoder_t encoder)
1846 {
1847 struct ipa_replace_map *map;
1848 int i;
1849 struct cgraph_edge *e;
1850
1851 /* TODO: Should this code be moved to ipa-param-manipulation? */
1852 struct bitpack_d bp;
1853 bp = bitpack_create (ob->main_stream);
1854 clone_info *info = clone_info::get (node);
1855
1856 bp_pack_value (&bp, (info && info->param_adjustments != NULL), 1);
1857 streamer_write_bitpack (&bp);
1858 if (ipa_param_adjustments *adjustments
1859 = info ? info->param_adjustments : NULL)
1860 {
1861 streamer_write_uhwi (ob, vec_safe_length (adjustments->m_adj_params));
1862 ipa_adjusted_param *adj;
1863 FOR_EACH_VEC_SAFE_ELT (adjustments->m_adj_params, i, adj)
1864 {
1865 bp = bitpack_create (ob->main_stream);
1866 bp_pack_value (&bp, adj->base_index, IPA_PARAM_MAX_INDEX_BITS);
1867 bp_pack_value (&bp, adj->prev_clone_index, IPA_PARAM_MAX_INDEX_BITS);
1868 bp_pack_value (&bp, adj->op, 2);
1869 bp_pack_value (&bp, adj->param_prefix_index, 2);
1870 bp_pack_value (&bp, adj->prev_clone_adjustment, 1);
1871 bp_pack_value (&bp, adj->reverse, 1);
1872 bp_pack_value (&bp, adj->user_flag, 1);
1873 streamer_write_bitpack (&bp);
1874 if (adj->op == IPA_PARAM_OP_SPLIT
1875 || adj->op == IPA_PARAM_OP_NEW)
1876 {
1877 stream_write_tree (ob, adj->type, true);
1878 if (adj->op == IPA_PARAM_OP_SPLIT)
1879 {
1880 stream_write_tree (ob, adj->alias_ptr_type, true);
1881 streamer_write_uhwi (ob, adj->unit_offset);
1882 }
1883 }
1884 }
1885 streamer_write_hwi (ob, adjustments->m_always_copy_start);
1886 bp = bitpack_create (ob->main_stream);
1887 bp_pack_value (&bp, info->param_adjustments->m_skip_return, 1);
1888 streamer_write_bitpack (&bp);
1889 }
1890
1891 streamer_write_uhwi (ob, info ? vec_safe_length (info->tree_map) : 0);
1892 if (info)
1893 FOR_EACH_VEC_SAFE_ELT (info->tree_map, i, map)
1894 {
1895 streamer_write_uhwi (ob, map->parm_num);
1896 gcc_assert (EXPR_LOCATION (map->new_tree) == UNKNOWN_LOCATION);
1897 stream_write_tree (ob, map->new_tree, true);
1898 }
1899
1900 if (lto_symtab_encoder_in_partition_p (encoder, node))
1901 {
1902 for (e = node->callees; e; e = e->next_callee)
1903 output_edge_opt_summary (ob, e);
1904 for (e = node->indirect_calls; e; e = e->next_callee)
1905 output_edge_opt_summary (ob, e);
1906 }
1907 }
1908
1909 /* Output optimization summaries stored in callgraph.
1910 At the moment it is the clone info structure. */
1911
1912 static void
1913 output_cgraph_opt_summary (void)
1914 {
1915 int i, n_nodes;
1916 lto_symtab_encoder_t encoder;
1917 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
1918 unsigned count = 0;
1919
1920 ob->symbol = NULL;
1921 encoder = ob->decl_state->symtab_node_encoder;
1922 n_nodes = lto_symtab_encoder_size (encoder);
1923 for (i = 0; i < n_nodes; i++)
1924 {
1925 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
1926 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1927 if (cnode && output_cgraph_opt_summary_p (cnode))
1928 count++;
1929 }
1930 streamer_write_uhwi (ob, count);
1931 for (i = 0; i < n_nodes; i++)
1932 {
1933 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
1934 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1935 if (cnode && output_cgraph_opt_summary_p (cnode))
1936 {
1937 streamer_write_uhwi (ob, i);
1938 output_node_opt_summary (ob, cnode, encoder);
1939 }
1940 }
1941 produce_asm (ob, NULL);
1942 destroy_output_block (ob);
1943 }
1944
1945 /* Input optimisation summary of EDGE. */
1946
1947 static void
1948 input_edge_opt_summary (struct cgraph_edge *edge ATTRIBUTE_UNUSED,
1949 class lto_input_block *ib_main ATTRIBUTE_UNUSED)
1950 {
1951 }
1952
1953 /* Input optimisation summary of NODE. */
1954
1955 static void
1956 input_node_opt_summary (struct cgraph_node *node,
1957 class lto_input_block *ib_main,
1958 class data_in *data_in)
1959 {
1960 int i;
1961 int count;
1962 struct cgraph_edge *e;
1963
1964 /* TODO: Should this code be moved to ipa-param-manipulation? */
1965 struct bitpack_d bp;
1966 bp = streamer_read_bitpack (ib_main);
1967 bool have_adjustments = bp_unpack_value (&bp, 1);
1968 clone_info *info = clone_info::get_create (node);
1969
1970 if (have_adjustments)
1971 {
1972 count = streamer_read_uhwi (ib_main);
1973 vec<ipa_adjusted_param, va_gc> *new_params = NULL;
1974 for (i = 0; i < count; i++)
1975 {
1976 ipa_adjusted_param adj;
1977 memset (&adj, 0, sizeof (adj));
1978 bp = streamer_read_bitpack (ib_main);
1979 adj.base_index = bp_unpack_value (&bp, IPA_PARAM_MAX_INDEX_BITS);
1980 adj.prev_clone_index
1981 = bp_unpack_value (&bp, IPA_PARAM_MAX_INDEX_BITS);
1982 adj.op = (enum ipa_parm_op) bp_unpack_value (&bp, 2);
1983 adj.param_prefix_index = bp_unpack_value (&bp, 2);
1984 adj.prev_clone_adjustment = bp_unpack_value (&bp, 1);
1985 adj.reverse = bp_unpack_value (&bp, 1);
1986 adj.user_flag = bp_unpack_value (&bp, 1);
1987 if (adj.op == IPA_PARAM_OP_SPLIT
1988 || adj.op == IPA_PARAM_OP_NEW)
1989 {
1990 adj.type = stream_read_tree (ib_main, data_in);
1991 if (adj.op == IPA_PARAM_OP_SPLIT)
1992 {
1993 adj.alias_ptr_type = stream_read_tree (ib_main, data_in);
1994 adj.unit_offset = streamer_read_uhwi (ib_main);
1995 }
1996 }
1997 vec_safe_push (new_params, adj);
1998 }
1999 int always_copy_start = streamer_read_hwi (ib_main);
2000 bp = streamer_read_bitpack (ib_main);
2001 bool skip_return = bp_unpack_value (&bp, 1);
2002 info->param_adjustments
2003 = (new (ggc_alloc <ipa_param_adjustments> ())
2004 ipa_param_adjustments (new_params, always_copy_start, skip_return));
2005 }
2006
2007 count = streamer_read_uhwi (ib_main);
2008 for (i = 0; i < count; i++)
2009 {
2010 struct ipa_replace_map *map = ggc_alloc<ipa_replace_map> ();
2011
2012 vec_safe_push (info->tree_map, map);
2013 map->parm_num = streamer_read_uhwi (ib_main);
2014 map->new_tree = stream_read_tree (ib_main, data_in);
2015 }
2016 for (e = node->callees; e; e = e->next_callee)
2017 input_edge_opt_summary (e, ib_main);
2018 for (e = node->indirect_calls; e; e = e->next_callee)
2019 input_edge_opt_summary (e, ib_main);
2020 }
2021
2022 /* Read section in file FILE_DATA of length LEN with data DATA. */
2023
2024 static void
2025 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
2026 const char *data, size_t len,
2027 vec<symtab_node *> nodes)
2028 {
2029 const struct lto_function_header *header =
2030 (const struct lto_function_header *) data;
2031 const int cfg_offset = sizeof (struct lto_function_header);
2032 const int main_offset = cfg_offset + header->cfg_size;
2033 const int string_offset = main_offset + header->main_size;
2034 class data_in *data_in;
2035 unsigned int i;
2036 unsigned int count;
2037
2038 lto_input_block ib_main ((const char *) data + main_offset,
2039 header->main_size, file_data->mode_table);
2040
2041 data_in =
2042 lto_data_in_create (file_data, (const char *) data + string_offset,
2043 header->string_size, vNULL);
2044 count = streamer_read_uhwi (&ib_main);
2045
2046 for (i = 0; i < count; i++)
2047 {
2048 int ref = streamer_read_uhwi (&ib_main);
2049 input_node_opt_summary (dyn_cast<cgraph_node *> (nodes[ref]),
2050 &ib_main, data_in);
2051 }
2052 lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
2053 len);
2054 lto_data_in_delete (data_in);
2055 }
2056
2057 /* Input optimization summary of cgraph. */
2058
2059 static void
2060 input_cgraph_opt_summary (vec<symtab_node *> nodes)
2061 {
2062 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
2063 struct lto_file_decl_data *file_data;
2064 unsigned int j = 0;
2065
2066 while ((file_data = file_data_vec[j++]))
2067 {
2068 size_t len;
2069 const char *data
2070 = lto_get_summary_section_data (file_data, LTO_section_cgraph_opt_sum,
2071 &len);
2072 if (data)
2073 input_cgraph_opt_section (file_data, data, len, nodes);
2074 }
2075 }