Daily bump.
[gcc.git] / gcc / ipa-prop.c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2021 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
50 #include "ipa-utils.h"
51 #include "dbgcnt.h"
52 #include "domwalk.h"
53 #include "builtins.h"
54 #include "tree-cfgcleanup.h"
55 #include "options.h"
56 #include "symtab-clones.h"
57 #include "attr-fnspec.h"
58
59 /* Function summary where the parameter infos are actually stored. */
60 ipa_node_params_t *ipa_node_params_sum = NULL;
61
62 function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULL;
63
64 /* Edge summary for IPA-CP edge information. */
65 ipa_edge_args_sum_t *ipa_edge_args_sum;
66
67 /* Traits for a hash table for reusing already existing ipa_bits. */
68
69 struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
70 {
71 typedef ipa_bits *value_type;
72 typedef ipa_bits *compare_type;
73 static hashval_t
74 hash (const ipa_bits *p)
75 {
76 hashval_t t = (hashval_t) p->value.to_shwi ();
77 return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
78 }
79 static bool
80 equal (const ipa_bits *a, const ipa_bits *b)
81 {
82 return a->value == b->value && a->mask == b->mask;
83 }
84 static const bool empty_zero_p = true;
85 static void
86 mark_empty (ipa_bits *&p)
87 {
88 p = NULL;
89 }
90 static bool
91 is_empty (const ipa_bits *p)
92 {
93 return p == NULL;
94 }
95 static bool
96 is_deleted (const ipa_bits *p)
97 {
98 return p == reinterpret_cast<const ipa_bits *> (1);
99 }
100 static void
101 mark_deleted (ipa_bits *&p)
102 {
103 p = reinterpret_cast<ipa_bits *> (1);
104 }
105 };
106
107 /* Hash table for avoid repeated allocations of equal ipa_bits. */
108 static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
109
110 /* Traits for a hash table for reusing value_ranges used for IPA. Note that
111 the equiv bitmap is not hashed and is expected to be NULL. */
112
113 struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range *>
114 {
115 typedef value_range *value_type;
116 typedef value_range *compare_type;
117 static hashval_t
118 hash (const value_range *p)
119 {
120 inchash::hash hstate (p->kind ());
121 inchash::add_expr (p->min (), hstate);
122 inchash::add_expr (p->max (), hstate);
123 return hstate.end ();
124 }
125 static bool
126 equal (const value_range *a, const value_range *b)
127 {
128 return (a->equal_p (*b)
129 && types_compatible_p (a->type (), b->type ()));
130 }
131 static const bool empty_zero_p = true;
132 static void
133 mark_empty (value_range *&p)
134 {
135 p = NULL;
136 }
137 static bool
138 is_empty (const value_range *p)
139 {
140 return p == NULL;
141 }
142 static bool
143 is_deleted (const value_range *p)
144 {
145 return p == reinterpret_cast<const value_range *> (1);
146 }
147 static void
148 mark_deleted (value_range *&p)
149 {
150 p = reinterpret_cast<value_range *> (1);
151 }
152 };
153
154 /* Hash table for avoid repeated allocations of equal value_ranges. */
155 static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
156
157 /* Holders of ipa cgraph hooks: */
158 static struct cgraph_node_hook_list *function_insertion_hook_holder;
159
160 /* Description of a reference to an IPA constant. */
161 struct ipa_cst_ref_desc
162 {
163 /* Edge that corresponds to the statement which took the reference. */
164 struct cgraph_edge *cs;
165 /* Linked list of duplicates created when call graph edges are cloned. */
166 struct ipa_cst_ref_desc *next_duplicate;
167 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
168 if out of control. */
169 int refcount;
170 };
171
172 /* Allocation pool for reference descriptions. */
173
174 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
175 ("IPA-PROP ref descriptions");
176
177 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
178 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
179
180 static bool
181 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
182 {
183 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
184
185 if (!fs_opts)
186 return false;
187 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
188 }
189
190 /* Return index of the formal whose tree is PTREE in function which corresponds
191 to INFO. */
192
193 static int
194 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
195 tree ptree)
196 {
197 int i, count;
198
199 count = vec_safe_length (descriptors);
200 for (i = 0; i < count; i++)
201 if ((*descriptors)[i].decl_or_type == ptree)
202 return i;
203
204 return -1;
205 }
206
207 /* Return index of the formal whose tree is PTREE in function which corresponds
208 to INFO. */
209
210 int
211 ipa_get_param_decl_index (class ipa_node_params *info, tree ptree)
212 {
213 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
214 }
215
216 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
217 NODE. */
218
219 static void
220 ipa_populate_param_decls (struct cgraph_node *node,
221 vec<ipa_param_descriptor, va_gc> &descriptors)
222 {
223 tree fndecl;
224 tree fnargs;
225 tree parm;
226 int param_num;
227
228 fndecl = node->decl;
229 gcc_assert (gimple_has_body_p (fndecl));
230 fnargs = DECL_ARGUMENTS (fndecl);
231 param_num = 0;
232 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
233 {
234 descriptors[param_num].decl_or_type = parm;
235 unsigned int cost = estimate_move_cost (TREE_TYPE (parm), true);
236 descriptors[param_num].move_cost = cost;
237 /* Watch overflow, move_cost is a bitfield. */
238 gcc_checking_assert (cost == descriptors[param_num].move_cost);
239 param_num++;
240 }
241 }
242
243 /* Return how many formal parameters FNDECL has. */
244
245 int
246 count_formal_params (tree fndecl)
247 {
248 tree parm;
249 int count = 0;
250 gcc_assert (gimple_has_body_p (fndecl));
251
252 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
253 count++;
254
255 return count;
256 }
257
258 /* Return the declaration of Ith formal parameter of the function corresponding
259 to INFO. Note there is no setter function as this array is built just once
260 using ipa_initialize_node_params. */
261
262 void
263 ipa_dump_param (FILE *file, class ipa_node_params *info, int i)
264 {
265 fprintf (file, "param #%i", i);
266 if ((*info->descriptors)[i].decl_or_type)
267 {
268 fprintf (file, " ");
269 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
270 }
271 }
272
273 /* If necessary, allocate vector of parameter descriptors in info of NODE.
274 Return true if they were allocated, false if not. */
275
276 static bool
277 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
278 {
279 class ipa_node_params *info = IPA_NODE_REF_GET_CREATE (node);
280
281 if (!info->descriptors && param_count)
282 {
283 vec_safe_grow_cleared (info->descriptors, param_count, true);
284 return true;
285 }
286 else
287 return false;
288 }
289
290 /* Initialize the ipa_node_params structure associated with NODE by counting
291 the function parameters, creating the descriptors and populating their
292 param_decls. */
293
294 void
295 ipa_initialize_node_params (struct cgraph_node *node)
296 {
297 class ipa_node_params *info = IPA_NODE_REF_GET_CREATE (node);
298
299 if (!info->descriptors
300 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
301 ipa_populate_param_decls (node, *info->descriptors);
302 }
303
304 /* Print the jump functions associated with call graph edge CS to file F. */
305
306 static void
307 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
308 {
309 int i, count;
310
311 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
312 for (i = 0; i < count; i++)
313 {
314 struct ipa_jump_func *jump_func;
315 enum jump_func_type type;
316
317 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
318 type = jump_func->type;
319
320 fprintf (f, " param %d: ", i);
321 if (type == IPA_JF_UNKNOWN)
322 fprintf (f, "UNKNOWN\n");
323 else if (type == IPA_JF_CONST)
324 {
325 tree val = jump_func->value.constant.value;
326 fprintf (f, "CONST: ");
327 print_generic_expr (f, val);
328 if (TREE_CODE (val) == ADDR_EXPR
329 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
330 {
331 fprintf (f, " -> ");
332 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
333 }
334 fprintf (f, "\n");
335 }
336 else if (type == IPA_JF_PASS_THROUGH)
337 {
338 fprintf (f, "PASS THROUGH: ");
339 fprintf (f, "%d, op %s",
340 jump_func->value.pass_through.formal_id,
341 get_tree_code_name(jump_func->value.pass_through.operation));
342 if (jump_func->value.pass_through.operation != NOP_EXPR)
343 {
344 fprintf (f, " ");
345 print_generic_expr (f, jump_func->value.pass_through.operand);
346 }
347 if (jump_func->value.pass_through.agg_preserved)
348 fprintf (f, ", agg_preserved");
349 fprintf (f, "\n");
350 }
351 else if (type == IPA_JF_ANCESTOR)
352 {
353 fprintf (f, "ANCESTOR: ");
354 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
355 jump_func->value.ancestor.formal_id,
356 jump_func->value.ancestor.offset);
357 if (jump_func->value.ancestor.agg_preserved)
358 fprintf (f, ", agg_preserved");
359 fprintf (f, "\n");
360 }
361
362 if (jump_func->agg.items)
363 {
364 struct ipa_agg_jf_item *item;
365 int j;
366
367 fprintf (f, " Aggregate passed by %s:\n",
368 jump_func->agg.by_ref ? "reference" : "value");
369 FOR_EACH_VEC_ELT (*jump_func->agg.items, j, item)
370 {
371 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
372 item->offset);
373 fprintf (f, "type: ");
374 print_generic_expr (f, item->type);
375 fprintf (f, ", ");
376 if (item->jftype == IPA_JF_PASS_THROUGH)
377 fprintf (f, "PASS THROUGH: %d,",
378 item->value.pass_through.formal_id);
379 else if (item->jftype == IPA_JF_LOAD_AGG)
380 {
381 fprintf (f, "LOAD AGG: %d",
382 item->value.pass_through.formal_id);
383 fprintf (f, " [offset: " HOST_WIDE_INT_PRINT_DEC ", by %s],",
384 item->value.load_agg.offset,
385 item->value.load_agg.by_ref ? "reference"
386 : "value");
387 }
388
389 if (item->jftype == IPA_JF_PASS_THROUGH
390 || item->jftype == IPA_JF_LOAD_AGG)
391 {
392 fprintf (f, " op %s",
393 get_tree_code_name (item->value.pass_through.operation));
394 if (item->value.pass_through.operation != NOP_EXPR)
395 {
396 fprintf (f, " ");
397 print_generic_expr (f, item->value.pass_through.operand);
398 }
399 }
400 else if (item->jftype == IPA_JF_CONST)
401 {
402 fprintf (f, "CONST: ");
403 print_generic_expr (f, item->value.constant);
404 }
405 else if (item->jftype == IPA_JF_UNKNOWN)
406 fprintf (f, "UNKNOWN: " HOST_WIDE_INT_PRINT_DEC " bits",
407 tree_to_uhwi (TYPE_SIZE (item->type)));
408 fprintf (f, "\n");
409 }
410 }
411
412 class ipa_polymorphic_call_context *ctx
413 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
414 if (ctx && !ctx->useless_p ())
415 {
416 fprintf (f, " Context: ");
417 ctx->dump (dump_file);
418 }
419
420 if (jump_func->bits)
421 {
422 fprintf (f, " value: ");
423 print_hex (jump_func->bits->value, f);
424 fprintf (f, ", mask: ");
425 print_hex (jump_func->bits->mask, f);
426 fprintf (f, "\n");
427 }
428 else
429 fprintf (f, " Unknown bits\n");
430
431 if (jump_func->m_vr)
432 {
433 fprintf (f, " VR ");
434 fprintf (f, "%s[",
435 (jump_func->m_vr->kind () == VR_ANTI_RANGE) ? "~" : "");
436 print_decs (wi::to_wide (jump_func->m_vr->min ()), f);
437 fprintf (f, ", ");
438 print_decs (wi::to_wide (jump_func->m_vr->max ()), f);
439 fprintf (f, "]\n");
440 }
441 else
442 fprintf (f, " Unknown VR\n");
443 }
444 }
445
446
447 /* Print the jump functions of all arguments on all call graph edges going from
448 NODE to file F. */
449
450 void
451 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
452 {
453 struct cgraph_edge *cs;
454
455 fprintf (f, " Jump functions of caller %s:\n", node->dump_name ());
456 for (cs = node->callees; cs; cs = cs->next_callee)
457 {
458
459 fprintf (f, " callsite %s -> %s : \n",
460 node->dump_name (),
461 cs->callee->dump_name ());
462 if (!ipa_edge_args_info_available_for_edge_p (cs))
463 fprintf (f, " no arg info\n");
464 else
465 ipa_print_node_jump_functions_for_edge (f, cs);
466 }
467
468 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
469 {
470 class cgraph_indirect_call_info *ii;
471
472 ii = cs->indirect_info;
473 if (ii->agg_contents)
474 fprintf (f, " indirect %s callsite, calling param %i, "
475 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
476 ii->member_ptr ? "member ptr" : "aggregate",
477 ii->param_index, ii->offset,
478 ii->by_ref ? "by reference" : "by_value");
479 else
480 fprintf (f, " indirect %s callsite, calling param %i, "
481 "offset " HOST_WIDE_INT_PRINT_DEC,
482 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
483 ii->offset);
484
485 if (cs->call_stmt)
486 {
487 fprintf (f, ", for stmt ");
488 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
489 }
490 else
491 fprintf (f, "\n");
492 if (ii->polymorphic)
493 ii->context.dump (f);
494 if (!ipa_edge_args_info_available_for_edge_p (cs))
495 fprintf (f, " no arg info\n");
496 else
497 ipa_print_node_jump_functions_for_edge (f, cs);
498 }
499 }
500
501 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
502
503 void
504 ipa_print_all_jump_functions (FILE *f)
505 {
506 struct cgraph_node *node;
507
508 fprintf (f, "\nJump functions:\n");
509 FOR_EACH_FUNCTION (node)
510 {
511 ipa_print_node_jump_functions (f, node);
512 }
513 }
514
515 /* Set jfunc to be a know-really nothing jump function. */
516
517 static void
518 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
519 {
520 jfunc->type = IPA_JF_UNKNOWN;
521 }
522
523 /* Set JFUNC to be a copy of another jmp (to be used by jump function
524 combination code). The two functions will share their rdesc. */
525
526 static void
527 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
528 struct ipa_jump_func *src)
529
530 {
531 gcc_checking_assert (src->type == IPA_JF_CONST);
532 dst->type = IPA_JF_CONST;
533 dst->value.constant = src->value.constant;
534 }
535
536 /* Set JFUNC to be a constant jmp function. */
537
538 static void
539 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
540 struct cgraph_edge *cs)
541 {
542 jfunc->type = IPA_JF_CONST;
543 jfunc->value.constant.value = unshare_expr_without_location (constant);
544
545 if (TREE_CODE (constant) == ADDR_EXPR
546 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
547 {
548 struct ipa_cst_ref_desc *rdesc;
549
550 rdesc = ipa_refdesc_pool.allocate ();
551 rdesc->cs = cs;
552 rdesc->next_duplicate = NULL;
553 rdesc->refcount = 1;
554 jfunc->value.constant.rdesc = rdesc;
555 }
556 else
557 jfunc->value.constant.rdesc = NULL;
558 }
559
560 /* Set JFUNC to be a simple pass-through jump function. */
561 static void
562 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
563 bool agg_preserved)
564 {
565 jfunc->type = IPA_JF_PASS_THROUGH;
566 jfunc->value.pass_through.operand = NULL_TREE;
567 jfunc->value.pass_through.formal_id = formal_id;
568 jfunc->value.pass_through.operation = NOP_EXPR;
569 jfunc->value.pass_through.agg_preserved = agg_preserved;
570 }
571
572 /* Set JFUNC to be an unary pass through jump function. */
573
574 static void
575 ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
576 enum tree_code operation)
577 {
578 jfunc->type = IPA_JF_PASS_THROUGH;
579 jfunc->value.pass_through.operand = NULL_TREE;
580 jfunc->value.pass_through.formal_id = formal_id;
581 jfunc->value.pass_through.operation = operation;
582 jfunc->value.pass_through.agg_preserved = false;
583 }
584 /* Set JFUNC to be an arithmetic pass through jump function. */
585
586 static void
587 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
588 tree operand, enum tree_code operation)
589 {
590 jfunc->type = IPA_JF_PASS_THROUGH;
591 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
592 jfunc->value.pass_through.formal_id = formal_id;
593 jfunc->value.pass_through.operation = operation;
594 jfunc->value.pass_through.agg_preserved = false;
595 }
596
597 /* Set JFUNC to be an ancestor jump function. */
598
599 static void
600 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
601 int formal_id, bool agg_preserved)
602 {
603 jfunc->type = IPA_JF_ANCESTOR;
604 jfunc->value.ancestor.formal_id = formal_id;
605 jfunc->value.ancestor.offset = offset;
606 jfunc->value.ancestor.agg_preserved = agg_preserved;
607 }
608
609 /* Get IPA BB information about the given BB. FBI is the context of analyzis
610 of this function body. */
611
612 static struct ipa_bb_info *
613 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
614 {
615 gcc_checking_assert (fbi);
616 return &fbi->bb_infos[bb->index];
617 }
618
619 /* Structure to be passed in between detect_type_change and
620 check_stmt_for_type_change. */
621
622 struct prop_type_change_info
623 {
624 /* Offset into the object where there is the virtual method pointer we are
625 looking for. */
626 HOST_WIDE_INT offset;
627 /* The declaration or SSA_NAME pointer of the base that we are checking for
628 type change. */
629 tree object;
630 /* Set to true if dynamic type change has been detected. */
631 bool type_maybe_changed;
632 };
633
634 /* Return true if STMT can modify a virtual method table pointer.
635
636 This function makes special assumptions about both constructors and
637 destructors which are all the functions that are allowed to alter the VMT
638 pointers. It assumes that destructors begin with assignment into all VMT
639 pointers and that constructors essentially look in the following way:
640
641 1) The very first thing they do is that they call constructors of ancestor
642 sub-objects that have them.
643
644 2) Then VMT pointers of this and all its ancestors is set to new values
645 corresponding to the type corresponding to the constructor.
646
647 3) Only afterwards, other stuff such as constructor of member sub-objects
648 and the code written by the user is run. Only this may include calling
649 virtual functions, directly or indirectly.
650
651 There is no way to call a constructor of an ancestor sub-object in any
652 other way.
653
654 This means that we do not have to care whether constructors get the correct
655 type information because they will always change it (in fact, if we define
656 the type to be given by the VMT pointer, it is undefined).
657
658 The most important fact to derive from the above is that if, for some
659 statement in the section 3, we try to detect whether the dynamic type has
660 changed, we can safely ignore all calls as we examine the function body
661 backwards until we reach statements in section 2 because these calls cannot
662 be ancestor constructors or destructors (if the input is not bogus) and so
663 do not change the dynamic type (this holds true only for automatically
664 allocated objects but at the moment we devirtualize only these). We then
665 must detect that statements in section 2 change the dynamic type and can try
666 to derive the new type. That is enough and we can stop, we will never see
667 the calls into constructors of sub-objects in this code. Therefore we can
668 safely ignore all call statements that we traverse.
669 */
670
671 static bool
672 stmt_may_be_vtbl_ptr_store (gimple *stmt)
673 {
674 if (is_gimple_call (stmt))
675 return false;
676 if (gimple_clobber_p (stmt))
677 return false;
678 else if (is_gimple_assign (stmt))
679 {
680 tree lhs = gimple_assign_lhs (stmt);
681
682 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
683 {
684 if (flag_strict_aliasing
685 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
686 return false;
687
688 if (TREE_CODE (lhs) == COMPONENT_REF
689 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
690 return false;
691 /* In the future we might want to use get_ref_base_and_extent to find
692 if there is a field corresponding to the offset and if so, proceed
693 almost like if it was a component ref. */
694 }
695 }
696 return true;
697 }
698
699 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
700 to check whether a particular statement may modify the virtual table
701 pointerIt stores its result into DATA, which points to a
702 prop_type_change_info structure. */
703
704 static bool
705 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
706 {
707 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
708 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
709
710 if (stmt_may_be_vtbl_ptr_store (stmt))
711 {
712 tci->type_maybe_changed = true;
713 return true;
714 }
715 else
716 return false;
717 }
718
719 /* See if ARG is PARAM_DECl describing instance passed by pointer
720 or reference in FUNCTION. Return false if the dynamic type may change
721 in between beggining of the function until CALL is invoked.
722
723 Generally functions are not allowed to change type of such instances,
724 but they call destructors. We assume that methods cannot destroy the THIS
725 pointer. Also as a special cases, constructor and destructors may change
726 type of the THIS pointer. */
727
728 static bool
729 param_type_may_change_p (tree function, tree arg, gimple *call)
730 {
731 /* Pure functions cannot do any changes on the dynamic type;
732 that require writting to memory. */
733 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
734 return false;
735 /* We need to check if we are within inlined consturctor
736 or destructor (ideally we would have way to check that the
737 inline cdtor is actually working on ARG, but we don't have
738 easy tie on this, so punt on all non-pure cdtors.
739 We may also record the types of cdtors and once we know type
740 of the instance match them.
741
742 Also code unification optimizations may merge calls from
743 different blocks making return values unreliable. So
744 do nothing during late optimization. */
745 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
746 return true;
747 if (TREE_CODE (arg) == SSA_NAME
748 && SSA_NAME_IS_DEFAULT_DEF (arg)
749 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
750 {
751 /* Normal (non-THIS) argument. */
752 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
753 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
754 /* THIS pointer of an method - here we want to watch constructors
755 and destructors as those definitely may change the dynamic
756 type. */
757 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
758 && !DECL_CXX_CONSTRUCTOR_P (function)
759 && !DECL_CXX_DESTRUCTOR_P (function)
760 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
761 {
762 /* Walk the inline stack and watch out for ctors/dtors. */
763 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
764 block = BLOCK_SUPERCONTEXT (block))
765 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
766 return true;
767 return false;
768 }
769 }
770 return true;
771 }
772
773 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
774 callsite CALL) by looking for assignments to its virtual table pointer. If
775 it is, return true. ARG is the object itself (not a pointer
776 to it, unless dereferenced). BASE is the base of the memory access as
777 returned by get_ref_base_and_extent, as is the offset.
778
779 This is helper function for detect_type_change and detect_type_change_ssa
780 that does the heavy work which is usually unnecesary. */
781
782 static bool
783 detect_type_change_from_memory_writes (ipa_func_body_info *fbi, tree arg,
784 tree base, tree comp_type, gcall *call,
785 HOST_WIDE_INT offset)
786 {
787 struct prop_type_change_info tci;
788 ao_ref ao;
789
790 gcc_checking_assert (DECL_P (arg)
791 || TREE_CODE (arg) == MEM_REF
792 || handled_component_p (arg));
793
794 comp_type = TYPE_MAIN_VARIANT (comp_type);
795
796 /* Const calls cannot call virtual methods through VMT and so type changes do
797 not matter. */
798 if (!flag_devirtualize || !gimple_vuse (call)
799 /* Be sure expected_type is polymorphic. */
800 || !comp_type
801 || TREE_CODE (comp_type) != RECORD_TYPE
802 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
803 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
804 return true;
805
806 ao_ref_init (&ao, arg);
807 ao.base = base;
808 ao.offset = offset;
809 ao.size = POINTER_SIZE;
810 ao.max_size = ao.size;
811
812 tci.offset = offset;
813 tci.object = get_base_address (arg);
814 tci.type_maybe_changed = false;
815
816 int walked
817 = walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
818 &tci, NULL, NULL, fbi->aa_walk_budget + 1);
819
820 if (walked >= 0 && !tci.type_maybe_changed)
821 return false;
822
823 return true;
824 }
825
826 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
827 If it is, return true. ARG is the object itself (not a pointer
828 to it, unless dereferenced). BASE is the base of the memory access as
829 returned by get_ref_base_and_extent, as is the offset. */
830
831 static bool
832 detect_type_change (ipa_func_body_info *fbi, tree arg, tree base,
833 tree comp_type, gcall *call,
834 HOST_WIDE_INT offset)
835 {
836 if (!flag_devirtualize)
837 return false;
838
839 if (TREE_CODE (base) == MEM_REF
840 && !param_type_may_change_p (current_function_decl,
841 TREE_OPERAND (base, 0),
842 call))
843 return false;
844 return detect_type_change_from_memory_writes (fbi, arg, base, comp_type,
845 call, offset);
846 }
847
848 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
849 SSA name (its dereference will become the base and the offset is assumed to
850 be zero). */
851
852 static bool
853 detect_type_change_ssa (ipa_func_body_info *fbi, tree arg, tree comp_type,
854 gcall *call)
855 {
856 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
857 if (!flag_devirtualize
858 || !POINTER_TYPE_P (TREE_TYPE (arg)))
859 return false;
860
861 if (!param_type_may_change_p (current_function_decl, arg, call))
862 return false;
863
864 arg = build2 (MEM_REF, ptr_type_node, arg,
865 build_int_cst (ptr_type_node, 0));
866
867 return detect_type_change_from_memory_writes (fbi, arg, arg, comp_type,
868 call, 0);
869 }
870
871 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
872 boolean variable pointed to by DATA. */
873
874 static bool
875 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
876 void *data)
877 {
878 bool *b = (bool *) data;
879 *b = true;
880 return true;
881 }
882
883 /* Find the nearest valid aa status for parameter specified by INDEX that
884 dominates BB. */
885
886 static struct ipa_param_aa_status *
887 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
888 int index)
889 {
890 while (true)
891 {
892 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
893 if (!bb)
894 return NULL;
895 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
896 if (!bi->param_aa_statuses.is_empty ()
897 && bi->param_aa_statuses[index].valid)
898 return &bi->param_aa_statuses[index];
899 }
900 }
901
902 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
903 structures and/or intialize the result with a dominating description as
904 necessary. */
905
906 static struct ipa_param_aa_status *
907 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
908 int index)
909 {
910 gcc_checking_assert (fbi);
911 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
912 if (bi->param_aa_statuses.is_empty ())
913 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count, true);
914 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
915 if (!paa->valid)
916 {
917 gcc_checking_assert (!paa->parm_modified
918 && !paa->ref_modified
919 && !paa->pt_modified);
920 struct ipa_param_aa_status *dom_paa;
921 dom_paa = find_dominating_aa_status (fbi, bb, index);
922 if (dom_paa)
923 *paa = *dom_paa;
924 else
925 paa->valid = true;
926 }
927
928 return paa;
929 }
930
931 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
932 a value known not to be modified in this function before reaching the
933 statement STMT. FBI holds information about the function we have so far
934 gathered but do not survive the summary building stage. */
935
936 static bool
937 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
938 gimple *stmt, tree parm_load)
939 {
940 struct ipa_param_aa_status *paa;
941 bool modified = false;
942 ao_ref refd;
943
944 tree base = get_base_address (parm_load);
945 gcc_assert (TREE_CODE (base) == PARM_DECL);
946 if (TREE_READONLY (base))
947 return true;
948
949 gcc_checking_assert (fbi);
950 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
951 if (paa->parm_modified)
952 return false;
953
954 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
955 ao_ref_init (&refd, parm_load);
956 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
957 &modified, NULL, NULL,
958 fbi->aa_walk_budget + 1);
959 if (walked < 0)
960 {
961 modified = true;
962 if (fbi)
963 fbi->aa_walk_budget = 0;
964 }
965 else if (fbi)
966 fbi->aa_walk_budget -= walked;
967 if (paa && modified)
968 paa->parm_modified = true;
969 return !modified;
970 }
971
972 /* If STMT is an assignment that loads a value from an parameter declaration,
973 return the index of the parameter in ipa_node_params which has not been
974 modified. Otherwise return -1. */
975
976 static int
977 load_from_unmodified_param (struct ipa_func_body_info *fbi,
978 vec<ipa_param_descriptor, va_gc> *descriptors,
979 gimple *stmt)
980 {
981 int index;
982 tree op1;
983
984 if (!gimple_assign_single_p (stmt))
985 return -1;
986
987 op1 = gimple_assign_rhs1 (stmt);
988 if (TREE_CODE (op1) != PARM_DECL)
989 return -1;
990
991 index = ipa_get_param_decl_index_1 (descriptors, op1);
992 if (index < 0
993 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
994 return -1;
995
996 return index;
997 }
998
999 /* Return true if memory reference REF (which must be a load through parameter
1000 with INDEX) loads data that are known to be unmodified in this function
1001 before reaching statement STMT. */
1002
1003 static bool
1004 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
1005 int index, gimple *stmt, tree ref)
1006 {
1007 struct ipa_param_aa_status *paa;
1008 bool modified = false;
1009 ao_ref refd;
1010
1011 gcc_checking_assert (fbi);
1012 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1013 if (paa->ref_modified)
1014 return false;
1015
1016 gcc_checking_assert (gimple_vuse (stmt));
1017 ao_ref_init (&refd, ref);
1018 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1019 &modified, NULL, NULL,
1020 fbi->aa_walk_budget + 1);
1021 if (walked < 0)
1022 {
1023 modified = true;
1024 fbi->aa_walk_budget = 0;
1025 }
1026 else
1027 fbi->aa_walk_budget -= walked;
1028 if (modified)
1029 paa->ref_modified = true;
1030 return !modified;
1031 }
1032
1033 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1034 is known to be unmodified in this function before reaching call statement
1035 CALL into which it is passed. FBI describes the function body. */
1036
1037 static bool
1038 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
1039 gimple *call, tree parm)
1040 {
1041 bool modified = false;
1042 ao_ref refd;
1043
1044 /* It's unnecessary to calculate anything about memory contnets for a const
1045 function because it is not goin to use it. But do not cache the result
1046 either. Also, no such calculations for non-pointers. */
1047 if (!gimple_vuse (call)
1048 || !POINTER_TYPE_P (TREE_TYPE (parm)))
1049 return false;
1050
1051 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1052 gimple_bb (call),
1053 index);
1054 if (paa->pt_modified)
1055 return false;
1056
1057 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1058 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1059 &modified, NULL, NULL,
1060 fbi->aa_walk_budget + 1);
1061 if (walked < 0)
1062 {
1063 fbi->aa_walk_budget = 0;
1064 modified = true;
1065 }
1066 else
1067 fbi->aa_walk_budget -= walked;
1068 if (modified)
1069 paa->pt_modified = true;
1070 return !modified;
1071 }
1072
1073 /* Return true if we can prove that OP is a memory reference loading
1074 data from an aggregate passed as a parameter.
1075
1076 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1077 false if it cannot prove that the value has not been modified before the
1078 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1079 if it cannot prove the value has not been modified, in that case it will
1080 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1081
1082 INFO and PARMS_AINFO describe parameters of the current function (but the
1083 latter can be NULL), STMT is the load statement. If function returns true,
1084 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1085 within the aggregate and whether it is a load from a value passed by
1086 reference respectively. */
1087
1088 bool
1089 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1090 vec<ipa_param_descriptor, va_gc> *descriptors,
1091 gimple *stmt, tree op, int *index_p,
1092 HOST_WIDE_INT *offset_p, poly_int64 *size_p,
1093 bool *by_ref_p, bool *guaranteed_unmodified)
1094 {
1095 int index;
1096 HOST_WIDE_INT size;
1097 bool reverse;
1098 tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
1099
1100 if (!base)
1101 return false;
1102
1103 if (DECL_P (base))
1104 {
1105 int index = ipa_get_param_decl_index_1 (descriptors, base);
1106 if (index >= 0
1107 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1108 {
1109 *index_p = index;
1110 *by_ref_p = false;
1111 if (size_p)
1112 *size_p = size;
1113 if (guaranteed_unmodified)
1114 *guaranteed_unmodified = true;
1115 return true;
1116 }
1117 return false;
1118 }
1119
1120 if (TREE_CODE (base) != MEM_REF
1121 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1122 || !integer_zerop (TREE_OPERAND (base, 1)))
1123 return false;
1124
1125 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1126 {
1127 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1128 index = ipa_get_param_decl_index_1 (descriptors, parm);
1129 }
1130 else
1131 {
1132 /* This branch catches situations where a pointer parameter is not a
1133 gimple register, for example:
1134
1135 void hip7(S*) (struct S * p)
1136 {
1137 void (*<T2e4>) (struct S *) D.1867;
1138 struct S * p.1;
1139
1140 <bb 2>:
1141 p.1_1 = p;
1142 D.1867_2 = p.1_1->f;
1143 D.1867_2 ();
1144 gdp = &p;
1145 */
1146
1147 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1148 index = load_from_unmodified_param (fbi, descriptors, def);
1149 }
1150
1151 if (index >= 0)
1152 {
1153 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1154 if (!data_preserved && !guaranteed_unmodified)
1155 return false;
1156
1157 *index_p = index;
1158 *by_ref_p = true;
1159 if (size_p)
1160 *size_p = size;
1161 if (guaranteed_unmodified)
1162 *guaranteed_unmodified = data_preserved;
1163 return true;
1164 }
1165 return false;
1166 }
1167
1168 /* If STMT is an assignment that loads a value from a parameter declaration,
1169 or from an aggregate passed as the parameter either by value or reference,
1170 return the index of the parameter in ipa_node_params. Otherwise return -1.
1171
1172 FBI holds gathered information about the function. INFO describes
1173 parameters of the function, STMT is the assignment statement. If it is a
1174 memory load from an aggregate, *OFFSET_P is filled with offset within the
1175 aggregate, and *BY_REF_P specifies whether the aggregate is passed by
1176 reference. */
1177
1178 static int
1179 load_from_unmodified_param_or_agg (struct ipa_func_body_info *fbi,
1180 class ipa_node_params *info,
1181 gimple *stmt,
1182 HOST_WIDE_INT *offset_p,
1183 bool *by_ref_p)
1184 {
1185 int index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1186 poly_int64 size;
1187
1188 /* Load value from a parameter declaration. */
1189 if (index >= 0)
1190 {
1191 *offset_p = -1;
1192 return index;
1193 }
1194
1195 if (!gimple_assign_load_p (stmt))
1196 return -1;
1197
1198 tree rhs = gimple_assign_rhs1 (stmt);
1199
1200 /* Skip memory reference containing VIEW_CONVERT_EXPR. */
1201 for (tree t = rhs; handled_component_p (t); t = TREE_OPERAND (t, 0))
1202 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
1203 return -1;
1204
1205 /* Skip memory reference containing bit-field. */
1206 if (TREE_CODE (rhs) == BIT_FIELD_REF
1207 || contains_bitfld_component_ref_p (rhs))
1208 return -1;
1209
1210 if (!ipa_load_from_parm_agg (fbi, info->descriptors, stmt, rhs, &index,
1211 offset_p, &size, by_ref_p))
1212 return -1;
1213
1214 gcc_assert (!maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (rhs))),
1215 size));
1216 if (!*by_ref_p)
1217 {
1218 tree param_type = ipa_get_type (info, index);
1219
1220 if (!param_type || !AGGREGATE_TYPE_P (param_type))
1221 return -1;
1222 }
1223 else if (TREE_THIS_VOLATILE (rhs))
1224 return -1;
1225
1226 return index;
1227 }
1228
1229 /* Walk pointer adjustemnts from OP (such as POINTER_PLUS and ADDR_EXPR)
1230 to find original pointer. Initialize RET to the pointer which results from
1231 the walk.
1232 If offset is known return true and initialize OFFSET_RET. */
1233
1234 bool
1235 unadjusted_ptr_and_unit_offset (tree op, tree *ret, poly_int64 *offset_ret)
1236 {
1237 poly_int64 offset = 0;
1238 bool offset_known = true;
1239 int i;
1240
1241 for (i = 0; i < param_ipa_jump_function_lookups; i++)
1242 {
1243 if (TREE_CODE (op) == ADDR_EXPR)
1244 {
1245 poly_int64 extra_offset = 0;
1246 tree base = get_addr_base_and_unit_offset (TREE_OPERAND (op, 0),
1247 &offset);
1248 if (!base)
1249 {
1250 base = get_base_address (TREE_OPERAND (op, 0));
1251 if (TREE_CODE (base) != MEM_REF)
1252 break;
1253 offset_known = false;
1254 }
1255 else
1256 {
1257 if (TREE_CODE (base) != MEM_REF)
1258 break;
1259 offset += extra_offset;
1260 }
1261 op = TREE_OPERAND (base, 0);
1262 if (mem_ref_offset (base).to_shwi (&extra_offset))
1263 offset += extra_offset;
1264 else
1265 offset_known = false;
1266 }
1267 else if (TREE_CODE (op) == SSA_NAME
1268 && !SSA_NAME_IS_DEFAULT_DEF (op))
1269 {
1270 gimple *pstmt = SSA_NAME_DEF_STMT (op);
1271
1272 if (gimple_assign_single_p (pstmt))
1273 op = gimple_assign_rhs1 (pstmt);
1274 else if (is_gimple_assign (pstmt)
1275 && gimple_assign_rhs_code (pstmt) == POINTER_PLUS_EXPR)
1276 {
1277 poly_int64 extra_offset = 0;
1278 if (ptrdiff_tree_p (gimple_assign_rhs2 (pstmt),
1279 &extra_offset))
1280 offset += extra_offset;
1281 else
1282 offset_known = false;
1283 op = gimple_assign_rhs1 (pstmt);
1284 }
1285 else
1286 break;
1287 }
1288 else
1289 break;
1290 }
1291 *ret = op;
1292 *offset_ret = offset;
1293 return offset_known;
1294 }
1295
1296 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1297 of an assignment statement STMT, try to determine whether we are actually
1298 handling any of the following cases and construct an appropriate jump
1299 function into JFUNC if so:
1300
1301 1) The passed value is loaded from a formal parameter which is not a gimple
1302 register (most probably because it is addressable, the value has to be
1303 scalar) and we can guarantee the value has not changed. This case can
1304 therefore be described by a simple pass-through jump function. For example:
1305
1306 foo (int a)
1307 {
1308 int a.0;
1309
1310 a.0_2 = a;
1311 bar (a.0_2);
1312
1313 2) The passed value can be described by a simple arithmetic pass-through
1314 jump function. E.g.
1315
1316 foo (int a)
1317 {
1318 int D.2064;
1319
1320 D.2064_4 = a.1(D) + 4;
1321 bar (D.2064_4);
1322
1323 This case can also occur in combination of the previous one, e.g.:
1324
1325 foo (int a, int z)
1326 {
1327 int a.0;
1328 int D.2064;
1329
1330 a.0_3 = a;
1331 D.2064_4 = a.0_3 + 4;
1332 foo (D.2064_4);
1333
1334 3) The passed value is an address of an object within another one (which
1335 also passed by reference). Such situations are described by an ancestor
1336 jump function and describe situations such as:
1337
1338 B::foo() (struct B * const this)
1339 {
1340 struct A * D.1845;
1341
1342 D.1845_2 = &this_1(D)->D.1748;
1343 A::bar (D.1845_2);
1344
1345 INFO is the structure describing individual parameters access different
1346 stages of IPA optimizations. PARMS_AINFO contains the information that is
1347 only needed for intraprocedural analysis. */
1348
1349 static void
1350 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1351 class ipa_node_params *info,
1352 struct ipa_jump_func *jfunc,
1353 gcall *call, gimple *stmt, tree name,
1354 tree param_type)
1355 {
1356 HOST_WIDE_INT offset, size;
1357 tree op1, tc_ssa, base, ssa;
1358 bool reverse;
1359 int index;
1360
1361 op1 = gimple_assign_rhs1 (stmt);
1362
1363 if (TREE_CODE (op1) == SSA_NAME)
1364 {
1365 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1366 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1367 else
1368 index = load_from_unmodified_param (fbi, info->descriptors,
1369 SSA_NAME_DEF_STMT (op1));
1370 tc_ssa = op1;
1371 }
1372 else
1373 {
1374 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1375 tc_ssa = gimple_assign_lhs (stmt);
1376 }
1377
1378 if (index >= 0)
1379 {
1380 switch (gimple_assign_rhs_class (stmt))
1381 {
1382 case GIMPLE_BINARY_RHS:
1383 {
1384 tree op2 = gimple_assign_rhs2 (stmt);
1385 if (!is_gimple_ip_invariant (op2)
1386 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1387 != tcc_comparison)
1388 && !useless_type_conversion_p (TREE_TYPE (name),
1389 TREE_TYPE (op1))))
1390 return;
1391
1392 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1393 gimple_assign_rhs_code (stmt));
1394 break;
1395 }
1396 case GIMPLE_SINGLE_RHS:
1397 {
1398 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1399 tc_ssa);
1400 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1401 break;
1402 }
1403 case GIMPLE_UNARY_RHS:
1404 if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1405 ipa_set_jf_unary_pass_through (jfunc, index,
1406 gimple_assign_rhs_code (stmt));
1407 default:;
1408 }
1409 return;
1410 }
1411
1412 if (TREE_CODE (op1) != ADDR_EXPR)
1413 return;
1414 op1 = TREE_OPERAND (op1, 0);
1415 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1416 return;
1417 base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
1418 offset_int mem_offset;
1419 if (!base
1420 || TREE_CODE (base) != MEM_REF
1421 || !mem_ref_offset (base).is_constant (&mem_offset))
1422 return;
1423 offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1424 ssa = TREE_OPERAND (base, 0);
1425 if (TREE_CODE (ssa) != SSA_NAME
1426 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1427 || offset < 0)
1428 return;
1429
1430 /* Dynamic types are changed in constructors and destructors. */
1431 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1432 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1433 ipa_set_ancestor_jf (jfunc, offset, index,
1434 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1435 }
1436
1437 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1438 it looks like:
1439
1440 iftmp.1_3 = &obj_2(D)->D.1762;
1441
1442 The base of the MEM_REF must be a default definition SSA NAME of a
1443 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1444 whole MEM_REF expression is returned and the offset calculated from any
1445 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1446 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1447
1448 static tree
1449 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1450 {
1451 HOST_WIDE_INT size;
1452 tree expr, parm, obj;
1453 bool reverse;
1454
1455 if (!gimple_assign_single_p (assign))
1456 return NULL_TREE;
1457 expr = gimple_assign_rhs1 (assign);
1458
1459 if (TREE_CODE (expr) != ADDR_EXPR)
1460 return NULL_TREE;
1461 expr = TREE_OPERAND (expr, 0);
1462 obj = expr;
1463 expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
1464
1465 offset_int mem_offset;
1466 if (!expr
1467 || TREE_CODE (expr) != MEM_REF
1468 || !mem_ref_offset (expr).is_constant (&mem_offset))
1469 return NULL_TREE;
1470 parm = TREE_OPERAND (expr, 0);
1471 if (TREE_CODE (parm) != SSA_NAME
1472 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1473 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1474 return NULL_TREE;
1475
1476 *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1477 *obj_p = obj;
1478 return expr;
1479 }
1480
1481
1482 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1483 statement PHI, try to find out whether NAME is in fact a
1484 multiple-inheritance typecast from a descendant into an ancestor of a formal
1485 parameter and thus can be described by an ancestor jump function and if so,
1486 write the appropriate function into JFUNC.
1487
1488 Essentially we want to match the following pattern:
1489
1490 if (obj_2(D) != 0B)
1491 goto <bb 3>;
1492 else
1493 goto <bb 4>;
1494
1495 <bb 3>:
1496 iftmp.1_3 = &obj_2(D)->D.1762;
1497
1498 <bb 4>:
1499 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1500 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1501 return D.1879_6; */
1502
1503 static void
1504 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1505 class ipa_node_params *info,
1506 struct ipa_jump_func *jfunc,
1507 gcall *call, gphi *phi)
1508 {
1509 HOST_WIDE_INT offset;
1510 gimple *assign, *cond;
1511 basic_block phi_bb, assign_bb, cond_bb;
1512 tree tmp, parm, expr, obj;
1513 int index, i;
1514
1515 if (gimple_phi_num_args (phi) != 2)
1516 return;
1517
1518 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1519 tmp = PHI_ARG_DEF (phi, 0);
1520 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1521 tmp = PHI_ARG_DEF (phi, 1);
1522 else
1523 return;
1524 if (TREE_CODE (tmp) != SSA_NAME
1525 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1526 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1527 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1528 return;
1529
1530 assign = SSA_NAME_DEF_STMT (tmp);
1531 assign_bb = gimple_bb (assign);
1532 if (!single_pred_p (assign_bb))
1533 return;
1534 expr = get_ancestor_addr_info (assign, &obj, &offset);
1535 if (!expr)
1536 return;
1537 parm = TREE_OPERAND (expr, 0);
1538 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1539 if (index < 0)
1540 return;
1541
1542 cond_bb = single_pred (assign_bb);
1543 cond = last_stmt (cond_bb);
1544 if (!cond
1545 || gimple_code (cond) != GIMPLE_COND
1546 || gimple_cond_code (cond) != NE_EXPR
1547 || gimple_cond_lhs (cond) != parm
1548 || !integer_zerop (gimple_cond_rhs (cond)))
1549 return;
1550
1551 phi_bb = gimple_bb (phi);
1552 for (i = 0; i < 2; i++)
1553 {
1554 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1555 if (pred != assign_bb && pred != cond_bb)
1556 return;
1557 }
1558
1559 ipa_set_ancestor_jf (jfunc, offset, index,
1560 parm_ref_data_pass_through_p (fbi, index, call, parm));
1561 }
1562
1563 /* Inspect the given TYPE and return true iff it has the same structure (the
1564 same number of fields of the same types) as a C++ member pointer. If
1565 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1566 corresponding fields there. */
1567
1568 static bool
1569 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1570 {
1571 tree fld;
1572
1573 if (TREE_CODE (type) != RECORD_TYPE)
1574 return false;
1575
1576 fld = TYPE_FIELDS (type);
1577 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1578 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1579 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1580 return false;
1581
1582 if (method_ptr)
1583 *method_ptr = fld;
1584
1585 fld = DECL_CHAIN (fld);
1586 if (!fld || INTEGRAL_TYPE_P (fld)
1587 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1588 return false;
1589 if (delta)
1590 *delta = fld;
1591
1592 if (DECL_CHAIN (fld))
1593 return false;
1594
1595 return true;
1596 }
1597
1598 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1599 return the rhs of its defining statement, and this statement is stored in
1600 *RHS_STMT. Otherwise return RHS as it is. */
1601
1602 static inline tree
1603 get_ssa_def_if_simple_copy (tree rhs, gimple **rhs_stmt)
1604 {
1605 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1606 {
1607 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1608
1609 if (gimple_assign_single_p (def_stmt))
1610 rhs = gimple_assign_rhs1 (def_stmt);
1611 else
1612 break;
1613 *rhs_stmt = def_stmt;
1614 }
1615 return rhs;
1616 }
1617
1618 /* Simple linked list, describing contents of an aggregate before call. */
1619
1620 struct ipa_known_agg_contents_list
1621 {
1622 /* Offset and size of the described part of the aggregate. */
1623 HOST_WIDE_INT offset, size;
1624
1625 /* Type of the described part of the aggregate. */
1626 tree type;
1627
1628 /* Known constant value or jump function data describing contents. */
1629 struct ipa_load_agg_data value;
1630
1631 /* Pointer to the next structure in the list. */
1632 struct ipa_known_agg_contents_list *next;
1633 };
1634
1635 /* Add an aggregate content item into a linked list of
1636 ipa_known_agg_contents_list structure, in which all elements
1637 are sorted ascendingly by offset. */
1638
1639 static inline void
1640 add_to_agg_contents_list (struct ipa_known_agg_contents_list **plist,
1641 struct ipa_known_agg_contents_list *item)
1642 {
1643 struct ipa_known_agg_contents_list *list = *plist;
1644
1645 for (; list; list = list->next)
1646 {
1647 if (list->offset >= item->offset)
1648 break;
1649
1650 plist = &list->next;
1651 }
1652
1653 item->next = list;
1654 *plist = item;
1655 }
1656
1657 /* Check whether a given aggregate content is clobbered by certain element in
1658 a linked list of ipa_known_agg_contents_list. */
1659
1660 static inline bool
1661 clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list *list,
1662 struct ipa_known_agg_contents_list *item)
1663 {
1664 for (; list; list = list->next)
1665 {
1666 if (list->offset >= item->offset)
1667 return list->offset < item->offset + item->size;
1668
1669 if (list->offset + list->size > item->offset)
1670 return true;
1671 }
1672
1673 return false;
1674 }
1675
1676 /* Build aggregate jump function from LIST, assuming there are exactly
1677 VALUE_COUNT entries there and that offset of the passed argument
1678 is ARG_OFFSET and store it into JFUNC. */
1679
1680 static void
1681 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1682 int value_count, HOST_WIDE_INT arg_offset,
1683 struct ipa_jump_func *jfunc)
1684 {
1685 vec_safe_reserve (jfunc->agg.items, value_count, true);
1686 for (; list; list = list->next)
1687 {
1688 struct ipa_agg_jf_item item;
1689 tree operand = list->value.pass_through.operand;
1690
1691 if (list->value.pass_through.formal_id >= 0)
1692 {
1693 /* Content value is derived from some formal paramerter. */
1694 if (list->value.offset >= 0)
1695 item.jftype = IPA_JF_LOAD_AGG;
1696 else
1697 item.jftype = IPA_JF_PASS_THROUGH;
1698
1699 item.value.load_agg = list->value;
1700 if (operand)
1701 item.value.pass_through.operand
1702 = unshare_expr_without_location (operand);
1703 }
1704 else if (operand)
1705 {
1706 /* Content value is known constant. */
1707 item.jftype = IPA_JF_CONST;
1708 item.value.constant = unshare_expr_without_location (operand);
1709 }
1710 else
1711 continue;
1712
1713 item.type = list->type;
1714 gcc_assert (tree_to_shwi (TYPE_SIZE (list->type)) == list->size);
1715
1716 item.offset = list->offset - arg_offset;
1717 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1718
1719 jfunc->agg.items->quick_push (item);
1720 }
1721 }
1722
1723 /* Given an assignment statement STMT, try to collect information into
1724 AGG_VALUE that will be used to construct jump function for RHS of the
1725 assignment, from which content value of an aggregate part comes.
1726
1727 Besides constant and simple pass-through jump functions, also try to
1728 identify whether it matches the following pattern that can be described by
1729 a load-value-from-aggregate jump function, which is a derivative of simple
1730 pass-through jump function.
1731
1732 foo (int *p)
1733 {
1734 ...
1735
1736 *(q_5 + 4) = *(p_3(D) + 28) op 1;
1737 bar (q_5);
1738 }
1739
1740 Here IPA_LOAD_AGG_DATA data structure is informative enough to describe
1741 constant, simple pass-through and load-vale-from-aggregate. If value
1742 is constant, it will be kept in field OPERAND, and field FORMAL_ID is
1743 set to -1. For simple pass-through and load-value-from-aggregate, field
1744 FORMAL_ID specifies the related formal parameter index, and field
1745 OFFSET can be used to distinguish them, -1 means simple pass-through,
1746 otherwise means load-value-from-aggregate. */
1747
1748 static void
1749 analyze_agg_content_value (struct ipa_func_body_info *fbi,
1750 struct ipa_load_agg_data *agg_value,
1751 gimple *stmt)
1752 {
1753 tree lhs = gimple_assign_lhs (stmt);
1754 tree rhs1 = gimple_assign_rhs1 (stmt);
1755 enum tree_code code;
1756 int index = -1;
1757
1758 /* Initialize jump function data for the aggregate part. */
1759 memset (agg_value, 0, sizeof (*agg_value));
1760 agg_value->pass_through.operation = NOP_EXPR;
1761 agg_value->pass_through.formal_id = -1;
1762 agg_value->offset = -1;
1763
1764 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs)) /* TODO: Support aggregate type. */
1765 || TREE_THIS_VOLATILE (lhs)
1766 || TREE_CODE (lhs) == BIT_FIELD_REF
1767 || contains_bitfld_component_ref_p (lhs))
1768 return;
1769
1770 /* Skip SSA copies. */
1771 while (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
1772 {
1773 if (TREE_CODE (rhs1) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (rhs1))
1774 break;
1775
1776 stmt = SSA_NAME_DEF_STMT (rhs1);
1777 if (!is_gimple_assign (stmt))
1778 break;
1779
1780 rhs1 = gimple_assign_rhs1 (stmt);
1781 }
1782
1783 if (gphi *phi = dyn_cast<gphi *> (stmt))
1784 {
1785 /* Also special case like the following (a is a formal parameter):
1786
1787 _12 = *a_11(D).dim[0].stride;
1788 ...
1789 # iftmp.22_9 = PHI <_12(2), 1(3)>
1790 ...
1791 parm.6.dim[0].stride = iftmp.22_9;
1792 ...
1793 __x_MOD_foo (&parm.6, b_31(D));
1794
1795 The aggregate function describing parm.6.dim[0].stride is encoded as a
1796 PASS-THROUGH jump function with ASSERT_EXPR operation whith operand 1
1797 (the constant from the PHI node). */
1798
1799 if (gimple_phi_num_args (phi) != 2)
1800 return;
1801 tree arg0 = gimple_phi_arg_def (phi, 0);
1802 tree arg1 = gimple_phi_arg_def (phi, 1);
1803 tree operand;
1804
1805 if (is_gimple_ip_invariant (arg1))
1806 {
1807 operand = arg1;
1808 rhs1 = arg0;
1809 }
1810 else if (is_gimple_ip_invariant (arg0))
1811 {
1812 operand = arg0;
1813 rhs1 = arg1;
1814 }
1815 else
1816 return;
1817
1818 rhs1 = get_ssa_def_if_simple_copy (rhs1, &stmt);
1819 if (!is_gimple_assign (stmt))
1820 return;
1821
1822 code = ASSERT_EXPR;
1823 agg_value->pass_through.operand = operand;
1824 }
1825 else if (is_gimple_assign (stmt))
1826 {
1827 code = gimple_assign_rhs_code (stmt);
1828 switch (gimple_assign_rhs_class (stmt))
1829 {
1830 case GIMPLE_SINGLE_RHS:
1831 if (is_gimple_ip_invariant (rhs1))
1832 {
1833 agg_value->pass_through.operand = rhs1;
1834 return;
1835 }
1836 code = NOP_EXPR;
1837 break;
1838
1839 case GIMPLE_UNARY_RHS:
1840 /* NOTE: A GIMPLE_UNARY_RHS operation might not be tcc_unary
1841 (truth_not_expr is example), GIMPLE_BINARY_RHS does not imply
1842 tcc_binary, this subtleness is somewhat misleading.
1843
1844 Since tcc_unary is widely used in IPA-CP code to check an operation
1845 with one operand, here we only allow tc_unary operation to avoid
1846 possible problem. Then we can use (opclass == tc_unary) or not to
1847 distinguish unary and binary. */
1848 if (TREE_CODE_CLASS (code) != tcc_unary || CONVERT_EXPR_CODE_P (code))
1849 return;
1850
1851 rhs1 = get_ssa_def_if_simple_copy (rhs1, &stmt);
1852 break;
1853
1854 case GIMPLE_BINARY_RHS:
1855 {
1856 gimple *rhs1_stmt = stmt;
1857 gimple *rhs2_stmt = stmt;
1858 tree rhs2 = gimple_assign_rhs2 (stmt);
1859
1860 rhs1 = get_ssa_def_if_simple_copy (rhs1, &rhs1_stmt);
1861 rhs2 = get_ssa_def_if_simple_copy (rhs2, &rhs2_stmt);
1862
1863 if (is_gimple_ip_invariant (rhs2))
1864 {
1865 agg_value->pass_through.operand = rhs2;
1866 stmt = rhs1_stmt;
1867 }
1868 else if (is_gimple_ip_invariant (rhs1))
1869 {
1870 if (TREE_CODE_CLASS (code) == tcc_comparison)
1871 code = swap_tree_comparison (code);
1872 else if (!commutative_tree_code (code))
1873 return;
1874
1875 agg_value->pass_through.operand = rhs1;
1876 stmt = rhs2_stmt;
1877 rhs1 = rhs2;
1878 }
1879 else
1880 return;
1881
1882 if (TREE_CODE_CLASS (code) != tcc_comparison
1883 && !useless_type_conversion_p (TREE_TYPE (lhs),
1884 TREE_TYPE (rhs1)))
1885 return;
1886 }
1887 break;
1888
1889 default:
1890 return;
1891 }
1892 }
1893 else
1894 return;
1895
1896 if (TREE_CODE (rhs1) != SSA_NAME)
1897 index = load_from_unmodified_param_or_agg (fbi, fbi->info, stmt,
1898 &agg_value->offset,
1899 &agg_value->by_ref);
1900 else if (SSA_NAME_IS_DEFAULT_DEF (rhs1))
1901 index = ipa_get_param_decl_index (fbi->info, SSA_NAME_VAR (rhs1));
1902
1903 if (index >= 0)
1904 {
1905 if (agg_value->offset >= 0)
1906 agg_value->type = TREE_TYPE (rhs1);
1907 agg_value->pass_through.formal_id = index;
1908 agg_value->pass_through.operation = code;
1909 }
1910 else
1911 agg_value->pass_through.operand = NULL_TREE;
1912 }
1913
1914 /* If STMT is a memory store to the object whose address is BASE, extract
1915 information (offset, size, and value) into CONTENT, and return true,
1916 otherwise we conservatively assume the whole object is modified with
1917 unknown content, and return false. CHECK_REF means that access to object
1918 is expected to be in form of MEM_REF expression. */
1919
1920 static bool
1921 extract_mem_content (struct ipa_func_body_info *fbi,
1922 gimple *stmt, tree base, bool check_ref,
1923 struct ipa_known_agg_contents_list *content)
1924 {
1925 HOST_WIDE_INT lhs_offset, lhs_size;
1926 bool reverse;
1927
1928 if (!is_gimple_assign (stmt))
1929 return false;
1930
1931 tree lhs = gimple_assign_lhs (stmt);
1932 tree lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset, &lhs_size,
1933 &reverse);
1934 if (!lhs_base)
1935 return false;
1936
1937 if (check_ref)
1938 {
1939 if (TREE_CODE (lhs_base) != MEM_REF
1940 || TREE_OPERAND (lhs_base, 0) != base
1941 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1942 return false;
1943 }
1944 else if (lhs_base != base)
1945 return false;
1946
1947 content->offset = lhs_offset;
1948 content->size = lhs_size;
1949 content->type = TREE_TYPE (lhs);
1950 content->next = NULL;
1951
1952 analyze_agg_content_value (fbi, &content->value, stmt);
1953 return true;
1954 }
1955
1956 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1957 in ARG is filled in constants or values that are derived from caller's
1958 formal parameter in the way described by some kinds of jump functions. FBI
1959 is the context of the caller function for interprocedural analysis. ARG can
1960 either be an aggregate expression or a pointer to an aggregate. ARG_TYPE is
1961 the type of the aggregate, JFUNC is the jump function for the aggregate. */
1962
1963 static void
1964 determine_known_aggregate_parts (struct ipa_func_body_info *fbi,
1965 gcall *call, tree arg,
1966 tree arg_type,
1967 struct ipa_jump_func *jfunc)
1968 {
1969 struct ipa_known_agg_contents_list *list = NULL, *all_list = NULL;
1970 bitmap visited = NULL;
1971 int item_count = 0, value_count = 0;
1972 HOST_WIDE_INT arg_offset, arg_size;
1973 tree arg_base;
1974 bool check_ref, by_ref;
1975 ao_ref r;
1976 int max_agg_items = opt_for_fn (fbi->node->decl, param_ipa_max_agg_items);
1977
1978 if (max_agg_items == 0)
1979 return;
1980
1981 /* The function operates in three stages. First, we prepare check_ref, r,
1982 arg_base and arg_offset based on what is actually passed as an actual
1983 argument. */
1984
1985 if (POINTER_TYPE_P (arg_type))
1986 {
1987 by_ref = true;
1988 if (TREE_CODE (arg) == SSA_NAME)
1989 {
1990 tree type_size;
1991 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type)))
1992 || !POINTER_TYPE_P (TREE_TYPE (arg)))
1993 return;
1994 check_ref = true;
1995 arg_base = arg;
1996 arg_offset = 0;
1997 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1998 arg_size = tree_to_uhwi (type_size);
1999 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
2000 }
2001 else if (TREE_CODE (arg) == ADDR_EXPR)
2002 {
2003 bool reverse;
2004
2005 arg = TREE_OPERAND (arg, 0);
2006 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
2007 &arg_size, &reverse);
2008 if (!arg_base)
2009 return;
2010 if (DECL_P (arg_base))
2011 {
2012 check_ref = false;
2013 ao_ref_init (&r, arg_base);
2014 }
2015 else
2016 return;
2017 }
2018 else
2019 return;
2020 }
2021 else
2022 {
2023 bool reverse;
2024
2025 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
2026
2027 by_ref = false;
2028 check_ref = false;
2029 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
2030 &arg_size, &reverse);
2031 if (!arg_base)
2032 return;
2033
2034 ao_ref_init (&r, arg);
2035 }
2036
2037 /* Second stage traverses virtual SSA web backwards starting from the call
2038 statement, only looks at individual dominating virtual operand (its
2039 definition dominates the call), as long as it is confident that content
2040 of the aggregate is affected by definition of the virtual operand, it
2041 builds a sorted linked list of ipa_agg_jf_list describing that. */
2042
2043 for (tree dom_vuse = gimple_vuse (call); dom_vuse;)
2044 {
2045 gimple *stmt = SSA_NAME_DEF_STMT (dom_vuse);
2046
2047 if (gimple_code (stmt) == GIMPLE_PHI)
2048 {
2049 dom_vuse = get_continuation_for_phi (stmt, &r, true,
2050 fbi->aa_walk_budget,
2051 &visited, false, NULL, NULL);
2052 continue;
2053 }
2054
2055 if (stmt_may_clobber_ref_p_1 (stmt, &r))
2056 {
2057 struct ipa_known_agg_contents_list *content
2058 = XALLOCA (struct ipa_known_agg_contents_list);
2059
2060 if (!extract_mem_content (fbi, stmt, arg_base, check_ref, content))
2061 break;
2062
2063 /* Now we get a dominating virtual operand, and need to check
2064 whether its value is clobbered any other dominating one. */
2065 if ((content->value.pass_through.formal_id >= 0
2066 || content->value.pass_through.operand)
2067 && !clobber_by_agg_contents_list_p (all_list, content))
2068 {
2069 struct ipa_known_agg_contents_list *copy
2070 = XALLOCA (struct ipa_known_agg_contents_list);
2071
2072 /* Add to the list consisting of only dominating virtual
2073 operands, whose definitions can finally reach the call. */
2074 add_to_agg_contents_list (&list, (*copy = *content, copy));
2075
2076 if (++value_count == max_agg_items)
2077 break;
2078 }
2079
2080 /* Add to the list consisting of all dominating virtual operands. */
2081 add_to_agg_contents_list (&all_list, content);
2082
2083 if (++item_count == 2 * max_agg_items)
2084 break;
2085 }
2086 dom_vuse = gimple_vuse (stmt);
2087 }
2088
2089 if (visited)
2090 BITMAP_FREE (visited);
2091
2092 /* Third stage just goes over the list and creates an appropriate vector of
2093 ipa_agg_jf_item structures out of it, of course only if there are
2094 any meaningful items to begin with. */
2095
2096 if (value_count)
2097 {
2098 jfunc->agg.by_ref = by_ref;
2099 build_agg_jump_func_from_list (list, value_count, arg_offset, jfunc);
2100 }
2101 }
2102
2103
2104 /* Return the Ith param type of callee associated with call graph
2105 edge E. */
2106
2107 tree
2108 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
2109 {
2110 int n;
2111 tree type = (e->callee
2112 ? TREE_TYPE (e->callee->decl)
2113 : gimple_call_fntype (e->call_stmt));
2114 tree t = TYPE_ARG_TYPES (type);
2115
2116 for (n = 0; n < i; n++)
2117 {
2118 if (!t)
2119 break;
2120 t = TREE_CHAIN (t);
2121 }
2122 if (t)
2123 return TREE_VALUE (t);
2124 if (!e->callee)
2125 return NULL;
2126 t = DECL_ARGUMENTS (e->callee->decl);
2127 for (n = 0; n < i; n++)
2128 {
2129 if (!t)
2130 return NULL;
2131 t = TREE_CHAIN (t);
2132 }
2133 if (t)
2134 return TREE_TYPE (t);
2135 return NULL;
2136 }
2137
2138 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
2139 allocated structure or a previously existing one shared with other jump
2140 functions and/or transformation summaries. */
2141
2142 ipa_bits *
2143 ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
2144 {
2145 ipa_bits tmp;
2146 tmp.value = value;
2147 tmp.mask = mask;
2148
2149 ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
2150 if (*slot)
2151 return *slot;
2152
2153 ipa_bits *res = ggc_alloc<ipa_bits> ();
2154 res->value = value;
2155 res->mask = mask;
2156 *slot = res;
2157
2158 return res;
2159 }
2160
2161 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
2162 table in order to avoid creating multiple same ipa_bits structures. */
2163
2164 static void
2165 ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
2166 const widest_int &mask)
2167 {
2168 jf->bits = ipa_get_ipa_bits_for_value (value, mask);
2169 }
2170
2171 /* Return a pointer to a value_range just like *TMP, but either find it in
2172 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
2173
2174 static value_range *
2175 ipa_get_value_range (value_range *tmp)
2176 {
2177 value_range **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
2178 if (*slot)
2179 return *slot;
2180
2181 value_range *vr = new (ggc_alloc<value_range> ()) value_range;
2182 *vr = *tmp;
2183 *slot = vr;
2184
2185 return vr;
2186 }
2187
2188 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
2189 equiv set. Use hash table in order to avoid creating multiple same copies of
2190 value_ranges. */
2191
2192 static value_range *
2193 ipa_get_value_range (enum value_range_kind kind, tree min, tree max)
2194 {
2195 value_range tmp (min, max, kind);
2196 return ipa_get_value_range (&tmp);
2197 }
2198
2199 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
2200 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
2201 same value_range structures. */
2202
2203 static void
2204 ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_kind type,
2205 tree min, tree max)
2206 {
2207 jf->m_vr = ipa_get_value_range (type, min, max);
2208 }
2209
2210 /* Assign to JF a pointer to a value_range just like TMP but either fetch a
2211 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
2212
2213 static void
2214 ipa_set_jfunc_vr (ipa_jump_func *jf, value_range *tmp)
2215 {
2216 jf->m_vr = ipa_get_value_range (tmp);
2217 }
2218
2219 /* Compute jump function for all arguments of callsite CS and insert the
2220 information in the jump_functions array in the ipa_edge_args corresponding
2221 to this callsite. */
2222
2223 static void
2224 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
2225 struct cgraph_edge *cs)
2226 {
2227 class ipa_node_params *info = IPA_NODE_REF (cs->caller);
2228 class ipa_edge_args *args = IPA_EDGE_REF_GET_CREATE (cs);
2229 gcall *call = cs->call_stmt;
2230 int n, arg_num = gimple_call_num_args (call);
2231 bool useful_context = false;
2232
2233 if (arg_num == 0 || args->jump_functions)
2234 return;
2235 vec_safe_grow_cleared (args->jump_functions, arg_num, true);
2236 if (flag_devirtualize)
2237 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num, true);
2238
2239 if (gimple_call_internal_p (call))
2240 return;
2241 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
2242 return;
2243
2244 for (n = 0; n < arg_num; n++)
2245 {
2246 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
2247 tree arg = gimple_call_arg (call, n);
2248 tree param_type = ipa_get_callee_param_type (cs, n);
2249 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
2250 {
2251 tree instance;
2252 class ipa_polymorphic_call_context context (cs->caller->decl,
2253 arg, cs->call_stmt,
2254 &instance);
2255 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt,
2256 &fbi->aa_walk_budget);
2257 *ipa_get_ith_polymorhic_call_context (args, n) = context;
2258 if (!context.useless_p ())
2259 useful_context = true;
2260 }
2261
2262 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2263 {
2264 bool addr_nonzero = false;
2265 bool strict_overflow = false;
2266
2267 if (TREE_CODE (arg) == SSA_NAME
2268 && param_type
2269 && get_ptr_nonnull (arg))
2270 addr_nonzero = true;
2271 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
2272 addr_nonzero = true;
2273
2274 if (addr_nonzero)
2275 {
2276 tree z = build_int_cst (TREE_TYPE (arg), 0);
2277 ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
2278 }
2279 else
2280 gcc_assert (!jfunc->m_vr);
2281 }
2282 else
2283 {
2284 wide_int min, max;
2285 value_range_kind kind;
2286 if (TREE_CODE (arg) == SSA_NAME
2287 && param_type
2288 && (kind = get_range_info (arg, &min, &max))
2289 && (kind == VR_RANGE || kind == VR_ANTI_RANGE))
2290 {
2291 value_range resvr;
2292 value_range tmpvr (wide_int_to_tree (TREE_TYPE (arg), min),
2293 wide_int_to_tree (TREE_TYPE (arg), max),
2294 kind);
2295 range_fold_unary_expr (&resvr, NOP_EXPR, param_type,
2296 &tmpvr, TREE_TYPE (arg));
2297 if (!resvr.undefined_p () && !resvr.varying_p ())
2298 ipa_set_jfunc_vr (jfunc, &resvr);
2299 else
2300 gcc_assert (!jfunc->m_vr);
2301 }
2302 else
2303 gcc_assert (!jfunc->m_vr);
2304 }
2305
2306 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
2307 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
2308 {
2309 if (TREE_CODE (arg) == SSA_NAME)
2310 ipa_set_jfunc_bits (jfunc, 0,
2311 widest_int::from (get_nonzero_bits (arg),
2312 TYPE_SIGN (TREE_TYPE (arg))));
2313 else
2314 ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
2315 }
2316 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
2317 {
2318 unsigned HOST_WIDE_INT bitpos;
2319 unsigned align;
2320
2321 get_pointer_alignment_1 (arg, &align, &bitpos);
2322 widest_int mask = wi::bit_and_not
2323 (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg)), false),
2324 align / BITS_PER_UNIT - 1);
2325 widest_int value = bitpos / BITS_PER_UNIT;
2326 ipa_set_jfunc_bits (jfunc, value, mask);
2327 }
2328 else
2329 gcc_assert (!jfunc->bits);
2330
2331 if (is_gimple_ip_invariant (arg)
2332 || (VAR_P (arg)
2333 && is_global_var (arg)
2334 && TREE_READONLY (arg)))
2335 ipa_set_jf_constant (jfunc, arg, cs);
2336 else if (!is_gimple_reg_type (TREE_TYPE (arg))
2337 && TREE_CODE (arg) == PARM_DECL)
2338 {
2339 int index = ipa_get_param_decl_index (info, arg);
2340
2341 gcc_assert (index >=0);
2342 /* Aggregate passed by value, check for pass-through, otherwise we
2343 will attempt to fill in aggregate contents later in this
2344 for cycle. */
2345 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
2346 {
2347 ipa_set_jf_simple_pass_through (jfunc, index, false);
2348 continue;
2349 }
2350 }
2351 else if (TREE_CODE (arg) == SSA_NAME)
2352 {
2353 if (SSA_NAME_IS_DEFAULT_DEF (arg))
2354 {
2355 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
2356 if (index >= 0)
2357 {
2358 bool agg_p;
2359 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
2360 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
2361 }
2362 }
2363 else
2364 {
2365 gimple *stmt = SSA_NAME_DEF_STMT (arg);
2366 if (is_gimple_assign (stmt))
2367 compute_complex_assign_jump_func (fbi, info, jfunc,
2368 call, stmt, arg, param_type);
2369 else if (gimple_code (stmt) == GIMPLE_PHI)
2370 compute_complex_ancestor_jump_func (fbi, info, jfunc,
2371 call,
2372 as_a <gphi *> (stmt));
2373 }
2374 }
2375
2376 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
2377 passed (because type conversions are ignored in gimple). Usually we can
2378 safely get type from function declaration, but in case of K&R prototypes or
2379 variadic functions we can try our luck with type of the pointer passed.
2380 TODO: Since we look for actual initialization of the memory object, we may better
2381 work out the type based on the memory stores we find. */
2382 if (!param_type)
2383 param_type = TREE_TYPE (arg);
2384
2385 if ((jfunc->type != IPA_JF_PASS_THROUGH
2386 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
2387 && (jfunc->type != IPA_JF_ANCESTOR
2388 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
2389 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
2390 || POINTER_TYPE_P (param_type)))
2391 determine_known_aggregate_parts (fbi, call, arg, param_type, jfunc);
2392 }
2393 if (!useful_context)
2394 vec_free (args->polymorphic_call_contexts);
2395 }
2396
2397 /* Compute jump functions for all edges - both direct and indirect - outgoing
2398 from BB. */
2399
2400 static void
2401 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
2402 {
2403 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2404 int i;
2405 struct cgraph_edge *cs;
2406
2407 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
2408 {
2409 struct cgraph_node *callee = cs->callee;
2410
2411 if (callee)
2412 {
2413 callee = callee->ultimate_alias_target ();
2414 /* We do not need to bother analyzing calls to unknown functions
2415 unless they may become known during lto/whopr. */
2416 if (!callee->definition && !flag_lto
2417 && !gimple_call_fnspec (cs->call_stmt).known_p ())
2418 continue;
2419 }
2420 ipa_compute_jump_functions_for_edge (fbi, cs);
2421 }
2422 }
2423
2424 /* If STMT looks like a statement loading a value from a member pointer formal
2425 parameter, return that parameter and store the offset of the field to
2426 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2427 might be clobbered). If USE_DELTA, then we look for a use of the delta
2428 field rather than the pfn. */
2429
2430 static tree
2431 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
2432 HOST_WIDE_INT *offset_p)
2433 {
2434 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2435
2436 if (!gimple_assign_single_p (stmt))
2437 return NULL_TREE;
2438
2439 rhs = gimple_assign_rhs1 (stmt);
2440 if (TREE_CODE (rhs) == COMPONENT_REF)
2441 {
2442 ref_field = TREE_OPERAND (rhs, 1);
2443 rhs = TREE_OPERAND (rhs, 0);
2444 }
2445 else
2446 ref_field = NULL_TREE;
2447 if (TREE_CODE (rhs) != MEM_REF)
2448 return NULL_TREE;
2449 rec = TREE_OPERAND (rhs, 0);
2450 if (TREE_CODE (rec) != ADDR_EXPR)
2451 return NULL_TREE;
2452 rec = TREE_OPERAND (rec, 0);
2453 if (TREE_CODE (rec) != PARM_DECL
2454 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2455 return NULL_TREE;
2456 ref_offset = TREE_OPERAND (rhs, 1);
2457
2458 if (use_delta)
2459 fld = delta_field;
2460 else
2461 fld = ptr_field;
2462 if (offset_p)
2463 *offset_p = int_bit_position (fld);
2464
2465 if (ref_field)
2466 {
2467 if (integer_nonzerop (ref_offset))
2468 return NULL_TREE;
2469 return ref_field == fld ? rec : NULL_TREE;
2470 }
2471 else
2472 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2473 : NULL_TREE;
2474 }
2475
2476 /* Returns true iff T is an SSA_NAME defined by a statement. */
2477
2478 static bool
2479 ipa_is_ssa_with_stmt_def (tree t)
2480 {
2481 if (TREE_CODE (t) == SSA_NAME
2482 && !SSA_NAME_IS_DEFAULT_DEF (t))
2483 return true;
2484 else
2485 return false;
2486 }
2487
2488 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2489 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2490 indirect call graph edge.
2491 If POLYMORPHIC is true record is as a destination of polymorphic call. */
2492
2493 static struct cgraph_edge *
2494 ipa_note_param_call (struct cgraph_node *node, int param_index,
2495 gcall *stmt, bool polymorphic)
2496 {
2497 struct cgraph_edge *cs;
2498
2499 cs = node->get_edge (stmt);
2500 cs->indirect_info->param_index = param_index;
2501 cs->indirect_info->agg_contents = 0;
2502 cs->indirect_info->member_ptr = 0;
2503 cs->indirect_info->guaranteed_unmodified = 0;
2504 ipa_set_param_used_by_indirect_call (IPA_NODE_REF (node),
2505 param_index, true);
2506 if (cs->indirect_info->polymorphic || polymorphic)
2507 ipa_set_param_used_by_polymorphic_call
2508 (IPA_NODE_REF (node), param_index, true);
2509 return cs;
2510 }
2511
2512 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2513 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2514 intermediate information about each formal parameter. Currently it checks
2515 whether the call calls a pointer that is a formal parameter and if so, the
2516 parameter is marked with the called flag and an indirect call graph edge
2517 describing the call is created. This is very simple for ordinary pointers
2518 represented in SSA but not-so-nice when it comes to member pointers. The
2519 ugly part of this function does nothing more than trying to match the
2520 pattern of such a call. An example of such a pattern is the gimple dump
2521 below, the call is on the last line:
2522
2523 <bb 2>:
2524 f$__delta_5 = f.__delta;
2525 f$__pfn_24 = f.__pfn;
2526
2527 or
2528 <bb 2>:
2529 f$__delta_5 = MEM[(struct *)&f];
2530 f$__pfn_24 = MEM[(struct *)&f + 4B];
2531
2532 and a few lines below:
2533
2534 <bb 5>
2535 D.2496_3 = (int) f$__pfn_24;
2536 D.2497_4 = D.2496_3 & 1;
2537 if (D.2497_4 != 0)
2538 goto <bb 3>;
2539 else
2540 goto <bb 4>;
2541
2542 <bb 6>:
2543 D.2500_7 = (unsigned int) f$__delta_5;
2544 D.2501_8 = &S + D.2500_7;
2545 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2546 D.2503_10 = *D.2502_9;
2547 D.2504_12 = f$__pfn_24 + -1;
2548 D.2505_13 = (unsigned int) D.2504_12;
2549 D.2506_14 = D.2503_10 + D.2505_13;
2550 D.2507_15 = *D.2506_14;
2551 iftmp.11_16 = (String:: *) D.2507_15;
2552
2553 <bb 7>:
2554 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2555 D.2500_19 = (unsigned int) f$__delta_5;
2556 D.2508_20 = &S + D.2500_19;
2557 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2558
2559 Such patterns are results of simple calls to a member pointer:
2560
2561 int doprinting (int (MyString::* f)(int) const)
2562 {
2563 MyString S ("somestring");
2564
2565 return (S.*f)(4);
2566 }
2567
2568 Moreover, the function also looks for called pointers loaded from aggregates
2569 passed by value or reference. */
2570
2571 static void
2572 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2573 tree target)
2574 {
2575 class ipa_node_params *info = fbi->info;
2576 HOST_WIDE_INT offset;
2577 bool by_ref;
2578
2579 if (SSA_NAME_IS_DEFAULT_DEF (target))
2580 {
2581 tree var = SSA_NAME_VAR (target);
2582 int index = ipa_get_param_decl_index (info, var);
2583 if (index >= 0)
2584 ipa_note_param_call (fbi->node, index, call, false);
2585 return;
2586 }
2587
2588 int index;
2589 gimple *def = SSA_NAME_DEF_STMT (target);
2590 bool guaranteed_unmodified;
2591 if (gimple_assign_single_p (def)
2592 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2593 gimple_assign_rhs1 (def), &index, &offset,
2594 NULL, &by_ref, &guaranteed_unmodified))
2595 {
2596 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2597 call, false);
2598 cs->indirect_info->offset = offset;
2599 cs->indirect_info->agg_contents = 1;
2600 cs->indirect_info->by_ref = by_ref;
2601 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2602 return;
2603 }
2604
2605 /* Now we need to try to match the complex pattern of calling a member
2606 pointer. */
2607 if (gimple_code (def) != GIMPLE_PHI
2608 || gimple_phi_num_args (def) != 2
2609 || !POINTER_TYPE_P (TREE_TYPE (target))
2610 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2611 return;
2612
2613 /* First, we need to check whether one of these is a load from a member
2614 pointer that is a parameter to this function. */
2615 tree n1 = PHI_ARG_DEF (def, 0);
2616 tree n2 = PHI_ARG_DEF (def, 1);
2617 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2618 return;
2619 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2620 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2621
2622 tree rec;
2623 basic_block bb, virt_bb;
2624 basic_block join = gimple_bb (def);
2625 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2626 {
2627 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2628 return;
2629
2630 bb = EDGE_PRED (join, 0)->src;
2631 virt_bb = gimple_bb (d2);
2632 }
2633 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2634 {
2635 bb = EDGE_PRED (join, 1)->src;
2636 virt_bb = gimple_bb (d1);
2637 }
2638 else
2639 return;
2640
2641 /* Second, we need to check that the basic blocks are laid out in the way
2642 corresponding to the pattern. */
2643
2644 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2645 || single_pred (virt_bb) != bb
2646 || single_succ (virt_bb) != join)
2647 return;
2648
2649 /* Third, let's see that the branching is done depending on the least
2650 significant bit of the pfn. */
2651
2652 gimple *branch = last_stmt (bb);
2653 if (!branch || gimple_code (branch) != GIMPLE_COND)
2654 return;
2655
2656 if ((gimple_cond_code (branch) != NE_EXPR
2657 && gimple_cond_code (branch) != EQ_EXPR)
2658 || !integer_zerop (gimple_cond_rhs (branch)))
2659 return;
2660
2661 tree cond = gimple_cond_lhs (branch);
2662 if (!ipa_is_ssa_with_stmt_def (cond))
2663 return;
2664
2665 def = SSA_NAME_DEF_STMT (cond);
2666 if (!is_gimple_assign (def)
2667 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2668 || !integer_onep (gimple_assign_rhs2 (def)))
2669 return;
2670
2671 cond = gimple_assign_rhs1 (def);
2672 if (!ipa_is_ssa_with_stmt_def (cond))
2673 return;
2674
2675 def = SSA_NAME_DEF_STMT (cond);
2676
2677 if (is_gimple_assign (def)
2678 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2679 {
2680 cond = gimple_assign_rhs1 (def);
2681 if (!ipa_is_ssa_with_stmt_def (cond))
2682 return;
2683 def = SSA_NAME_DEF_STMT (cond);
2684 }
2685
2686 tree rec2;
2687 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2688 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2689 == ptrmemfunc_vbit_in_delta),
2690 NULL);
2691 if (rec != rec2)
2692 return;
2693
2694 index = ipa_get_param_decl_index (info, rec);
2695 if (index >= 0
2696 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2697 {
2698 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2699 call, false);
2700 cs->indirect_info->offset = offset;
2701 cs->indirect_info->agg_contents = 1;
2702 cs->indirect_info->member_ptr = 1;
2703 cs->indirect_info->guaranteed_unmodified = 1;
2704 }
2705
2706 return;
2707 }
2708
2709 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2710 object referenced in the expression is a formal parameter of the caller
2711 FBI->node (described by FBI->info), create a call note for the
2712 statement. */
2713
2714 static void
2715 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2716 gcall *call, tree target)
2717 {
2718 tree obj = OBJ_TYPE_REF_OBJECT (target);
2719 int index;
2720 HOST_WIDE_INT anc_offset;
2721
2722 if (!flag_devirtualize)
2723 return;
2724
2725 if (TREE_CODE (obj) != SSA_NAME)
2726 return;
2727
2728 class ipa_node_params *info = fbi->info;
2729 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2730 {
2731 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2732 return;
2733
2734 anc_offset = 0;
2735 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2736 gcc_assert (index >= 0);
2737 if (detect_type_change_ssa (fbi, obj, obj_type_ref_class (target),
2738 call))
2739 return;
2740 }
2741 else
2742 {
2743 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2744 tree expr;
2745
2746 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2747 if (!expr)
2748 return;
2749 index = ipa_get_param_decl_index (info,
2750 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2751 gcc_assert (index >= 0);
2752 if (detect_type_change (fbi, obj, expr, obj_type_ref_class (target),
2753 call, anc_offset))
2754 return;
2755 }
2756
2757 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2758 call, true);
2759 class cgraph_indirect_call_info *ii = cs->indirect_info;
2760 ii->offset = anc_offset;
2761 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2762 ii->otr_type = obj_type_ref_class (target);
2763 ii->polymorphic = 1;
2764 }
2765
2766 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2767 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2768 containing intermediate information about each formal parameter. */
2769
2770 static void
2771 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2772 {
2773 tree target = gimple_call_fn (call);
2774
2775 if (!target
2776 || (TREE_CODE (target) != SSA_NAME
2777 && !virtual_method_call_p (target)))
2778 return;
2779
2780 struct cgraph_edge *cs = fbi->node->get_edge (call);
2781 /* If we previously turned the call into a direct call, there is
2782 no need to analyze. */
2783 if (cs && !cs->indirect_unknown_callee)
2784 return;
2785
2786 if (cs->indirect_info->polymorphic && flag_devirtualize)
2787 {
2788 tree instance;
2789 tree target = gimple_call_fn (call);
2790 ipa_polymorphic_call_context context (current_function_decl,
2791 target, call, &instance);
2792
2793 gcc_checking_assert (cs->indirect_info->otr_type
2794 == obj_type_ref_class (target));
2795 gcc_checking_assert (cs->indirect_info->otr_token
2796 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2797
2798 cs->indirect_info->vptr_changed
2799 = !context.get_dynamic_type (instance,
2800 OBJ_TYPE_REF_OBJECT (target),
2801 obj_type_ref_class (target), call,
2802 &fbi->aa_walk_budget);
2803 cs->indirect_info->context = context;
2804 }
2805
2806 if (TREE_CODE (target) == SSA_NAME)
2807 ipa_analyze_indirect_call_uses (fbi, call, target);
2808 else if (virtual_method_call_p (target))
2809 ipa_analyze_virtual_call_uses (fbi, call, target);
2810 }
2811
2812
2813 /* Analyze the call statement STMT with respect to formal parameters (described
2814 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2815 formal parameters are called. */
2816
2817 static void
2818 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2819 {
2820 if (is_gimple_call (stmt))
2821 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2822 }
2823
2824 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2825 If OP is a parameter declaration, mark it as used in the info structure
2826 passed in DATA. */
2827
2828 static bool
2829 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2830 {
2831 class ipa_node_params *info = (class ipa_node_params *) data;
2832
2833 op = get_base_address (op);
2834 if (op
2835 && TREE_CODE (op) == PARM_DECL)
2836 {
2837 int index = ipa_get_param_decl_index (info, op);
2838 gcc_assert (index >= 0);
2839 ipa_set_param_used (info, index, true);
2840 }
2841
2842 return false;
2843 }
2844
2845 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2846 the findings in various structures of the associated ipa_node_params
2847 structure, such as parameter flags, notes etc. FBI holds various data about
2848 the function being analyzed. */
2849
2850 static void
2851 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2852 {
2853 gimple_stmt_iterator gsi;
2854 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2855 {
2856 gimple *stmt = gsi_stmt (gsi);
2857
2858 if (is_gimple_debug (stmt))
2859 continue;
2860
2861 ipa_analyze_stmt_uses (fbi, stmt);
2862 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2863 visit_ref_for_mod_analysis,
2864 visit_ref_for_mod_analysis,
2865 visit_ref_for_mod_analysis);
2866 }
2867 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2868 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2869 visit_ref_for_mod_analysis,
2870 visit_ref_for_mod_analysis,
2871 visit_ref_for_mod_analysis);
2872 }
2873
2874 /* Calculate controlled uses of parameters of NODE. */
2875
2876 static void
2877 ipa_analyze_controlled_uses (struct cgraph_node *node)
2878 {
2879 class ipa_node_params *info = IPA_NODE_REF (node);
2880
2881 for (int i = 0; i < ipa_get_param_count (info); i++)
2882 {
2883 tree parm = ipa_get_param (info, i);
2884 int controlled_uses = 0;
2885
2886 /* For SSA regs see if parameter is used. For non-SSA we compute
2887 the flag during modification analysis. */
2888 if (is_gimple_reg (parm))
2889 {
2890 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2891 parm);
2892 if (ddef && !has_zero_uses (ddef))
2893 {
2894 imm_use_iterator imm_iter;
2895 use_operand_p use_p;
2896
2897 ipa_set_param_used (info, i, true);
2898 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2899 if (!is_gimple_call (USE_STMT (use_p)))
2900 {
2901 if (!is_gimple_debug (USE_STMT (use_p)))
2902 {
2903 controlled_uses = IPA_UNDESCRIBED_USE;
2904 break;
2905 }
2906 }
2907 else
2908 controlled_uses++;
2909 }
2910 else
2911 controlled_uses = 0;
2912 }
2913 else
2914 controlled_uses = IPA_UNDESCRIBED_USE;
2915 ipa_set_controlled_uses (info, i, controlled_uses);
2916 }
2917 }
2918
2919 /* Free stuff in BI. */
2920
2921 static void
2922 free_ipa_bb_info (struct ipa_bb_info *bi)
2923 {
2924 bi->cg_edges.release ();
2925 bi->param_aa_statuses.release ();
2926 }
2927
2928 /* Dominator walker driving the analysis. */
2929
2930 class analysis_dom_walker : public dom_walker
2931 {
2932 public:
2933 analysis_dom_walker (struct ipa_func_body_info *fbi)
2934 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2935
2936 virtual edge before_dom_children (basic_block);
2937
2938 private:
2939 struct ipa_func_body_info *m_fbi;
2940 };
2941
2942 edge
2943 analysis_dom_walker::before_dom_children (basic_block bb)
2944 {
2945 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2946 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2947 return NULL;
2948 }
2949
2950 /* Release body info FBI. */
2951
2952 void
2953 ipa_release_body_info (struct ipa_func_body_info *fbi)
2954 {
2955 int i;
2956 struct ipa_bb_info *bi;
2957
2958 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2959 free_ipa_bb_info (bi);
2960 fbi->bb_infos.release ();
2961 }
2962
2963 /* Initialize the array describing properties of formal parameters
2964 of NODE, analyze their uses and compute jump functions associated
2965 with actual arguments of calls from within NODE. */
2966
2967 void
2968 ipa_analyze_node (struct cgraph_node *node)
2969 {
2970 struct ipa_func_body_info fbi;
2971 class ipa_node_params *info;
2972
2973 ipa_check_create_node_params ();
2974 ipa_check_create_edge_args ();
2975 info = IPA_NODE_REF_GET_CREATE (node);
2976
2977 if (info->analysis_done)
2978 return;
2979 info->analysis_done = 1;
2980
2981 if (ipa_func_spec_opts_forbid_analysis_p (node))
2982 {
2983 for (int i = 0; i < ipa_get_param_count (info); i++)
2984 {
2985 ipa_set_param_used (info, i, true);
2986 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2987 }
2988 return;
2989 }
2990
2991 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2992 push_cfun (func);
2993 calculate_dominance_info (CDI_DOMINATORS);
2994 ipa_initialize_node_params (node);
2995 ipa_analyze_controlled_uses (node);
2996
2997 fbi.node = node;
2998 fbi.info = IPA_NODE_REF (node);
2999 fbi.bb_infos = vNULL;
3000 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun), true);
3001 fbi.param_count = ipa_get_param_count (info);
3002 fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
3003
3004 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
3005 {
3006 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
3007 bi->cg_edges.safe_push (cs);
3008 }
3009
3010 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
3011 {
3012 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
3013 bi->cg_edges.safe_push (cs);
3014 }
3015
3016 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
3017
3018 ipa_release_body_info (&fbi);
3019 free_dominance_info (CDI_DOMINATORS);
3020 pop_cfun ();
3021 }
3022
3023 /* Update the jump functions associated with call graph edge E when the call
3024 graph edge CS is being inlined, assuming that E->caller is already (possibly
3025 indirectly) inlined into CS->callee and that E has not been inlined. */
3026
3027 static void
3028 update_jump_functions_after_inlining (struct cgraph_edge *cs,
3029 struct cgraph_edge *e)
3030 {
3031 class ipa_edge_args *top = IPA_EDGE_REF (cs);
3032 class ipa_edge_args *args = IPA_EDGE_REF (e);
3033 if (!args)
3034 return;
3035 int count = ipa_get_cs_argument_count (args);
3036 int i;
3037
3038 for (i = 0; i < count; i++)
3039 {
3040 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
3041 class ipa_polymorphic_call_context *dst_ctx
3042 = ipa_get_ith_polymorhic_call_context (args, i);
3043
3044 if (dst->agg.items)
3045 {
3046 struct ipa_agg_jf_item *item;
3047 int j;
3048
3049 FOR_EACH_VEC_ELT (*dst->agg.items, j, item)
3050 {
3051 int dst_fid;
3052 struct ipa_jump_func *src;
3053
3054 if (item->jftype != IPA_JF_PASS_THROUGH
3055 && item->jftype != IPA_JF_LOAD_AGG)
3056 continue;
3057
3058 dst_fid = item->value.pass_through.formal_id;
3059 if (!top || dst_fid >= ipa_get_cs_argument_count (top))
3060 {
3061 item->jftype = IPA_JF_UNKNOWN;
3062 continue;
3063 }
3064
3065 item->value.pass_through.formal_id = -1;
3066 src = ipa_get_ith_jump_func (top, dst_fid);
3067 if (src->type == IPA_JF_CONST)
3068 {
3069 if (item->jftype == IPA_JF_PASS_THROUGH
3070 && item->value.pass_through.operation == NOP_EXPR)
3071 {
3072 item->jftype = IPA_JF_CONST;
3073 item->value.constant = src->value.constant.value;
3074 continue;
3075 }
3076 }
3077 else if (src->type == IPA_JF_PASS_THROUGH
3078 && src->value.pass_through.operation == NOP_EXPR)
3079 {
3080 if (item->jftype == IPA_JF_PASS_THROUGH
3081 || !item->value.load_agg.by_ref
3082 || src->value.pass_through.agg_preserved)
3083 item->value.pass_through.formal_id
3084 = src->value.pass_through.formal_id;
3085 }
3086 else if (src->type == IPA_JF_ANCESTOR)
3087 {
3088 if (item->jftype == IPA_JF_PASS_THROUGH)
3089 {
3090 if (!src->value.ancestor.offset)
3091 item->value.pass_through.formal_id
3092 = src->value.ancestor.formal_id;
3093 }
3094 else if (src->value.ancestor.agg_preserved)
3095 {
3096 gcc_checking_assert (item->value.load_agg.by_ref);
3097
3098 item->value.pass_through.formal_id
3099 = src->value.ancestor.formal_id;
3100 item->value.load_agg.offset
3101 += src->value.ancestor.offset;
3102 }
3103 }
3104
3105 if (item->value.pass_through.formal_id < 0)
3106 item->jftype = IPA_JF_UNKNOWN;
3107 }
3108 }
3109
3110 if (!top)
3111 {
3112 ipa_set_jf_unknown (dst);
3113 continue;
3114 }
3115
3116 if (dst->type == IPA_JF_ANCESTOR)
3117 {
3118 struct ipa_jump_func *src;
3119 int dst_fid = dst->value.ancestor.formal_id;
3120 class ipa_polymorphic_call_context *src_ctx
3121 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
3122
3123 /* Variable number of arguments can cause havoc if we try to access
3124 one that does not exist in the inlined edge. So make sure we
3125 don't. */
3126 if (dst_fid >= ipa_get_cs_argument_count (top))
3127 {
3128 ipa_set_jf_unknown (dst);
3129 continue;
3130 }
3131
3132 src = ipa_get_ith_jump_func (top, dst_fid);
3133
3134 if (src_ctx && !src_ctx->useless_p ())
3135 {
3136 class ipa_polymorphic_call_context ctx = *src_ctx;
3137
3138 /* TODO: Make type preserved safe WRT contexts. */
3139 if (!ipa_get_jf_ancestor_type_preserved (dst))
3140 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3141 ctx.offset_by (dst->value.ancestor.offset);
3142 if (!ctx.useless_p ())
3143 {
3144 if (!dst_ctx)
3145 {
3146 vec_safe_grow_cleared (args->polymorphic_call_contexts,
3147 count, true);
3148 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3149 }
3150
3151 dst_ctx->combine_with (ctx);
3152 }
3153 }
3154
3155 /* Parameter and argument in ancestor jump function must be pointer
3156 type, which means access to aggregate must be by-reference. */
3157 gcc_assert (!src->agg.items || src->agg.by_ref);
3158
3159 if (src->agg.items && dst->value.ancestor.agg_preserved)
3160 {
3161 struct ipa_agg_jf_item *item;
3162 int j;
3163
3164 /* Currently we do not produce clobber aggregate jump functions,
3165 replace with merging when we do. */
3166 gcc_assert (!dst->agg.items);
3167
3168 dst->agg.items = vec_safe_copy (src->agg.items);
3169 dst->agg.by_ref = src->agg.by_ref;
3170 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
3171 item->offset -= dst->value.ancestor.offset;
3172 }
3173
3174 if (src->type == IPA_JF_PASS_THROUGH
3175 && src->value.pass_through.operation == NOP_EXPR)
3176 {
3177 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
3178 dst->value.ancestor.agg_preserved &=
3179 src->value.pass_through.agg_preserved;
3180 }
3181 else if (src->type == IPA_JF_ANCESTOR)
3182 {
3183 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
3184 dst->value.ancestor.offset += src->value.ancestor.offset;
3185 dst->value.ancestor.agg_preserved &=
3186 src->value.ancestor.agg_preserved;
3187 }
3188 else
3189 ipa_set_jf_unknown (dst);
3190 }
3191 else if (dst->type == IPA_JF_PASS_THROUGH)
3192 {
3193 struct ipa_jump_func *src;
3194 /* We must check range due to calls with variable number of arguments
3195 and we cannot combine jump functions with operations. */
3196 if (dst->value.pass_through.operation == NOP_EXPR
3197 && (top && dst->value.pass_through.formal_id
3198 < ipa_get_cs_argument_count (top)))
3199 {
3200 int dst_fid = dst->value.pass_through.formal_id;
3201 src = ipa_get_ith_jump_func (top, dst_fid);
3202 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
3203 class ipa_polymorphic_call_context *src_ctx
3204 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
3205
3206 if (src_ctx && !src_ctx->useless_p ())
3207 {
3208 class ipa_polymorphic_call_context ctx = *src_ctx;
3209
3210 /* TODO: Make type preserved safe WRT contexts. */
3211 if (!ipa_get_jf_pass_through_type_preserved (dst))
3212 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3213 if (!ctx.useless_p ())
3214 {
3215 if (!dst_ctx)
3216 {
3217 vec_safe_grow_cleared (args->polymorphic_call_contexts,
3218 count, true);
3219 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3220 }
3221 dst_ctx->combine_with (ctx);
3222 }
3223 }
3224 switch (src->type)
3225 {
3226 case IPA_JF_UNKNOWN:
3227 ipa_set_jf_unknown (dst);
3228 break;
3229 case IPA_JF_CONST:
3230 ipa_set_jf_cst_copy (dst, src);
3231 break;
3232
3233 case IPA_JF_PASS_THROUGH:
3234 {
3235 int formal_id = ipa_get_jf_pass_through_formal_id (src);
3236 enum tree_code operation;
3237 operation = ipa_get_jf_pass_through_operation (src);
3238
3239 if (operation == NOP_EXPR)
3240 {
3241 bool agg_p;
3242 agg_p = dst_agg_p
3243 && ipa_get_jf_pass_through_agg_preserved (src);
3244 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
3245 }
3246 else if (TREE_CODE_CLASS (operation) == tcc_unary)
3247 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
3248 else
3249 {
3250 tree operand = ipa_get_jf_pass_through_operand (src);
3251 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
3252 operation);
3253 }
3254 break;
3255 }
3256 case IPA_JF_ANCESTOR:
3257 {
3258 bool agg_p;
3259 agg_p = dst_agg_p
3260 && ipa_get_jf_ancestor_agg_preserved (src);
3261 ipa_set_ancestor_jf (dst,
3262 ipa_get_jf_ancestor_offset (src),
3263 ipa_get_jf_ancestor_formal_id (src),
3264 agg_p);
3265 break;
3266 }
3267 default:
3268 gcc_unreachable ();
3269 }
3270
3271 if (src->agg.items
3272 && (dst_agg_p || !src->agg.by_ref))
3273 {
3274 /* Currently we do not produce clobber aggregate jump
3275 functions, replace with merging when we do. */
3276 gcc_assert (!dst->agg.items);
3277
3278 dst->agg.by_ref = src->agg.by_ref;
3279 dst->agg.items = vec_safe_copy (src->agg.items);
3280 }
3281 }
3282 else
3283 ipa_set_jf_unknown (dst);
3284 }
3285 }
3286 }
3287
3288 /* If TARGET is an addr_expr of a function declaration, make it the
3289 (SPECULATIVE)destination of an indirect edge IE and return the edge.
3290 Otherwise, return NULL. */
3291
3292 struct cgraph_edge *
3293 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
3294 bool speculative)
3295 {
3296 struct cgraph_node *callee;
3297 bool unreachable = false;
3298
3299 if (TREE_CODE (target) == ADDR_EXPR)
3300 target = TREE_OPERAND (target, 0);
3301 if (TREE_CODE (target) != FUNCTION_DECL)
3302 {
3303 target = canonicalize_constructor_val (target, NULL);
3304 if (!target || TREE_CODE (target) != FUNCTION_DECL)
3305 {
3306 /* Member pointer call that goes through a VMT lookup. */
3307 if (ie->indirect_info->member_ptr
3308 /* Or if target is not an invariant expression and we do not
3309 know if it will evaulate to function at runtime.
3310 This can happen when folding through &VAR, where &VAR
3311 is IP invariant, but VAR itself is not.
3312
3313 TODO: Revisit this when GCC 5 is branched. It seems that
3314 member_ptr check is not needed and that we may try to fold
3315 the expression and see if VAR is readonly. */
3316 || !is_gimple_ip_invariant (target))
3317 {
3318 if (dump_enabled_p ())
3319 {
3320 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3321 "discovered direct call non-invariant %s\n",
3322 ie->caller->dump_name ());
3323 }
3324 return NULL;
3325 }
3326
3327
3328 if (dump_enabled_p ())
3329 {
3330 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3331 "discovered direct call to non-function in %s, "
3332 "making it __builtin_unreachable\n",
3333 ie->caller->dump_name ());
3334 }
3335
3336 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3337 callee = cgraph_node::get_create (target);
3338 unreachable = true;
3339 }
3340 else
3341 callee = cgraph_node::get (target);
3342 }
3343 else
3344 callee = cgraph_node::get (target);
3345
3346 /* Because may-edges are not explicitely represented and vtable may be external,
3347 we may create the first reference to the object in the unit. */
3348 if (!callee || callee->inlined_to)
3349 {
3350
3351 /* We are better to ensure we can refer to it.
3352 In the case of static functions we are out of luck, since we already
3353 removed its body. In the case of public functions we may or may
3354 not introduce the reference. */
3355 if (!canonicalize_constructor_val (target, NULL)
3356 || !TREE_PUBLIC (target))
3357 {
3358 if (dump_file)
3359 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
3360 "(%s -> %s) but cannot refer to it. Giving up.\n",
3361 ie->caller->dump_name (),
3362 ie->callee->dump_name ());
3363 return NULL;
3364 }
3365 callee = cgraph_node::get_create (target);
3366 }
3367
3368 /* If the edge is already speculated. */
3369 if (speculative && ie->speculative)
3370 {
3371 if (dump_file)
3372 {
3373 cgraph_edge *e2 = ie->speculative_call_for_target (callee);
3374 if (!e2)
3375 {
3376 if (dump_file)
3377 fprintf (dump_file, "ipa-prop: Discovered call to a "
3378 "speculative target (%s -> %s) but the call is "
3379 "already speculated to different target. "
3380 "Giving up.\n",
3381 ie->caller->dump_name (), callee->dump_name ());
3382 }
3383 else
3384 {
3385 if (dump_file)
3386 fprintf (dump_file,
3387 "ipa-prop: Discovered call to a speculative target "
3388 "(%s -> %s) this agree with previous speculation.\n",
3389 ie->caller->dump_name (), callee->dump_name ());
3390 }
3391 }
3392 return NULL;
3393 }
3394
3395 if (!dbg_cnt (devirt))
3396 return NULL;
3397
3398 ipa_check_create_node_params ();
3399
3400 /* We cannot make edges to inline clones. It is bug that someone removed
3401 the cgraph node too early. */
3402 gcc_assert (!callee->inlined_to);
3403
3404 if (dump_file && !unreachable)
3405 {
3406 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
3407 "(%s -> %s), for stmt ",
3408 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
3409 speculative ? "speculative" : "known",
3410 ie->caller->dump_name (),
3411 callee->dump_name ());
3412 if (ie->call_stmt)
3413 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
3414 else
3415 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
3416 }
3417 if (dump_enabled_p ())
3418 {
3419 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3420 "converting indirect call in %s to direct call to %s\n",
3421 ie->caller->dump_name (), callee->dump_name ());
3422 }
3423 if (!speculative)
3424 {
3425 struct cgraph_edge *orig = ie;
3426 ie = cgraph_edge::make_direct (ie, callee);
3427 /* If we resolved speculative edge the cost is already up to date
3428 for direct call (adjusted by inline_edge_duplication_hook). */
3429 if (ie == orig)
3430 {
3431 ipa_call_summary *es = ipa_call_summaries->get (ie);
3432 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
3433 - eni_size_weights.call_cost);
3434 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
3435 - eni_time_weights.call_cost);
3436 }
3437 }
3438 else
3439 {
3440 if (!callee->can_be_discarded_p ())
3441 {
3442 cgraph_node *alias;
3443 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
3444 if (alias)
3445 callee = alias;
3446 }
3447 /* make_speculative will update ie's cost to direct call cost. */
3448 ie = ie->make_speculative
3449 (callee, ie->count.apply_scale (8, 10));
3450 }
3451
3452 return ie;
3453 }
3454
3455 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3456 CONSTRUCTOR and return it. Return NULL if the search fails for some
3457 reason. */
3458
3459 static tree
3460 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
3461 {
3462 tree type = TREE_TYPE (constructor);
3463 if (TREE_CODE (type) != ARRAY_TYPE
3464 && TREE_CODE (type) != RECORD_TYPE)
3465 return NULL;
3466
3467 unsigned ix;
3468 tree index, val;
3469 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
3470 {
3471 HOST_WIDE_INT elt_offset;
3472 if (TREE_CODE (type) == ARRAY_TYPE)
3473 {
3474 offset_int off;
3475 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3476 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3477
3478 if (index)
3479 {
3480 if (TREE_CODE (index) == RANGE_EXPR)
3481 off = wi::to_offset (TREE_OPERAND (index, 0));
3482 else
3483 off = wi::to_offset (index);
3484 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3485 {
3486 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3487 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3488 off = wi::sext (off - wi::to_offset (low_bound),
3489 TYPE_PRECISION (TREE_TYPE (index)));
3490 }
3491 off *= wi::to_offset (unit_size);
3492 /* ??? Handle more than just the first index of a
3493 RANGE_EXPR. */
3494 }
3495 else
3496 off = wi::to_offset (unit_size) * ix;
3497
3498 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3499 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3500 continue;
3501 elt_offset = off.to_shwi ();
3502 }
3503 else if (TREE_CODE (type) == RECORD_TYPE)
3504 {
3505 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3506 if (DECL_BIT_FIELD (index))
3507 continue;
3508 elt_offset = int_bit_position (index);
3509 }
3510 else
3511 gcc_unreachable ();
3512
3513 if (elt_offset > req_offset)
3514 return NULL;
3515
3516 if (TREE_CODE (val) == CONSTRUCTOR)
3517 return find_constructor_constant_at_offset (val,
3518 req_offset - elt_offset);
3519
3520 if (elt_offset == req_offset
3521 && is_gimple_reg_type (TREE_TYPE (val))
3522 && is_gimple_ip_invariant (val))
3523 return val;
3524 }
3525 return NULL;
3526 }
3527
3528 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3529 invariant from a static constructor and if so, return it. Otherwise return
3530 NULL. */
3531
3532 static tree
3533 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3534 {
3535 if (by_ref)
3536 {
3537 if (TREE_CODE (scalar) != ADDR_EXPR)
3538 return NULL;
3539 scalar = TREE_OPERAND (scalar, 0);
3540 }
3541
3542 if (!VAR_P (scalar)
3543 || !is_global_var (scalar)
3544 || !TREE_READONLY (scalar)
3545 || !DECL_INITIAL (scalar)
3546 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3547 return NULL;
3548
3549 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3550 }
3551
3552 /* Retrieve value from AGG, a set of known offset/value for an aggregate or
3553 static initializer of SCALAR (which can be NULL) for the given OFFSET or
3554 return NULL if there is none. BY_REF specifies whether the value has to be
3555 passed by reference or by value. If FROM_GLOBAL_CONSTANT is non-NULL, then
3556 the boolean it points to is set to true if the value comes from an
3557 initializer of a constant. */
3558
3559 tree
3560 ipa_find_agg_cst_for_param (struct ipa_agg_value_set *agg, tree scalar,
3561 HOST_WIDE_INT offset, bool by_ref,
3562 bool *from_global_constant)
3563 {
3564 struct ipa_agg_value *item;
3565 int i;
3566
3567 if (scalar)
3568 {
3569 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3570 if (res)
3571 {
3572 if (from_global_constant)
3573 *from_global_constant = true;
3574 return res;
3575 }
3576 }
3577
3578 if (!agg
3579 || by_ref != agg->by_ref)
3580 return NULL;
3581
3582 FOR_EACH_VEC_ELT (agg->items, i, item)
3583 if (item->offset == offset)
3584 {
3585 /* Currently we do not have clobber values, return NULL for them once
3586 we do. */
3587 gcc_checking_assert (is_gimple_ip_invariant (item->value));
3588 if (from_global_constant)
3589 *from_global_constant = false;
3590 return item->value;
3591 }
3592 return NULL;
3593 }
3594
3595 /* Remove a reference to SYMBOL from the list of references of a node given by
3596 reference description RDESC. Return true if the reference has been
3597 successfully found and removed. */
3598
3599 static bool
3600 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3601 {
3602 struct ipa_ref *to_del;
3603 struct cgraph_edge *origin;
3604
3605 origin = rdesc->cs;
3606 if (!origin)
3607 return false;
3608 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3609 origin->lto_stmt_uid);
3610 if (!to_del)
3611 return false;
3612
3613 to_del->remove_reference ();
3614 if (dump_file)
3615 fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3616 origin->caller->dump_name (), symbol->dump_name ());
3617 return true;
3618 }
3619
3620 /* If JFUNC has a reference description with refcount different from
3621 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3622 NULL. JFUNC must be a constant jump function. */
3623
3624 static struct ipa_cst_ref_desc *
3625 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3626 {
3627 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3628 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3629 return rdesc;
3630 else
3631 return NULL;
3632 }
3633
3634 /* If the value of constant jump function JFUNC is an address of a function
3635 declaration, return the associated call graph node. Otherwise return
3636 NULL. */
3637
3638 static cgraph_node *
3639 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3640 {
3641 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3642 tree cst = ipa_get_jf_constant (jfunc);
3643 if (TREE_CODE (cst) != ADDR_EXPR
3644 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3645 return NULL;
3646
3647 return cgraph_node::get (TREE_OPERAND (cst, 0));
3648 }
3649
3650
3651 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3652 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3653 the edge specified in the rdesc. Return false if either the symbol or the
3654 reference could not be found, otherwise return true. */
3655
3656 static bool
3657 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3658 {
3659 struct ipa_cst_ref_desc *rdesc;
3660 if (jfunc->type == IPA_JF_CONST
3661 && (rdesc = jfunc_rdesc_usable (jfunc))
3662 && --rdesc->refcount == 0)
3663 {
3664 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3665 if (!symbol)
3666 return false;
3667
3668 return remove_described_reference (symbol, rdesc);
3669 }
3670 return true;
3671 }
3672
3673 /* Try to find a destination for indirect edge IE that corresponds to a simple
3674 call or a call of a member function pointer and where the destination is a
3675 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3676 the type of the parameter to which the result of JFUNC is passed. If it can
3677 be determined, return the newly direct edge, otherwise return NULL.
3678 NEW_ROOT and NEW_ROOT_INFO is the node and its info that JFUNC lattices are
3679 relative to. */
3680
3681 static struct cgraph_edge *
3682 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3683 struct ipa_jump_func *jfunc, tree target_type,
3684 struct cgraph_node *new_root,
3685 class ipa_node_params *new_root_info)
3686 {
3687 struct cgraph_edge *cs;
3688 tree target;
3689 bool agg_contents = ie->indirect_info->agg_contents;
3690 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
3691 if (agg_contents)
3692 {
3693 bool from_global_constant;
3694 ipa_agg_value_set agg = ipa_agg_value_set_from_jfunc (new_root_info,
3695 new_root,
3696 &jfunc->agg);
3697 target = ipa_find_agg_cst_for_param (&agg, scalar,
3698 ie->indirect_info->offset,
3699 ie->indirect_info->by_ref,
3700 &from_global_constant);
3701 agg.release ();
3702 if (target
3703 && !from_global_constant
3704 && !ie->indirect_info->guaranteed_unmodified)
3705 return NULL;
3706 }
3707 else
3708 target = scalar;
3709 if (!target)
3710 return NULL;
3711 cs = ipa_make_edge_direct_to_target (ie, target);
3712
3713 if (cs && !agg_contents)
3714 {
3715 bool ok;
3716 gcc_checking_assert (cs->callee
3717 && (cs != ie
3718 || jfunc->type != IPA_JF_CONST
3719 || !cgraph_node_for_jfunc (jfunc)
3720 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3721 ok = try_decrement_rdesc_refcount (jfunc);
3722 gcc_checking_assert (ok);
3723 }
3724
3725 return cs;
3726 }
3727
3728 /* Return the target to be used in cases of impossible devirtualization. IE
3729 and target (the latter can be NULL) are dumped when dumping is enabled. */
3730
3731 tree
3732 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3733 {
3734 if (dump_file)
3735 {
3736 if (target)
3737 fprintf (dump_file,
3738 "Type inconsistent devirtualization: %s->%s\n",
3739 ie->caller->dump_name (),
3740 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3741 else
3742 fprintf (dump_file,
3743 "No devirtualization target in %s\n",
3744 ie->caller->dump_name ());
3745 }
3746 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3747 cgraph_node::get_create (new_target);
3748 return new_target;
3749 }
3750
3751 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3752 call based on a formal parameter which is described by jump function JFUNC
3753 and if it can be determined, make it direct and return the direct edge.
3754 Otherwise, return NULL. CTX describes the polymorphic context that the
3755 parameter the call is based on brings along with it. NEW_ROOT and
3756 NEW_ROOT_INFO is the node and its info that JFUNC lattices are relative
3757 to. */
3758
3759 static struct cgraph_edge *
3760 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3761 struct ipa_jump_func *jfunc,
3762 class ipa_polymorphic_call_context ctx,
3763 struct cgraph_node *new_root,
3764 class ipa_node_params *new_root_info)
3765 {
3766 tree target = NULL;
3767 bool speculative = false;
3768
3769 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3770 return NULL;
3771
3772 gcc_assert (!ie->indirect_info->by_ref);
3773
3774 /* Try to do lookup via known virtual table pointer value. */
3775 if (!ie->indirect_info->vptr_changed
3776 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3777 {
3778 tree vtable;
3779 unsigned HOST_WIDE_INT offset;
3780 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3781 : NULL;
3782 ipa_agg_value_set agg = ipa_agg_value_set_from_jfunc (new_root_info,
3783 new_root,
3784 &jfunc->agg);
3785 tree t = ipa_find_agg_cst_for_param (&agg, scalar,
3786 ie->indirect_info->offset,
3787 true);
3788 agg.release ();
3789 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3790 {
3791 bool can_refer;
3792 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3793 vtable, offset, &can_refer);
3794 if (can_refer)
3795 {
3796 if (!t
3797 || fndecl_built_in_p (t, BUILT_IN_UNREACHABLE)
3798 || !possible_polymorphic_call_target_p
3799 (ie, cgraph_node::get (t)))
3800 {
3801 /* Do not speculate builtin_unreachable, it is stupid! */
3802 if (!ie->indirect_info->vptr_changed)
3803 target = ipa_impossible_devirt_target (ie, target);
3804 else
3805 target = NULL;
3806 }
3807 else
3808 {
3809 target = t;
3810 speculative = ie->indirect_info->vptr_changed;
3811 }
3812 }
3813 }
3814 }
3815
3816 ipa_polymorphic_call_context ie_context (ie);
3817 vec <cgraph_node *>targets;
3818 bool final;
3819
3820 ctx.offset_by (ie->indirect_info->offset);
3821 if (ie->indirect_info->vptr_changed)
3822 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3823 ie->indirect_info->otr_type);
3824 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3825 targets = possible_polymorphic_call_targets
3826 (ie->indirect_info->otr_type,
3827 ie->indirect_info->otr_token,
3828 ctx, &final);
3829 if (final && targets.length () <= 1)
3830 {
3831 speculative = false;
3832 if (targets.length () == 1)
3833 target = targets[0]->decl;
3834 else
3835 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3836 }
3837 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3838 && !ie->speculative && ie->maybe_hot_p ())
3839 {
3840 cgraph_node *n;
3841 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3842 ie->indirect_info->otr_token,
3843 ie->indirect_info->context);
3844 if (n)
3845 {
3846 target = n->decl;
3847 speculative = true;
3848 }
3849 }
3850
3851 if (target)
3852 {
3853 if (!possible_polymorphic_call_target_p
3854 (ie, cgraph_node::get_create (target)))
3855 {
3856 if (speculative)
3857 return NULL;
3858 target = ipa_impossible_devirt_target (ie, target);
3859 }
3860 return ipa_make_edge_direct_to_target (ie, target, speculative);
3861 }
3862 else
3863 return NULL;
3864 }
3865
3866 /* Update the param called notes associated with NODE when CS is being inlined,
3867 assuming NODE is (potentially indirectly) inlined into CS->callee.
3868 Moreover, if the callee is discovered to be constant, create a new cgraph
3869 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3870 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3871
3872 static bool
3873 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3874 struct cgraph_node *node,
3875 vec<cgraph_edge *> *new_edges)
3876 {
3877 class ipa_edge_args *top;
3878 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3879 struct cgraph_node *new_root;
3880 class ipa_node_params *new_root_info, *inlined_node_info;
3881 bool res = false;
3882
3883 ipa_check_create_edge_args ();
3884 top = IPA_EDGE_REF (cs);
3885 new_root = cs->caller->inlined_to
3886 ? cs->caller->inlined_to : cs->caller;
3887 new_root_info = IPA_NODE_REF (new_root);
3888 inlined_node_info = IPA_NODE_REF (cs->callee->function_symbol ());
3889
3890 for (ie = node->indirect_calls; ie; ie = next_ie)
3891 {
3892 class cgraph_indirect_call_info *ici = ie->indirect_info;
3893 struct ipa_jump_func *jfunc;
3894 int param_index;
3895
3896 next_ie = ie->next_callee;
3897
3898 if (ici->param_index == -1)
3899 continue;
3900
3901 /* We must check range due to calls with variable number of arguments: */
3902 if (!top || ici->param_index >= ipa_get_cs_argument_count (top))
3903 {
3904 ici->param_index = -1;
3905 continue;
3906 }
3907
3908 param_index = ici->param_index;
3909 jfunc = ipa_get_ith_jump_func (top, param_index);
3910
3911 auto_vec<cgraph_node *, 4> spec_targets;
3912 if (ie->speculative)
3913 for (cgraph_edge *direct = ie->first_speculative_call_target ();
3914 direct;
3915 direct = direct->next_speculative_call_target ())
3916 spec_targets.safe_push (direct->callee);
3917
3918 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3919 new_direct_edge = NULL;
3920 else if (ici->polymorphic)
3921 {
3922 ipa_polymorphic_call_context ctx;
3923 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3924 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx,
3925 new_root,
3926 new_root_info);
3927 }
3928 else
3929 {
3930 tree target_type = ipa_get_type (inlined_node_info, param_index);
3931 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3932 target_type,
3933 new_root,
3934 new_root_info);
3935 }
3936
3937 /* If speculation was removed, then we need to do nothing. */
3938 if (new_direct_edge && new_direct_edge != ie
3939 && spec_targets.contains (new_direct_edge->callee))
3940 {
3941 new_direct_edge->indirect_inlining_edge = 1;
3942 top = IPA_EDGE_REF (cs);
3943 res = true;
3944 if (!new_direct_edge->speculative)
3945 continue;
3946 }
3947 else if (new_direct_edge)
3948 {
3949 new_direct_edge->indirect_inlining_edge = 1;
3950 if (new_edges)
3951 {
3952 new_edges->safe_push (new_direct_edge);
3953 res = true;
3954 }
3955 top = IPA_EDGE_REF (cs);
3956 /* If speculative edge was introduced we still need to update
3957 call info of the indirect edge. */
3958 if (!new_direct_edge->speculative)
3959 continue;
3960 }
3961 if (jfunc->type == IPA_JF_PASS_THROUGH
3962 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3963 {
3964 if (ici->agg_contents
3965 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3966 && !ici->polymorphic)
3967 ici->param_index = -1;
3968 else
3969 {
3970 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3971 if (ici->polymorphic
3972 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3973 ici->vptr_changed = true;
3974 ipa_set_param_used_by_indirect_call (new_root_info,
3975 ici->param_index, true);
3976 if (ici->polymorphic)
3977 ipa_set_param_used_by_polymorphic_call (new_root_info,
3978 ici->param_index, true);
3979 }
3980 }
3981 else if (jfunc->type == IPA_JF_ANCESTOR)
3982 {
3983 if (ici->agg_contents
3984 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3985 && !ici->polymorphic)
3986 ici->param_index = -1;
3987 else
3988 {
3989 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3990 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3991 if (ici->polymorphic
3992 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3993 ici->vptr_changed = true;
3994 ipa_set_param_used_by_indirect_call (new_root_info,
3995 ici->param_index, true);
3996 if (ici->polymorphic)
3997 ipa_set_param_used_by_polymorphic_call (new_root_info,
3998 ici->param_index, true);
3999 }
4000 }
4001 else
4002 /* Either we can find a destination for this edge now or never. */
4003 ici->param_index = -1;
4004 }
4005
4006 return res;
4007 }
4008
4009 /* Recursively traverse subtree of NODE (including node) made of inlined
4010 cgraph_edges when CS has been inlined and invoke
4011 update_indirect_edges_after_inlining on all nodes and
4012 update_jump_functions_after_inlining on all non-inlined edges that lead out
4013 of this subtree. Newly discovered indirect edges will be added to
4014 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
4015 created. */
4016
4017 static bool
4018 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
4019 struct cgraph_node *node,
4020 vec<cgraph_edge *> *new_edges)
4021 {
4022 struct cgraph_edge *e;
4023 bool res;
4024
4025 res = update_indirect_edges_after_inlining (cs, node, new_edges);
4026
4027 for (e = node->callees; e; e = e->next_callee)
4028 if (!e->inline_failed)
4029 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
4030 else
4031 update_jump_functions_after_inlining (cs, e);
4032 for (e = node->indirect_calls; e; e = e->next_callee)
4033 update_jump_functions_after_inlining (cs, e);
4034
4035 return res;
4036 }
4037
4038 /* Combine two controlled uses counts as done during inlining. */
4039
4040 static int
4041 combine_controlled_uses_counters (int c, int d)
4042 {
4043 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
4044 return IPA_UNDESCRIBED_USE;
4045 else
4046 return c + d - 1;
4047 }
4048
4049 /* Propagate number of controlled users from CS->caleee to the new root of the
4050 tree of inlined nodes. */
4051
4052 static void
4053 propagate_controlled_uses (struct cgraph_edge *cs)
4054 {
4055 class ipa_edge_args *args = IPA_EDGE_REF (cs);
4056 if (!args)
4057 return;
4058 struct cgraph_node *new_root = cs->caller->inlined_to
4059 ? cs->caller->inlined_to : cs->caller;
4060 class ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
4061 class ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
4062 int count, i;
4063
4064 if (!old_root_info)
4065 return;
4066
4067 count = MIN (ipa_get_cs_argument_count (args),
4068 ipa_get_param_count (old_root_info));
4069 for (i = 0; i < count; i++)
4070 {
4071 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4072 struct ipa_cst_ref_desc *rdesc;
4073
4074 if (jf->type == IPA_JF_PASS_THROUGH)
4075 {
4076 int src_idx, c, d;
4077 src_idx = ipa_get_jf_pass_through_formal_id (jf);
4078 c = ipa_get_controlled_uses (new_root_info, src_idx);
4079 d = ipa_get_controlled_uses (old_root_info, i);
4080
4081 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
4082 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
4083 c = combine_controlled_uses_counters (c, d);
4084 ipa_set_controlled_uses (new_root_info, src_idx, c);
4085 if (c == 0 && new_root_info->ipcp_orig_node)
4086 {
4087 struct cgraph_node *n;
4088 struct ipa_ref *ref;
4089 tree t = new_root_info->known_csts[src_idx];
4090
4091 if (t && TREE_CODE (t) == ADDR_EXPR
4092 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
4093 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
4094 && (ref = new_root->find_reference (n, NULL, 0)))
4095 {
4096 if (dump_file)
4097 fprintf (dump_file, "ipa-prop: Removing cloning-created "
4098 "reference from %s to %s.\n",
4099 new_root->dump_name (),
4100 n->dump_name ());
4101 ref->remove_reference ();
4102 }
4103 }
4104 }
4105 else if (jf->type == IPA_JF_CONST
4106 && (rdesc = jfunc_rdesc_usable (jf)))
4107 {
4108 int d = ipa_get_controlled_uses (old_root_info, i);
4109 int c = rdesc->refcount;
4110 rdesc->refcount = combine_controlled_uses_counters (c, d);
4111 if (rdesc->refcount == 0)
4112 {
4113 tree cst = ipa_get_jf_constant (jf);
4114 struct cgraph_node *n;
4115 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
4116 && TREE_CODE (TREE_OPERAND (cst, 0))
4117 == FUNCTION_DECL);
4118 n = cgraph_node::get (TREE_OPERAND (cst, 0));
4119 if (n)
4120 {
4121 struct cgraph_node *clone;
4122 bool ok;
4123 ok = remove_described_reference (n, rdesc);
4124 gcc_checking_assert (ok);
4125
4126 clone = cs->caller;
4127 while (clone->inlined_to
4128 && clone->ipcp_clone
4129 && clone != rdesc->cs->caller)
4130 {
4131 struct ipa_ref *ref;
4132 ref = clone->find_reference (n, NULL, 0);
4133 if (ref)
4134 {
4135 if (dump_file)
4136 fprintf (dump_file, "ipa-prop: Removing "
4137 "cloning-created reference "
4138 "from %s to %s.\n",
4139 clone->dump_name (),
4140 n->dump_name ());
4141 ref->remove_reference ();
4142 }
4143 clone = clone->callers->caller;
4144 }
4145 }
4146 }
4147 }
4148 }
4149
4150 for (i = ipa_get_param_count (old_root_info);
4151 i < ipa_get_cs_argument_count (args);
4152 i++)
4153 {
4154 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4155
4156 if (jf->type == IPA_JF_CONST)
4157 {
4158 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
4159 if (rdesc)
4160 rdesc->refcount = IPA_UNDESCRIBED_USE;
4161 }
4162 else if (jf->type == IPA_JF_PASS_THROUGH)
4163 ipa_set_controlled_uses (new_root_info,
4164 jf->value.pass_through.formal_id,
4165 IPA_UNDESCRIBED_USE);
4166 }
4167 }
4168
4169 /* Update jump functions and call note functions on inlining the call site CS.
4170 CS is expected to lead to a node already cloned by
4171 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
4172 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
4173 created. */
4174
4175 bool
4176 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
4177 vec<cgraph_edge *> *new_edges)
4178 {
4179 bool changed;
4180 /* Do nothing if the preparation phase has not been carried out yet
4181 (i.e. during early inlining). */
4182 if (!ipa_node_params_sum)
4183 return false;
4184 gcc_assert (ipa_edge_args_sum);
4185
4186 propagate_controlled_uses (cs);
4187 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
4188 ipa_node_params_sum->remove (cs->callee);
4189
4190 class ipa_edge_args *args = IPA_EDGE_REF (cs);
4191 if (args)
4192 {
4193 bool ok = true;
4194 if (args->jump_functions)
4195 {
4196 struct ipa_jump_func *jf;
4197 int i;
4198 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
4199 if (jf->type == IPA_JF_CONST
4200 && ipa_get_jf_constant_rdesc (jf))
4201 {
4202 ok = false;
4203 break;
4204 }
4205 }
4206 if (ok)
4207 ipa_edge_args_sum->remove (cs);
4208 }
4209 if (ipcp_transformation_sum)
4210 ipcp_transformation_sum->remove (cs->callee);
4211
4212 return changed;
4213 }
4214
4215 /* Ensure that array of edge arguments infos is big enough to accommodate a
4216 structure for all edges and reallocates it if not. Also, allocate
4217 associated hash tables is they do not already exist. */
4218
4219 void
4220 ipa_check_create_edge_args (void)
4221 {
4222 if (!ipa_edge_args_sum)
4223 ipa_edge_args_sum
4224 = (new (ggc_alloc_no_dtor<ipa_edge_args_sum_t> ())
4225 ipa_edge_args_sum_t (symtab, true));
4226 if (!ipa_bits_hash_table)
4227 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
4228 if (!ipa_vr_hash_table)
4229 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4230 }
4231
4232 /* Free all ipa_edge structures. */
4233
4234 void
4235 ipa_free_all_edge_args (void)
4236 {
4237 if (!ipa_edge_args_sum)
4238 return;
4239
4240 ggc_delete (ipa_edge_args_sum);
4241 ipa_edge_args_sum = NULL;
4242 }
4243
4244 /* Free all ipa_node_params structures. */
4245
4246 void
4247 ipa_free_all_node_params (void)
4248 {
4249 if (ipa_node_params_sum)
4250 ggc_delete (ipa_node_params_sum);
4251 ipa_node_params_sum = NULL;
4252 }
4253
4254 /* Initialize IPA CP transformation summary and also allocate any necessary hash
4255 tables if they do not already exist. */
4256
4257 void
4258 ipcp_transformation_initialize (void)
4259 {
4260 if (!ipa_bits_hash_table)
4261 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
4262 if (!ipa_vr_hash_table)
4263 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4264 if (ipcp_transformation_sum == NULL)
4265 {
4266 ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab);
4267 ipcp_transformation_sum->disable_insertion_hook ();
4268 }
4269 }
4270
4271 /* Release the IPA CP transformation summary. */
4272
4273 void
4274 ipcp_free_transformation_sum (void)
4275 {
4276 if (!ipcp_transformation_sum)
4277 return;
4278
4279 ipcp_transformation_sum->~function_summary<ipcp_transformation *> ();
4280 ggc_free (ipcp_transformation_sum);
4281 ipcp_transformation_sum = NULL;
4282 }
4283
4284 /* Set the aggregate replacements of NODE to be AGGVALS. */
4285
4286 void
4287 ipa_set_node_agg_value_chain (struct cgraph_node *node,
4288 struct ipa_agg_replacement_value *aggvals)
4289 {
4290 ipcp_transformation_initialize ();
4291 ipcp_transformation *s = ipcp_transformation_sum->get_create (node);
4292 s->agg_values = aggvals;
4293 }
4294
4295 /* Hook that is called by cgraph.c when an edge is removed. Adjust reference
4296 count data structures accordingly. */
4297
4298 void
4299 ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
4300 {
4301 if (args->jump_functions)
4302 {
4303 struct ipa_jump_func *jf;
4304 int i;
4305 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
4306 {
4307 struct ipa_cst_ref_desc *rdesc;
4308 try_decrement_rdesc_refcount (jf);
4309 if (jf->type == IPA_JF_CONST
4310 && (rdesc = ipa_get_jf_constant_rdesc (jf))
4311 && rdesc->cs == cs)
4312 rdesc->cs = NULL;
4313 }
4314 }
4315 }
4316
4317 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
4318 reference count data strucutres accordingly. */
4319
4320 void
4321 ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
4322 ipa_edge_args *old_args, ipa_edge_args *new_args)
4323 {
4324 unsigned int i;
4325
4326 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
4327 if (old_args->polymorphic_call_contexts)
4328 new_args->polymorphic_call_contexts
4329 = vec_safe_copy (old_args->polymorphic_call_contexts);
4330
4331 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
4332 {
4333 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
4334 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
4335
4336 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
4337
4338 if (src_jf->type == IPA_JF_CONST)
4339 {
4340 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
4341
4342 if (!src_rdesc)
4343 dst_jf->value.constant.rdesc = NULL;
4344 else if (src->caller == dst->caller)
4345 {
4346 struct ipa_ref *ref;
4347 symtab_node *n = cgraph_node_for_jfunc (src_jf);
4348 gcc_checking_assert (n);
4349 ref = src->caller->find_reference (n, src->call_stmt,
4350 src->lto_stmt_uid);
4351 gcc_checking_assert (ref);
4352 dst->caller->clone_reference (ref, ref->stmt);
4353
4354 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4355 dst_rdesc->cs = dst;
4356 dst_rdesc->refcount = src_rdesc->refcount;
4357 dst_rdesc->next_duplicate = NULL;
4358 dst_jf->value.constant.rdesc = dst_rdesc;
4359 }
4360 else if (src_rdesc->cs == src)
4361 {
4362 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4363 dst_rdesc->cs = dst;
4364 dst_rdesc->refcount = src_rdesc->refcount;
4365 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
4366 src_rdesc->next_duplicate = dst_rdesc;
4367 dst_jf->value.constant.rdesc = dst_rdesc;
4368 }
4369 else
4370 {
4371 struct ipa_cst_ref_desc *dst_rdesc;
4372 /* This can happen during inlining, when a JFUNC can refer to a
4373 reference taken in a function up in the tree of inline clones.
4374 We need to find the duplicate that refers to our tree of
4375 inline clones. */
4376
4377 gcc_assert (dst->caller->inlined_to);
4378 for (dst_rdesc = src_rdesc->next_duplicate;
4379 dst_rdesc;
4380 dst_rdesc = dst_rdesc->next_duplicate)
4381 {
4382 struct cgraph_node *top;
4383 top = dst_rdesc->cs->caller->inlined_to
4384 ? dst_rdesc->cs->caller->inlined_to
4385 : dst_rdesc->cs->caller;
4386 if (dst->caller->inlined_to == top)
4387 break;
4388 }
4389 gcc_assert (dst_rdesc);
4390 dst_jf->value.constant.rdesc = dst_rdesc;
4391 }
4392 }
4393 else if (dst_jf->type == IPA_JF_PASS_THROUGH
4394 && src->caller == dst->caller)
4395 {
4396 struct cgraph_node *inline_root = dst->caller->inlined_to
4397 ? dst->caller->inlined_to : dst->caller;
4398 class ipa_node_params *root_info = IPA_NODE_REF (inline_root);
4399 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
4400
4401 int c = ipa_get_controlled_uses (root_info, idx);
4402 if (c != IPA_UNDESCRIBED_USE)
4403 {
4404 c++;
4405 ipa_set_controlled_uses (root_info, idx, c);
4406 }
4407 }
4408 }
4409 }
4410
4411 /* Analyze newly added function into callgraph. */
4412
4413 static void
4414 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
4415 {
4416 if (node->has_gimple_body_p ())
4417 ipa_analyze_node (node);
4418 }
4419
4420 /* Hook that is called by summary when a node is duplicated. */
4421
4422 void
4423 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
4424 ipa_node_params *old_info,
4425 ipa_node_params *new_info)
4426 {
4427 ipa_agg_replacement_value *old_av, *new_av;
4428
4429 new_info->descriptors = vec_safe_copy (old_info->descriptors);
4430 new_info->lattices = NULL;
4431 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
4432 new_info->known_csts = old_info->known_csts.copy ();
4433 new_info->known_contexts = old_info->known_contexts.copy ();
4434
4435 new_info->analysis_done = old_info->analysis_done;
4436 new_info->node_enqueued = old_info->node_enqueued;
4437 new_info->versionable = old_info->versionable;
4438
4439 old_av = ipa_get_agg_replacements_for_node (src);
4440 if (old_av)
4441 {
4442 new_av = NULL;
4443 while (old_av)
4444 {
4445 struct ipa_agg_replacement_value *v;
4446
4447 v = ggc_alloc<ipa_agg_replacement_value> ();
4448 memcpy (v, old_av, sizeof (*v));
4449 v->next = new_av;
4450 new_av = v;
4451 old_av = old_av->next;
4452 }
4453 ipa_set_node_agg_value_chain (dst, new_av);
4454 }
4455 }
4456
4457 /* Duplication of ipcp transformation summaries. */
4458
4459 void
4460 ipcp_transformation_t::duplicate(cgraph_node *, cgraph_node *dst,
4461 ipcp_transformation *src_trans,
4462 ipcp_transformation *dst_trans)
4463 {
4464 /* Avoid redundant work of duplicating vectors we will never use. */
4465 if (dst->inlined_to)
4466 return;
4467 dst_trans->bits = vec_safe_copy (src_trans->bits);
4468 dst_trans->m_vr = vec_safe_copy (src_trans->m_vr);
4469 ipa_agg_replacement_value *agg = src_trans->agg_values,
4470 **aggptr = &dst_trans->agg_values;
4471 while (agg)
4472 {
4473 *aggptr = ggc_alloc<ipa_agg_replacement_value> ();
4474 **aggptr = *agg;
4475 agg = agg->next;
4476 aggptr = &(*aggptr)->next;
4477 }
4478 }
4479
4480 /* Register our cgraph hooks if they are not already there. */
4481
4482 void
4483 ipa_register_cgraph_hooks (void)
4484 {
4485 ipa_check_create_node_params ();
4486 ipa_check_create_edge_args ();
4487
4488 function_insertion_hook_holder =
4489 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
4490 }
4491
4492 /* Unregister our cgraph hooks if they are not already there. */
4493
4494 static void
4495 ipa_unregister_cgraph_hooks (void)
4496 {
4497 if (function_insertion_hook_holder)
4498 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
4499 function_insertion_hook_holder = NULL;
4500 }
4501
4502 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4503 longer needed after ipa-cp. */
4504
4505 void
4506 ipa_free_all_structures_after_ipa_cp (void)
4507 {
4508 if (!optimize && !in_lto_p)
4509 {
4510 ipa_free_all_edge_args ();
4511 ipa_free_all_node_params ();
4512 ipcp_sources_pool.release ();
4513 ipcp_cst_values_pool.release ();
4514 ipcp_poly_ctx_values_pool.release ();
4515 ipcp_agg_lattice_pool.release ();
4516 ipa_unregister_cgraph_hooks ();
4517 ipa_refdesc_pool.release ();
4518 }
4519 }
4520
4521 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4522 longer needed after indirect inlining. */
4523
4524 void
4525 ipa_free_all_structures_after_iinln (void)
4526 {
4527 ipa_free_all_edge_args ();
4528 ipa_free_all_node_params ();
4529 ipa_unregister_cgraph_hooks ();
4530 ipcp_sources_pool.release ();
4531 ipcp_cst_values_pool.release ();
4532 ipcp_poly_ctx_values_pool.release ();
4533 ipcp_agg_lattice_pool.release ();
4534 ipa_refdesc_pool.release ();
4535 }
4536
4537 /* Print ipa_tree_map data structures of all functions in the
4538 callgraph to F. */
4539
4540 void
4541 ipa_print_node_params (FILE *f, struct cgraph_node *node)
4542 {
4543 int i, count;
4544 class ipa_node_params *info;
4545
4546 if (!node->definition)
4547 return;
4548 info = IPA_NODE_REF (node);
4549 fprintf (f, " function %s parameter descriptors:\n", node->dump_name ());
4550 if (!info)
4551 {
4552 fprintf (f, " no params return\n");
4553 return;
4554 }
4555 count = ipa_get_param_count (info);
4556 for (i = 0; i < count; i++)
4557 {
4558 int c;
4559
4560 fprintf (f, " ");
4561 ipa_dump_param (f, info, i);
4562 if (ipa_is_param_used (info, i))
4563 fprintf (f, " used");
4564 if (ipa_is_param_used_by_ipa_predicates (info, i))
4565 fprintf (f, " used_by_ipa_predicates");
4566 if (ipa_is_param_used_by_indirect_call (info, i))
4567 fprintf (f, " used_by_indirect_call");
4568 if (ipa_is_param_used_by_polymorphic_call (info, i))
4569 fprintf (f, " used_by_polymorphic_call");
4570 c = ipa_get_controlled_uses (info, i);
4571 if (c == IPA_UNDESCRIBED_USE)
4572 fprintf (f, " undescribed_use");
4573 else
4574 fprintf (f, " controlled_uses=%i", c);
4575 fprintf (f, "\n");
4576 }
4577 }
4578
4579 /* Print ipa_tree_map data structures of all functions in the
4580 callgraph to F. */
4581
4582 void
4583 ipa_print_all_params (FILE * f)
4584 {
4585 struct cgraph_node *node;
4586
4587 fprintf (f, "\nFunction parameters:\n");
4588 FOR_EACH_FUNCTION (node)
4589 ipa_print_node_params (f, node);
4590 }
4591
4592 /* Dump the AV linked list. */
4593
4594 void
4595 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4596 {
4597 bool comma = false;
4598 fprintf (f, " Aggregate replacements:");
4599 for (; av; av = av->next)
4600 {
4601 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4602 av->index, av->offset);
4603 print_generic_expr (f, av->value);
4604 comma = true;
4605 }
4606 fprintf (f, "\n");
4607 }
4608
4609 /* Stream out jump function JUMP_FUNC to OB. */
4610
4611 static void
4612 ipa_write_jump_function (struct output_block *ob,
4613 struct ipa_jump_func *jump_func)
4614 {
4615 struct ipa_agg_jf_item *item;
4616 struct bitpack_d bp;
4617 int i, count;
4618 int flag = 0;
4619
4620 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4621 as well as WPA memory by handling them specially. */
4622 if (jump_func->type == IPA_JF_CONST
4623 && TREE_CODE (jump_func->value.constant.value) == ADDR_EXPR)
4624 flag = 1;
4625
4626 streamer_write_uhwi (ob, jump_func->type * 2 + flag);
4627 switch (jump_func->type)
4628 {
4629 case IPA_JF_UNKNOWN:
4630 break;
4631 case IPA_JF_CONST:
4632 gcc_assert (
4633 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4634 stream_write_tree (ob,
4635 flag
4636 ? TREE_OPERAND (jump_func->value.constant.value, 0)
4637 : jump_func->value.constant.value, true);
4638 break;
4639 case IPA_JF_PASS_THROUGH:
4640 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4641 if (jump_func->value.pass_through.operation == NOP_EXPR)
4642 {
4643 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4644 bp = bitpack_create (ob->main_stream);
4645 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4646 streamer_write_bitpack (&bp);
4647 }
4648 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4649 == tcc_unary)
4650 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4651 else
4652 {
4653 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4654 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4655 }
4656 break;
4657 case IPA_JF_ANCESTOR:
4658 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4659 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4660 bp = bitpack_create (ob->main_stream);
4661 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4662 streamer_write_bitpack (&bp);
4663 break;
4664 default:
4665 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
4666 }
4667
4668 count = vec_safe_length (jump_func->agg.items);
4669 streamer_write_uhwi (ob, count);
4670 if (count)
4671 {
4672 bp = bitpack_create (ob->main_stream);
4673 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4674 streamer_write_bitpack (&bp);
4675 }
4676
4677 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4678 {
4679 stream_write_tree (ob, item->type, true);
4680 streamer_write_uhwi (ob, item->offset);
4681 streamer_write_uhwi (ob, item->jftype);
4682 switch (item->jftype)
4683 {
4684 case IPA_JF_UNKNOWN:
4685 break;
4686 case IPA_JF_CONST:
4687 stream_write_tree (ob, item->value.constant, true);
4688 break;
4689 case IPA_JF_PASS_THROUGH:
4690 case IPA_JF_LOAD_AGG:
4691 streamer_write_uhwi (ob, item->value.pass_through.operation);
4692 streamer_write_uhwi (ob, item->value.pass_through.formal_id);
4693 if (TREE_CODE_CLASS (item->value.pass_through.operation)
4694 != tcc_unary)
4695 stream_write_tree (ob, item->value.pass_through.operand, true);
4696 if (item->jftype == IPA_JF_LOAD_AGG)
4697 {
4698 stream_write_tree (ob, item->value.load_agg.type, true);
4699 streamer_write_uhwi (ob, item->value.load_agg.offset);
4700 bp = bitpack_create (ob->main_stream);
4701 bp_pack_value (&bp, item->value.load_agg.by_ref, 1);
4702 streamer_write_bitpack (&bp);
4703 }
4704 break;
4705 default:
4706 fatal_error (UNKNOWN_LOCATION,
4707 "invalid jump function in LTO stream");
4708 }
4709 }
4710
4711 bp = bitpack_create (ob->main_stream);
4712 bp_pack_value (&bp, !!jump_func->bits, 1);
4713 streamer_write_bitpack (&bp);
4714 if (jump_func->bits)
4715 {
4716 streamer_write_widest_int (ob, jump_func->bits->value);
4717 streamer_write_widest_int (ob, jump_func->bits->mask);
4718 }
4719 bp_pack_value (&bp, !!jump_func->m_vr, 1);
4720 streamer_write_bitpack (&bp);
4721 if (jump_func->m_vr)
4722 {
4723 streamer_write_enum (ob->main_stream, value_rang_type,
4724 VR_LAST, jump_func->m_vr->kind ());
4725 stream_write_tree (ob, jump_func->m_vr->min (), true);
4726 stream_write_tree (ob, jump_func->m_vr->max (), true);
4727 }
4728 }
4729
4730 /* Read in jump function JUMP_FUNC from IB. */
4731
4732 static void
4733 ipa_read_jump_function (class lto_input_block *ib,
4734 struct ipa_jump_func *jump_func,
4735 struct cgraph_edge *cs,
4736 class data_in *data_in,
4737 bool prevails)
4738 {
4739 enum jump_func_type jftype;
4740 enum tree_code operation;
4741 int i, count;
4742 int val = streamer_read_uhwi (ib);
4743 bool flag = val & 1;
4744
4745 jftype = (enum jump_func_type) (val / 2);
4746 switch (jftype)
4747 {
4748 case IPA_JF_UNKNOWN:
4749 ipa_set_jf_unknown (jump_func);
4750 break;
4751 case IPA_JF_CONST:
4752 {
4753 tree t = stream_read_tree (ib, data_in);
4754 if (flag && prevails)
4755 t = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (t)), t);
4756 ipa_set_jf_constant (jump_func, t, cs);
4757 }
4758 break;
4759 case IPA_JF_PASS_THROUGH:
4760 operation = (enum tree_code) streamer_read_uhwi (ib);
4761 if (operation == NOP_EXPR)
4762 {
4763 int formal_id = streamer_read_uhwi (ib);
4764 struct bitpack_d bp = streamer_read_bitpack (ib);
4765 bool agg_preserved = bp_unpack_value (&bp, 1);
4766 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4767 }
4768 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4769 {
4770 int formal_id = streamer_read_uhwi (ib);
4771 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4772 }
4773 else
4774 {
4775 tree operand = stream_read_tree (ib, data_in);
4776 int formal_id = streamer_read_uhwi (ib);
4777 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4778 operation);
4779 }
4780 break;
4781 case IPA_JF_ANCESTOR:
4782 {
4783 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4784 int formal_id = streamer_read_uhwi (ib);
4785 struct bitpack_d bp = streamer_read_bitpack (ib);
4786 bool agg_preserved = bp_unpack_value (&bp, 1);
4787 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4788 break;
4789 }
4790 default:
4791 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
4792 }
4793
4794 count = streamer_read_uhwi (ib);
4795 if (prevails)
4796 {
4797 jump_func->agg.items = NULL;
4798 vec_safe_reserve (jump_func->agg.items, count, true);
4799 }
4800 if (count)
4801 {
4802 struct bitpack_d bp = streamer_read_bitpack (ib);
4803 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4804 }
4805 for (i = 0; i < count; i++)
4806 {
4807 struct ipa_agg_jf_item item;
4808 item.type = stream_read_tree (ib, data_in);
4809 item.offset = streamer_read_uhwi (ib);
4810 item.jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4811
4812 switch (item.jftype)
4813 {
4814 case IPA_JF_UNKNOWN:
4815 break;
4816 case IPA_JF_CONST:
4817 item.value.constant = stream_read_tree (ib, data_in);
4818 break;
4819 case IPA_JF_PASS_THROUGH:
4820 case IPA_JF_LOAD_AGG:
4821 operation = (enum tree_code) streamer_read_uhwi (ib);
4822 item.value.pass_through.operation = operation;
4823 item.value.pass_through.formal_id = streamer_read_uhwi (ib);
4824 if (TREE_CODE_CLASS (operation) == tcc_unary)
4825 item.value.pass_through.operand = NULL_TREE;
4826 else
4827 item.value.pass_through.operand = stream_read_tree (ib, data_in);
4828 if (item.jftype == IPA_JF_LOAD_AGG)
4829 {
4830 struct bitpack_d bp;
4831 item.value.load_agg.type = stream_read_tree (ib, data_in);
4832 item.value.load_agg.offset = streamer_read_uhwi (ib);
4833 bp = streamer_read_bitpack (ib);
4834 item.value.load_agg.by_ref = bp_unpack_value (&bp, 1);
4835 }
4836 break;
4837 default:
4838 fatal_error (UNKNOWN_LOCATION,
4839 "invalid jump function in LTO stream");
4840 }
4841 if (prevails)
4842 jump_func->agg.items->quick_push (item);
4843 }
4844
4845 struct bitpack_d bp = streamer_read_bitpack (ib);
4846 bool bits_known = bp_unpack_value (&bp, 1);
4847 if (bits_known)
4848 {
4849 widest_int value = streamer_read_widest_int (ib);
4850 widest_int mask = streamer_read_widest_int (ib);
4851 if (prevails)
4852 ipa_set_jfunc_bits (jump_func, value, mask);
4853 }
4854 else
4855 jump_func->bits = NULL;
4856
4857 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4858 bool vr_known = bp_unpack_value (&vr_bp, 1);
4859 if (vr_known)
4860 {
4861 enum value_range_kind type = streamer_read_enum (ib, value_range_kind,
4862 VR_LAST);
4863 tree min = stream_read_tree (ib, data_in);
4864 tree max = stream_read_tree (ib, data_in);
4865 if (prevails)
4866 ipa_set_jfunc_vr (jump_func, type, min, max);
4867 }
4868 else
4869 jump_func->m_vr = NULL;
4870 }
4871
4872 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4873 relevant to indirect inlining to OB. */
4874
4875 static void
4876 ipa_write_indirect_edge_info (struct output_block *ob,
4877 struct cgraph_edge *cs)
4878 {
4879 class cgraph_indirect_call_info *ii = cs->indirect_info;
4880 struct bitpack_d bp;
4881
4882 streamer_write_hwi (ob, ii->param_index);
4883 bp = bitpack_create (ob->main_stream);
4884 bp_pack_value (&bp, ii->polymorphic, 1);
4885 bp_pack_value (&bp, ii->agg_contents, 1);
4886 bp_pack_value (&bp, ii->member_ptr, 1);
4887 bp_pack_value (&bp, ii->by_ref, 1);
4888 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4889 bp_pack_value (&bp, ii->vptr_changed, 1);
4890 streamer_write_bitpack (&bp);
4891 if (ii->agg_contents || ii->polymorphic)
4892 streamer_write_hwi (ob, ii->offset);
4893 else
4894 gcc_assert (ii->offset == 0);
4895
4896 if (ii->polymorphic)
4897 {
4898 streamer_write_hwi (ob, ii->otr_token);
4899 stream_write_tree (ob, ii->otr_type, true);
4900 ii->context.stream_out (ob);
4901 }
4902 }
4903
4904 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4905 relevant to indirect inlining from IB. */
4906
4907 static void
4908 ipa_read_indirect_edge_info (class lto_input_block *ib,
4909 class data_in *data_in,
4910 struct cgraph_edge *cs,
4911 class ipa_node_params *info)
4912 {
4913 class cgraph_indirect_call_info *ii = cs->indirect_info;
4914 struct bitpack_d bp;
4915
4916 ii->param_index = (int) streamer_read_hwi (ib);
4917 bp = streamer_read_bitpack (ib);
4918 ii->polymorphic = bp_unpack_value (&bp, 1);
4919 ii->agg_contents = bp_unpack_value (&bp, 1);
4920 ii->member_ptr = bp_unpack_value (&bp, 1);
4921 ii->by_ref = bp_unpack_value (&bp, 1);
4922 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
4923 ii->vptr_changed = bp_unpack_value (&bp, 1);
4924 if (ii->agg_contents || ii->polymorphic)
4925 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4926 else
4927 ii->offset = 0;
4928 if (ii->polymorphic)
4929 {
4930 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4931 ii->otr_type = stream_read_tree (ib, data_in);
4932 ii->context.stream_in (ib, data_in);
4933 }
4934 if (info && ii->param_index >= 0)
4935 {
4936 if (ii->polymorphic)
4937 ipa_set_param_used_by_polymorphic_call (info,
4938 ii->param_index , true);
4939 ipa_set_param_used_by_indirect_call (info,
4940 ii->param_index, true);
4941 }
4942 }
4943
4944 /* Stream out NODE info to OB. */
4945
4946 static void
4947 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4948 {
4949 int node_ref;
4950 lto_symtab_encoder_t encoder;
4951 class ipa_node_params *info = IPA_NODE_REF (node);
4952 int j;
4953 struct cgraph_edge *e;
4954 struct bitpack_d bp;
4955
4956 encoder = ob->decl_state->symtab_node_encoder;
4957 node_ref = lto_symtab_encoder_encode (encoder, node);
4958 streamer_write_uhwi (ob, node_ref);
4959
4960 streamer_write_uhwi (ob, ipa_get_param_count (info));
4961 for (j = 0; j < ipa_get_param_count (info); j++)
4962 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4963 bp = bitpack_create (ob->main_stream);
4964 gcc_assert (info->analysis_done
4965 || ipa_get_param_count (info) == 0);
4966 gcc_assert (!info->node_enqueued);
4967 gcc_assert (!info->ipcp_orig_node);
4968 for (j = 0; j < ipa_get_param_count (info); j++)
4969 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4970 streamer_write_bitpack (&bp);
4971 for (j = 0; j < ipa_get_param_count (info); j++)
4972 {
4973 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4974 stream_write_tree (ob, ipa_get_type (info, j), true);
4975 }
4976 for (e = node->callees; e; e = e->next_callee)
4977 {
4978 class ipa_edge_args *args = IPA_EDGE_REF (e);
4979
4980 if (!args)
4981 {
4982 streamer_write_uhwi (ob, 0);
4983 continue;
4984 }
4985
4986 streamer_write_uhwi (ob,
4987 ipa_get_cs_argument_count (args) * 2
4988 + (args->polymorphic_call_contexts != NULL));
4989 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4990 {
4991 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4992 if (args->polymorphic_call_contexts != NULL)
4993 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4994 }
4995 }
4996 for (e = node->indirect_calls; e; e = e->next_callee)
4997 {
4998 class ipa_edge_args *args = IPA_EDGE_REF (e);
4999 if (!args)
5000 streamer_write_uhwi (ob, 0);
5001 else
5002 {
5003 streamer_write_uhwi (ob,
5004 ipa_get_cs_argument_count (args) * 2
5005 + (args->polymorphic_call_contexts != NULL));
5006 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5007 {
5008 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5009 if (args->polymorphic_call_contexts != NULL)
5010 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5011 }
5012 }
5013 ipa_write_indirect_edge_info (ob, e);
5014 }
5015 }
5016
5017 /* Stream in edge E from IB. */
5018
5019 static void
5020 ipa_read_edge_info (class lto_input_block *ib,
5021 class data_in *data_in,
5022 struct cgraph_edge *e, bool prevails)
5023 {
5024 int count = streamer_read_uhwi (ib);
5025 bool contexts_computed = count & 1;
5026
5027 count /= 2;
5028 if (!count)
5029 return;
5030 if (prevails
5031 && (e->possibly_call_in_translation_unit_p ()
5032 /* Also stream in jump functions to builtins in hope that they
5033 will get fnspecs. */
5034 || fndecl_built_in_p (e->callee->decl, BUILT_IN_NORMAL)))
5035 {
5036 class ipa_edge_args *args = IPA_EDGE_REF_GET_CREATE (e);
5037 vec_safe_grow_cleared (args->jump_functions, count, true);
5038 if (contexts_computed)
5039 vec_safe_grow_cleared (args->polymorphic_call_contexts, count, true);
5040 for (int k = 0; k < count; k++)
5041 {
5042 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5043 data_in, prevails);
5044 if (contexts_computed)
5045 ipa_get_ith_polymorhic_call_context (args, k)->stream_in
5046 (ib, data_in);
5047 }
5048 }
5049 else
5050 {
5051 for (int k = 0; k < count; k++)
5052 {
5053 struct ipa_jump_func dummy;
5054 ipa_read_jump_function (ib, &dummy, e,
5055 data_in, prevails);
5056 if (contexts_computed)
5057 {
5058 class ipa_polymorphic_call_context ctx;
5059 ctx.stream_in (ib, data_in);
5060 }
5061 }
5062 }
5063 }
5064
5065 /* Stream in NODE info from IB. */
5066
5067 static void
5068 ipa_read_node_info (class lto_input_block *ib, struct cgraph_node *node,
5069 class data_in *data_in)
5070 {
5071 int k;
5072 struct cgraph_edge *e;
5073 struct bitpack_d bp;
5074 bool prevails = node->prevailing_p ();
5075 class ipa_node_params *info = prevails
5076 ? IPA_NODE_REF_GET_CREATE (node) : NULL;
5077
5078 int param_count = streamer_read_uhwi (ib);
5079 if (prevails)
5080 {
5081 ipa_alloc_node_params (node, param_count);
5082 for (k = 0; k < param_count; k++)
5083 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
5084 if (ipa_get_param_count (info) != 0)
5085 info->analysis_done = true;
5086 info->node_enqueued = false;
5087 }
5088 else
5089 for (k = 0; k < param_count; k++)
5090 streamer_read_uhwi (ib);
5091
5092 bp = streamer_read_bitpack (ib);
5093 for (k = 0; k < param_count; k++)
5094 {
5095 bool used = bp_unpack_value (&bp, 1);
5096
5097 if (prevails)
5098 ipa_set_param_used (info, k, used);
5099 }
5100 for (k = 0; k < param_count; k++)
5101 {
5102 int nuses = streamer_read_hwi (ib);
5103 tree type = stream_read_tree (ib, data_in);
5104
5105 if (prevails)
5106 {
5107 ipa_set_controlled_uses (info, k, nuses);
5108 (*info->descriptors)[k].decl_or_type = type;
5109 }
5110 }
5111 for (e = node->callees; e; e = e->next_callee)
5112 ipa_read_edge_info (ib, data_in, e, prevails);
5113 for (e = node->indirect_calls; e; e = e->next_callee)
5114 {
5115 ipa_read_edge_info (ib, data_in, e, prevails);
5116 ipa_read_indirect_edge_info (ib, data_in, e, info);
5117 }
5118 }
5119
5120 /* Write jump functions for nodes in SET. */
5121
5122 void
5123 ipa_prop_write_jump_functions (void)
5124 {
5125 struct cgraph_node *node;
5126 struct output_block *ob;
5127 unsigned int count = 0;
5128 lto_symtab_encoder_iterator lsei;
5129 lto_symtab_encoder_t encoder;
5130
5131 if (!ipa_node_params_sum || !ipa_edge_args_sum)
5132 return;
5133
5134 ob = create_output_block (LTO_section_jump_functions);
5135 encoder = ob->decl_state->symtab_node_encoder;
5136 ob->symbol = NULL;
5137 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5138 lsei_next_function_in_partition (&lsei))
5139 {
5140 node = lsei_cgraph_node (lsei);
5141 if (node->has_gimple_body_p ()
5142 && IPA_NODE_REF (node) != NULL)
5143 count++;
5144 }
5145
5146 streamer_write_uhwi (ob, count);
5147
5148 /* Process all of the functions. */
5149 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5150 lsei_next_function_in_partition (&lsei))
5151 {
5152 node = lsei_cgraph_node (lsei);
5153 if (node->has_gimple_body_p ()
5154 && IPA_NODE_REF (node) != NULL)
5155 ipa_write_node_info (ob, node);
5156 }
5157 streamer_write_char_stream (ob->main_stream, 0);
5158 produce_asm (ob, NULL);
5159 destroy_output_block (ob);
5160 }
5161
5162 /* Read section in file FILE_DATA of length LEN with data DATA. */
5163
5164 static void
5165 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5166 size_t len)
5167 {
5168 const struct lto_function_header *header =
5169 (const struct lto_function_header *) data;
5170 const int cfg_offset = sizeof (struct lto_function_header);
5171 const int main_offset = cfg_offset + header->cfg_size;
5172 const int string_offset = main_offset + header->main_size;
5173 class data_in *data_in;
5174 unsigned int i;
5175 unsigned int count;
5176
5177 lto_input_block ib_main ((const char *) data + main_offset,
5178 header->main_size, file_data->mode_table);
5179
5180 data_in =
5181 lto_data_in_create (file_data, (const char *) data + string_offset,
5182 header->string_size, vNULL);
5183 count = streamer_read_uhwi (&ib_main);
5184
5185 for (i = 0; i < count; i++)
5186 {
5187 unsigned int index;
5188 struct cgraph_node *node;
5189 lto_symtab_encoder_t encoder;
5190
5191 index = streamer_read_uhwi (&ib_main);
5192 encoder = file_data->symtab_node_encoder;
5193 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5194 index));
5195 gcc_assert (node->definition);
5196 ipa_read_node_info (&ib_main, node, data_in);
5197 }
5198 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5199 len);
5200 lto_data_in_delete (data_in);
5201 }
5202
5203 /* Read ipcp jump functions. */
5204
5205 void
5206 ipa_prop_read_jump_functions (void)
5207 {
5208 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5209 struct lto_file_decl_data *file_data;
5210 unsigned int j = 0;
5211
5212 ipa_check_create_node_params ();
5213 ipa_check_create_edge_args ();
5214 ipa_register_cgraph_hooks ();
5215
5216 while ((file_data = file_data_vec[j++]))
5217 {
5218 size_t len;
5219 const char *data
5220 = lto_get_summary_section_data (file_data, LTO_section_jump_functions,
5221 &len);
5222 if (data)
5223 ipa_prop_read_section (file_data, data, len);
5224 }
5225 }
5226
5227 void
5228 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
5229 {
5230 int node_ref;
5231 unsigned int count = 0;
5232 lto_symtab_encoder_t encoder;
5233 struct ipa_agg_replacement_value *aggvals, *av;
5234
5235 aggvals = ipa_get_agg_replacements_for_node (node);
5236 encoder = ob->decl_state->symtab_node_encoder;
5237 node_ref = lto_symtab_encoder_encode (encoder, node);
5238 streamer_write_uhwi (ob, node_ref);
5239
5240 for (av = aggvals; av; av = av->next)
5241 count++;
5242 streamer_write_uhwi (ob, count);
5243
5244 for (av = aggvals; av; av = av->next)
5245 {
5246 struct bitpack_d bp;
5247
5248 streamer_write_uhwi (ob, av->offset);
5249 streamer_write_uhwi (ob, av->index);
5250 stream_write_tree (ob, av->value, true);
5251
5252 bp = bitpack_create (ob->main_stream);
5253 bp_pack_value (&bp, av->by_ref, 1);
5254 streamer_write_bitpack (&bp);
5255 }
5256
5257 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5258 if (ts && vec_safe_length (ts->m_vr) > 0)
5259 {
5260 count = ts->m_vr->length ();
5261 streamer_write_uhwi (ob, count);
5262 for (unsigned i = 0; i < count; ++i)
5263 {
5264 struct bitpack_d bp;
5265 ipa_vr *parm_vr = &(*ts->m_vr)[i];
5266 bp = bitpack_create (ob->main_stream);
5267 bp_pack_value (&bp, parm_vr->known, 1);
5268 streamer_write_bitpack (&bp);
5269 if (parm_vr->known)
5270 {
5271 streamer_write_enum (ob->main_stream, value_rang_type,
5272 VR_LAST, parm_vr->type);
5273 streamer_write_wide_int (ob, parm_vr->min);
5274 streamer_write_wide_int (ob, parm_vr->max);
5275 }
5276 }
5277 }
5278 else
5279 streamer_write_uhwi (ob, 0);
5280
5281 if (ts && vec_safe_length (ts->bits) > 0)
5282 {
5283 count = ts->bits->length ();
5284 streamer_write_uhwi (ob, count);
5285
5286 for (unsigned i = 0; i < count; ++i)
5287 {
5288 const ipa_bits *bits_jfunc = (*ts->bits)[i];
5289 struct bitpack_d bp = bitpack_create (ob->main_stream);
5290 bp_pack_value (&bp, !!bits_jfunc, 1);
5291 streamer_write_bitpack (&bp);
5292 if (bits_jfunc)
5293 {
5294 streamer_write_widest_int (ob, bits_jfunc->value);
5295 streamer_write_widest_int (ob, bits_jfunc->mask);
5296 }
5297 }
5298 }
5299 else
5300 streamer_write_uhwi (ob, 0);
5301 }
5302
5303 /* Stream in the aggregate value replacement chain for NODE from IB. */
5304
5305 static void
5306 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5307 data_in *data_in)
5308 {
5309 struct ipa_agg_replacement_value *aggvals = NULL;
5310 unsigned int count, i;
5311
5312 count = streamer_read_uhwi (ib);
5313 for (i = 0; i <count; i++)
5314 {
5315 struct ipa_agg_replacement_value *av;
5316 struct bitpack_d bp;
5317
5318 av = ggc_alloc<ipa_agg_replacement_value> ();
5319 av->offset = streamer_read_uhwi (ib);
5320 av->index = streamer_read_uhwi (ib);
5321 av->value = stream_read_tree (ib, data_in);
5322 bp = streamer_read_bitpack (ib);
5323 av->by_ref = bp_unpack_value (&bp, 1);
5324 av->next = aggvals;
5325 aggvals = av;
5326 }
5327 ipa_set_node_agg_value_chain (node, aggvals);
5328
5329 count = streamer_read_uhwi (ib);
5330 if (count > 0)
5331 {
5332 ipcp_transformation_initialize ();
5333 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
5334 vec_safe_grow_cleared (ts->m_vr, count, true);
5335 for (i = 0; i < count; i++)
5336 {
5337 ipa_vr *parm_vr;
5338 parm_vr = &(*ts->m_vr)[i];
5339 struct bitpack_d bp;
5340 bp = streamer_read_bitpack (ib);
5341 parm_vr->known = bp_unpack_value (&bp, 1);
5342 if (parm_vr->known)
5343 {
5344 parm_vr->type = streamer_read_enum (ib, value_range_kind,
5345 VR_LAST);
5346 parm_vr->min = streamer_read_wide_int (ib);
5347 parm_vr->max = streamer_read_wide_int (ib);
5348 }
5349 }
5350 }
5351 count = streamer_read_uhwi (ib);
5352 if (count > 0)
5353 {
5354 ipcp_transformation_initialize ();
5355 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
5356 vec_safe_grow_cleared (ts->bits, count, true);
5357
5358 for (i = 0; i < count; i++)
5359 {
5360 struct bitpack_d bp = streamer_read_bitpack (ib);
5361 bool known = bp_unpack_value (&bp, 1);
5362 if (known)
5363 {
5364 const widest_int value = streamer_read_widest_int (ib);
5365 const widest_int mask = streamer_read_widest_int (ib);
5366 ipa_bits *bits
5367 = ipa_get_ipa_bits_for_value (value, mask);
5368 (*ts->bits)[i] = bits;
5369 }
5370 }
5371 }
5372 }
5373
5374 /* Write all aggregate replacement for nodes in set. */
5375
5376 void
5377 ipcp_write_transformation_summaries (void)
5378 {
5379 struct cgraph_node *node;
5380 struct output_block *ob;
5381 unsigned int count = 0;
5382 lto_symtab_encoder_iterator lsei;
5383 lto_symtab_encoder_t encoder;
5384
5385 ob = create_output_block (LTO_section_ipcp_transform);
5386 encoder = ob->decl_state->symtab_node_encoder;
5387 ob->symbol = NULL;
5388 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5389 lsei_next_function_in_partition (&lsei))
5390 {
5391 node = lsei_cgraph_node (lsei);
5392 if (node->has_gimple_body_p ())
5393 count++;
5394 }
5395
5396 streamer_write_uhwi (ob, count);
5397
5398 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5399 lsei_next_function_in_partition (&lsei))
5400 {
5401 node = lsei_cgraph_node (lsei);
5402 if (node->has_gimple_body_p ())
5403 write_ipcp_transformation_info (ob, node);
5404 }
5405 streamer_write_char_stream (ob->main_stream, 0);
5406 produce_asm (ob, NULL);
5407 destroy_output_block (ob);
5408 }
5409
5410 /* Read replacements section in file FILE_DATA of length LEN with data
5411 DATA. */
5412
5413 static void
5414 read_replacements_section (struct lto_file_decl_data *file_data,
5415 const char *data,
5416 size_t len)
5417 {
5418 const struct lto_function_header *header =
5419 (const struct lto_function_header *) data;
5420 const int cfg_offset = sizeof (struct lto_function_header);
5421 const int main_offset = cfg_offset + header->cfg_size;
5422 const int string_offset = main_offset + header->main_size;
5423 class data_in *data_in;
5424 unsigned int i;
5425 unsigned int count;
5426
5427 lto_input_block ib_main ((const char *) data + main_offset,
5428 header->main_size, file_data->mode_table);
5429
5430 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5431 header->string_size, vNULL);
5432 count = streamer_read_uhwi (&ib_main);
5433
5434 for (i = 0; i < count; i++)
5435 {
5436 unsigned int index;
5437 struct cgraph_node *node;
5438 lto_symtab_encoder_t encoder;
5439
5440 index = streamer_read_uhwi (&ib_main);
5441 encoder = file_data->symtab_node_encoder;
5442 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5443 index));
5444 gcc_assert (node->definition);
5445 read_ipcp_transformation_info (&ib_main, node, data_in);
5446 }
5447 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5448 len);
5449 lto_data_in_delete (data_in);
5450 }
5451
5452 /* Read IPA-CP aggregate replacements. */
5453
5454 void
5455 ipcp_read_transformation_summaries (void)
5456 {
5457 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5458 struct lto_file_decl_data *file_data;
5459 unsigned int j = 0;
5460
5461 while ((file_data = file_data_vec[j++]))
5462 {
5463 size_t len;
5464 const char *data
5465 = lto_get_summary_section_data (file_data, LTO_section_ipcp_transform,
5466 &len);
5467 if (data)
5468 read_replacements_section (file_data, data, len);
5469 }
5470 }
5471
5472 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5473 NODE. */
5474
5475 static void
5476 adjust_agg_replacement_values (struct cgraph_node *node,
5477 struct ipa_agg_replacement_value *aggval)
5478 {
5479 struct ipa_agg_replacement_value *v;
5480 clone_info *cinfo = clone_info::get (node);
5481
5482 if (!cinfo || !cinfo->param_adjustments)
5483 return;
5484
5485 auto_vec<int, 16> new_indices;
5486 cinfo->param_adjustments->get_updated_indices (&new_indices);
5487 for (v = aggval; v; v = v->next)
5488 {
5489 gcc_checking_assert (v->index >= 0);
5490
5491 if ((unsigned) v->index < new_indices.length ())
5492 v->index = new_indices[v->index];
5493 else
5494 /* This can happen if we know about a constant passed by reference by
5495 an argument which is never actually used for anything, let alone
5496 loading that constant. */
5497 v->index = -1;
5498 }
5499 }
5500
5501 /* Dominator walker driving the ipcp modification phase. */
5502
5503 class ipcp_modif_dom_walker : public dom_walker
5504 {
5505 public:
5506 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
5507 vec<ipa_param_descriptor, va_gc> *descs,
5508 struct ipa_agg_replacement_value *av,
5509 bool *sc, bool *cc)
5510 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5511 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5512
5513 virtual edge before_dom_children (basic_block);
5514
5515 private:
5516 struct ipa_func_body_info *m_fbi;
5517 vec<ipa_param_descriptor, va_gc> *m_descriptors;
5518 struct ipa_agg_replacement_value *m_aggval;
5519 bool *m_something_changed, *m_cfg_changed;
5520 };
5521
5522 edge
5523 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5524 {
5525 gimple_stmt_iterator gsi;
5526 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5527 {
5528 struct ipa_agg_replacement_value *v;
5529 gimple *stmt = gsi_stmt (gsi);
5530 tree rhs, val, t;
5531 HOST_WIDE_INT offset;
5532 poly_int64 size;
5533 int index;
5534 bool by_ref, vce;
5535
5536 if (!gimple_assign_load_p (stmt))
5537 continue;
5538 rhs = gimple_assign_rhs1 (stmt);
5539 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5540 continue;
5541
5542 vce = false;
5543 t = rhs;
5544 while (handled_component_p (t))
5545 {
5546 /* V_C_E can do things like convert an array of integers to one
5547 bigger integer and similar things we do not handle below. */
5548 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
5549 {
5550 vce = true;
5551 break;
5552 }
5553 t = TREE_OPERAND (t, 0);
5554 }
5555 if (vce)
5556 continue;
5557
5558 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5559 &offset, &size, &by_ref))
5560 continue;
5561 for (v = m_aggval; v; v = v->next)
5562 if (v->index == index
5563 && v->offset == offset)
5564 break;
5565 if (!v
5566 || v->by_ref != by_ref
5567 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v->value))),
5568 size))
5569 continue;
5570
5571 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5572 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5573 {
5574 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5575 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5576 else if (TYPE_SIZE (TREE_TYPE (rhs))
5577 == TYPE_SIZE (TREE_TYPE (v->value)))
5578 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5579 else
5580 {
5581 if (dump_file)
5582 {
5583 fprintf (dump_file, " const ");
5584 print_generic_expr (dump_file, v->value);
5585 fprintf (dump_file, " can't be converted to type of ");
5586 print_generic_expr (dump_file, rhs);
5587 fprintf (dump_file, "\n");
5588 }
5589 continue;
5590 }
5591 }
5592 else
5593 val = v->value;
5594
5595 if (dump_file && (dump_flags & TDF_DETAILS))
5596 {
5597 fprintf (dump_file, "Modifying stmt:\n ");
5598 print_gimple_stmt (dump_file, stmt, 0);
5599 }
5600 gimple_assign_set_rhs_from_tree (&gsi, val);
5601 update_stmt (stmt);
5602
5603 if (dump_file && (dump_flags & TDF_DETAILS))
5604 {
5605 fprintf (dump_file, "into:\n ");
5606 print_gimple_stmt (dump_file, stmt, 0);
5607 fprintf (dump_file, "\n");
5608 }
5609
5610 *m_something_changed = true;
5611 if (maybe_clean_eh_stmt (stmt)
5612 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5613 *m_cfg_changed = true;
5614 }
5615 return NULL;
5616 }
5617
5618 /* Return true if we have recorded VALUE and MASK about PARM.
5619 Set VALUE and MASk accordingly. */
5620
5621 bool
5622 ipcp_get_parm_bits (tree parm, tree *value, widest_int *mask)
5623 {
5624 cgraph_node *cnode = cgraph_node::get (current_function_decl);
5625 ipcp_transformation *ts = ipcp_get_transformation_summary (cnode);
5626 if (!ts || vec_safe_length (ts->bits) == 0)
5627 return false;
5628
5629 int i = 0;
5630 for (tree p = DECL_ARGUMENTS (current_function_decl);
5631 p != parm; p = DECL_CHAIN (p))
5632 {
5633 i++;
5634 /* Ignore static chain. */
5635 if (!p)
5636 return false;
5637 }
5638
5639 clone_info *cinfo = clone_info::get (cnode);
5640 if (cinfo && cinfo->param_adjustments)
5641 {
5642 i = cinfo->param_adjustments->get_original_index (i);
5643 if (i < 0)
5644 return false;
5645 }
5646
5647 vec<ipa_bits *, va_gc> &bits = *ts->bits;
5648 if (!bits[i])
5649 return false;
5650 *mask = bits[i]->mask;
5651 *value = wide_int_to_tree (TREE_TYPE (parm), bits[i]->value);
5652 return true;
5653 }
5654
5655
5656 /* Update bits info of formal parameters as described in
5657 ipcp_transformation. */
5658
5659 static void
5660 ipcp_update_bits (struct cgraph_node *node)
5661 {
5662 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5663
5664 if (!ts || vec_safe_length (ts->bits) == 0)
5665 return;
5666 vec<ipa_bits *, va_gc> &bits = *ts->bits;
5667 unsigned count = bits.length ();
5668 if (!count)
5669 return;
5670
5671 auto_vec<int, 16> new_indices;
5672 bool need_remapping = false;
5673 clone_info *cinfo = clone_info::get (node);
5674 if (cinfo && cinfo->param_adjustments)
5675 {
5676 cinfo->param_adjustments->get_updated_indices (&new_indices);
5677 need_remapping = true;
5678 }
5679 auto_vec <tree, 16> parm_decls;
5680 push_function_arg_decls (&parm_decls, node->decl);
5681
5682 for (unsigned i = 0; i < count; ++i)
5683 {
5684 tree parm;
5685 if (need_remapping)
5686 {
5687 if (i >= new_indices.length ())
5688 continue;
5689 int idx = new_indices[i];
5690 if (idx < 0)
5691 continue;
5692 parm = parm_decls[idx];
5693 }
5694 else
5695 parm = parm_decls[i];
5696 gcc_checking_assert (parm);
5697
5698
5699 if (!bits[i]
5700 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
5701 || POINTER_TYPE_P (TREE_TYPE (parm)))
5702 || !is_gimple_reg (parm))
5703 continue;
5704
5705 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5706 if (!ddef)
5707 continue;
5708
5709 if (dump_file)
5710 {
5711 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5712 print_hex (bits[i]->mask, dump_file);
5713 fprintf (dump_file, "\n");
5714 }
5715
5716 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5717 {
5718 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5719 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5720
5721 wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
5722 | wide_int::from (bits[i]->value, prec, sgn);
5723 set_nonzero_bits (ddef, nonzero_bits);
5724 }
5725 else
5726 {
5727 unsigned tem = bits[i]->mask.to_uhwi ();
5728 unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
5729 unsigned align = tem & -tem;
5730 unsigned misalign = bitpos & (align - 1);
5731
5732 if (align > 1)
5733 {
5734 if (dump_file)
5735 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5736
5737 unsigned old_align, old_misalign;
5738 struct ptr_info_def *pi = get_ptr_info (ddef);
5739 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5740
5741 if (old_known
5742 && old_align > align)
5743 {
5744 if (dump_file)
5745 {
5746 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5747 if ((old_misalign & (align - 1)) != misalign)
5748 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5749 old_misalign, misalign);
5750 }
5751 continue;
5752 }
5753
5754 if (old_known
5755 && ((misalign & (old_align - 1)) != old_misalign)
5756 && dump_file)
5757 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5758 old_misalign, misalign);
5759
5760 set_ptr_info_alignment (pi, align, misalign);
5761 }
5762 }
5763 }
5764 }
5765
5766 bool
5767 ipa_vr::nonzero_p (tree expr_type) const
5768 {
5769 if (type == VR_ANTI_RANGE && wi::eq_p (min, 0) && wi::eq_p (max, 0))
5770 return true;
5771
5772 unsigned prec = TYPE_PRECISION (expr_type);
5773 return (type == VR_RANGE
5774 && TYPE_UNSIGNED (expr_type)
5775 && wi::eq_p (min, wi::one (prec))
5776 && wi::eq_p (max, wi::max_value (prec, TYPE_SIGN (expr_type))));
5777 }
5778
5779 /* Update value range of formal parameters as described in
5780 ipcp_transformation. */
5781
5782 static void
5783 ipcp_update_vr (struct cgraph_node *node)
5784 {
5785 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5786 if (!ts || vec_safe_length (ts->m_vr) == 0)
5787 return;
5788 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5789 unsigned count = vr.length ();
5790 if (!count)
5791 return;
5792
5793 auto_vec<int, 16> new_indices;
5794 bool need_remapping = false;
5795 clone_info *cinfo = clone_info::get (node);
5796 if (cinfo && cinfo->param_adjustments)
5797 {
5798 cinfo->param_adjustments->get_updated_indices (&new_indices);
5799 need_remapping = true;
5800 }
5801 auto_vec <tree, 16> parm_decls;
5802 push_function_arg_decls (&parm_decls, node->decl);
5803
5804 for (unsigned i = 0; i < count; ++i)
5805 {
5806 tree parm;
5807 int remapped_idx;
5808 if (need_remapping)
5809 {
5810 if (i >= new_indices.length ())
5811 continue;
5812 remapped_idx = new_indices[i];
5813 if (remapped_idx < 0)
5814 continue;
5815 }
5816 else
5817 remapped_idx = i;
5818
5819 parm = parm_decls[remapped_idx];
5820
5821 gcc_checking_assert (parm);
5822 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5823
5824 if (!ddef || !is_gimple_reg (parm))
5825 continue;
5826
5827 if (vr[i].known
5828 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5829 {
5830 tree type = TREE_TYPE (ddef);
5831 unsigned prec = TYPE_PRECISION (type);
5832 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5833 {
5834 if (dump_file)
5835 {
5836 fprintf (dump_file, "Setting value range of param %u "
5837 "(now %i) ", i, remapped_idx);
5838 fprintf (dump_file, "%s[",
5839 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5840 print_decs (vr[i].min, dump_file);
5841 fprintf (dump_file, ", ");
5842 print_decs (vr[i].max, dump_file);
5843 fprintf (dump_file, "]\n");
5844 }
5845 set_range_info (ddef, vr[i].type,
5846 wide_int_storage::from (vr[i].min, prec,
5847 TYPE_SIGN (type)),
5848 wide_int_storage::from (vr[i].max, prec,
5849 TYPE_SIGN (type)));
5850 }
5851 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5852 && vr[i].nonzero_p (TREE_TYPE (ddef)))
5853 {
5854 if (dump_file)
5855 fprintf (dump_file, "Setting nonnull for %u\n", i);
5856 set_ptr_nonnull (ddef);
5857 }
5858 }
5859 }
5860 }
5861
5862 /* IPCP transformation phase doing propagation of aggregate values. */
5863
5864 unsigned int
5865 ipcp_transform_function (struct cgraph_node *node)
5866 {
5867 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
5868 struct ipa_func_body_info fbi;
5869 struct ipa_agg_replacement_value *aggval;
5870 int param_count;
5871 bool cfg_changed = false, something_changed = false;
5872
5873 gcc_checking_assert (cfun);
5874 gcc_checking_assert (current_function_decl);
5875
5876 if (dump_file)
5877 fprintf (dump_file, "Modification phase of node %s\n",
5878 node->dump_name ());
5879
5880 ipcp_update_bits (node);
5881 ipcp_update_vr (node);
5882 aggval = ipa_get_agg_replacements_for_node (node);
5883 if (!aggval)
5884 return 0;
5885 param_count = count_formal_params (node->decl);
5886 if (param_count == 0)
5887 return 0;
5888 adjust_agg_replacement_values (node, aggval);
5889 if (dump_file)
5890 ipa_dump_agg_replacement_values (dump_file, aggval);
5891
5892 fbi.node = node;
5893 fbi.info = NULL;
5894 fbi.bb_infos = vNULL;
5895 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun), true);
5896 fbi.param_count = param_count;
5897 fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
5898
5899 vec_safe_grow_cleared (descriptors, param_count, true);
5900 ipa_populate_param_decls (node, *descriptors);
5901 calculate_dominance_info (CDI_DOMINATORS);
5902 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5903 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5904
5905 int i;
5906 struct ipa_bb_info *bi;
5907 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5908 free_ipa_bb_info (bi);
5909 fbi.bb_infos.release ();
5910 free_dominance_info (CDI_DOMINATORS);
5911
5912 ipcp_transformation *s = ipcp_transformation_sum->get (node);
5913 s->agg_values = NULL;
5914 s->bits = NULL;
5915 s->m_vr = NULL;
5916
5917 vec_free (descriptors);
5918
5919 if (!something_changed)
5920 return 0;
5921
5922 if (cfg_changed)
5923 delete_unreachable_blocks_update_callgraph (node, false);
5924
5925 return TODO_update_ssa_only_virtuals;
5926 }
5927
5928
5929 /* Return true if OTHER describes same agg value. */
5930 bool
5931 ipa_agg_value::equal_to (const ipa_agg_value &other)
5932 {
5933 return offset == other.offset
5934 && operand_equal_p (value, other.value, 0);
5935 }
5936
5937 /* Destructor also removing individual aggregate values. */
5938
5939 ipa_auto_call_arg_values::~ipa_auto_call_arg_values ()
5940 {
5941 ipa_release_agg_values (m_known_aggs, false);
5942 }
5943
5944
5945
5946 #include "gt-ipa-prop.h"