ipa/97673 - fix input_location leak
[gcc.git] / gcc / tree-inline.c
1 /* Tree inlining.
2 Copyright (C) 2001-2021 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "calls.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
41 #include "cfganal.h"
42 #include "tree-iterator.h"
43 #include "intl.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-into-ssa.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "except.h"
55 #include "debug.h"
56 #include "value-prof.h"
57 #include "cfgloop.h"
58 #include "builtins.h"
59 #include "stringpool.h"
60 #include "attribs.h"
61 #include "sreal.h"
62 #include "tree-cfgcleanup.h"
63 #include "tree-ssa-live.h"
64 #include "alloc-pool.h"
65 #include "symbol-summary.h"
66 #include "symtab-thunks.h"
67 #include "symtab-clones.h"
68
69 /* I'm not real happy about this, but we need to handle gimple and
70 non-gimple trees. */
71
72 /* Inlining, Cloning, Versioning, Parallelization
73
74 Inlining: a function body is duplicated, but the PARM_DECLs are
75 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
76 MODIFY_EXPRs that store to a dedicated returned-value variable.
77 The duplicated eh_region info of the copy will later be appended
78 to the info for the caller; the eh_region info in copied throwing
79 statements and RESX statements are adjusted accordingly.
80
81 Cloning: (only in C++) We have one body for a con/de/structor, and
82 multiple function decls, each with a unique parameter list.
83 Duplicate the body, using the given splay tree; some parameters
84 will become constants (like 0 or 1).
85
86 Versioning: a function body is duplicated and the result is a new
87 function rather than into blocks of an existing function as with
88 inlining. Some parameters will become constants.
89
90 Parallelization: a region of a function is duplicated resulting in
91 a new function. Variables may be replaced with complex expressions
92 to enable shared variable semantics.
93
94 All of these will simultaneously lookup any callgraph edges. If
95 we're going to inline the duplicated function body, and the given
96 function has some cloned callgraph nodes (one for each place this
97 function will be inlined) those callgraph edges will be duplicated.
98 If we're cloning the body, those callgraph edges will be
99 updated to point into the new body. (Note that the original
100 callgraph node and edge list will not be altered.)
101
102 See the CALL_EXPR handling case in copy_tree_body_r (). */
103
104 /* To Do:
105
106 o In order to make inlining-on-trees work, we pessimized
107 function-local static constants. In particular, they are now
108 always output, even when not addressed. Fix this by treating
109 function-local static constants just like global static
110 constants; the back-end already knows not to output them if they
111 are not needed.
112
113 o Provide heuristics to clamp inlining of recursive template
114 calls? */
115
116
117 /* Weights that estimate_num_insns uses to estimate the size of the
118 produced code. */
119
120 eni_weights eni_size_weights;
121
122 /* Weights that estimate_num_insns uses to estimate the time necessary
123 to execute the produced code. */
124
125 eni_weights eni_time_weights;
126
127 /* Prototypes. */
128
129 static tree declare_return_variable (copy_body_data *, tree, tree,
130 basic_block);
131 static void remap_block (tree *, copy_body_data *);
132 static void copy_bind_expr (tree *, int *, copy_body_data *);
133 static void declare_inline_vars (tree, tree);
134 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
135 static void prepend_lexical_block (tree current_block, tree new_block);
136 static tree copy_result_decl_to_var (tree, copy_body_data *);
137 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
138 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
139 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
140
141 /* Insert a tree->tree mapping for ID. Despite the name suggests
142 that the trees should be variables, it is used for more than that. */
143
144 void
145 insert_decl_map (copy_body_data *id, tree key, tree value)
146 {
147 id->decl_map->put (key, value);
148
149 /* Always insert an identity map as well. If we see this same new
150 node again, we won't want to duplicate it a second time. */
151 if (key != value)
152 id->decl_map->put (value, value);
153 }
154
155 /* Insert a tree->tree mapping for ID. This is only used for
156 variables. */
157
158 static void
159 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
160 {
161 if (!gimple_in_ssa_p (id->src_cfun))
162 return;
163
164 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
165 return;
166
167 if (!target_for_debug_bind (key))
168 return;
169
170 gcc_assert (TREE_CODE (key) == PARM_DECL);
171 gcc_assert (VAR_P (value));
172
173 if (!id->debug_map)
174 id->debug_map = new hash_map<tree, tree>;
175
176 id->debug_map->put (key, value);
177 }
178
179 /* If nonzero, we're remapping the contents of inlined debug
180 statements. If negative, an error has occurred, such as a
181 reference to a variable that isn't available in the inlined
182 context. */
183 static int processing_debug_stmt = 0;
184
185 /* Construct new SSA name for old NAME. ID is the inline context. */
186
187 static tree
188 remap_ssa_name (tree name, copy_body_data *id)
189 {
190 tree new_tree, var;
191 tree *n;
192
193 gcc_assert (TREE_CODE (name) == SSA_NAME);
194
195 n = id->decl_map->get (name);
196 if (n)
197 {
198 /* WHen we perform edge redirection as part of CFG copy, IPA-SRA can
199 remove an unused LHS from a call statement. Such LHS can however
200 still appear in debug statements, but their value is lost in this
201 function and we do not want to map them. */
202 if (id->killed_new_ssa_names
203 && id->killed_new_ssa_names->contains (*n))
204 {
205 gcc_assert (processing_debug_stmt);
206 processing_debug_stmt = -1;
207 return name;
208 }
209
210 return unshare_expr (*n);
211 }
212
213 if (processing_debug_stmt)
214 {
215 if (SSA_NAME_IS_DEFAULT_DEF (name)
216 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
217 && id->entry_bb == NULL
218 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
219 {
220 tree vexpr = make_node (DEBUG_EXPR_DECL);
221 gimple *def_temp;
222 gimple_stmt_iterator gsi;
223 tree val = SSA_NAME_VAR (name);
224
225 n = id->decl_map->get (val);
226 if (n != NULL)
227 val = *n;
228 if (TREE_CODE (val) != PARM_DECL
229 && !(VAR_P (val) && DECL_ABSTRACT_ORIGIN (val)))
230 {
231 processing_debug_stmt = -1;
232 return name;
233 }
234 n = id->decl_map->get (val);
235 if (n && TREE_CODE (*n) == DEBUG_EXPR_DECL)
236 return *n;
237 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
238 DECL_ARTIFICIAL (vexpr) = 1;
239 TREE_TYPE (vexpr) = TREE_TYPE (name);
240 SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
241 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
242 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
243 insert_decl_map (id, val, vexpr);
244 return vexpr;
245 }
246
247 processing_debug_stmt = -1;
248 return name;
249 }
250
251 /* Remap anonymous SSA names or SSA names of anonymous decls. */
252 var = SSA_NAME_VAR (name);
253 if (!var
254 || (!SSA_NAME_IS_DEFAULT_DEF (name)
255 && VAR_P (var)
256 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
257 && DECL_ARTIFICIAL (var)
258 && DECL_IGNORED_P (var)
259 && !DECL_NAME (var)))
260 {
261 struct ptr_info_def *pi;
262 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
263 if (!var && SSA_NAME_IDENTIFIER (name))
264 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
265 insert_decl_map (id, name, new_tree);
266 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
267 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
268 /* At least IPA points-to info can be directly transferred. */
269 if (id->src_cfun->gimple_df
270 && id->src_cfun->gimple_df->ipa_pta
271 && POINTER_TYPE_P (TREE_TYPE (name))
272 && (pi = SSA_NAME_PTR_INFO (name))
273 && !pi->pt.anything)
274 {
275 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
276 new_pi->pt = pi->pt;
277 }
278 /* So can range-info. */
279 if (!POINTER_TYPE_P (TREE_TYPE (name))
280 && SSA_NAME_RANGE_INFO (name))
281 duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
282 SSA_NAME_RANGE_INFO (name));
283 return new_tree;
284 }
285
286 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
287 in copy_bb. */
288 new_tree = remap_decl (var, id);
289
290 /* We might've substituted constant or another SSA_NAME for
291 the variable.
292
293 Replace the SSA name representing RESULT_DECL by variable during
294 inlining: this saves us from need to introduce PHI node in a case
295 return value is just partly initialized. */
296 if ((VAR_P (new_tree) || TREE_CODE (new_tree) == PARM_DECL)
297 && (!SSA_NAME_VAR (name)
298 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
299 || !id->transform_return_to_modify))
300 {
301 struct ptr_info_def *pi;
302 new_tree = make_ssa_name (new_tree);
303 insert_decl_map (id, name, new_tree);
304 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
305 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
306 /* At least IPA points-to info can be directly transferred. */
307 if (id->src_cfun->gimple_df
308 && id->src_cfun->gimple_df->ipa_pta
309 && POINTER_TYPE_P (TREE_TYPE (name))
310 && (pi = SSA_NAME_PTR_INFO (name))
311 && !pi->pt.anything)
312 {
313 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
314 new_pi->pt = pi->pt;
315 }
316 /* So can range-info. */
317 if (!POINTER_TYPE_P (TREE_TYPE (name))
318 && SSA_NAME_RANGE_INFO (name))
319 duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
320 SSA_NAME_RANGE_INFO (name));
321 if (SSA_NAME_IS_DEFAULT_DEF (name))
322 {
323 /* By inlining function having uninitialized variable, we might
324 extend the lifetime (variable might get reused). This cause
325 ICE in the case we end up extending lifetime of SSA name across
326 abnormal edge, but also increase register pressure.
327
328 We simply initialize all uninitialized vars by 0 except
329 for case we are inlining to very first BB. We can avoid
330 this for all BBs that are not inside strongly connected
331 regions of the CFG, but this is expensive to test. */
332 if (id->entry_bb
333 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
334 && (!SSA_NAME_VAR (name)
335 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
336 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
337 0)->dest
338 || EDGE_COUNT (id->entry_bb->preds) != 1))
339 {
340 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
341 gimple *init_stmt;
342 tree zero = build_zero_cst (TREE_TYPE (new_tree));
343
344 init_stmt = gimple_build_assign (new_tree, zero);
345 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
346 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
347 }
348 else
349 {
350 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
351 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
352 }
353 }
354 }
355 else
356 insert_decl_map (id, name, new_tree);
357 return new_tree;
358 }
359
360 /* Remap DECL during the copying of the BLOCK tree for the function. */
361
362 tree
363 remap_decl (tree decl, copy_body_data *id)
364 {
365 tree *n;
366
367 /* We only remap local variables in the current function. */
368
369 /* See if we have remapped this declaration. */
370
371 n = id->decl_map->get (decl);
372
373 if (!n && processing_debug_stmt)
374 {
375 processing_debug_stmt = -1;
376 return decl;
377 }
378
379 /* When remapping a type within copy_gimple_seq_and_replace_locals, all
380 necessary DECLs have already been remapped and we do not want to duplicate
381 a decl coming from outside of the sequence we are copying. */
382 if (!n
383 && id->prevent_decl_creation_for_types
384 && id->remapping_type_depth > 0
385 && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
386 return decl;
387
388 /* If we didn't already have an equivalent for this declaration, create one
389 now. */
390 if (!n)
391 {
392 /* Make a copy of the variable or label. */
393 tree t = id->copy_decl (decl, id);
394
395 /* Remember it, so that if we encounter this local entity again
396 we can reuse this copy. Do this early because remap_type may
397 need this decl for TYPE_STUB_DECL. */
398 insert_decl_map (id, decl, t);
399
400 if (!DECL_P (t))
401 return t;
402
403 /* Remap types, if necessary. */
404 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
405 if (TREE_CODE (t) == TYPE_DECL)
406 {
407 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
408
409 /* Preserve the invariant that DECL_ORIGINAL_TYPE != TREE_TYPE,
410 which is enforced in gen_typedef_die when DECL_ABSTRACT_ORIGIN
411 is not set on the TYPE_DECL, for example in LTO mode. */
412 if (DECL_ORIGINAL_TYPE (t) == TREE_TYPE (t))
413 {
414 tree x = build_variant_type_copy (TREE_TYPE (t));
415 TYPE_STUB_DECL (x) = TYPE_STUB_DECL (TREE_TYPE (t));
416 TYPE_NAME (x) = TYPE_NAME (TREE_TYPE (t));
417 DECL_ORIGINAL_TYPE (t) = x;
418 }
419 }
420
421 /* Remap sizes as necessary. */
422 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
423 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
424
425 /* If fields, do likewise for offset and qualifier. */
426 if (TREE_CODE (t) == FIELD_DECL)
427 {
428 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
429 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
430 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
431 }
432
433 return t;
434 }
435
436 if (id->do_not_unshare)
437 return *n;
438 else
439 return unshare_expr (*n);
440 }
441
442 static tree
443 remap_type_1 (tree type, copy_body_data *id)
444 {
445 tree new_tree, t;
446
447 /* We do need a copy. build and register it now. If this is a pointer or
448 reference type, remap the designated type and make a new pointer or
449 reference type. */
450 if (TREE_CODE (type) == POINTER_TYPE)
451 {
452 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
453 TYPE_MODE (type),
454 TYPE_REF_CAN_ALIAS_ALL (type));
455 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
456 new_tree = build_type_attribute_qual_variant (new_tree,
457 TYPE_ATTRIBUTES (type),
458 TYPE_QUALS (type));
459 insert_decl_map (id, type, new_tree);
460 return new_tree;
461 }
462 else if (TREE_CODE (type) == REFERENCE_TYPE)
463 {
464 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
465 TYPE_MODE (type),
466 TYPE_REF_CAN_ALIAS_ALL (type));
467 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
468 new_tree = build_type_attribute_qual_variant (new_tree,
469 TYPE_ATTRIBUTES (type),
470 TYPE_QUALS (type));
471 insert_decl_map (id, type, new_tree);
472 return new_tree;
473 }
474 else
475 new_tree = copy_node (type);
476
477 insert_decl_map (id, type, new_tree);
478
479 /* This is a new type, not a copy of an old type. Need to reassociate
480 variants. We can handle everything except the main variant lazily. */
481 t = TYPE_MAIN_VARIANT (type);
482 if (type != t)
483 {
484 t = remap_type (t, id);
485 TYPE_MAIN_VARIANT (new_tree) = t;
486 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
487 TYPE_NEXT_VARIANT (t) = new_tree;
488 }
489 else
490 {
491 TYPE_MAIN_VARIANT (new_tree) = new_tree;
492 TYPE_NEXT_VARIANT (new_tree) = NULL;
493 }
494
495 if (TYPE_STUB_DECL (type))
496 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
497
498 /* Lazily create pointer and reference types. */
499 TYPE_POINTER_TO (new_tree) = NULL;
500 TYPE_REFERENCE_TO (new_tree) = NULL;
501
502 /* Copy all types that may contain references to local variables; be sure to
503 preserve sharing in between type and its main variant when possible. */
504 switch (TREE_CODE (new_tree))
505 {
506 case INTEGER_TYPE:
507 case REAL_TYPE:
508 case FIXED_POINT_TYPE:
509 case ENUMERAL_TYPE:
510 case BOOLEAN_TYPE:
511 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
512 {
513 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
514 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
515
516 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
517 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
518 }
519 else
520 {
521 t = TYPE_MIN_VALUE (new_tree);
522 if (t && TREE_CODE (t) != INTEGER_CST)
523 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
524
525 t = TYPE_MAX_VALUE (new_tree);
526 if (t && TREE_CODE (t) != INTEGER_CST)
527 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
528 }
529 return new_tree;
530
531 case FUNCTION_TYPE:
532 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
533 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
534 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
535 else
536 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
537 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
538 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
539 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
540 else
541 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
542 return new_tree;
543
544 case ARRAY_TYPE:
545 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
546 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
547 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
548 else
549 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
550
551 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
552 {
553 gcc_checking_assert (TYPE_DOMAIN (type)
554 == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
555 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
556 }
557 else
558 {
559 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
560 /* For array bounds where we have decided not to copy over the bounds
561 variable which isn't used in OpenMP/OpenACC region, change them to
562 an uninitialized VAR_DECL temporary. */
563 if (id->adjust_array_error_bounds
564 && TYPE_DOMAIN (new_tree)
565 && TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) == error_mark_node
566 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
567 {
568 tree v = create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree)));
569 DECL_ATTRIBUTES (v)
570 = tree_cons (get_identifier ("omp dummy var"), NULL_TREE,
571 DECL_ATTRIBUTES (v));
572 TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) = v;
573 }
574 }
575 break;
576
577 case RECORD_TYPE:
578 case UNION_TYPE:
579 case QUAL_UNION_TYPE:
580 if (TYPE_MAIN_VARIANT (type) != type
581 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
582 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
583 else
584 {
585 tree f, nf = NULL;
586
587 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
588 {
589 t = remap_decl (f, id);
590 DECL_CONTEXT (t) = new_tree;
591 DECL_CHAIN (t) = nf;
592 nf = t;
593 }
594 TYPE_FIELDS (new_tree) = nreverse (nf);
595 }
596 break;
597
598 case OFFSET_TYPE:
599 default:
600 /* Shouldn't have been thought variable sized. */
601 gcc_unreachable ();
602 }
603
604 /* All variants of type share the same size, so use the already remaped data. */
605 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
606 {
607 tree s = TYPE_SIZE (type);
608 tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
609 tree su = TYPE_SIZE_UNIT (type);
610 tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
611 gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
612 && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
613 || s == mvs);
614 gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
615 && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
616 || su == mvsu);
617 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
618 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
619 }
620 else
621 {
622 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
623 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
624 }
625
626 return new_tree;
627 }
628
629 /* Helper function for remap_type_2, called through walk_tree. */
630
631 static tree
632 remap_type_3 (tree *tp, int *walk_subtrees, void *data)
633 {
634 copy_body_data *id = (copy_body_data *) data;
635
636 if (TYPE_P (*tp))
637 *walk_subtrees = 0;
638
639 else if (DECL_P (*tp) && remap_decl (*tp, id) != *tp)
640 return *tp;
641
642 return NULL_TREE;
643 }
644
645 /* Return true if TYPE needs to be remapped because remap_decl on any
646 needed embedded decl returns something other than that decl. */
647
648 static bool
649 remap_type_2 (tree type, copy_body_data *id)
650 {
651 tree t;
652
653 #define RETURN_TRUE_IF_VAR(T) \
654 do \
655 { \
656 tree _t = (T); \
657 if (_t) \
658 { \
659 if (DECL_P (_t) && remap_decl (_t, id) != _t) \
660 return true; \
661 if (!TYPE_SIZES_GIMPLIFIED (type) \
662 && walk_tree (&_t, remap_type_3, id, NULL)) \
663 return true; \
664 } \
665 } \
666 while (0)
667
668 switch (TREE_CODE (type))
669 {
670 case POINTER_TYPE:
671 case REFERENCE_TYPE:
672 case FUNCTION_TYPE:
673 case METHOD_TYPE:
674 return remap_type_2 (TREE_TYPE (type), id);
675
676 case INTEGER_TYPE:
677 case REAL_TYPE:
678 case FIXED_POINT_TYPE:
679 case ENUMERAL_TYPE:
680 case BOOLEAN_TYPE:
681 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
682 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
683 return false;
684
685 case ARRAY_TYPE:
686 if (remap_type_2 (TREE_TYPE (type), id)
687 || (TYPE_DOMAIN (type) && remap_type_2 (TYPE_DOMAIN (type), id)))
688 return true;
689 break;
690
691 case RECORD_TYPE:
692 case UNION_TYPE:
693 case QUAL_UNION_TYPE:
694 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
695 if (TREE_CODE (t) == FIELD_DECL)
696 {
697 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
698 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
699 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
700 if (TREE_CODE (type) == QUAL_UNION_TYPE)
701 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
702 }
703 break;
704
705 default:
706 return false;
707 }
708
709 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
710 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
711 return false;
712 #undef RETURN_TRUE_IF_VAR
713 }
714
715 tree
716 remap_type (tree type, copy_body_data *id)
717 {
718 tree *node;
719 tree tmp;
720
721 if (type == NULL)
722 return type;
723
724 /* See if we have remapped this type. */
725 node = id->decl_map->get (type);
726 if (node)
727 return *node;
728
729 /* The type only needs remapping if it's variably modified. */
730 if (! variably_modified_type_p (type, id->src_fn)
731 /* Don't remap if copy_decl method doesn't always return a new
732 decl and for all embedded decls returns the passed in decl. */
733 || (id->dont_remap_vla_if_no_change && !remap_type_2 (type, id)))
734 {
735 insert_decl_map (id, type, type);
736 return type;
737 }
738
739 id->remapping_type_depth++;
740 tmp = remap_type_1 (type, id);
741 id->remapping_type_depth--;
742
743 return tmp;
744 }
745
746 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
747
748 static bool
749 can_be_nonlocal (tree decl, copy_body_data *id)
750 {
751 /* We cannot duplicate function decls. */
752 if (TREE_CODE (decl) == FUNCTION_DECL)
753 return true;
754
755 /* Local static vars must be non-local or we get multiple declaration
756 problems. */
757 if (VAR_P (decl) && !auto_var_in_fn_p (decl, id->src_fn))
758 return true;
759
760 return false;
761 }
762
763 static tree
764 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
765 copy_body_data *id)
766 {
767 tree old_var;
768 tree new_decls = NULL_TREE;
769
770 /* Remap its variables. */
771 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
772 {
773 tree new_var;
774
775 if (can_be_nonlocal (old_var, id))
776 {
777 /* We need to add this variable to the local decls as otherwise
778 nothing else will do so. */
779 if (VAR_P (old_var) && ! DECL_EXTERNAL (old_var) && cfun)
780 add_local_decl (cfun, old_var);
781 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
782 && !DECL_IGNORED_P (old_var)
783 && nonlocalized_list)
784 vec_safe_push (*nonlocalized_list, old_var);
785 continue;
786 }
787
788 /* Remap the variable. */
789 new_var = remap_decl (old_var, id);
790
791 /* If we didn't remap this variable, we can't mess with its
792 TREE_CHAIN. If we remapped this variable to the return slot, it's
793 already declared somewhere else, so don't declare it here. */
794
795 if (new_var == id->retvar)
796 ;
797 else if (!new_var)
798 {
799 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
800 && !DECL_IGNORED_P (old_var)
801 && nonlocalized_list)
802 vec_safe_push (*nonlocalized_list, old_var);
803 }
804 else
805 {
806 gcc_assert (DECL_P (new_var));
807 DECL_CHAIN (new_var) = new_decls;
808 new_decls = new_var;
809
810 /* Also copy value-expressions. */
811 if (VAR_P (new_var) && DECL_HAS_VALUE_EXPR_P (new_var))
812 {
813 tree tem = DECL_VALUE_EXPR (new_var);
814 bool old_regimplify = id->regimplify;
815 id->remapping_type_depth++;
816 walk_tree (&tem, copy_tree_body_r, id, NULL);
817 id->remapping_type_depth--;
818 id->regimplify = old_regimplify;
819 SET_DECL_VALUE_EXPR (new_var, tem);
820 }
821 }
822 }
823
824 return nreverse (new_decls);
825 }
826
827 /* Copy the BLOCK to contain remapped versions of the variables
828 therein. And hook the new block into the block-tree. */
829
830 static void
831 remap_block (tree *block, copy_body_data *id)
832 {
833 tree old_block;
834 tree new_block;
835
836 /* Make the new block. */
837 old_block = *block;
838 new_block = make_node (BLOCK);
839 TREE_USED (new_block) = TREE_USED (old_block);
840 BLOCK_ABSTRACT_ORIGIN (new_block) = BLOCK_ORIGIN (old_block);
841 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
842 BLOCK_NONLOCALIZED_VARS (new_block)
843 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
844 *block = new_block;
845
846 /* Remap its variables. */
847 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
848 &BLOCK_NONLOCALIZED_VARS (new_block),
849 id);
850
851 if (id->transform_lang_insert_block)
852 id->transform_lang_insert_block (new_block);
853
854 /* Remember the remapped block. */
855 insert_decl_map (id, old_block, new_block);
856 }
857
858 /* Copy the whole block tree and root it in id->block. */
859
860 static tree
861 remap_blocks (tree block, copy_body_data *id)
862 {
863 tree t;
864 tree new_tree = block;
865
866 if (!block)
867 return NULL;
868
869 remap_block (&new_tree, id);
870 gcc_assert (new_tree != block);
871 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
872 prepend_lexical_block (new_tree, remap_blocks (t, id));
873 /* Blocks are in arbitrary order, but make things slightly prettier and do
874 not swap order when producing a copy. */
875 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
876 return new_tree;
877 }
878
879 /* Remap the block tree rooted at BLOCK to nothing. */
880
881 static void
882 remap_blocks_to_null (tree block, copy_body_data *id)
883 {
884 tree t;
885 insert_decl_map (id, block, NULL_TREE);
886 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
887 remap_blocks_to_null (t, id);
888 }
889
890 /* Remap the location info pointed to by LOCUS. */
891
892 static location_t
893 remap_location (location_t locus, copy_body_data *id)
894 {
895 if (LOCATION_BLOCK (locus))
896 {
897 tree *n = id->decl_map->get (LOCATION_BLOCK (locus));
898 gcc_assert (n);
899 if (*n)
900 return set_block (locus, *n);
901 }
902
903 locus = LOCATION_LOCUS (locus);
904
905 if (locus != UNKNOWN_LOCATION && id->block)
906 return set_block (locus, id->block);
907
908 return locus;
909 }
910
911 static void
912 copy_statement_list (tree *tp)
913 {
914 tree_stmt_iterator oi, ni;
915 tree new_tree;
916
917 new_tree = alloc_stmt_list ();
918 ni = tsi_start (new_tree);
919 oi = tsi_start (*tp);
920 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
921 *tp = new_tree;
922
923 for (; !tsi_end_p (oi); tsi_next (&oi))
924 {
925 tree stmt = tsi_stmt (oi);
926 if (TREE_CODE (stmt) == STATEMENT_LIST)
927 /* This copy is not redundant; tsi_link_after will smash this
928 STATEMENT_LIST into the end of the one we're building, and we
929 don't want to do that with the original. */
930 copy_statement_list (&stmt);
931 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
932 }
933 }
934
935 static void
936 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
937 {
938 tree block = BIND_EXPR_BLOCK (*tp);
939 /* Copy (and replace) the statement. */
940 copy_tree_r (tp, walk_subtrees, NULL);
941 if (block)
942 {
943 remap_block (&block, id);
944 BIND_EXPR_BLOCK (*tp) = block;
945 }
946
947 if (BIND_EXPR_VARS (*tp))
948 /* This will remap a lot of the same decls again, but this should be
949 harmless. */
950 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
951 }
952
953
954 /* Create a new gimple_seq by remapping all the statements in BODY
955 using the inlining information in ID. */
956
957 static gimple_seq
958 remap_gimple_seq (gimple_seq body, copy_body_data *id)
959 {
960 gimple_stmt_iterator si;
961 gimple_seq new_body = NULL;
962
963 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
964 {
965 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
966 gimple_seq_add_seq (&new_body, new_stmts);
967 }
968
969 return new_body;
970 }
971
972
973 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
974 block using the mapping information in ID. */
975
976 static gimple *
977 copy_gimple_bind (gbind *stmt, copy_body_data *id)
978 {
979 gimple *new_bind;
980 tree new_block, new_vars;
981 gimple_seq body, new_body;
982
983 /* Copy the statement. Note that we purposely don't use copy_stmt
984 here because we need to remap statements as we copy. */
985 body = gimple_bind_body (stmt);
986 new_body = remap_gimple_seq (body, id);
987
988 new_block = gimple_bind_block (stmt);
989 if (new_block)
990 remap_block (&new_block, id);
991
992 /* This will remap a lot of the same decls again, but this should be
993 harmless. */
994 new_vars = gimple_bind_vars (stmt);
995 if (new_vars)
996 new_vars = remap_decls (new_vars, NULL, id);
997
998 new_bind = gimple_build_bind (new_vars, new_body, new_block);
999
1000 return new_bind;
1001 }
1002
1003 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
1004
1005 static bool
1006 is_parm (tree decl)
1007 {
1008 if (TREE_CODE (decl) == SSA_NAME)
1009 {
1010 decl = SSA_NAME_VAR (decl);
1011 if (!decl)
1012 return false;
1013 }
1014
1015 return (TREE_CODE (decl) == PARM_DECL);
1016 }
1017
1018 /* Remap the dependence CLIQUE from the source to the destination function
1019 as specified in ID. */
1020
1021 static unsigned short
1022 remap_dependence_clique (copy_body_data *id, unsigned short clique)
1023 {
1024 if (clique == 0 || processing_debug_stmt)
1025 return 0;
1026 if (!id->dependence_map)
1027 id->dependence_map = new hash_map<dependence_hash, unsigned short>;
1028 bool existed;
1029 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
1030 if (!existed)
1031 {
1032 /* Clique 1 is reserved for local ones set by PTA. */
1033 if (cfun->last_clique == 0)
1034 cfun->last_clique = 1;
1035 newc = ++cfun->last_clique;
1036 }
1037 return newc;
1038 }
1039
1040 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
1041 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
1042 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
1043 recursing into the children nodes of *TP. */
1044
1045 static tree
1046 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
1047 {
1048 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
1049 copy_body_data *id = (copy_body_data *) wi_p->info;
1050 tree fn = id->src_fn;
1051
1052 /* For recursive invocations this is no longer the LHS itself. */
1053 bool is_lhs = wi_p->is_lhs;
1054 wi_p->is_lhs = false;
1055
1056 if (TREE_CODE (*tp) == SSA_NAME)
1057 {
1058 *tp = remap_ssa_name (*tp, id);
1059 *walk_subtrees = 0;
1060 if (is_lhs)
1061 SSA_NAME_DEF_STMT (*tp) = wi_p->stmt;
1062 return NULL;
1063 }
1064 else if (auto_var_in_fn_p (*tp, fn))
1065 {
1066 /* Local variables and labels need to be replaced by equivalent
1067 variables. We don't want to copy static variables; there's
1068 only one of those, no matter how many times we inline the
1069 containing function. Similarly for globals from an outer
1070 function. */
1071 tree new_decl;
1072
1073 /* Remap the declaration. */
1074 new_decl = remap_decl (*tp, id);
1075 gcc_assert (new_decl);
1076 /* Replace this variable with the copy. */
1077 STRIP_TYPE_NOPS (new_decl);
1078 /* ??? The C++ frontend uses void * pointer zero to initialize
1079 any other type. This confuses the middle-end type verification.
1080 As cloned bodies do not go through gimplification again the fixup
1081 there doesn't trigger. */
1082 if (TREE_CODE (new_decl) == INTEGER_CST
1083 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
1084 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
1085 *tp = new_decl;
1086 *walk_subtrees = 0;
1087 }
1088 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1089 gcc_unreachable ();
1090 else if (TREE_CODE (*tp) == SAVE_EXPR)
1091 gcc_unreachable ();
1092 else if (TREE_CODE (*tp) == LABEL_DECL
1093 && (!DECL_CONTEXT (*tp)
1094 || decl_function_context (*tp) == id->src_fn))
1095 /* These may need to be remapped for EH handling. */
1096 *tp = remap_decl (*tp, id);
1097 else if (TREE_CODE (*tp) == FIELD_DECL)
1098 {
1099 /* If the enclosing record type is variably_modified_type_p, the field
1100 has already been remapped. Otherwise, it need not be. */
1101 tree *n = id->decl_map->get (*tp);
1102 if (n)
1103 *tp = *n;
1104 *walk_subtrees = 0;
1105 }
1106 else if (TYPE_P (*tp))
1107 /* Types may need remapping as well. */
1108 *tp = remap_type (*tp, id);
1109 else if (CONSTANT_CLASS_P (*tp))
1110 {
1111 /* If this is a constant, we have to copy the node iff the type
1112 will be remapped. copy_tree_r will not copy a constant. */
1113 tree new_type = remap_type (TREE_TYPE (*tp), id);
1114
1115 if (new_type == TREE_TYPE (*tp))
1116 *walk_subtrees = 0;
1117
1118 else if (TREE_CODE (*tp) == INTEGER_CST)
1119 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1120 else
1121 {
1122 *tp = copy_node (*tp);
1123 TREE_TYPE (*tp) = new_type;
1124 }
1125 }
1126 else
1127 {
1128 /* Otherwise, just copy the node. Note that copy_tree_r already
1129 knows not to copy VAR_DECLs, etc., so this is safe. */
1130
1131 if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1132 {
1133 /* We need to re-canonicalize MEM_REFs from inline substitutions
1134 that can happen when a pointer argument is an ADDR_EXPR.
1135 Recurse here manually to allow that. */
1136 tree ptr = TREE_OPERAND (*tp, 0);
1137 tree type = remap_type (TREE_TYPE (*tp), id);
1138 tree old = *tp;
1139 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
1140 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1141 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1142 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1143 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1144 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1145 {
1146 MR_DEPENDENCE_CLIQUE (*tp)
1147 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1148 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1149 }
1150 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1151 remapped a parameter as the property might be valid only
1152 for the parameter itself. */
1153 if (TREE_THIS_NOTRAP (old)
1154 && (!is_parm (TREE_OPERAND (old, 0))
1155 || (!id->transform_parameter && is_parm (ptr))))
1156 TREE_THIS_NOTRAP (*tp) = 1;
1157 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1158 *walk_subtrees = 0;
1159 return NULL;
1160 }
1161
1162 /* Here is the "usual case". Copy this tree node, and then
1163 tweak some special cases. */
1164 copy_tree_r (tp, walk_subtrees, NULL);
1165
1166 if (TREE_CODE (*tp) != OMP_CLAUSE)
1167 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1168
1169 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1170 {
1171 /* The copied TARGET_EXPR has never been expanded, even if the
1172 original node was expanded already. */
1173 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1174 TREE_OPERAND (*tp, 3) = NULL_TREE;
1175 }
1176 else if (TREE_CODE (*tp) == ADDR_EXPR)
1177 {
1178 /* Variable substitution need not be simple. In particular,
1179 the MEM_REF substitution above. Make sure that
1180 TREE_CONSTANT and friends are up-to-date. */
1181 int invariant = is_gimple_min_invariant (*tp);
1182 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
1183 recompute_tree_invariant_for_addr_expr (*tp);
1184
1185 /* If this used to be invariant, but is not any longer,
1186 then regimplification is probably needed. */
1187 if (invariant && !is_gimple_min_invariant (*tp))
1188 id->regimplify = true;
1189
1190 *walk_subtrees = 0;
1191 }
1192 }
1193
1194 /* Update the TREE_BLOCK for the cloned expr. */
1195 if (EXPR_P (*tp))
1196 {
1197 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1198 tree old_block = TREE_BLOCK (*tp);
1199 if (old_block)
1200 {
1201 tree *n;
1202 n = id->decl_map->get (TREE_BLOCK (*tp));
1203 if (n)
1204 new_block = *n;
1205 }
1206 TREE_SET_BLOCK (*tp, new_block);
1207 }
1208
1209 /* Keep iterating. */
1210 return NULL_TREE;
1211 }
1212
1213
1214 /* Called from copy_body_id via walk_tree. DATA is really a
1215 `copy_body_data *'. */
1216
1217 tree
1218 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1219 {
1220 copy_body_data *id = (copy_body_data *) data;
1221 tree fn = id->src_fn;
1222 tree new_block;
1223
1224 /* Begin by recognizing trees that we'll completely rewrite for the
1225 inlining context. Our output for these trees is completely
1226 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1227 into an edge). Further down, we'll handle trees that get
1228 duplicated and/or tweaked. */
1229
1230 /* When requested, RETURN_EXPRs should be transformed to just the
1231 contained MODIFY_EXPR. The branch semantics of the return will
1232 be handled elsewhere by manipulating the CFG rather than a statement. */
1233 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1234 {
1235 tree assignment = TREE_OPERAND (*tp, 0);
1236
1237 /* If we're returning something, just turn that into an
1238 assignment into the equivalent of the original RESULT_DECL.
1239 If the "assignment" is just the result decl, the result
1240 decl has already been set (e.g. a recent "foo (&result_decl,
1241 ...)"); just toss the entire RETURN_EXPR. */
1242 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1243 {
1244 /* Replace the RETURN_EXPR with (a copy of) the
1245 MODIFY_EXPR hanging underneath. */
1246 *tp = copy_node (assignment);
1247 }
1248 else /* Else the RETURN_EXPR returns no value. */
1249 {
1250 *tp = NULL;
1251 return (tree) (void *)1;
1252 }
1253 }
1254 else if (TREE_CODE (*tp) == SSA_NAME)
1255 {
1256 *tp = remap_ssa_name (*tp, id);
1257 *walk_subtrees = 0;
1258 return NULL;
1259 }
1260
1261 /* Local variables and labels need to be replaced by equivalent
1262 variables. We don't want to copy static variables; there's only
1263 one of those, no matter how many times we inline the containing
1264 function. Similarly for globals from an outer function. */
1265 else if (auto_var_in_fn_p (*tp, fn))
1266 {
1267 tree new_decl;
1268
1269 /* Remap the declaration. */
1270 new_decl = remap_decl (*tp, id);
1271 gcc_assert (new_decl);
1272 /* Replace this variable with the copy. */
1273 STRIP_TYPE_NOPS (new_decl);
1274 *tp = new_decl;
1275 *walk_subtrees = 0;
1276 }
1277 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1278 copy_statement_list (tp);
1279 else if (TREE_CODE (*tp) == SAVE_EXPR
1280 || TREE_CODE (*tp) == TARGET_EXPR)
1281 remap_save_expr (tp, id->decl_map, walk_subtrees);
1282 else if (TREE_CODE (*tp) == LABEL_DECL
1283 && (! DECL_CONTEXT (*tp)
1284 || decl_function_context (*tp) == id->src_fn))
1285 /* These may need to be remapped for EH handling. */
1286 *tp = remap_decl (*tp, id);
1287 else if (TREE_CODE (*tp) == BIND_EXPR)
1288 copy_bind_expr (tp, walk_subtrees, id);
1289 /* Types may need remapping as well. */
1290 else if (TYPE_P (*tp))
1291 *tp = remap_type (*tp, id);
1292
1293 /* If this is a constant, we have to copy the node iff the type will be
1294 remapped. copy_tree_r will not copy a constant. */
1295 else if (CONSTANT_CLASS_P (*tp))
1296 {
1297 tree new_type = remap_type (TREE_TYPE (*tp), id);
1298
1299 if (new_type == TREE_TYPE (*tp))
1300 *walk_subtrees = 0;
1301
1302 else if (TREE_CODE (*tp) == INTEGER_CST)
1303 *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
1304 else
1305 {
1306 *tp = copy_node (*tp);
1307 TREE_TYPE (*tp) = new_type;
1308 }
1309 }
1310
1311 /* Otherwise, just copy the node. Note that copy_tree_r already
1312 knows not to copy VAR_DECLs, etc., so this is safe. */
1313 else
1314 {
1315 /* Here we handle trees that are not completely rewritten.
1316 First we detect some inlining-induced bogosities for
1317 discarding. */
1318 if (TREE_CODE (*tp) == MODIFY_EXPR
1319 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1320 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1321 {
1322 /* Some assignments VAR = VAR; don't generate any rtl code
1323 and thus don't count as variable modification. Avoid
1324 keeping bogosities like 0 = 0. */
1325 tree decl = TREE_OPERAND (*tp, 0), value;
1326 tree *n;
1327
1328 n = id->decl_map->get (decl);
1329 if (n)
1330 {
1331 value = *n;
1332 STRIP_TYPE_NOPS (value);
1333 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1334 {
1335 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1336 return copy_tree_body_r (tp, walk_subtrees, data);
1337 }
1338 }
1339 }
1340 else if (TREE_CODE (*tp) == INDIRECT_REF)
1341 {
1342 /* Get rid of *& from inline substitutions that can happen when a
1343 pointer argument is an ADDR_EXPR. */
1344 tree decl = TREE_OPERAND (*tp, 0);
1345 tree *n = id->decl_map->get (decl);
1346 if (n)
1347 {
1348 /* If we happen to get an ADDR_EXPR in n->value, strip
1349 it manually here as we'll eventually get ADDR_EXPRs
1350 which lie about their types pointed to. In this case
1351 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1352 but we absolutely rely on that. As fold_indirect_ref
1353 does other useful transformations, try that first, though. */
1354 tree type = TREE_TYPE (*tp);
1355 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1356 tree old = *tp;
1357 *tp = id->do_not_fold ? NULL : gimple_fold_indirect_ref (ptr);
1358 if (! *tp)
1359 {
1360 type = remap_type (type, id);
1361 if (TREE_CODE (ptr) == ADDR_EXPR && !id->do_not_fold)
1362 {
1363 *tp
1364 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1365 /* ??? We should either assert here or build
1366 a VIEW_CONVERT_EXPR instead of blindly leaking
1367 incompatible types to our IL. */
1368 if (! *tp)
1369 *tp = TREE_OPERAND (ptr, 0);
1370 }
1371 else
1372 {
1373 *tp = build1 (INDIRECT_REF, type, ptr);
1374 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1375 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1376 TREE_READONLY (*tp) = TREE_READONLY (old);
1377 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1378 have remapped a parameter as the property might be
1379 valid only for the parameter itself. */
1380 if (TREE_THIS_NOTRAP (old)
1381 && (!is_parm (TREE_OPERAND (old, 0))
1382 || (!id->transform_parameter && is_parm (ptr))))
1383 TREE_THIS_NOTRAP (*tp) = 1;
1384 }
1385 }
1386 *walk_subtrees = 0;
1387 return NULL;
1388 }
1389 }
1390 else if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
1391 {
1392 /* We need to re-canonicalize MEM_REFs from inline substitutions
1393 that can happen when a pointer argument is an ADDR_EXPR.
1394 Recurse here manually to allow that. */
1395 tree ptr = TREE_OPERAND (*tp, 0);
1396 tree type = remap_type (TREE_TYPE (*tp), id);
1397 tree old = *tp;
1398 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1399 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1400 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1401 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1402 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1403 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1404 {
1405 MR_DEPENDENCE_CLIQUE (*tp)
1406 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1407 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1408 }
1409 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1410 remapped a parameter as the property might be valid only
1411 for the parameter itself. */
1412 if (TREE_THIS_NOTRAP (old)
1413 && (!is_parm (TREE_OPERAND (old, 0))
1414 || (!id->transform_parameter && is_parm (ptr))))
1415 TREE_THIS_NOTRAP (*tp) = 1;
1416 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1417 *walk_subtrees = 0;
1418 return NULL;
1419 }
1420
1421 /* Here is the "usual case". Copy this tree node, and then
1422 tweak some special cases. */
1423 copy_tree_r (tp, walk_subtrees, NULL);
1424
1425 /* If EXPR has block defined, map it to newly constructed block.
1426 When inlining we want EXPRs without block appear in the block
1427 of function call if we are not remapping a type. */
1428 if (EXPR_P (*tp))
1429 {
1430 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1431 if (TREE_BLOCK (*tp))
1432 {
1433 tree *n;
1434 n = id->decl_map->get (TREE_BLOCK (*tp));
1435 if (n)
1436 new_block = *n;
1437 }
1438 TREE_SET_BLOCK (*tp, new_block);
1439 }
1440
1441 if (TREE_CODE (*tp) != OMP_CLAUSE)
1442 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1443
1444 /* The copied TARGET_EXPR has never been expanded, even if the
1445 original node was expanded already. */
1446 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1447 {
1448 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1449 TREE_OPERAND (*tp, 3) = NULL_TREE;
1450 }
1451
1452 /* Variable substitution need not be simple. In particular, the
1453 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1454 and friends are up-to-date. */
1455 else if (TREE_CODE (*tp) == ADDR_EXPR)
1456 {
1457 int invariant = is_gimple_min_invariant (*tp);
1458 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1459
1460 /* Handle the case where we substituted an INDIRECT_REF
1461 into the operand of the ADDR_EXPR. */
1462 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF
1463 && !id->do_not_fold)
1464 {
1465 tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1466 if (TREE_TYPE (t) != TREE_TYPE (*tp))
1467 t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1468 *tp = t;
1469 }
1470 else
1471 recompute_tree_invariant_for_addr_expr (*tp);
1472
1473 /* If this used to be invariant, but is not any longer,
1474 then regimplification is probably needed. */
1475 if (invariant && !is_gimple_min_invariant (*tp))
1476 id->regimplify = true;
1477
1478 *walk_subtrees = 0;
1479 }
1480 }
1481
1482 /* Keep iterating. */
1483 return NULL_TREE;
1484 }
1485
1486 /* Helper for remap_gimple_stmt. Given an EH region number for the
1487 source function, map that to the duplicate EH region number in
1488 the destination function. */
1489
1490 static int
1491 remap_eh_region_nr (int old_nr, copy_body_data *id)
1492 {
1493 eh_region old_r, new_r;
1494
1495 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1496 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1497
1498 return new_r->index;
1499 }
1500
1501 /* Similar, but operate on INTEGER_CSTs. */
1502
1503 static tree
1504 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1505 {
1506 int old_nr, new_nr;
1507
1508 old_nr = tree_to_shwi (old_t_nr);
1509 new_nr = remap_eh_region_nr (old_nr, id);
1510
1511 return build_int_cst (integer_type_node, new_nr);
1512 }
1513
1514 /* Helper for copy_bb. Remap statement STMT using the inlining
1515 information in ID. Return the new statement copy. */
1516
1517 static gimple_seq
1518 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1519 {
1520 gimple *copy = NULL;
1521 struct walk_stmt_info wi;
1522 bool skip_first = false;
1523 gimple_seq stmts = NULL;
1524
1525 if (is_gimple_debug (stmt)
1526 && (gimple_debug_nonbind_marker_p (stmt)
1527 ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
1528 : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
1529 return NULL;
1530
1531 /* Begin by recognizing trees that we'll completely rewrite for the
1532 inlining context. Our output for these trees is completely
1533 different from our input (e.g. RETURN_EXPR is deleted and morphs
1534 into an edge). Further down, we'll handle trees that get
1535 duplicated and/or tweaked. */
1536
1537 /* When requested, GIMPLE_RETURN should be transformed to just the
1538 contained GIMPLE_ASSIGN. The branch semantics of the return will
1539 be handled elsewhere by manipulating the CFG rather than the
1540 statement. */
1541 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1542 {
1543 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1544
1545 /* If we're returning something, just turn that into an
1546 assignment to the equivalent of the original RESULT_DECL.
1547 If RETVAL is just the result decl, the result decl has
1548 already been set (e.g. a recent "foo (&result_decl, ...)");
1549 just toss the entire GIMPLE_RETURN. Likewise for when the
1550 call doesn't want the return value. */
1551 if (retval
1552 && (TREE_CODE (retval) != RESULT_DECL
1553 && (!id->call_stmt
1554 || gimple_call_lhs (id->call_stmt) != NULL_TREE)
1555 && (TREE_CODE (retval) != SSA_NAME
1556 || ! SSA_NAME_VAR (retval)
1557 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1558 {
1559 copy = gimple_build_assign (id->do_not_unshare
1560 ? id->retvar : unshare_expr (id->retvar),
1561 retval);
1562 /* id->retvar is already substituted. Skip it on later remapping. */
1563 skip_first = true;
1564 }
1565 else
1566 return NULL;
1567 }
1568 else if (gimple_has_substatements (stmt))
1569 {
1570 gimple_seq s1, s2;
1571
1572 /* When cloning bodies from the C++ front end, we will be handed bodies
1573 in High GIMPLE form. Handle here all the High GIMPLE statements that
1574 have embedded statements. */
1575 switch (gimple_code (stmt))
1576 {
1577 case GIMPLE_BIND:
1578 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1579 break;
1580
1581 case GIMPLE_CATCH:
1582 {
1583 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1584 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1585 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1586 }
1587 break;
1588
1589 case GIMPLE_EH_FILTER:
1590 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1591 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1592 break;
1593
1594 case GIMPLE_TRY:
1595 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1596 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1597 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1598 break;
1599
1600 case GIMPLE_WITH_CLEANUP_EXPR:
1601 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1602 copy = gimple_build_wce (s1);
1603 break;
1604
1605 case GIMPLE_OMP_PARALLEL:
1606 {
1607 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1608 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1609 copy = gimple_build_omp_parallel
1610 (s1,
1611 gimple_omp_parallel_clauses (omp_par_stmt),
1612 gimple_omp_parallel_child_fn (omp_par_stmt),
1613 gimple_omp_parallel_data_arg (omp_par_stmt));
1614 }
1615 break;
1616
1617 case GIMPLE_OMP_TASK:
1618 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1619 copy = gimple_build_omp_task
1620 (s1,
1621 gimple_omp_task_clauses (stmt),
1622 gimple_omp_task_child_fn (stmt),
1623 gimple_omp_task_data_arg (stmt),
1624 gimple_omp_task_copy_fn (stmt),
1625 gimple_omp_task_arg_size (stmt),
1626 gimple_omp_task_arg_align (stmt));
1627 break;
1628
1629 case GIMPLE_OMP_FOR:
1630 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1631 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1632 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1633 gimple_omp_for_clauses (stmt),
1634 gimple_omp_for_collapse (stmt), s2);
1635 {
1636 size_t i;
1637 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1638 {
1639 gimple_omp_for_set_index (copy, i,
1640 gimple_omp_for_index (stmt, i));
1641 gimple_omp_for_set_initial (copy, i,
1642 gimple_omp_for_initial (stmt, i));
1643 gimple_omp_for_set_final (copy, i,
1644 gimple_omp_for_final (stmt, i));
1645 gimple_omp_for_set_incr (copy, i,
1646 gimple_omp_for_incr (stmt, i));
1647 gimple_omp_for_set_cond (copy, i,
1648 gimple_omp_for_cond (stmt, i));
1649 }
1650 }
1651 break;
1652
1653 case GIMPLE_OMP_MASTER:
1654 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1655 copy = gimple_build_omp_master (s1);
1656 break;
1657
1658 case GIMPLE_OMP_TASKGROUP:
1659 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1660 copy = gimple_build_omp_taskgroup
1661 (s1, gimple_omp_taskgroup_clauses (stmt));
1662 break;
1663
1664 case GIMPLE_OMP_ORDERED:
1665 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1666 copy = gimple_build_omp_ordered
1667 (s1,
1668 gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1669 break;
1670
1671 case GIMPLE_OMP_SCAN:
1672 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1673 copy = gimple_build_omp_scan
1674 (s1, gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)));
1675 break;
1676
1677 case GIMPLE_OMP_SECTION:
1678 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1679 copy = gimple_build_omp_section (s1);
1680 break;
1681
1682 case GIMPLE_OMP_SECTIONS:
1683 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1684 copy = gimple_build_omp_sections
1685 (s1, gimple_omp_sections_clauses (stmt));
1686 break;
1687
1688 case GIMPLE_OMP_SINGLE:
1689 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1690 copy = gimple_build_omp_single
1691 (s1, gimple_omp_single_clauses (stmt));
1692 break;
1693
1694 case GIMPLE_OMP_TARGET:
1695 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1696 copy = gimple_build_omp_target
1697 (s1, gimple_omp_target_kind (stmt),
1698 gimple_omp_target_clauses (stmt));
1699 break;
1700
1701 case GIMPLE_OMP_TEAMS:
1702 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1703 copy = gimple_build_omp_teams
1704 (s1, gimple_omp_teams_clauses (stmt));
1705 break;
1706
1707 case GIMPLE_OMP_CRITICAL:
1708 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1709 copy = gimple_build_omp_critical (s1,
1710 gimple_omp_critical_name
1711 (as_a <gomp_critical *> (stmt)),
1712 gimple_omp_critical_clauses
1713 (as_a <gomp_critical *> (stmt)));
1714 break;
1715
1716 case GIMPLE_TRANSACTION:
1717 {
1718 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1719 gtransaction *new_trans_stmt;
1720 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1721 id);
1722 copy = new_trans_stmt = gimple_build_transaction (s1);
1723 gimple_transaction_set_subcode (new_trans_stmt,
1724 gimple_transaction_subcode (old_trans_stmt));
1725 gimple_transaction_set_label_norm (new_trans_stmt,
1726 gimple_transaction_label_norm (old_trans_stmt));
1727 gimple_transaction_set_label_uninst (new_trans_stmt,
1728 gimple_transaction_label_uninst (old_trans_stmt));
1729 gimple_transaction_set_label_over (new_trans_stmt,
1730 gimple_transaction_label_over (old_trans_stmt));
1731 }
1732 break;
1733
1734 default:
1735 gcc_unreachable ();
1736 }
1737 }
1738 else
1739 {
1740 if (gimple_assign_copy_p (stmt)
1741 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1742 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1743 {
1744 /* Here we handle statements that are not completely rewritten.
1745 First we detect some inlining-induced bogosities for
1746 discarding. */
1747
1748 /* Some assignments VAR = VAR; don't generate any rtl code
1749 and thus don't count as variable modification. Avoid
1750 keeping bogosities like 0 = 0. */
1751 tree decl = gimple_assign_lhs (stmt), value;
1752 tree *n;
1753
1754 n = id->decl_map->get (decl);
1755 if (n)
1756 {
1757 value = *n;
1758 STRIP_TYPE_NOPS (value);
1759 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1760 return NULL;
1761 }
1762 }
1763
1764 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1765 in a block that we aren't copying during tree_function_versioning,
1766 just drop the clobber stmt. */
1767 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1768 {
1769 tree lhs = gimple_assign_lhs (stmt);
1770 if (TREE_CODE (lhs) == MEM_REF
1771 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1772 {
1773 gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1774 if (gimple_bb (def_stmt)
1775 && !bitmap_bit_p (id->blocks_to_copy,
1776 gimple_bb (def_stmt)->index))
1777 return NULL;
1778 }
1779 }
1780
1781 /* We do not allow CLOBBERs of handled components. In case
1782 returned value is stored via such handled component, remove
1783 the clobber so stmt verifier is happy. */
1784 if (gimple_clobber_p (stmt)
1785 && TREE_CODE (gimple_assign_lhs (stmt)) == RESULT_DECL)
1786 {
1787 tree remapped = remap_decl (gimple_assign_lhs (stmt), id);
1788 if (!DECL_P (remapped)
1789 && TREE_CODE (remapped) != MEM_REF)
1790 return NULL;
1791 }
1792
1793 if (gimple_debug_bind_p (stmt))
1794 {
1795 gdebug *copy
1796 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1797 gimple_debug_bind_get_value (stmt),
1798 stmt);
1799 if (id->reset_location)
1800 gimple_set_location (copy, input_location);
1801 id->debug_stmts.safe_push (copy);
1802 gimple_seq_add_stmt (&stmts, copy);
1803 return stmts;
1804 }
1805 if (gimple_debug_source_bind_p (stmt))
1806 {
1807 gdebug *copy = gimple_build_debug_source_bind
1808 (gimple_debug_source_bind_get_var (stmt),
1809 gimple_debug_source_bind_get_value (stmt),
1810 stmt);
1811 if (id->reset_location)
1812 gimple_set_location (copy, input_location);
1813 id->debug_stmts.safe_push (copy);
1814 gimple_seq_add_stmt (&stmts, copy);
1815 return stmts;
1816 }
1817 if (gimple_debug_nonbind_marker_p (stmt))
1818 {
1819 /* If the inlined function has too many debug markers,
1820 don't copy them. */
1821 if (id->src_cfun->debug_marker_count
1822 > param_max_debug_marker_count
1823 || id->reset_location)
1824 return stmts;
1825
1826 gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
1827 id->debug_stmts.safe_push (copy);
1828 gimple_seq_add_stmt (&stmts, copy);
1829 return stmts;
1830 }
1831
1832 /* Create a new deep copy of the statement. */
1833 copy = gimple_copy (stmt);
1834
1835 /* Clear flags that need revisiting. */
1836 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1837 {
1838 if (gimple_call_tail_p (call_stmt))
1839 gimple_call_set_tail (call_stmt, false);
1840 if (gimple_call_from_thunk_p (call_stmt))
1841 gimple_call_set_from_thunk (call_stmt, false);
1842 if (gimple_call_internal_p (call_stmt))
1843 switch (gimple_call_internal_fn (call_stmt))
1844 {
1845 case IFN_GOMP_SIMD_LANE:
1846 case IFN_GOMP_SIMD_VF:
1847 case IFN_GOMP_SIMD_LAST_LANE:
1848 case IFN_GOMP_SIMD_ORDERED_START:
1849 case IFN_GOMP_SIMD_ORDERED_END:
1850 DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1851 break;
1852 default:
1853 break;
1854 }
1855 }
1856
1857 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1858 RESX and EH_DISPATCH. */
1859 if (id->eh_map)
1860 switch (gimple_code (copy))
1861 {
1862 case GIMPLE_CALL:
1863 {
1864 tree r, fndecl = gimple_call_fndecl (copy);
1865 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1866 switch (DECL_FUNCTION_CODE (fndecl))
1867 {
1868 case BUILT_IN_EH_COPY_VALUES:
1869 r = gimple_call_arg (copy, 1);
1870 r = remap_eh_region_tree_nr (r, id);
1871 gimple_call_set_arg (copy, 1, r);
1872 /* FALLTHRU */
1873
1874 case BUILT_IN_EH_POINTER:
1875 case BUILT_IN_EH_FILTER:
1876 r = gimple_call_arg (copy, 0);
1877 r = remap_eh_region_tree_nr (r, id);
1878 gimple_call_set_arg (copy, 0, r);
1879 break;
1880
1881 default:
1882 break;
1883 }
1884
1885 /* Reset alias info if we didn't apply measures to
1886 keep it valid over inlining by setting DECL_PT_UID. */
1887 if (!id->src_cfun->gimple_df
1888 || !id->src_cfun->gimple_df->ipa_pta)
1889 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1890 }
1891 break;
1892
1893 case GIMPLE_RESX:
1894 {
1895 gresx *resx_stmt = as_a <gresx *> (copy);
1896 int r = gimple_resx_region (resx_stmt);
1897 r = remap_eh_region_nr (r, id);
1898 gimple_resx_set_region (resx_stmt, r);
1899 }
1900 break;
1901
1902 case GIMPLE_EH_DISPATCH:
1903 {
1904 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1905 int r = gimple_eh_dispatch_region (eh_dispatch);
1906 r = remap_eh_region_nr (r, id);
1907 gimple_eh_dispatch_set_region (eh_dispatch, r);
1908 }
1909 break;
1910
1911 default:
1912 break;
1913 }
1914 }
1915
1916 /* If STMT has a block defined, map it to the newly constructed block. */
1917 if (tree block = gimple_block (copy))
1918 {
1919 tree *n;
1920 n = id->decl_map->get (block);
1921 gcc_assert (n);
1922 gimple_set_block (copy, *n);
1923 }
1924 if (id->param_body_adjs)
1925 {
1926 gimple_seq extra_stmts = NULL;
1927 id->param_body_adjs->modify_gimple_stmt (&copy, &extra_stmts);
1928 if (!gimple_seq_empty_p (extra_stmts))
1929 {
1930 memset (&wi, 0, sizeof (wi));
1931 wi.info = id;
1932 for (gimple_stmt_iterator egsi = gsi_start (extra_stmts);
1933 !gsi_end_p (egsi);
1934 gsi_next (&egsi))
1935 walk_gimple_op (gsi_stmt (egsi), remap_gimple_op_r, &wi);
1936 gimple_seq_add_seq (&stmts, extra_stmts);
1937 }
1938 }
1939
1940 if (id->reset_location)
1941 gimple_set_location (copy, input_location);
1942
1943 /* Debug statements ought to be rebuilt and not copied. */
1944 gcc_checking_assert (!is_gimple_debug (copy));
1945
1946 /* Remap all the operands in COPY. */
1947 memset (&wi, 0, sizeof (wi));
1948 wi.info = id;
1949 if (skip_first)
1950 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1951 else
1952 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1953
1954 /* Clear the copied virtual operands. We are not remapping them here
1955 but are going to recreate them from scratch. */
1956 if (gimple_has_mem_ops (copy))
1957 {
1958 gimple_set_vdef (copy, NULL_TREE);
1959 gimple_set_vuse (copy, NULL_TREE);
1960 }
1961
1962 if (cfun->can_throw_non_call_exceptions)
1963 {
1964 /* When inlining a function which does not have non-call exceptions
1965 enabled into a function that has (which only happens with
1966 always-inline) we have to fixup stmts that cannot throw. */
1967 if (gcond *cond = dyn_cast <gcond *> (copy))
1968 if (gimple_could_trap_p (cond))
1969 {
1970 gassign *cmp
1971 = gimple_build_assign (make_ssa_name (boolean_type_node),
1972 gimple_cond_code (cond),
1973 gimple_cond_lhs (cond),
1974 gimple_cond_rhs (cond));
1975 gimple_seq_add_stmt (&stmts, cmp);
1976 gimple_cond_set_code (cond, NE_EXPR);
1977 gimple_cond_set_lhs (cond, gimple_assign_lhs (cmp));
1978 gimple_cond_set_rhs (cond, boolean_false_node);
1979 }
1980 }
1981
1982 gimple_seq_add_stmt (&stmts, copy);
1983 return stmts;
1984 }
1985
1986
1987 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1988 later */
1989
1990 static basic_block
1991 copy_bb (copy_body_data *id, basic_block bb,
1992 profile_count num, profile_count den)
1993 {
1994 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1995 basic_block copy_basic_block;
1996 tree decl;
1997 basic_block prev;
1998
1999 profile_count::adjust_for_ipa_scaling (&num, &den);
2000
2001 /* Search for previous copied basic block. */
2002 prev = bb->prev_bb;
2003 while (!prev->aux)
2004 prev = prev->prev_bb;
2005
2006 /* create_basic_block() will append every new block to
2007 basic_block_info automatically. */
2008 copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
2009 copy_basic_block->count = bb->count.apply_scale (num, den);
2010
2011 copy_gsi = gsi_start_bb (copy_basic_block);
2012
2013 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2014 {
2015 gimple_seq stmts;
2016 gimple *stmt = gsi_stmt (gsi);
2017 gimple *orig_stmt = stmt;
2018 gimple_stmt_iterator stmts_gsi;
2019 bool stmt_added = false;
2020
2021 id->regimplify = false;
2022 stmts = remap_gimple_stmt (stmt, id);
2023
2024 if (gimple_seq_empty_p (stmts))
2025 continue;
2026
2027 seq_gsi = copy_gsi;
2028
2029 for (stmts_gsi = gsi_start (stmts);
2030 !gsi_end_p (stmts_gsi); )
2031 {
2032 stmt = gsi_stmt (stmts_gsi);
2033
2034 /* Advance iterator now before stmt is moved to seq_gsi. */
2035 gsi_next (&stmts_gsi);
2036
2037 if (gimple_nop_p (stmt))
2038 continue;
2039
2040 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
2041 orig_stmt);
2042
2043 /* With return slot optimization we can end up with
2044 non-gimple (foo *)&this->m, fix that here. */
2045 if (is_gimple_assign (stmt)
2046 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
2047 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
2048 {
2049 tree new_rhs;
2050 new_rhs = force_gimple_operand_gsi (&seq_gsi,
2051 gimple_assign_rhs1 (stmt),
2052 true, NULL, false,
2053 GSI_CONTINUE_LINKING);
2054 gimple_assign_set_rhs1 (stmt, new_rhs);
2055 id->regimplify = false;
2056 }
2057
2058 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
2059
2060 if (id->regimplify)
2061 gimple_regimplify_operands (stmt, &seq_gsi);
2062
2063 stmt_added = true;
2064 }
2065
2066 if (!stmt_added)
2067 continue;
2068
2069 /* If copy_basic_block has been empty at the start of this iteration,
2070 call gsi_start_bb again to get at the newly added statements. */
2071 if (gsi_end_p (copy_gsi))
2072 copy_gsi = gsi_start_bb (copy_basic_block);
2073 else
2074 gsi_next (&copy_gsi);
2075
2076 /* Process the new statement. The call to gimple_regimplify_operands
2077 possibly turned the statement into multiple statements, we
2078 need to process all of them. */
2079 do
2080 {
2081 tree fn;
2082 gcall *call_stmt;
2083
2084 stmt = gsi_stmt (copy_gsi);
2085 call_stmt = dyn_cast <gcall *> (stmt);
2086 if (call_stmt
2087 && gimple_call_va_arg_pack_p (call_stmt)
2088 && id->call_stmt
2089 && ! gimple_call_va_arg_pack_p (id->call_stmt))
2090 {
2091 /* __builtin_va_arg_pack () should be replaced by
2092 all arguments corresponding to ... in the caller. */
2093 tree p;
2094 gcall *new_call;
2095 vec<tree> argarray;
2096 size_t nargs = gimple_call_num_args (id->call_stmt);
2097 size_t n;
2098
2099 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2100 nargs--;
2101
2102 /* Create the new array of arguments. */
2103 n = nargs + gimple_call_num_args (call_stmt);
2104 argarray.create (n);
2105 argarray.safe_grow_cleared (n, true);
2106
2107 /* Copy all the arguments before '...' */
2108 memcpy (argarray.address (),
2109 gimple_call_arg_ptr (call_stmt, 0),
2110 gimple_call_num_args (call_stmt) * sizeof (tree));
2111
2112 /* Append the arguments passed in '...' */
2113 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
2114 gimple_call_arg_ptr (id->call_stmt, 0)
2115 + (gimple_call_num_args (id->call_stmt) - nargs),
2116 nargs * sizeof (tree));
2117
2118 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
2119 argarray);
2120
2121 argarray.release ();
2122
2123 /* Copy all GIMPLE_CALL flags, location and block, except
2124 GF_CALL_VA_ARG_PACK. */
2125 gimple_call_copy_flags (new_call, call_stmt);
2126 gimple_call_set_va_arg_pack (new_call, false);
2127 /* location includes block. */
2128 gimple_set_location (new_call, gimple_location (stmt));
2129 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
2130
2131 gsi_replace (&copy_gsi, new_call, false);
2132 stmt = new_call;
2133 }
2134 else if (call_stmt
2135 && id->call_stmt
2136 && (decl = gimple_call_fndecl (stmt))
2137 && fndecl_built_in_p (decl, BUILT_IN_VA_ARG_PACK_LEN))
2138 {
2139 /* __builtin_va_arg_pack_len () should be replaced by
2140 the number of anonymous arguments. */
2141 size_t nargs = gimple_call_num_args (id->call_stmt);
2142 tree count, p;
2143 gimple *new_stmt;
2144
2145 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
2146 nargs--;
2147
2148 if (!gimple_call_lhs (stmt))
2149 {
2150 /* Drop unused calls. */
2151 gsi_remove (&copy_gsi, false);
2152 continue;
2153 }
2154 else if (!gimple_call_va_arg_pack_p (id->call_stmt))
2155 {
2156 count = build_int_cst (integer_type_node, nargs);
2157 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
2158 gsi_replace (&copy_gsi, new_stmt, false);
2159 stmt = new_stmt;
2160 }
2161 else if (nargs != 0)
2162 {
2163 tree newlhs = create_tmp_reg_or_ssa_name (integer_type_node);
2164 count = build_int_cst (integer_type_node, nargs);
2165 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2166 PLUS_EXPR, newlhs, count);
2167 gimple_call_set_lhs (stmt, newlhs);
2168 gsi_insert_after (&copy_gsi, new_stmt, GSI_NEW_STMT);
2169 }
2170 }
2171 else if (call_stmt
2172 && id->call_stmt
2173 && gimple_call_internal_p (stmt)
2174 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
2175 {
2176 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
2177 gsi_remove (&copy_gsi, false);
2178 continue;
2179 }
2180
2181 /* Statements produced by inlining can be unfolded, especially
2182 when we constant propagated some operands. We can't fold
2183 them right now for two reasons:
2184 1) folding require SSA_NAME_DEF_STMTs to be correct
2185 2) we can't change function calls to builtins.
2186 So we just mark statement for later folding. We mark
2187 all new statements, instead just statements that has changed
2188 by some nontrivial substitution so even statements made
2189 foldable indirectly are updated. If this turns out to be
2190 expensive, copy_body can be told to watch for nontrivial
2191 changes. */
2192 if (id->statements_to_fold)
2193 id->statements_to_fold->add (stmt);
2194
2195 /* We're duplicating a CALL_EXPR. Find any corresponding
2196 callgraph edges and update or duplicate them. */
2197 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
2198 {
2199 struct cgraph_edge *edge;
2200
2201 switch (id->transform_call_graph_edges)
2202 {
2203 case CB_CGE_DUPLICATE:
2204 edge = id->src_node->get_edge (orig_stmt);
2205 if (edge)
2206 {
2207 struct cgraph_edge *old_edge = edge;
2208
2209 /* A speculative call is consist of multiple
2210 edges - indirect edge and one or more direct edges
2211 Duplicate the whole thing and distribute frequencies
2212 accordingly. */
2213 if (edge->speculative)
2214 {
2215 int n = 0;
2216 profile_count direct_cnt
2217 = profile_count::zero ();
2218
2219 /* First figure out the distribution of counts
2220 so we can re-scale BB profile accordingly. */
2221 for (cgraph_edge *e = old_edge; e;
2222 e = e->next_speculative_call_target ())
2223 direct_cnt = direct_cnt + e->count;
2224
2225 cgraph_edge *indirect
2226 = old_edge->speculative_call_indirect_edge ();
2227 profile_count indir_cnt = indirect->count;
2228
2229 /* Next iterate all direct edges, clone it and its
2230 corresponding reference and update profile. */
2231 for (cgraph_edge *e = old_edge;
2232 e;
2233 e = e->next_speculative_call_target ())
2234 {
2235 profile_count cnt = e->count;
2236
2237 id->dst_node->clone_reference
2238 (e->speculative_call_target_ref (), stmt);
2239 edge = e->clone (id->dst_node, call_stmt,
2240 gimple_uid (stmt), num, den,
2241 true);
2242 profile_probability prob
2243 = cnt.probability_in (direct_cnt
2244 + indir_cnt);
2245 edge->count
2246 = copy_basic_block->count.apply_probability
2247 (prob);
2248 n++;
2249 }
2250 gcc_checking_assert
2251 (indirect->num_speculative_call_targets_p ()
2252 == n);
2253
2254 /* Duplicate the indirect edge after all direct edges
2255 cloned. */
2256 indirect = indirect->clone (id->dst_node, call_stmt,
2257 gimple_uid (stmt),
2258 num, den,
2259 true);
2260
2261 profile_probability prob
2262 = indir_cnt.probability_in (direct_cnt
2263 + indir_cnt);
2264 indirect->count
2265 = copy_basic_block->count.apply_probability (prob);
2266 }
2267 else
2268 {
2269 edge = edge->clone (id->dst_node, call_stmt,
2270 gimple_uid (stmt),
2271 num, den,
2272 true);
2273 edge->count = copy_basic_block->count;
2274 }
2275 }
2276 break;
2277
2278 case CB_CGE_MOVE_CLONES:
2279 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2280 call_stmt);
2281 edge = id->dst_node->get_edge (stmt);
2282 break;
2283
2284 case CB_CGE_MOVE:
2285 edge = id->dst_node->get_edge (orig_stmt);
2286 if (edge)
2287 edge = cgraph_edge::set_call_stmt (edge, call_stmt);
2288 break;
2289
2290 default:
2291 gcc_unreachable ();
2292 }
2293
2294 /* Constant propagation on argument done during inlining
2295 may create new direct call. Produce an edge for it. */
2296 if ((!edge
2297 || (edge->indirect_inlining_edge
2298 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2299 && id->dst_node->definition
2300 && (fn = gimple_call_fndecl (stmt)) != NULL)
2301 {
2302 struct cgraph_node *dest = cgraph_node::get_create (fn);
2303
2304 /* We have missing edge in the callgraph. This can happen
2305 when previous inlining turned an indirect call into a
2306 direct call by constant propagating arguments or we are
2307 producing dead clone (for further cloning). In all
2308 other cases we hit a bug (incorrect node sharing is the
2309 most common reason for missing edges). */
2310 gcc_assert (!dest->definition
2311 || dest->address_taken
2312 || !id->src_node->definition
2313 || !id->dst_node->definition);
2314 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2315 id->dst_node->create_edge_including_clones
2316 (dest, orig_stmt, call_stmt, bb->count,
2317 CIF_ORIGINALLY_INDIRECT_CALL);
2318 else
2319 id->dst_node->create_edge (dest, call_stmt,
2320 bb->count)->inline_failed
2321 = CIF_ORIGINALLY_INDIRECT_CALL;
2322 if (dump_file)
2323 {
2324 fprintf (dump_file, "Created new direct edge to %s\n",
2325 dest->dump_name ());
2326 }
2327 }
2328
2329 notice_special_calls (as_a <gcall *> (stmt));
2330 }
2331
2332 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2333 id->eh_map, id->eh_lp_nr);
2334
2335 gsi_next (&copy_gsi);
2336 }
2337 while (!gsi_end_p (copy_gsi));
2338
2339 copy_gsi = gsi_last_bb (copy_basic_block);
2340 }
2341
2342 return copy_basic_block;
2343 }
2344
2345 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2346 form is quite easy, since dominator relationship for old basic blocks does
2347 not change.
2348
2349 There is however exception where inlining might change dominator relation
2350 across EH edges from basic block within inlined functions destinating
2351 to landing pads in function we inline into.
2352
2353 The function fills in PHI_RESULTs of such PHI nodes if they refer
2354 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2355 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2356 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2357 set, and this means that there will be no overlapping live ranges
2358 for the underlying symbol.
2359
2360 This might change in future if we allow redirecting of EH edges and
2361 we might want to change way build CFG pre-inlining to include
2362 all the possible edges then. */
2363 static void
2364 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2365 bool can_throw, bool nonlocal_goto)
2366 {
2367 edge e;
2368 edge_iterator ei;
2369
2370 FOR_EACH_EDGE (e, ei, bb->succs)
2371 if (!e->dest->aux
2372 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2373 {
2374 gphi *phi;
2375 gphi_iterator si;
2376
2377 if (!nonlocal_goto)
2378 gcc_assert (e->flags & EDGE_EH);
2379
2380 if (!can_throw)
2381 gcc_assert (!(e->flags & EDGE_EH));
2382
2383 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2384 {
2385 edge re;
2386
2387 phi = si.phi ();
2388
2389 /* For abnormal goto/call edges the receiver can be the
2390 ENTRY_BLOCK. Do not assert this cannot happen. */
2391
2392 gcc_assert ((e->flags & EDGE_EH)
2393 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2394
2395 re = find_edge (ret_bb, e->dest);
2396 gcc_checking_assert (re);
2397 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2398 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2399
2400 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2401 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2402 }
2403 }
2404 }
2405
2406 /* Insert clobbers for automatic variables of inlined ID->src_fn
2407 function at the start of basic block ID->eh_landing_pad_dest. */
2408
2409 static void
2410 add_clobbers_to_eh_landing_pad (copy_body_data *id)
2411 {
2412 tree var;
2413 basic_block bb = id->eh_landing_pad_dest;
2414 live_vars_map *vars = NULL;
2415 unsigned int cnt = 0;
2416 unsigned int i;
2417 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2418 if (VAR_P (var)
2419 && !DECL_HARD_REGISTER (var)
2420 && !TREE_THIS_VOLATILE (var)
2421 && !DECL_HAS_VALUE_EXPR_P (var)
2422 && !is_gimple_reg (var)
2423 && auto_var_in_fn_p (var, id->src_fn)
2424 && !lookup_attribute ("omp simd array", DECL_ATTRIBUTES (var)))
2425 {
2426 tree *t = id->decl_map->get (var);
2427 if (!t)
2428 continue;
2429 tree new_var = *t;
2430 if (VAR_P (new_var)
2431 && !DECL_HARD_REGISTER (new_var)
2432 && !TREE_THIS_VOLATILE (new_var)
2433 && !DECL_HAS_VALUE_EXPR_P (new_var)
2434 && !is_gimple_reg (new_var)
2435 && auto_var_in_fn_p (new_var, id->dst_fn))
2436 {
2437 if (vars == NULL)
2438 vars = new live_vars_map;
2439 vars->put (DECL_UID (var), cnt++);
2440 }
2441 }
2442 if (vars == NULL)
2443 return;
2444
2445 vec<bitmap_head> live = compute_live_vars (id->src_cfun, vars);
2446 FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
2447 if (VAR_P (var))
2448 {
2449 edge e;
2450 edge_iterator ei;
2451 bool needed = false;
2452 unsigned int *v = vars->get (DECL_UID (var));
2453 if (v == NULL)
2454 continue;
2455 FOR_EACH_EDGE (e, ei, bb->preds)
2456 if ((e->flags & EDGE_EH) != 0
2457 && e->src->index >= id->add_clobbers_to_eh_landing_pads)
2458 {
2459 basic_block src_bb = (basic_block) e->src->aux;
2460
2461 if (bitmap_bit_p (&live[src_bb->index], *v))
2462 {
2463 needed = true;
2464 break;
2465 }
2466 }
2467 if (needed)
2468 {
2469 tree new_var = *id->decl_map->get (var);
2470 gimple_stmt_iterator gsi = gsi_after_labels (bb);
2471 tree clobber = build_clobber (TREE_TYPE (new_var));
2472 gimple *clobber_stmt = gimple_build_assign (new_var, clobber);
2473 gsi_insert_before (&gsi, clobber_stmt, GSI_NEW_STMT);
2474 }
2475 }
2476 destroy_live_vars (live);
2477 delete vars;
2478 }
2479
2480 /* Copy edges from BB into its copy constructed earlier, scale profile
2481 accordingly. Edges will be taken care of later. Assume aux
2482 pointers to point to the copies of each BB. Return true if any
2483 debug stmts are left after a statement that must end the basic block. */
2484
2485 static bool
2486 copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
2487 basic_block ret_bb, basic_block abnormal_goto_dest,
2488 copy_body_data *id)
2489 {
2490 basic_block new_bb = (basic_block) bb->aux;
2491 edge_iterator ei;
2492 edge old_edge;
2493 gimple_stmt_iterator si;
2494 bool need_debug_cleanup = false;
2495
2496 /* Use the indices from the original blocks to create edges for the
2497 new ones. */
2498 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2499 if (!(old_edge->flags & EDGE_EH))
2500 {
2501 edge new_edge;
2502 int flags = old_edge->flags;
2503 location_t locus = old_edge->goto_locus;
2504
2505 /* Return edges do get a FALLTHRU flag when they get inlined. */
2506 if (old_edge->dest->index == EXIT_BLOCK
2507 && !(flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2508 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2509 flags |= EDGE_FALLTHRU;
2510
2511 new_edge
2512 = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2513 new_edge->probability = old_edge->probability;
2514 if (!id->reset_location)
2515 new_edge->goto_locus = remap_location (locus, id);
2516 }
2517
2518 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2519 return false;
2520
2521 /* When doing function splitting, we must decrease count of the return block
2522 which was previously reachable by block we did not copy. */
2523 if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
2524 FOR_EACH_EDGE (old_edge, ei, bb->preds)
2525 if (old_edge->src->index != ENTRY_BLOCK
2526 && !old_edge->src->aux)
2527 new_bb->count -= old_edge->count ().apply_scale (num, den);
2528
2529 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2530 {
2531 gimple *copy_stmt;
2532 bool can_throw, nonlocal_goto;
2533
2534 copy_stmt = gsi_stmt (si);
2535 if (!is_gimple_debug (copy_stmt))
2536 update_stmt (copy_stmt);
2537
2538 /* Do this before the possible split_block. */
2539 gsi_next (&si);
2540
2541 /* If this tree could throw an exception, there are two
2542 cases where we need to add abnormal edge(s): the
2543 tree wasn't in a region and there is a "current
2544 region" in the caller; or the original tree had
2545 EH edges. In both cases split the block after the tree,
2546 and add abnormal edge(s) as needed; we need both
2547 those from the callee and the caller.
2548 We check whether the copy can throw, because the const
2549 propagation can change an INDIRECT_REF which throws
2550 into a COMPONENT_REF which doesn't. If the copy
2551 can throw, the original could also throw. */
2552 can_throw = stmt_can_throw_internal (cfun, copy_stmt);
2553 nonlocal_goto
2554 = (stmt_can_make_abnormal_goto (copy_stmt)
2555 && !computed_goto_p (copy_stmt));
2556
2557 if (can_throw || nonlocal_goto)
2558 {
2559 if (!gsi_end_p (si))
2560 {
2561 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2562 gsi_next (&si);
2563 if (gsi_end_p (si))
2564 need_debug_cleanup = true;
2565 }
2566 if (!gsi_end_p (si))
2567 /* Note that bb's predecessor edges aren't necessarily
2568 right at this point; split_block doesn't care. */
2569 {
2570 edge e = split_block (new_bb, copy_stmt);
2571
2572 new_bb = e->dest;
2573 new_bb->aux = e->src->aux;
2574 si = gsi_start_bb (new_bb);
2575 }
2576 }
2577
2578 bool update_probs = false;
2579
2580 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2581 {
2582 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2583 update_probs = true;
2584 }
2585 else if (can_throw)
2586 {
2587 make_eh_edges (copy_stmt);
2588 update_probs = true;
2589 }
2590
2591 /* EH edges may not match old edges. Copy as much as possible. */
2592 if (update_probs)
2593 {
2594 edge e;
2595 edge_iterator ei;
2596 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2597
2598 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2599 if ((old_edge->flags & EDGE_EH)
2600 && (e = find_edge (copy_stmt_bb,
2601 (basic_block) old_edge->dest->aux))
2602 && (e->flags & EDGE_EH))
2603 e->probability = old_edge->probability;
2604
2605 FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
2606 if (e->flags & EDGE_EH)
2607 {
2608 if (!e->probability.initialized_p ())
2609 e->probability = profile_probability::never ();
2610 if (e->dest->index < id->add_clobbers_to_eh_landing_pads)
2611 {
2612 if (id->eh_landing_pad_dest == NULL)
2613 id->eh_landing_pad_dest = e->dest;
2614 else
2615 gcc_assert (id->eh_landing_pad_dest == e->dest);
2616 }
2617 }
2618 }
2619
2620
2621 /* If the call we inline cannot make abnormal goto do not add
2622 additional abnormal edges but only retain those already present
2623 in the original function body. */
2624 if (abnormal_goto_dest == NULL)
2625 nonlocal_goto = false;
2626 if (nonlocal_goto)
2627 {
2628 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2629
2630 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2631 nonlocal_goto = false;
2632 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2633 in OpenMP regions which aren't allowed to be left abnormally.
2634 So, no need to add abnormal edge in that case. */
2635 else if (is_gimple_call (copy_stmt)
2636 && gimple_call_internal_p (copy_stmt)
2637 && (gimple_call_internal_fn (copy_stmt)
2638 == IFN_ABNORMAL_DISPATCHER)
2639 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2640 nonlocal_goto = false;
2641 else
2642 make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
2643 EDGE_ABNORMAL);
2644 }
2645
2646 if ((can_throw || nonlocal_goto)
2647 && gimple_in_ssa_p (cfun))
2648 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2649 can_throw, nonlocal_goto);
2650 }
2651 return need_debug_cleanup;
2652 }
2653
2654 /* Copy the PHIs. All blocks and edges are copied, some blocks
2655 was possibly split and new outgoing EH edges inserted.
2656 BB points to the block of original function and AUX pointers links
2657 the original and newly copied blocks. */
2658
2659 static void
2660 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2661 {
2662 basic_block const new_bb = (basic_block) bb->aux;
2663 edge_iterator ei;
2664 gphi *phi;
2665 gphi_iterator si;
2666 edge new_edge;
2667 bool inserted = false;
2668
2669 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2670 {
2671 tree res, new_res;
2672 gphi *new_phi;
2673
2674 phi = si.phi ();
2675 res = PHI_RESULT (phi);
2676 new_res = res;
2677 if (!virtual_operand_p (res))
2678 {
2679 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2680 if (EDGE_COUNT (new_bb->preds) == 0)
2681 {
2682 /* Technically we'd want a SSA_DEFAULT_DEF here... */
2683 SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
2684 }
2685 else
2686 {
2687 new_phi = create_phi_node (new_res, new_bb);
2688 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2689 {
2690 edge old_edge = find_edge ((basic_block) new_edge->src->aux,
2691 bb);
2692 tree arg;
2693 tree new_arg;
2694 edge_iterator ei2;
2695 location_t locus;
2696
2697 /* When doing partial cloning, we allow PHIs on the entry
2698 block as long as all the arguments are the same.
2699 Find any input edge to see argument to copy. */
2700 if (!old_edge)
2701 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2702 if (!old_edge->src->aux)
2703 break;
2704
2705 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2706 new_arg = arg;
2707 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2708 gcc_assert (new_arg);
2709 /* With return slot optimization we can end up with
2710 non-gimple (foo *)&this->m, fix that here. */
2711 if (TREE_CODE (new_arg) != SSA_NAME
2712 && TREE_CODE (new_arg) != FUNCTION_DECL
2713 && !is_gimple_val (new_arg))
2714 {
2715 gimple_seq stmts = NULL;
2716 new_arg = force_gimple_operand (new_arg, &stmts, true,
2717 NULL);
2718 gsi_insert_seq_on_edge (new_edge, stmts);
2719 inserted = true;
2720 }
2721 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2722 if (id->reset_location)
2723 locus = input_location;
2724 else
2725 locus = remap_location (locus, id);
2726 add_phi_arg (new_phi, new_arg, new_edge, locus);
2727 }
2728 }
2729 }
2730 }
2731
2732 /* Commit the delayed edge insertions. */
2733 if (inserted)
2734 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2735 gsi_commit_one_edge_insert (new_edge, NULL);
2736 }
2737
2738
2739 /* Wrapper for remap_decl so it can be used as a callback. */
2740
2741 static tree
2742 remap_decl_1 (tree decl, void *data)
2743 {
2744 return remap_decl (decl, (copy_body_data *) data);
2745 }
2746
2747 /* Build struct function and associated datastructures for the new clone
2748 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2749 the cfun to the function of new_fndecl (and current_function_decl too). */
2750
2751 static void
2752 initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
2753 {
2754 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2755
2756 if (!DECL_ARGUMENTS (new_fndecl))
2757 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2758 if (!DECL_RESULT (new_fndecl))
2759 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2760
2761 /* Register specific tree functions. */
2762 gimple_register_cfg_hooks ();
2763
2764 /* Get clean struct function. */
2765 push_struct_function (new_fndecl);
2766
2767 /* We will rebuild these, so just sanity check that they are empty. */
2768 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2769 gcc_assert (cfun->local_decls == NULL);
2770 gcc_assert (cfun->cfg == NULL);
2771 gcc_assert (cfun->decl == new_fndecl);
2772
2773 /* Copy items we preserve during cloning. */
2774 cfun->static_chain_decl = src_cfun->static_chain_decl;
2775 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2776 cfun->function_end_locus = src_cfun->function_end_locus;
2777 cfun->curr_properties = src_cfun->curr_properties;
2778 cfun->last_verified = src_cfun->last_verified;
2779 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2780 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2781 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2782 cfun->calls_eh_return = src_cfun->calls_eh_return;
2783 cfun->stdarg = src_cfun->stdarg;
2784 cfun->after_inlining = src_cfun->after_inlining;
2785 cfun->can_throw_non_call_exceptions
2786 = src_cfun->can_throw_non_call_exceptions;
2787 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2788 cfun->returns_struct = src_cfun->returns_struct;
2789 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2790
2791 init_empty_tree_cfg ();
2792
2793 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2794
2795 profile_count num = count;
2796 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
2797 profile_count::adjust_for_ipa_scaling (&num, &den);
2798
2799 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2800 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2801 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2802 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2803 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
2804 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2805 if (src_cfun->eh)
2806 init_eh_for_function ();
2807
2808 if (src_cfun->gimple_df)
2809 {
2810 init_tree_ssa (cfun);
2811 cfun->gimple_df->in_ssa_p = src_cfun->gimple_df->in_ssa_p;
2812 if (cfun->gimple_df->in_ssa_p)
2813 init_ssa_operands (cfun);
2814 }
2815 }
2816
2817 /* Helper function for copy_cfg_body. Move debug stmts from the end
2818 of NEW_BB to the beginning of successor basic blocks when needed. If the
2819 successor has multiple predecessors, reset them, otherwise keep
2820 their value. */
2821
2822 static void
2823 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2824 {
2825 edge e;
2826 edge_iterator ei;
2827 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2828
2829 if (gsi_end_p (si)
2830 || gsi_one_before_end_p (si)
2831 || !(stmt_can_throw_internal (cfun, gsi_stmt (si))
2832 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2833 return;
2834
2835 FOR_EACH_EDGE (e, ei, new_bb->succs)
2836 {
2837 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2838 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2839 while (is_gimple_debug (gsi_stmt (ssi)))
2840 {
2841 gimple *stmt = gsi_stmt (ssi);
2842 gdebug *new_stmt;
2843 tree var;
2844 tree value;
2845
2846 /* For the last edge move the debug stmts instead of copying
2847 them. */
2848 if (ei_one_before_end_p (ei))
2849 {
2850 si = ssi;
2851 gsi_prev (&ssi);
2852 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2853 {
2854 gimple_debug_bind_reset_value (stmt);
2855 gimple_set_location (stmt, UNKNOWN_LOCATION);
2856 }
2857 gsi_remove (&si, false);
2858 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2859 continue;
2860 }
2861
2862 if (gimple_debug_bind_p (stmt))
2863 {
2864 var = gimple_debug_bind_get_var (stmt);
2865 if (single_pred_p (e->dest))
2866 {
2867 value = gimple_debug_bind_get_value (stmt);
2868 value = unshare_expr (value);
2869 new_stmt = gimple_build_debug_bind (var, value, stmt);
2870 }
2871 else
2872 new_stmt = gimple_build_debug_bind (var, NULL_TREE, NULL);
2873 }
2874 else if (gimple_debug_source_bind_p (stmt))
2875 {
2876 var = gimple_debug_source_bind_get_var (stmt);
2877 value = gimple_debug_source_bind_get_value (stmt);
2878 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2879 }
2880 else if (gimple_debug_nonbind_marker_p (stmt))
2881 new_stmt = as_a <gdebug *> (gimple_copy (stmt));
2882 else
2883 gcc_unreachable ();
2884 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2885 id->debug_stmts.safe_push (new_stmt);
2886 gsi_prev (&ssi);
2887 }
2888 }
2889 }
2890
2891 /* Make a copy of the sub-loops of SRC_PARENT and place them
2892 as siblings of DEST_PARENT. */
2893
2894 static void
2895 copy_loops (copy_body_data *id,
2896 class loop *dest_parent, class loop *src_parent)
2897 {
2898 class loop *src_loop = src_parent->inner;
2899 while (src_loop)
2900 {
2901 if (!id->blocks_to_copy
2902 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2903 {
2904 class loop *dest_loop = alloc_loop ();
2905
2906 /* Assign the new loop its header and latch and associate
2907 those with the new loop. */
2908 dest_loop->header = (basic_block)src_loop->header->aux;
2909 dest_loop->header->loop_father = dest_loop;
2910 if (src_loop->latch != NULL)
2911 {
2912 dest_loop->latch = (basic_block)src_loop->latch->aux;
2913 dest_loop->latch->loop_father = dest_loop;
2914 }
2915
2916 /* Copy loop meta-data. */
2917 copy_loop_info (src_loop, dest_loop);
2918 if (dest_loop->unroll)
2919 cfun->has_unroll = true;
2920 if (dest_loop->force_vectorize)
2921 cfun->has_force_vectorize_loops = true;
2922 if (id->src_cfun->last_clique != 0)
2923 dest_loop->owned_clique
2924 = remap_dependence_clique (id,
2925 src_loop->owned_clique
2926 ? src_loop->owned_clique : 1);
2927
2928 /* Finally place it into the loop array and the loop tree. */
2929 place_new_loop (cfun, dest_loop);
2930 flow_loop_tree_node_add (dest_parent, dest_loop);
2931
2932 if (src_loop->simduid)
2933 {
2934 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2935 cfun->has_simduid_loops = true;
2936 }
2937
2938 /* Recurse. */
2939 copy_loops (id, dest_loop, src_loop);
2940 }
2941 src_loop = src_loop->next;
2942 }
2943 }
2944
2945 /* Call redirect_call_stmt_to_callee on all calls in BB. */
2946
2947 void
2948 redirect_all_calls (copy_body_data * id, basic_block bb)
2949 {
2950 gimple_stmt_iterator si;
2951 gimple *last = last_stmt (bb);
2952 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2953 {
2954 gimple *stmt = gsi_stmt (si);
2955 if (is_gimple_call (stmt))
2956 {
2957 tree old_lhs = gimple_call_lhs (stmt);
2958 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2959 if (edge)
2960 {
2961 gimple *new_stmt
2962 = cgraph_edge::redirect_call_stmt_to_callee (edge);
2963 /* If IPA-SRA transformation, run as part of edge redirection,
2964 removed the LHS because it is unused, save it to
2965 killed_new_ssa_names so that we can prune it from debug
2966 statements. */
2967 if (old_lhs
2968 && TREE_CODE (old_lhs) == SSA_NAME
2969 && !gimple_call_lhs (new_stmt))
2970 {
2971 if (!id->killed_new_ssa_names)
2972 id->killed_new_ssa_names = new hash_set<tree> (16);
2973 id->killed_new_ssa_names->add (old_lhs);
2974 }
2975
2976 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2977 gimple_purge_dead_eh_edges (bb);
2978 }
2979 }
2980 }
2981 }
2982
2983 /* Make a copy of the body of FN so that it can be inserted inline in
2984 another function. Walks FN via CFG, returns new fndecl. */
2985
2986 static tree
2987 copy_cfg_body (copy_body_data * id,
2988 basic_block entry_block_map, basic_block exit_block_map,
2989 basic_block new_entry)
2990 {
2991 tree callee_fndecl = id->src_fn;
2992 /* Original cfun for the callee, doesn't change. */
2993 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2994 struct function *cfun_to_copy;
2995 basic_block bb;
2996 tree new_fndecl = NULL;
2997 bool need_debug_cleanup = false;
2998 int last;
2999 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
3000 profile_count num = entry_block_map->count;
3001
3002 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
3003
3004 /* Register specific tree functions. */
3005 gimple_register_cfg_hooks ();
3006
3007 /* If we are inlining just region of the function, make sure to connect
3008 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
3009 part of loop, we must compute frequency and probability of
3010 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
3011 probabilities of edges incoming from nonduplicated region. */
3012 if (new_entry)
3013 {
3014 edge e;
3015 edge_iterator ei;
3016 den = profile_count::zero ();
3017
3018 FOR_EACH_EDGE (e, ei, new_entry->preds)
3019 if (!e->src->aux)
3020 den += e->count ();
3021 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
3022 }
3023
3024 profile_count::adjust_for_ipa_scaling (&num, &den);
3025
3026 /* Must have a CFG here at this point. */
3027 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
3028 (DECL_STRUCT_FUNCTION (callee_fndecl)));
3029
3030
3031 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
3032 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
3033 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
3034 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
3035
3036 /* Duplicate any exception-handling regions. */
3037 if (cfun->eh)
3038 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
3039 remap_decl_1, id);
3040
3041 /* Use aux pointers to map the original blocks to copy. */
3042 FOR_EACH_BB_FN (bb, cfun_to_copy)
3043 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
3044 {
3045 basic_block new_bb = copy_bb (id, bb, num, den);
3046 bb->aux = new_bb;
3047 new_bb->aux = bb;
3048 new_bb->loop_father = entry_block_map->loop_father;
3049 }
3050
3051 last = last_basic_block_for_fn (cfun);
3052
3053 /* Now that we've duplicated the blocks, duplicate their edges. */
3054 basic_block abnormal_goto_dest = NULL;
3055 if (id->call_stmt
3056 && stmt_can_make_abnormal_goto (id->call_stmt))
3057 {
3058 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
3059
3060 bb = gimple_bb (id->call_stmt);
3061 gsi_next (&gsi);
3062 if (gsi_end_p (gsi))
3063 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
3064 }
3065 FOR_ALL_BB_FN (bb, cfun_to_copy)
3066 if (!id->blocks_to_copy
3067 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3068 need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
3069 abnormal_goto_dest, id);
3070
3071 if (id->eh_landing_pad_dest)
3072 {
3073 add_clobbers_to_eh_landing_pad (id);
3074 id->eh_landing_pad_dest = NULL;
3075 }
3076
3077 if (new_entry)
3078 {
3079 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
3080 EDGE_FALLTHRU);
3081 e->probability = profile_probability::always ();
3082 }
3083
3084 /* Duplicate the loop tree, if available and wanted. */
3085 if (loops_for_fn (src_cfun) != NULL
3086 && current_loops != NULL)
3087 {
3088 copy_loops (id, entry_block_map->loop_father,
3089 get_loop (src_cfun, 0));
3090 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
3091 loops_state_set (LOOPS_NEED_FIXUP);
3092 }
3093
3094 /* If the loop tree in the source function needed fixup, mark the
3095 destination loop tree for fixup, too. */
3096 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
3097 loops_state_set (LOOPS_NEED_FIXUP);
3098
3099 if (gimple_in_ssa_p (cfun))
3100 FOR_ALL_BB_FN (bb, cfun_to_copy)
3101 if (!id->blocks_to_copy
3102 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
3103 copy_phis_for_bb (bb, id);
3104
3105 FOR_ALL_BB_FN (bb, cfun_to_copy)
3106 if (bb->aux)
3107 {
3108 if (need_debug_cleanup
3109 && bb->index != ENTRY_BLOCK
3110 && bb->index != EXIT_BLOCK)
3111 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
3112 /* Update call edge destinations. This cannot be done before loop
3113 info is updated, because we may split basic blocks. */
3114 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
3115 && bb->index != ENTRY_BLOCK
3116 && bb->index != EXIT_BLOCK)
3117 redirect_all_calls (id, (basic_block)bb->aux);
3118 ((basic_block)bb->aux)->aux = NULL;
3119 bb->aux = NULL;
3120 }
3121
3122 /* Zero out AUX fields of newly created block during EH edge
3123 insertion. */
3124 for (; last < last_basic_block_for_fn (cfun); last++)
3125 {
3126 if (need_debug_cleanup)
3127 maybe_move_debug_stmts_to_successors (id,
3128 BASIC_BLOCK_FOR_FN (cfun, last));
3129 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
3130 /* Update call edge destinations. This cannot be done before loop
3131 info is updated, because we may split basic blocks. */
3132 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
3133 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
3134 }
3135 entry_block_map->aux = NULL;
3136 exit_block_map->aux = NULL;
3137
3138 if (id->eh_map)
3139 {
3140 delete id->eh_map;
3141 id->eh_map = NULL;
3142 }
3143 if (id->dependence_map)
3144 {
3145 delete id->dependence_map;
3146 id->dependence_map = NULL;
3147 }
3148
3149 return new_fndecl;
3150 }
3151
3152 /* Copy the debug STMT using ID. We deal with these statements in a
3153 special way: if any variable in their VALUE expression wasn't
3154 remapped yet, we won't remap it, because that would get decl uids
3155 out of sync, causing codegen differences between -g and -g0. If
3156 this arises, we drop the VALUE expression altogether. */
3157
3158 static void
3159 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
3160 {
3161 tree t, *n;
3162 struct walk_stmt_info wi;
3163
3164 if (tree block = gimple_block (stmt))
3165 {
3166 n = id->decl_map->get (block);
3167 gimple_set_block (stmt, n ? *n : id->block);
3168 }
3169
3170 if (gimple_debug_nonbind_marker_p (stmt))
3171 {
3172 if (id->call_stmt && !gimple_block (stmt))
3173 {
3174 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
3175 gsi_remove (&gsi, true);
3176 }
3177 return;
3178 }
3179
3180 /* Remap all the operands in COPY. */
3181 memset (&wi, 0, sizeof (wi));
3182 wi.info = id;
3183
3184 processing_debug_stmt = 1;
3185
3186 if (gimple_debug_source_bind_p (stmt))
3187 t = gimple_debug_source_bind_get_var (stmt);
3188 else if (gimple_debug_bind_p (stmt))
3189 t = gimple_debug_bind_get_var (stmt);
3190 else
3191 gcc_unreachable ();
3192
3193 if (TREE_CODE (t) == PARM_DECL && id->debug_map
3194 && (n = id->debug_map->get (t)))
3195 {
3196 gcc_assert (VAR_P (*n));
3197 t = *n;
3198 }
3199 else if (VAR_P (t) && !is_global_var (t) && !id->decl_map->get (t))
3200 /* T is a non-localized variable. */;
3201 else
3202 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
3203
3204 if (gimple_debug_bind_p (stmt))
3205 {
3206 gimple_debug_bind_set_var (stmt, t);
3207
3208 if (gimple_debug_bind_has_value_p (stmt))
3209 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
3210 remap_gimple_op_r, &wi, NULL);
3211
3212 /* Punt if any decl couldn't be remapped. */
3213 if (processing_debug_stmt < 0)
3214 gimple_debug_bind_reset_value (stmt);
3215 }
3216 else if (gimple_debug_source_bind_p (stmt))
3217 {
3218 gimple_debug_source_bind_set_var (stmt, t);
3219 /* When inlining and source bind refers to one of the optimized
3220 away parameters, change the source bind into normal debug bind
3221 referring to the corresponding DEBUG_EXPR_DECL that should have
3222 been bound before the call stmt. */
3223 t = gimple_debug_source_bind_get_value (stmt);
3224 if (t != NULL_TREE
3225 && TREE_CODE (t) == PARM_DECL
3226 && id->call_stmt)
3227 {
3228 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
3229 unsigned int i;
3230 if (debug_args != NULL)
3231 {
3232 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
3233 if ((**debug_args)[i] == DECL_ORIGIN (t)
3234 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
3235 {
3236 t = (**debug_args)[i + 1];
3237 stmt->subcode = GIMPLE_DEBUG_BIND;
3238 gimple_debug_bind_set_value (stmt, t);
3239 break;
3240 }
3241 }
3242 }
3243 if (gimple_debug_source_bind_p (stmt))
3244 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
3245 remap_gimple_op_r, &wi, NULL);
3246 }
3247
3248 processing_debug_stmt = 0;
3249
3250 update_stmt (stmt);
3251 }
3252
3253 /* Process deferred debug stmts. In order to give values better odds
3254 of being successfully remapped, we delay the processing of debug
3255 stmts until all other stmts that might require remapping are
3256 processed. */
3257
3258 static void
3259 copy_debug_stmts (copy_body_data *id)
3260 {
3261 size_t i;
3262 gdebug *stmt;
3263
3264 if (!id->debug_stmts.exists ())
3265 return;
3266
3267 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
3268 copy_debug_stmt (stmt, id);
3269
3270 id->debug_stmts.release ();
3271 }
3272
3273 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
3274 another function. */
3275
3276 static tree
3277 copy_tree_body (copy_body_data *id)
3278 {
3279 tree fndecl = id->src_fn;
3280 tree body = DECL_SAVED_TREE (fndecl);
3281
3282 walk_tree (&body, copy_tree_body_r, id, NULL);
3283
3284 return body;
3285 }
3286
3287 /* Make a copy of the body of FN so that it can be inserted inline in
3288 another function. */
3289
3290 static tree
3291 copy_body (copy_body_data *id,
3292 basic_block entry_block_map, basic_block exit_block_map,
3293 basic_block new_entry)
3294 {
3295 tree fndecl = id->src_fn;
3296 tree body;
3297
3298 /* If this body has a CFG, walk CFG and copy. */
3299 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
3300 body = copy_cfg_body (id, entry_block_map, exit_block_map,
3301 new_entry);
3302 copy_debug_stmts (id);
3303 delete id->killed_new_ssa_names;
3304 id->killed_new_ssa_names = NULL;
3305
3306 return body;
3307 }
3308
3309 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
3310 defined in function FN, or of a data member thereof. */
3311
3312 static bool
3313 self_inlining_addr_expr (tree value, tree fn)
3314 {
3315 tree var;
3316
3317 if (TREE_CODE (value) != ADDR_EXPR)
3318 return false;
3319
3320 var = get_base_address (TREE_OPERAND (value, 0));
3321
3322 return var && auto_var_in_fn_p (var, fn);
3323 }
3324
3325 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
3326 lexical block and line number information from base_stmt, if given,
3327 or from the last stmt of the block otherwise. */
3328
3329 static gimple *
3330 insert_init_debug_bind (copy_body_data *id,
3331 basic_block bb, tree var, tree value,
3332 gimple *base_stmt)
3333 {
3334 gimple *note;
3335 gimple_stmt_iterator gsi;
3336 tree tracked_var;
3337
3338 if (!gimple_in_ssa_p (id->src_cfun))
3339 return NULL;
3340
3341 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3342 return NULL;
3343
3344 tracked_var = target_for_debug_bind (var);
3345 if (!tracked_var)
3346 return NULL;
3347
3348 if (bb)
3349 {
3350 gsi = gsi_last_bb (bb);
3351 if (!base_stmt && !gsi_end_p (gsi))
3352 base_stmt = gsi_stmt (gsi);
3353 }
3354
3355 note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3356
3357 if (bb)
3358 {
3359 if (!gsi_end_p (gsi))
3360 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3361 else
3362 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3363 }
3364
3365 return note;
3366 }
3367
3368 static void
3369 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3370 {
3371 /* If VAR represents a zero-sized variable, it's possible that the
3372 assignment statement may result in no gimple statements. */
3373 if (init_stmt)
3374 {
3375 gimple_stmt_iterator si = gsi_last_bb (bb);
3376
3377 /* We can end up with init statements that store to a non-register
3378 from a rhs with a conversion. Handle that here by forcing the
3379 rhs into a temporary. gimple_regimplify_operands is not
3380 prepared to do this for us. */
3381 if (!is_gimple_debug (init_stmt)
3382 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3383 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3384 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3385 {
3386 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3387 gimple_expr_type (init_stmt),
3388 gimple_assign_rhs1 (init_stmt));
3389 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3390 GSI_NEW_STMT);
3391 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3392 gimple_assign_set_rhs1 (init_stmt, rhs);
3393 }
3394 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3395 if (!is_gimple_debug (init_stmt))
3396 {
3397 gimple_regimplify_operands (init_stmt, &si);
3398
3399 tree def = gimple_assign_lhs (init_stmt);
3400 insert_init_debug_bind (id, bb, def, def, init_stmt);
3401 }
3402 }
3403 }
3404
3405 /* Deal with mismatched formal/actual parameters, in a rather brute-force way
3406 if need be (which should only be necessary for invalid programs). Attempt
3407 to convert VAL to TYPE and return the result if it is possible, just return
3408 a zero constant of the given type if it fails. */
3409
3410 tree
3411 force_value_to_type (tree type, tree value)
3412 {
3413 /* If we can match up types by promotion/demotion do so. */
3414 if (fold_convertible_p (type, value))
3415 return fold_convert (type, value);
3416
3417 /* ??? For valid programs we should not end up here.
3418 Still if we end up with truly mismatched types here, fall back
3419 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3420 GIMPLE to the following passes. */
3421 if (!is_gimple_reg_type (TREE_TYPE (value))
3422 || TYPE_SIZE (type) == TYPE_SIZE (TREE_TYPE (value)))
3423 return fold_build1 (VIEW_CONVERT_EXPR, type, value);
3424 else
3425 return build_zero_cst (type);
3426 }
3427
3428 /* Initialize parameter P with VALUE. If needed, produce init statement
3429 at the end of BB. When BB is NULL, we return init statement to be
3430 output later. */
3431 static gimple *
3432 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3433 basic_block bb, tree *vars)
3434 {
3435 gimple *init_stmt = NULL;
3436 tree var;
3437 tree rhs = value;
3438 tree def = (gimple_in_ssa_p (cfun)
3439 ? ssa_default_def (id->src_cfun, p) : NULL);
3440
3441 if (value
3442 && value != error_mark_node
3443 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3444 rhs = force_value_to_type (TREE_TYPE (p), value);
3445
3446 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3447 here since the type of this decl must be visible to the calling
3448 function. */
3449 var = copy_decl_to_var (p, id);
3450
3451 /* Declare this new variable. */
3452 DECL_CHAIN (var) = *vars;
3453 *vars = var;
3454
3455 /* Make gimplifier happy about this variable. */
3456 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3457
3458 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3459 we would not need to create a new variable here at all, if it
3460 weren't for debug info. Still, we can just use the argument
3461 value. */
3462 if (TREE_READONLY (p)
3463 && !TREE_ADDRESSABLE (p)
3464 && value && !TREE_SIDE_EFFECTS (value)
3465 && !def)
3466 {
3467 /* We may produce non-gimple trees by adding NOPs or introduce
3468 invalid sharing when operand is not really constant.
3469 It is not big deal to prohibit constant propagation here as
3470 we will constant propagate in DOM1 pass anyway. */
3471 if (is_gimple_min_invariant (value)
3472 && useless_type_conversion_p (TREE_TYPE (p),
3473 TREE_TYPE (value))
3474 /* We have to be very careful about ADDR_EXPR. Make sure
3475 the base variable isn't a local variable of the inlined
3476 function, e.g., when doing recursive inlining, direct or
3477 mutually-recursive or whatever, which is why we don't
3478 just test whether fn == current_function_decl. */
3479 && ! self_inlining_addr_expr (value, fn))
3480 {
3481 insert_decl_map (id, p, value);
3482 insert_debug_decl_map (id, p, var);
3483 return insert_init_debug_bind (id, bb, var, value, NULL);
3484 }
3485 }
3486
3487 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3488 that way, when the PARM_DECL is encountered, it will be
3489 automatically replaced by the VAR_DECL. */
3490 insert_decl_map (id, p, var);
3491
3492 /* Even if P was TREE_READONLY, the new VAR should not be.
3493 In the original code, we would have constructed a
3494 temporary, and then the function body would have never
3495 changed the value of P. However, now, we will be
3496 constructing VAR directly. The constructor body may
3497 change its value multiple times as it is being
3498 constructed. Therefore, it must not be TREE_READONLY;
3499 the back-end assumes that TREE_READONLY variable is
3500 assigned to only once. */
3501 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3502 TREE_READONLY (var) = 0;
3503
3504 /* If there is no setup required and we are in SSA, take the easy route
3505 replacing all SSA names representing the function parameter by the
3506 SSA name passed to function.
3507
3508 We need to construct map for the variable anyway as it might be used
3509 in different SSA names when parameter is set in function.
3510
3511 Do replacement at -O0 for const arguments replaced by constant.
3512 This is important for builtin_constant_p and other construct requiring
3513 constant argument to be visible in inlined function body. */
3514 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3515 && (optimize
3516 || (TREE_READONLY (p)
3517 && is_gimple_min_invariant (rhs)))
3518 && (TREE_CODE (rhs) == SSA_NAME
3519 || is_gimple_min_invariant (rhs))
3520 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3521 {
3522 insert_decl_map (id, def, rhs);
3523 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3524 }
3525
3526 /* If the value of argument is never used, don't care about initializing
3527 it. */
3528 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3529 {
3530 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3531 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3532 }
3533
3534 /* Initialize this VAR_DECL from the equivalent argument. Convert
3535 the argument to the proper type in case it was promoted. */
3536 if (value)
3537 {
3538 if (rhs == error_mark_node)
3539 {
3540 insert_decl_map (id, p, var);
3541 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3542 }
3543
3544 STRIP_USELESS_TYPE_CONVERSION (rhs);
3545
3546 /* If we are in SSA form properly remap the default definition
3547 or assign to a dummy SSA name if the parameter is unused and
3548 we are not optimizing. */
3549 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3550 {
3551 if (def)
3552 {
3553 def = remap_ssa_name (def, id);
3554 init_stmt = gimple_build_assign (def, rhs);
3555 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3556 set_ssa_default_def (cfun, var, NULL);
3557 }
3558 else if (!optimize)
3559 {
3560 def = make_ssa_name (var);
3561 init_stmt = gimple_build_assign (def, rhs);
3562 }
3563 }
3564 else
3565 init_stmt = gimple_build_assign (var, rhs);
3566
3567 if (bb && init_stmt)
3568 insert_init_stmt (id, bb, init_stmt);
3569 }
3570 return init_stmt;
3571 }
3572
3573 /* Generate code to initialize the parameters of the function at the
3574 top of the stack in ID from the GIMPLE_CALL STMT. */
3575
3576 static void
3577 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3578 tree fn, basic_block bb)
3579 {
3580 tree parms;
3581 size_t i;
3582 tree p;
3583 tree vars = NULL_TREE;
3584 tree static_chain = gimple_call_chain (stmt);
3585
3586 /* Figure out what the parameters are. */
3587 parms = DECL_ARGUMENTS (fn);
3588
3589 /* Loop through the parameter declarations, replacing each with an
3590 equivalent VAR_DECL, appropriately initialized. */
3591 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3592 {
3593 tree val;
3594 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3595 setup_one_parameter (id, p, val, fn, bb, &vars);
3596 }
3597 /* After remapping parameters remap their types. This has to be done
3598 in a second loop over all parameters to appropriately remap
3599 variable sized arrays when the size is specified in a
3600 parameter following the array. */
3601 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3602 {
3603 tree *varp = id->decl_map->get (p);
3604 if (varp && VAR_P (*varp))
3605 {
3606 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3607 ? ssa_default_def (id->src_cfun, p) : NULL);
3608 tree var = *varp;
3609 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3610 /* Also remap the default definition if it was remapped
3611 to the default definition of the parameter replacement
3612 by the parameter setup. */
3613 if (def)
3614 {
3615 tree *defp = id->decl_map->get (def);
3616 if (defp
3617 && TREE_CODE (*defp) == SSA_NAME
3618 && SSA_NAME_VAR (*defp) == var)
3619 TREE_TYPE (*defp) = TREE_TYPE (var);
3620 }
3621 }
3622 }
3623
3624 /* Initialize the static chain. */
3625 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3626 gcc_assert (fn != current_function_decl);
3627 if (p)
3628 {
3629 /* No static chain? Seems like a bug in tree-nested.c. */
3630 gcc_assert (static_chain);
3631
3632 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3633 }
3634
3635 declare_inline_vars (id->block, vars);
3636 }
3637
3638
3639 /* Declare a return variable to replace the RESULT_DECL for the
3640 function we are calling. An appropriate DECL_STMT is returned.
3641 The USE_STMT is filled to contain a use of the declaration to
3642 indicate the return value of the function.
3643
3644 RETURN_SLOT, if non-null is place where to store the result. It
3645 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3646 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3647
3648 The return value is a (possibly null) value that holds the result
3649 as seen by the caller. */
3650
3651 static tree
3652 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3653 basic_block entry_bb)
3654 {
3655 tree callee = id->src_fn;
3656 tree result = DECL_RESULT (callee);
3657 tree callee_type = TREE_TYPE (result);
3658 tree caller_type;
3659 tree var, use;
3660
3661 /* Handle type-mismatches in the function declaration return type
3662 vs. the call expression. */
3663 if (modify_dest)
3664 caller_type = TREE_TYPE (modify_dest);
3665 else if (return_slot)
3666 caller_type = TREE_TYPE (return_slot);
3667 else /* No LHS on the call. */
3668 caller_type = TREE_TYPE (TREE_TYPE (callee));
3669
3670 /* We don't need to do anything for functions that don't return anything. */
3671 if (VOID_TYPE_P (callee_type))
3672 return NULL_TREE;
3673
3674 /* If there was a return slot, then the return value is the
3675 dereferenced address of that object. */
3676 if (return_slot)
3677 {
3678 /* The front end shouldn't have used both return_slot and
3679 a modify expression. */
3680 gcc_assert (!modify_dest);
3681 if (DECL_BY_REFERENCE (result))
3682 {
3683 tree return_slot_addr = build_fold_addr_expr (return_slot);
3684 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3685
3686 /* We are going to construct *&return_slot and we can't do that
3687 for variables believed to be not addressable.
3688
3689 FIXME: This check possibly can match, because values returned
3690 via return slot optimization are not believed to have address
3691 taken by alias analysis. */
3692 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3693 var = return_slot_addr;
3694 mark_addressable (return_slot);
3695 }
3696 else
3697 {
3698 var = return_slot;
3699 gcc_assert (TREE_CODE (var) != SSA_NAME);
3700 if (TREE_ADDRESSABLE (result))
3701 mark_addressable (var);
3702 }
3703 if (DECL_NOT_GIMPLE_REG_P (result)
3704 && DECL_P (var))
3705 DECL_NOT_GIMPLE_REG_P (var) = 1;
3706
3707 if (!useless_type_conversion_p (callee_type, caller_type))
3708 var = build1 (VIEW_CONVERT_EXPR, callee_type, var);
3709
3710 use = NULL;
3711 goto done;
3712 }
3713
3714 /* All types requiring non-trivial constructors should have been handled. */
3715 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3716
3717 /* Attempt to avoid creating a new temporary variable. */
3718 if (modify_dest
3719 && TREE_CODE (modify_dest) != SSA_NAME)
3720 {
3721 bool use_it = false;
3722
3723 /* We can't use MODIFY_DEST if there's type promotion involved. */
3724 if (!useless_type_conversion_p (callee_type, caller_type))
3725 use_it = false;
3726
3727 /* ??? If we're assigning to a variable sized type, then we must
3728 reuse the destination variable, because we've no good way to
3729 create variable sized temporaries at this point. */
3730 else if (!poly_int_tree_p (TYPE_SIZE_UNIT (caller_type)))
3731 use_it = true;
3732
3733 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3734 reuse it as the result of the call directly. Don't do this if
3735 it would promote MODIFY_DEST to addressable. */
3736 else if (TREE_ADDRESSABLE (result))
3737 use_it = false;
3738 else
3739 {
3740 tree base_m = get_base_address (modify_dest);
3741
3742 /* If the base isn't a decl, then it's a pointer, and we don't
3743 know where that's going to go. */
3744 if (!DECL_P (base_m))
3745 use_it = false;
3746 else if (is_global_var (base_m))
3747 use_it = false;
3748 else if (DECL_NOT_GIMPLE_REG_P (result)
3749 && !DECL_NOT_GIMPLE_REG_P (base_m))
3750 use_it = false;
3751 else if (!TREE_ADDRESSABLE (base_m))
3752 use_it = true;
3753 }
3754
3755 if (use_it)
3756 {
3757 var = modify_dest;
3758 use = NULL;
3759 goto done;
3760 }
3761 }
3762
3763 gcc_assert (poly_int_tree_p (TYPE_SIZE_UNIT (callee_type)));
3764
3765 var = copy_result_decl_to_var (result, id);
3766 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3767
3768 /* Do not have the rest of GCC warn about this variable as it should
3769 not be visible to the user. */
3770 TREE_NO_WARNING (var) = 1;
3771
3772 declare_inline_vars (id->block, var);
3773
3774 /* Build the use expr. If the return type of the function was
3775 promoted, convert it back to the expected type. */
3776 use = var;
3777 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3778 {
3779 /* If we can match up types by promotion/demotion do so. */
3780 if (fold_convertible_p (caller_type, var))
3781 use = fold_convert (caller_type, var);
3782 else
3783 {
3784 /* ??? For valid programs we should not end up here.
3785 Still if we end up with truly mismatched types here, fall back
3786 to using a MEM_REF to not leak invalid GIMPLE to the following
3787 passes. */
3788 /* Prevent var from being written into SSA form. */
3789 if (is_gimple_reg_type (TREE_TYPE (var)))
3790 DECL_NOT_GIMPLE_REG_P (var) = true;
3791 use = fold_build2 (MEM_REF, caller_type,
3792 build_fold_addr_expr (var),
3793 build_int_cst (ptr_type_node, 0));
3794 }
3795 }
3796
3797 STRIP_USELESS_TYPE_CONVERSION (use);
3798
3799 if (DECL_BY_REFERENCE (result))
3800 {
3801 TREE_ADDRESSABLE (var) = 1;
3802 var = build_fold_addr_expr (var);
3803 }
3804
3805 done:
3806 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3807 way, when the RESULT_DECL is encountered, it will be
3808 automatically replaced by the VAR_DECL.
3809
3810 When returning by reference, ensure that RESULT_DECL remaps to
3811 gimple_val. */
3812 if (DECL_BY_REFERENCE (result)
3813 && !is_gimple_val (var))
3814 {
3815 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3816 insert_decl_map (id, result, temp);
3817 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3818 it's default_def SSA_NAME. */
3819 if (gimple_in_ssa_p (id->src_cfun)
3820 && is_gimple_reg (result))
3821 {
3822 temp = make_ssa_name (temp);
3823 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3824 }
3825 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3826 }
3827 else
3828 insert_decl_map (id, result, var);
3829
3830 /* Remember this so we can ignore it in remap_decls. */
3831 id->retvar = var;
3832 return use;
3833 }
3834
3835 /* Determine if the function can be copied. If so return NULL. If
3836 not return a string describng the reason for failure. */
3837
3838 const char *
3839 copy_forbidden (struct function *fun)
3840 {
3841 const char *reason = fun->cannot_be_copied_reason;
3842
3843 /* Only examine the function once. */
3844 if (fun->cannot_be_copied_set)
3845 return reason;
3846
3847 /* We cannot copy a function that receives a non-local goto
3848 because we cannot remap the destination label used in the
3849 function that is performing the non-local goto. */
3850 /* ??? Actually, this should be possible, if we work at it.
3851 No doubt there's just a handful of places that simply
3852 assume it doesn't happen and don't substitute properly. */
3853 if (fun->has_nonlocal_label)
3854 {
3855 reason = G_("function %q+F can never be copied "
3856 "because it receives a non-local goto");
3857 goto fail;
3858 }
3859
3860 if (fun->has_forced_label_in_static)
3861 {
3862 reason = G_("function %q+F can never be copied because it saves "
3863 "address of local label in a static variable");
3864 goto fail;
3865 }
3866
3867 fail:
3868 fun->cannot_be_copied_reason = reason;
3869 fun->cannot_be_copied_set = true;
3870 return reason;
3871 }
3872
3873
3874 static const char *inline_forbidden_reason;
3875
3876 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3877 iff a function cannot be inlined. Also sets the reason why. */
3878
3879 static tree
3880 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3881 struct walk_stmt_info *wip)
3882 {
3883 tree fn = (tree) wip->info;
3884 tree t;
3885 gimple *stmt = gsi_stmt (*gsi);
3886
3887 switch (gimple_code (stmt))
3888 {
3889 case GIMPLE_CALL:
3890 /* Refuse to inline alloca call unless user explicitly forced so as
3891 this may change program's memory overhead drastically when the
3892 function using alloca is called in loop. In GCC present in
3893 SPEC2000 inlining into schedule_block cause it to require 2GB of
3894 RAM instead of 256MB. Don't do so for alloca calls emitted for
3895 VLA objects as those can't cause unbounded growth (they're always
3896 wrapped inside stack_save/stack_restore regions. */
3897 if (gimple_maybe_alloca_call_p (stmt)
3898 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3899 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3900 {
3901 inline_forbidden_reason
3902 = G_("function %q+F can never be inlined because it uses "
3903 "alloca (override using the always_inline attribute)");
3904 *handled_ops_p = true;
3905 return fn;
3906 }
3907
3908 t = gimple_call_fndecl (stmt);
3909 if (t == NULL_TREE)
3910 break;
3911
3912 /* We cannot inline functions that call setjmp. */
3913 if (setjmp_call_p (t))
3914 {
3915 inline_forbidden_reason
3916 = G_("function %q+F can never be inlined because it uses setjmp");
3917 *handled_ops_p = true;
3918 return t;
3919 }
3920
3921 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3922 switch (DECL_FUNCTION_CODE (t))
3923 {
3924 /* We cannot inline functions that take a variable number of
3925 arguments. */
3926 case BUILT_IN_VA_START:
3927 case BUILT_IN_NEXT_ARG:
3928 case BUILT_IN_VA_END:
3929 inline_forbidden_reason
3930 = G_("function %q+F can never be inlined because it "
3931 "uses variable argument lists");
3932 *handled_ops_p = true;
3933 return t;
3934
3935 case BUILT_IN_LONGJMP:
3936 /* We can't inline functions that call __builtin_longjmp at
3937 all. The non-local goto machinery really requires the
3938 destination be in a different function. If we allow the
3939 function calling __builtin_longjmp to be inlined into the
3940 function calling __builtin_setjmp, Things will Go Awry. */
3941 inline_forbidden_reason
3942 = G_("function %q+F can never be inlined because "
3943 "it uses setjmp-longjmp exception handling");
3944 *handled_ops_p = true;
3945 return t;
3946
3947 case BUILT_IN_NONLOCAL_GOTO:
3948 /* Similarly. */
3949 inline_forbidden_reason
3950 = G_("function %q+F can never be inlined because "
3951 "it uses non-local goto");
3952 *handled_ops_p = true;
3953 return t;
3954
3955 case BUILT_IN_RETURN:
3956 case BUILT_IN_APPLY_ARGS:
3957 /* If a __builtin_apply_args caller would be inlined,
3958 it would be saving arguments of the function it has
3959 been inlined into. Similarly __builtin_return would
3960 return from the function the inline has been inlined into. */
3961 inline_forbidden_reason
3962 = G_("function %q+F can never be inlined because "
3963 "it uses %<__builtin_return%> or %<__builtin_apply_args%>");
3964 *handled_ops_p = true;
3965 return t;
3966
3967 default:
3968 break;
3969 }
3970 break;
3971
3972 case GIMPLE_GOTO:
3973 t = gimple_goto_dest (stmt);
3974
3975 /* We will not inline a function which uses computed goto. The
3976 addresses of its local labels, which may be tucked into
3977 global storage, are of course not constant across
3978 instantiations, which causes unexpected behavior. */
3979 if (TREE_CODE (t) != LABEL_DECL)
3980 {
3981 inline_forbidden_reason
3982 = G_("function %q+F can never be inlined "
3983 "because it contains a computed goto");
3984 *handled_ops_p = true;
3985 return t;
3986 }
3987 break;
3988
3989 default:
3990 break;
3991 }
3992
3993 *handled_ops_p = false;
3994 return NULL_TREE;
3995 }
3996
3997 /* Return true if FNDECL is a function that cannot be inlined into
3998 another one. */
3999
4000 static bool
4001 inline_forbidden_p (tree fndecl)
4002 {
4003 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
4004 struct walk_stmt_info wi;
4005 basic_block bb;
4006 bool forbidden_p = false;
4007
4008 /* First check for shared reasons not to copy the code. */
4009 inline_forbidden_reason = copy_forbidden (fun);
4010 if (inline_forbidden_reason != NULL)
4011 return true;
4012
4013 /* Next, walk the statements of the function looking for
4014 constraucts we can't handle, or are non-optimal for inlining. */
4015 hash_set<tree> visited_nodes;
4016 memset (&wi, 0, sizeof (wi));
4017 wi.info = (void *) fndecl;
4018 wi.pset = &visited_nodes;
4019
4020 FOR_EACH_BB_FN (bb, fun)
4021 {
4022 gimple *ret;
4023 gimple_seq seq = bb_seq (bb);
4024 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
4025 forbidden_p = (ret != NULL);
4026 if (forbidden_p)
4027 break;
4028 }
4029
4030 return forbidden_p;
4031 }
4032 \f
4033 /* Return false if the function FNDECL cannot be inlined on account of its
4034 attributes, true otherwise. */
4035 static bool
4036 function_attribute_inlinable_p (const_tree fndecl)
4037 {
4038 if (targetm.attribute_table)
4039 {
4040 const_tree a;
4041
4042 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
4043 {
4044 const_tree name = get_attribute_name (a);
4045 int i;
4046
4047 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
4048 if (is_attribute_p (targetm.attribute_table[i].name, name))
4049 return targetm.function_attribute_inlinable_p (fndecl);
4050 }
4051 }
4052
4053 return true;
4054 }
4055
4056 /* Returns nonzero if FN is a function that does not have any
4057 fundamental inline blocking properties. */
4058
4059 bool
4060 tree_inlinable_function_p (tree fn)
4061 {
4062 bool inlinable = true;
4063 bool do_warning;
4064 tree always_inline;
4065
4066 /* If we've already decided this function shouldn't be inlined,
4067 there's no need to check again. */
4068 if (DECL_UNINLINABLE (fn))
4069 return false;
4070
4071 /* We only warn for functions declared `inline' by the user. */
4072 do_warning = (opt_for_fn (fn, warn_inline)
4073 && DECL_DECLARED_INLINE_P (fn)
4074 && !DECL_NO_INLINE_WARNING_P (fn)
4075 && !DECL_IN_SYSTEM_HEADER (fn));
4076
4077 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
4078
4079 if (flag_no_inline
4080 && always_inline == NULL)
4081 {
4082 if (do_warning)
4083 warning (OPT_Winline, "function %q+F can never be inlined because it "
4084 "is suppressed using %<-fno-inline%>", fn);
4085 inlinable = false;
4086 }
4087
4088 else if (!function_attribute_inlinable_p (fn))
4089 {
4090 if (do_warning)
4091 warning (OPT_Winline, "function %q+F can never be inlined because it "
4092 "uses attributes conflicting with inlining", fn);
4093 inlinable = false;
4094 }
4095
4096 else if (inline_forbidden_p (fn))
4097 {
4098 /* See if we should warn about uninlinable functions. Previously,
4099 some of these warnings would be issued while trying to expand
4100 the function inline, but that would cause multiple warnings
4101 about functions that would for example call alloca. But since
4102 this a property of the function, just one warning is enough.
4103 As a bonus we can now give more details about the reason why a
4104 function is not inlinable. */
4105 if (always_inline)
4106 error (inline_forbidden_reason, fn);
4107 else if (do_warning)
4108 warning (OPT_Winline, inline_forbidden_reason, fn);
4109
4110 inlinable = false;
4111 }
4112
4113 /* Squirrel away the result so that we don't have to check again. */
4114 DECL_UNINLINABLE (fn) = !inlinable;
4115
4116 return inlinable;
4117 }
4118
4119 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
4120 word size and take possible memcpy call into account and return
4121 cost based on whether optimizing for size or speed according to SPEED_P. */
4122
4123 int
4124 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
4125 {
4126 HOST_WIDE_INT size;
4127
4128 gcc_assert (!VOID_TYPE_P (type));
4129
4130 if (TREE_CODE (type) == VECTOR_TYPE)
4131 {
4132 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
4133 machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
4134 int orig_mode_size
4135 = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
4136 int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
4137 return ((orig_mode_size + simd_mode_size - 1)
4138 / simd_mode_size);
4139 }
4140
4141 size = int_size_in_bytes (type);
4142
4143 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
4144 /* Cost of a memcpy call, 3 arguments and the call. */
4145 return 4;
4146 else
4147 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
4148 }
4149
4150 /* Returns cost of operation CODE, according to WEIGHTS */
4151
4152 static int
4153 estimate_operator_cost (enum tree_code code, eni_weights *weights,
4154 tree op1 ATTRIBUTE_UNUSED, tree op2)
4155 {
4156 switch (code)
4157 {
4158 /* These are "free" conversions, or their presumed cost
4159 is folded into other operations. */
4160 case RANGE_EXPR:
4161 CASE_CONVERT:
4162 case COMPLEX_EXPR:
4163 case PAREN_EXPR:
4164 case VIEW_CONVERT_EXPR:
4165 return 0;
4166
4167 /* Assign cost of 1 to usual operations.
4168 ??? We may consider mapping RTL costs to this. */
4169 case COND_EXPR:
4170 case VEC_COND_EXPR:
4171 case VEC_PERM_EXPR:
4172
4173 case PLUS_EXPR:
4174 case POINTER_PLUS_EXPR:
4175 case POINTER_DIFF_EXPR:
4176 case MINUS_EXPR:
4177 case MULT_EXPR:
4178 case MULT_HIGHPART_EXPR:
4179
4180 case ADDR_SPACE_CONVERT_EXPR:
4181 case FIXED_CONVERT_EXPR:
4182 case FIX_TRUNC_EXPR:
4183
4184 case NEGATE_EXPR:
4185 case FLOAT_EXPR:
4186 case MIN_EXPR:
4187 case MAX_EXPR:
4188 case ABS_EXPR:
4189 case ABSU_EXPR:
4190
4191 case LSHIFT_EXPR:
4192 case RSHIFT_EXPR:
4193 case LROTATE_EXPR:
4194 case RROTATE_EXPR:
4195
4196 case BIT_IOR_EXPR:
4197 case BIT_XOR_EXPR:
4198 case BIT_AND_EXPR:
4199 case BIT_NOT_EXPR:
4200
4201 case TRUTH_ANDIF_EXPR:
4202 case TRUTH_ORIF_EXPR:
4203 case TRUTH_AND_EXPR:
4204 case TRUTH_OR_EXPR:
4205 case TRUTH_XOR_EXPR:
4206 case TRUTH_NOT_EXPR:
4207
4208 case LT_EXPR:
4209 case LE_EXPR:
4210 case GT_EXPR:
4211 case GE_EXPR:
4212 case EQ_EXPR:
4213 case NE_EXPR:
4214 case ORDERED_EXPR:
4215 case UNORDERED_EXPR:
4216
4217 case UNLT_EXPR:
4218 case UNLE_EXPR:
4219 case UNGT_EXPR:
4220 case UNGE_EXPR:
4221 case UNEQ_EXPR:
4222 case LTGT_EXPR:
4223
4224 case CONJ_EXPR:
4225
4226 case PREDECREMENT_EXPR:
4227 case PREINCREMENT_EXPR:
4228 case POSTDECREMENT_EXPR:
4229 case POSTINCREMENT_EXPR:
4230
4231 case REALIGN_LOAD_EXPR:
4232
4233 case WIDEN_PLUS_EXPR:
4234 case WIDEN_MINUS_EXPR:
4235 case WIDEN_SUM_EXPR:
4236 case WIDEN_MULT_EXPR:
4237 case DOT_PROD_EXPR:
4238 case SAD_EXPR:
4239 case WIDEN_MULT_PLUS_EXPR:
4240 case WIDEN_MULT_MINUS_EXPR:
4241 case WIDEN_LSHIFT_EXPR:
4242
4243 case VEC_WIDEN_PLUS_HI_EXPR:
4244 case VEC_WIDEN_PLUS_LO_EXPR:
4245 case VEC_WIDEN_MINUS_HI_EXPR:
4246 case VEC_WIDEN_MINUS_LO_EXPR:
4247 case VEC_WIDEN_MULT_HI_EXPR:
4248 case VEC_WIDEN_MULT_LO_EXPR:
4249 case VEC_WIDEN_MULT_EVEN_EXPR:
4250 case VEC_WIDEN_MULT_ODD_EXPR:
4251 case VEC_UNPACK_HI_EXPR:
4252 case VEC_UNPACK_LO_EXPR:
4253 case VEC_UNPACK_FLOAT_HI_EXPR:
4254 case VEC_UNPACK_FLOAT_LO_EXPR:
4255 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
4256 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
4257 case VEC_PACK_TRUNC_EXPR:
4258 case VEC_PACK_SAT_EXPR:
4259 case VEC_PACK_FIX_TRUNC_EXPR:
4260 case VEC_PACK_FLOAT_EXPR:
4261 case VEC_WIDEN_LSHIFT_HI_EXPR:
4262 case VEC_WIDEN_LSHIFT_LO_EXPR:
4263 case VEC_DUPLICATE_EXPR:
4264 case VEC_SERIES_EXPR:
4265
4266 return 1;
4267
4268 /* Few special cases of expensive operations. This is useful
4269 to avoid inlining on functions having too many of these. */
4270 case TRUNC_DIV_EXPR:
4271 case CEIL_DIV_EXPR:
4272 case FLOOR_DIV_EXPR:
4273 case ROUND_DIV_EXPR:
4274 case EXACT_DIV_EXPR:
4275 case TRUNC_MOD_EXPR:
4276 case CEIL_MOD_EXPR:
4277 case FLOOR_MOD_EXPR:
4278 case ROUND_MOD_EXPR:
4279 case RDIV_EXPR:
4280 if (TREE_CODE (op2) != INTEGER_CST)
4281 return weights->div_mod_cost;
4282 return 1;
4283
4284 /* Bit-field insertion needs several shift and mask operations. */
4285 case BIT_INSERT_EXPR:
4286 return 3;
4287
4288 default:
4289 /* We expect a copy assignment with no operator. */
4290 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
4291 return 0;
4292 }
4293 }
4294
4295
4296 /* Estimate number of instructions that will be created by expanding
4297 the statements in the statement sequence STMTS.
4298 WEIGHTS contains weights attributed to various constructs. */
4299
4300 int
4301 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
4302 {
4303 int cost;
4304 gimple_stmt_iterator gsi;
4305
4306 cost = 0;
4307 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
4308 cost += estimate_num_insns (gsi_stmt (gsi), weights);
4309
4310 return cost;
4311 }
4312
4313
4314 /* Estimate number of instructions that will be created by expanding STMT.
4315 WEIGHTS contains weights attributed to various constructs. */
4316
4317 int
4318 estimate_num_insns (gimple *stmt, eni_weights *weights)
4319 {
4320 unsigned cost, i;
4321 enum gimple_code code = gimple_code (stmt);
4322 tree lhs;
4323 tree rhs;
4324
4325 switch (code)
4326 {
4327 case GIMPLE_ASSIGN:
4328 /* Try to estimate the cost of assignments. We have three cases to
4329 deal with:
4330 1) Simple assignments to registers;
4331 2) Stores to things that must live in memory. This includes
4332 "normal" stores to scalars, but also assignments of large
4333 structures, or constructors of big arrays;
4334
4335 Let us look at the first two cases, assuming we have "a = b + C":
4336 <GIMPLE_ASSIGN <var_decl "a">
4337 <plus_expr <var_decl "b"> <constant C>>
4338 If "a" is a GIMPLE register, the assignment to it is free on almost
4339 any target, because "a" usually ends up in a real register. Hence
4340 the only cost of this expression comes from the PLUS_EXPR, and we
4341 can ignore the GIMPLE_ASSIGN.
4342 If "a" is not a GIMPLE register, the assignment to "a" will most
4343 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4344 of moving something into "a", which we compute using the function
4345 estimate_move_cost. */
4346 if (gimple_clobber_p (stmt))
4347 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4348
4349 lhs = gimple_assign_lhs (stmt);
4350 rhs = gimple_assign_rhs1 (stmt);
4351
4352 cost = 0;
4353
4354 /* Account for the cost of moving to / from memory. */
4355 if (gimple_store_p (stmt))
4356 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4357 if (gimple_assign_load_p (stmt))
4358 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4359
4360 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4361 gimple_assign_rhs1 (stmt),
4362 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4363 == GIMPLE_BINARY_RHS
4364 ? gimple_assign_rhs2 (stmt) : NULL);
4365 break;
4366
4367 case GIMPLE_COND:
4368 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4369 gimple_op (stmt, 0),
4370 gimple_op (stmt, 1));
4371 break;
4372
4373 case GIMPLE_SWITCH:
4374 {
4375 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4376 /* Take into account cost of the switch + guess 2 conditional jumps for
4377 each case label.
4378
4379 TODO: once the switch expansion logic is sufficiently separated, we can
4380 do better job on estimating cost of the switch. */
4381 if (weights->time_based)
4382 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4383 else
4384 cost = gimple_switch_num_labels (switch_stmt) * 2;
4385 }
4386 break;
4387
4388 case GIMPLE_CALL:
4389 {
4390 tree decl;
4391
4392 if (gimple_call_internal_p (stmt))
4393 return 0;
4394 else if ((decl = gimple_call_fndecl (stmt))
4395 && fndecl_built_in_p (decl))
4396 {
4397 /* Do not special case builtins where we see the body.
4398 This just confuse inliner. */
4399 struct cgraph_node *node;
4400 if (!(node = cgraph_node::get (decl))
4401 || node->definition)
4402 ;
4403 /* For buitins that are likely expanded to nothing or
4404 inlined do not account operand costs. */
4405 else if (is_simple_builtin (decl))
4406 return 0;
4407 else if (is_inexpensive_builtin (decl))
4408 return weights->target_builtin_call_cost;
4409 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4410 {
4411 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4412 specialize the cheap expansion we do here.
4413 ??? This asks for a more general solution. */
4414 switch (DECL_FUNCTION_CODE (decl))
4415 {
4416 case BUILT_IN_POW:
4417 case BUILT_IN_POWF:
4418 case BUILT_IN_POWL:
4419 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4420 && (real_equal
4421 (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4422 &dconst2)))
4423 return estimate_operator_cost
4424 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4425 gimple_call_arg (stmt, 0));
4426 break;
4427
4428 default:
4429 break;
4430 }
4431 }
4432 }
4433
4434 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4435 if (gimple_call_lhs (stmt))
4436 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4437 weights->time_based);
4438 for (i = 0; i < gimple_call_num_args (stmt); i++)
4439 {
4440 tree arg = gimple_call_arg (stmt, i);
4441 cost += estimate_move_cost (TREE_TYPE (arg),
4442 weights->time_based);
4443 }
4444 break;
4445 }
4446
4447 case GIMPLE_RETURN:
4448 return weights->return_cost;
4449
4450 case GIMPLE_GOTO:
4451 case GIMPLE_LABEL:
4452 case GIMPLE_NOP:
4453 case GIMPLE_PHI:
4454 case GIMPLE_PREDICT:
4455 case GIMPLE_DEBUG:
4456 return 0;
4457
4458 case GIMPLE_ASM:
4459 {
4460 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4461 /* 1000 means infinity. This avoids overflows later
4462 with very long asm statements. */
4463 if (count > 1000)
4464 count = 1000;
4465 /* If this asm is asm inline, count anything as minimum size. */
4466 if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
4467 count = MIN (1, count);
4468 return MAX (1, count);
4469 }
4470
4471 case GIMPLE_RESX:
4472 /* This is either going to be an external function call with one
4473 argument, or two register copy statements plus a goto. */
4474 return 2;
4475
4476 case GIMPLE_EH_DISPATCH:
4477 /* ??? This is going to turn into a switch statement. Ideally
4478 we'd have a look at the eh region and estimate the number of
4479 edges involved. */
4480 return 10;
4481
4482 case GIMPLE_BIND:
4483 return estimate_num_insns_seq (
4484 gimple_bind_body (as_a <gbind *> (stmt)),
4485 weights);
4486
4487 case GIMPLE_EH_FILTER:
4488 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4489
4490 case GIMPLE_CATCH:
4491 return estimate_num_insns_seq (gimple_catch_handler (
4492 as_a <gcatch *> (stmt)),
4493 weights);
4494
4495 case GIMPLE_TRY:
4496 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4497 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4498
4499 /* OMP directives are generally very expensive. */
4500
4501 case GIMPLE_OMP_RETURN:
4502 case GIMPLE_OMP_SECTIONS_SWITCH:
4503 case GIMPLE_OMP_ATOMIC_STORE:
4504 case GIMPLE_OMP_CONTINUE:
4505 /* ...except these, which are cheap. */
4506 return 0;
4507
4508 case GIMPLE_OMP_ATOMIC_LOAD:
4509 return weights->omp_cost;
4510
4511 case GIMPLE_OMP_FOR:
4512 return (weights->omp_cost
4513 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4514 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4515
4516 case GIMPLE_OMP_PARALLEL:
4517 case GIMPLE_OMP_TASK:
4518 case GIMPLE_OMP_CRITICAL:
4519 case GIMPLE_OMP_MASTER:
4520 case GIMPLE_OMP_TASKGROUP:
4521 case GIMPLE_OMP_ORDERED:
4522 case GIMPLE_OMP_SCAN:
4523 case GIMPLE_OMP_SECTION:
4524 case GIMPLE_OMP_SECTIONS:
4525 case GIMPLE_OMP_SINGLE:
4526 case GIMPLE_OMP_TARGET:
4527 case GIMPLE_OMP_TEAMS:
4528 return (weights->omp_cost
4529 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4530
4531 case GIMPLE_TRANSACTION:
4532 return (weights->tm_cost
4533 + estimate_num_insns_seq (gimple_transaction_body (
4534 as_a <gtransaction *> (stmt)),
4535 weights));
4536
4537 default:
4538 gcc_unreachable ();
4539 }
4540
4541 return cost;
4542 }
4543
4544 /* Estimate number of instructions that will be created by expanding
4545 function FNDECL. WEIGHTS contains weights attributed to various
4546 constructs. */
4547
4548 int
4549 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4550 {
4551 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4552 gimple_stmt_iterator bsi;
4553 basic_block bb;
4554 int n = 0;
4555
4556 gcc_assert (my_function && my_function->cfg);
4557 FOR_EACH_BB_FN (bb, my_function)
4558 {
4559 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4560 n += estimate_num_insns (gsi_stmt (bsi), weights);
4561 }
4562
4563 return n;
4564 }
4565
4566
4567 /* Initializes weights used by estimate_num_insns. */
4568
4569 void
4570 init_inline_once (void)
4571 {
4572 eni_size_weights.call_cost = 1;
4573 eni_size_weights.indirect_call_cost = 3;
4574 eni_size_weights.target_builtin_call_cost = 1;
4575 eni_size_weights.div_mod_cost = 1;
4576 eni_size_weights.omp_cost = 40;
4577 eni_size_weights.tm_cost = 10;
4578 eni_size_weights.time_based = false;
4579 eni_size_weights.return_cost = 1;
4580
4581 /* Estimating time for call is difficult, since we have no idea what the
4582 called function does. In the current uses of eni_time_weights,
4583 underestimating the cost does less harm than overestimating it, so
4584 we choose a rather small value here. */
4585 eni_time_weights.call_cost = 10;
4586 eni_time_weights.indirect_call_cost = 15;
4587 eni_time_weights.target_builtin_call_cost = 1;
4588 eni_time_weights.div_mod_cost = 10;
4589 eni_time_weights.omp_cost = 40;
4590 eni_time_weights.tm_cost = 40;
4591 eni_time_weights.time_based = true;
4592 eni_time_weights.return_cost = 2;
4593 }
4594
4595
4596 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4597
4598 static void
4599 prepend_lexical_block (tree current_block, tree new_block)
4600 {
4601 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4602 BLOCK_SUBBLOCKS (current_block) = new_block;
4603 BLOCK_SUPERCONTEXT (new_block) = current_block;
4604 }
4605
4606 /* Add local variables from CALLEE to CALLER. */
4607
4608 static inline void
4609 add_local_variables (struct function *callee, struct function *caller,
4610 copy_body_data *id)
4611 {
4612 tree var;
4613 unsigned ix;
4614
4615 FOR_EACH_LOCAL_DECL (callee, ix, var)
4616 if (!can_be_nonlocal (var, id))
4617 {
4618 tree new_var = remap_decl (var, id);
4619
4620 /* Remap debug-expressions. */
4621 if (VAR_P (new_var)
4622 && DECL_HAS_DEBUG_EXPR_P (var)
4623 && new_var != var)
4624 {
4625 tree tem = DECL_DEBUG_EXPR (var);
4626 bool old_regimplify = id->regimplify;
4627 id->remapping_type_depth++;
4628 walk_tree (&tem, copy_tree_body_r, id, NULL);
4629 id->remapping_type_depth--;
4630 id->regimplify = old_regimplify;
4631 SET_DECL_DEBUG_EXPR (new_var, tem);
4632 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4633 }
4634 add_local_decl (caller, new_var);
4635 }
4636 }
4637
4638 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4639 have brought in or introduced any debug stmts for SRCVAR. */
4640
4641 static inline void
4642 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4643 {
4644 tree *remappedvarp = id->decl_map->get (srcvar);
4645
4646 if (!remappedvarp)
4647 return;
4648
4649 if (!VAR_P (*remappedvarp))
4650 return;
4651
4652 if (*remappedvarp == id->retvar)
4653 return;
4654
4655 tree tvar = target_for_debug_bind (*remappedvarp);
4656 if (!tvar)
4657 return;
4658
4659 gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4660 id->call_stmt);
4661 gimple_seq_add_stmt (bindings, stmt);
4662 }
4663
4664 /* For each inlined variable for which we may have debug bind stmts,
4665 add before GSI a final debug stmt resetting it, marking the end of
4666 its life, so that var-tracking knows it doesn't have to compute
4667 further locations for it. */
4668
4669 static inline void
4670 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4671 {
4672 tree var;
4673 unsigned ix;
4674 gimple_seq bindings = NULL;
4675
4676 if (!gimple_in_ssa_p (id->src_cfun))
4677 return;
4678
4679 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4680 return;
4681
4682 for (var = DECL_ARGUMENTS (id->src_fn);
4683 var; var = DECL_CHAIN (var))
4684 reset_debug_binding (id, var, &bindings);
4685
4686 FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4687 reset_debug_binding (id, var, &bindings);
4688
4689 gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4690 }
4691
4692 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4693
4694 static bool
4695 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id,
4696 bitmap to_purge)
4697 {
4698 tree use_retvar;
4699 tree fn;
4700 hash_map<tree, tree> *dst;
4701 hash_map<tree, tree> *st = NULL;
4702 tree return_slot;
4703 tree modify_dest;
4704 struct cgraph_edge *cg_edge;
4705 cgraph_inline_failed_t reason;
4706 basic_block return_block;
4707 edge e;
4708 gimple_stmt_iterator gsi, stmt_gsi;
4709 bool successfully_inlined = false;
4710 bool purge_dead_abnormal_edges;
4711 gcall *call_stmt;
4712 unsigned int prop_mask, src_properties;
4713 struct function *dst_cfun;
4714 tree simduid;
4715 use_operand_p use;
4716 gimple *simtenter_stmt = NULL;
4717 vec<tree> *simtvars_save;
4718 clone_info *info;
4719
4720 /* The gimplifier uses input_location in too many places, such as
4721 internal_get_tmp_var (). */
4722 location_t saved_location = input_location;
4723 input_location = gimple_location (stmt);
4724
4725 /* From here on, we're only interested in CALL_EXPRs. */
4726 call_stmt = dyn_cast <gcall *> (stmt);
4727 if (!call_stmt)
4728 goto egress;
4729
4730 cg_edge = id->dst_node->get_edge (stmt);
4731 gcc_checking_assert (cg_edge);
4732 /* First, see if we can figure out what function is being called.
4733 If we cannot, then there is no hope of inlining the function. */
4734 if (cg_edge->indirect_unknown_callee)
4735 goto egress;
4736 fn = cg_edge->callee->decl;
4737 gcc_checking_assert (fn);
4738
4739 /* If FN is a declaration of a function in a nested scope that was
4740 globally declared inline, we don't set its DECL_INITIAL.
4741 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4742 C++ front-end uses it for cdtors to refer to their internal
4743 declarations, that are not real functions. Fortunately those
4744 don't have trees to be saved, so we can tell by checking their
4745 gimple_body. */
4746 if (!DECL_INITIAL (fn)
4747 && DECL_ABSTRACT_ORIGIN (fn)
4748 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4749 fn = DECL_ABSTRACT_ORIGIN (fn);
4750
4751 /* Don't try to inline functions that are not well-suited to inlining. */
4752 if (cg_edge->inline_failed)
4753 {
4754 reason = cg_edge->inline_failed;
4755 /* If this call was originally indirect, we do not want to emit any
4756 inlining related warnings or sorry messages because there are no
4757 guarantees regarding those. */
4758 if (cg_edge->indirect_inlining_edge)
4759 goto egress;
4760
4761 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4762 /* For extern inline functions that get redefined we always
4763 silently ignored always_inline flag. Better behavior would
4764 be to be able to keep both bodies and use extern inline body
4765 for inlining, but we can't do that because frontends overwrite
4766 the body. */
4767 && !cg_edge->callee->redefined_extern_inline
4768 /* During early inline pass, report only when optimization is
4769 not turned on. */
4770 && (symtab->global_info_ready
4771 || !optimize
4772 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4773 /* PR 20090218-1_0.c. Body can be provided by another module. */
4774 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4775 {
4776 error ("inlining failed in call to %<always_inline%> %q+F: %s", fn,
4777 cgraph_inline_failed_string (reason));
4778 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4779 inform (gimple_location (stmt), "called from here");
4780 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4781 inform (DECL_SOURCE_LOCATION (cfun->decl),
4782 "called from this function");
4783 }
4784 else if (opt_for_fn (fn, warn_inline)
4785 && DECL_DECLARED_INLINE_P (fn)
4786 && !DECL_NO_INLINE_WARNING_P (fn)
4787 && !DECL_IN_SYSTEM_HEADER (fn)
4788 && reason != CIF_UNSPECIFIED
4789 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4790 /* Do not warn about not inlined recursive calls. */
4791 && !cg_edge->recursive_p ()
4792 /* Avoid warnings during early inline pass. */
4793 && symtab->global_info_ready)
4794 {
4795 auto_diagnostic_group d;
4796 if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4797 fn, _(cgraph_inline_failed_string (reason))))
4798 {
4799 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4800 inform (gimple_location (stmt), "called from here");
4801 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4802 inform (DECL_SOURCE_LOCATION (cfun->decl),
4803 "called from this function");
4804 }
4805 }
4806 goto egress;
4807 }
4808 id->src_node = cg_edge->callee;
4809
4810 /* If callee is thunk, all we need is to adjust the THIS pointer
4811 and redirect to function being thunked. */
4812 if (id->src_node->thunk)
4813 {
4814 cgraph_edge *edge;
4815 tree virtual_offset = NULL;
4816 profile_count count = cg_edge->count;
4817 tree op;
4818 gimple_stmt_iterator iter = gsi_for_stmt (stmt);
4819 thunk_info *info = thunk_info::get (id->src_node);
4820
4821 cgraph_edge::remove (cg_edge);
4822 edge = id->src_node->callees->clone (id->dst_node, call_stmt,
4823 gimple_uid (stmt),
4824 profile_count::one (),
4825 profile_count::one (),
4826 true);
4827 edge->count = count;
4828 if (info->virtual_offset_p)
4829 virtual_offset = size_int (info->virtual_value);
4830 op = create_tmp_reg_fn (cfun, TREE_TYPE (gimple_call_arg (stmt, 0)),
4831 NULL);
4832 gsi_insert_before (&iter, gimple_build_assign (op,
4833 gimple_call_arg (stmt, 0)),
4834 GSI_NEW_STMT);
4835 gcc_assert (info->this_adjusting);
4836 op = thunk_adjust (&iter, op, 1, info->fixed_offset,
4837 virtual_offset, info->indirect_offset);
4838
4839 gimple_call_set_arg (stmt, 0, op);
4840 gimple_call_set_fndecl (stmt, edge->callee->decl);
4841 update_stmt (stmt);
4842 id->src_node->remove ();
4843 successfully_inlined = expand_call_inline (bb, stmt, id, to_purge);
4844 maybe_remove_unused_call_args (cfun, stmt);
4845 /* This used to return true even though we do fail to inline in
4846 some cases. See PR98525. */
4847 goto egress;
4848 }
4849 fn = cg_edge->callee->decl;
4850 cg_edge->callee->get_untransformed_body ();
4851
4852 if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4853 cg_edge->callee->verify ();
4854
4855 /* We will be inlining this callee. */
4856 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4857
4858 /* Update the callers EH personality. */
4859 if (DECL_FUNCTION_PERSONALITY (fn))
4860 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4861 = DECL_FUNCTION_PERSONALITY (fn);
4862
4863 /* Split the block before the GIMPLE_CALL. */
4864 stmt_gsi = gsi_for_stmt (stmt);
4865 gsi_prev (&stmt_gsi);
4866 e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
4867 bb = e->src;
4868 return_block = e->dest;
4869 remove_edge (e);
4870
4871 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4872 been the source of abnormal edges. In this case, schedule
4873 the removal of dead abnormal edges. */
4874 gsi = gsi_start_bb (return_block);
4875 gsi_next (&gsi);
4876 purge_dead_abnormal_edges = gsi_end_p (gsi);
4877
4878 stmt_gsi = gsi_start_bb (return_block);
4879
4880 /* Build a block containing code to initialize the arguments, the
4881 actual inline expansion of the body, and a label for the return
4882 statements within the function to jump to. The type of the
4883 statement expression is the return type of the function call.
4884 ??? If the call does not have an associated block then we will
4885 remap all callee blocks to NULL, effectively dropping most of
4886 its debug information. This should only happen for calls to
4887 artificial decls inserted by the compiler itself. We need to
4888 either link the inlined blocks into the caller block tree or
4889 not refer to them in any way to not break GC for locations. */
4890 if (tree block = gimple_block (stmt))
4891 {
4892 /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
4893 to make inlined_function_outer_scope_p return true on this BLOCK. */
4894 location_t loc = LOCATION_LOCUS (gimple_location (stmt));
4895 if (loc == UNKNOWN_LOCATION)
4896 loc = LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn));
4897 if (loc == UNKNOWN_LOCATION)
4898 loc = BUILTINS_LOCATION;
4899 id->block = make_node (BLOCK);
4900 BLOCK_ABSTRACT_ORIGIN (id->block) = DECL_ORIGIN (fn);
4901 BLOCK_SOURCE_LOCATION (id->block) = loc;
4902 prepend_lexical_block (block, id->block);
4903 }
4904
4905 /* Local declarations will be replaced by their equivalents in this map. */
4906 st = id->decl_map;
4907 id->decl_map = new hash_map<tree, tree>;
4908 dst = id->debug_map;
4909 id->debug_map = NULL;
4910 if (flag_stack_reuse != SR_NONE)
4911 id->add_clobbers_to_eh_landing_pads = last_basic_block_for_fn (cfun);
4912
4913 /* Record the function we are about to inline. */
4914 id->src_fn = fn;
4915 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4916 id->reset_location = DECL_IGNORED_P (fn);
4917 id->call_stmt = call_stmt;
4918
4919 /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
4920 variables to be added to IFN_GOMP_SIMT_ENTER argument list. */
4921 dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4922 simtvars_save = id->dst_simt_vars;
4923 if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
4924 && (simduid = bb->loop_father->simduid) != NULL_TREE
4925 && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
4926 && single_imm_use (simduid, &use, &simtenter_stmt)
4927 && is_gimple_call (simtenter_stmt)
4928 && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
4929 vec_alloc (id->dst_simt_vars, 0);
4930 else
4931 id->dst_simt_vars = NULL;
4932
4933 if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
4934 profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
4935
4936 /* If the src function contains an IFN_VA_ARG, then so will the dst
4937 function after inlining. Likewise for IFN_GOMP_USE_SIMT. */
4938 prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
4939 src_properties = id->src_cfun->curr_properties & prop_mask;
4940 if (src_properties != prop_mask)
4941 dst_cfun->curr_properties &= src_properties | ~prop_mask;
4942 dst_cfun->calls_eh_return |= id->src_cfun->calls_eh_return;
4943 id->dst_node->calls_declare_variant_alt
4944 |= id->src_node->calls_declare_variant_alt;
4945
4946 gcc_assert (!id->src_cfun->after_inlining);
4947
4948 id->entry_bb = bb;
4949 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4950 {
4951 gimple_stmt_iterator si = gsi_last_bb (bb);
4952 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4953 NOT_TAKEN),
4954 GSI_NEW_STMT);
4955 }
4956 initialize_inlined_parameters (id, stmt, fn, bb);
4957 if (debug_nonbind_markers_p && debug_inline_points && id->block
4958 && inlined_function_outer_scope_p (id->block))
4959 {
4960 gimple_stmt_iterator si = gsi_last_bb (bb);
4961 gsi_insert_after (&si, gimple_build_debug_inline_entry
4962 (id->block, DECL_SOURCE_LOCATION (id->src_fn)),
4963 GSI_NEW_STMT);
4964 }
4965
4966 if (DECL_INITIAL (fn))
4967 {
4968 if (gimple_block (stmt))
4969 {
4970 tree *var;
4971
4972 prepend_lexical_block (id->block,
4973 remap_blocks (DECL_INITIAL (fn), id));
4974 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4975 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4976 == NULL_TREE));
4977 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4978 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4979 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4980 under it. The parameters can be then evaluated in the debugger,
4981 but don't show in backtraces. */
4982 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4983 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4984 {
4985 tree v = *var;
4986 *var = TREE_CHAIN (v);
4987 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4988 BLOCK_VARS (id->block) = v;
4989 }
4990 else
4991 var = &TREE_CHAIN (*var);
4992 }
4993 else
4994 remap_blocks_to_null (DECL_INITIAL (fn), id);
4995 }
4996
4997 /* Return statements in the function body will be replaced by jumps
4998 to the RET_LABEL. */
4999 gcc_assert (DECL_INITIAL (fn));
5000 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
5001
5002 /* Find the LHS to which the result of this call is assigned. */
5003 return_slot = NULL;
5004 if (gimple_call_lhs (stmt))
5005 {
5006 modify_dest = gimple_call_lhs (stmt);
5007
5008 /* The function which we are inlining might not return a value,
5009 in which case we should issue a warning that the function
5010 does not return a value. In that case the optimizers will
5011 see that the variable to which the value is assigned was not
5012 initialized. We do not want to issue a warning about that
5013 uninitialized variable. */
5014 if (DECL_P (modify_dest))
5015 TREE_NO_WARNING (modify_dest) = 1;
5016
5017 if (gimple_call_return_slot_opt_p (call_stmt))
5018 {
5019 return_slot = modify_dest;
5020 modify_dest = NULL;
5021 }
5022 }
5023 else
5024 modify_dest = NULL;
5025
5026 /* If we are inlining a call to the C++ operator new, we don't want
5027 to use type based alias analysis on the return value. Otherwise
5028 we may get confused if the compiler sees that the inlined new
5029 function returns a pointer which was just deleted. See bug
5030 33407. */
5031 if (DECL_IS_OPERATOR_NEW_P (fn))
5032 {
5033 return_slot = NULL;
5034 modify_dest = NULL;
5035 }
5036
5037 /* Declare the return variable for the function. */
5038 use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
5039
5040 /* Add local vars in this inlined callee to caller. */
5041 add_local_variables (id->src_cfun, cfun, id);
5042
5043 info = clone_info::get (id->src_node);
5044 if (info && info->performed_splits)
5045 {
5046 clone_info *dst_info = clone_info::get_create (id->dst_node);
5047 /* Any calls from the inlined function will be turned into calls from the
5048 function we inline into. We must preserve notes about how to split
5049 parameters such calls should be redirected/updated. */
5050 unsigned len = vec_safe_length (info->performed_splits);
5051 for (unsigned i = 0; i < len; i++)
5052 {
5053 ipa_param_performed_split ps
5054 = (*info->performed_splits)[i];
5055 ps.dummy_decl = remap_decl (ps.dummy_decl, id);
5056 vec_safe_push (dst_info->performed_splits, ps);
5057 }
5058
5059 if (flag_checking)
5060 {
5061 len = vec_safe_length (dst_info->performed_splits);
5062 for (unsigned i = 0; i < len; i++)
5063 {
5064 ipa_param_performed_split *ps1
5065 = &(*dst_info->performed_splits)[i];
5066 for (unsigned j = i + 1; j < len; j++)
5067 {
5068 ipa_param_performed_split *ps2
5069 = &(*dst_info->performed_splits)[j];
5070 gcc_assert (ps1->dummy_decl != ps2->dummy_decl
5071 || ps1->unit_offset != ps2->unit_offset);
5072 }
5073 }
5074 }
5075 }
5076
5077 if (dump_enabled_p ())
5078 {
5079 char buf[128];
5080 snprintf (buf, sizeof(buf), "%4.2f",
5081 cg_edge->sreal_frequency ().to_double ());
5082 dump_printf_loc (MSG_NOTE | MSG_PRIORITY_INTERNALS,
5083 call_stmt,
5084 "Inlining %C to %C with frequency %s\n",
5085 id->src_node, id->dst_node, buf);
5086 if (dump_file && (dump_flags & TDF_DETAILS))
5087 {
5088 id->src_node->dump (dump_file);
5089 id->dst_node->dump (dump_file);
5090 }
5091 }
5092
5093 /* This is it. Duplicate the callee body. Assume callee is
5094 pre-gimplified. Note that we must not alter the caller
5095 function in any way before this point, as this CALL_EXPR may be
5096 a self-referential call; if we're calling ourselves, we need to
5097 duplicate our body before altering anything. */
5098 copy_body (id, bb, return_block, NULL);
5099
5100 reset_debug_bindings (id, stmt_gsi);
5101
5102 if (flag_stack_reuse != SR_NONE)
5103 for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
5104 if (!TREE_THIS_VOLATILE (p))
5105 {
5106 tree *varp = id->decl_map->get (p);
5107 if (varp && VAR_P (*varp) && !is_gimple_reg (*varp))
5108 {
5109 tree clobber = build_clobber (TREE_TYPE (*varp));
5110 gimple *clobber_stmt;
5111 clobber_stmt = gimple_build_assign (*varp, clobber);
5112 gimple_set_location (clobber_stmt, gimple_location (stmt));
5113 gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5114 }
5115 }
5116
5117 /* Reset the escaped solution. */
5118 if (cfun->gimple_df)
5119 pt_solution_reset (&cfun->gimple_df->escaped);
5120
5121 /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments. */
5122 if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
5123 {
5124 size_t nargs = gimple_call_num_args (simtenter_stmt);
5125 vec<tree> *vars = id->dst_simt_vars;
5126 auto_vec<tree> newargs (nargs + vars->length ());
5127 for (size_t i = 0; i < nargs; i++)
5128 newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
5129 for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
5130 {
5131 tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
5132 newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
5133 }
5134 gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
5135 gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
5136 gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
5137 gsi_replace (&gsi, g, false);
5138 }
5139 vec_free (id->dst_simt_vars);
5140 id->dst_simt_vars = simtvars_save;
5141
5142 /* Clean up. */
5143 if (id->debug_map)
5144 {
5145 delete id->debug_map;
5146 id->debug_map = dst;
5147 }
5148 delete id->decl_map;
5149 id->decl_map = st;
5150
5151 /* Unlink the calls virtual operands before replacing it. */
5152 unlink_stmt_vdef (stmt);
5153 if (gimple_vdef (stmt)
5154 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
5155 release_ssa_name (gimple_vdef (stmt));
5156
5157 /* If the inlined function returns a result that we care about,
5158 substitute the GIMPLE_CALL with an assignment of the return
5159 variable to the LHS of the call. That is, if STMT was
5160 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
5161 if (use_retvar && gimple_call_lhs (stmt))
5162 {
5163 gimple *old_stmt = stmt;
5164 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
5165 gimple_set_location (stmt, gimple_location (old_stmt));
5166 gsi_replace (&stmt_gsi, stmt, false);
5167 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
5168 /* Append a clobber for id->retvar if easily possible. */
5169 if (flag_stack_reuse != SR_NONE
5170 && id->retvar
5171 && VAR_P (id->retvar)
5172 && id->retvar != return_slot
5173 && id->retvar != modify_dest
5174 && !TREE_THIS_VOLATILE (id->retvar)
5175 && !is_gimple_reg (id->retvar)
5176 && !stmt_ends_bb_p (stmt))
5177 {
5178 tree clobber = build_clobber (TREE_TYPE (id->retvar));
5179 gimple *clobber_stmt;
5180 clobber_stmt = gimple_build_assign (id->retvar, clobber);
5181 gimple_set_location (clobber_stmt, gimple_location (old_stmt));
5182 gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
5183 }
5184 }
5185 else
5186 {
5187 /* Handle the case of inlining a function with no return
5188 statement, which causes the return value to become undefined. */
5189 if (gimple_call_lhs (stmt)
5190 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
5191 {
5192 tree name = gimple_call_lhs (stmt);
5193 tree var = SSA_NAME_VAR (name);
5194 tree def = var ? ssa_default_def (cfun, var) : NULL;
5195
5196 if (def)
5197 {
5198 /* If the variable is used undefined, make this name
5199 undefined via a move. */
5200 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
5201 gsi_replace (&stmt_gsi, stmt, true);
5202 }
5203 else
5204 {
5205 if (!var)
5206 {
5207 var = create_tmp_reg_fn (cfun, TREE_TYPE (name), NULL);
5208 SET_SSA_NAME_VAR_OR_IDENTIFIER (name, var);
5209 }
5210 /* Otherwise make this variable undefined. */
5211 gsi_remove (&stmt_gsi, true);
5212 set_ssa_default_def (cfun, var, name);
5213 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
5214 }
5215 }
5216 /* Replace with a clobber for id->retvar. */
5217 else if (flag_stack_reuse != SR_NONE
5218 && id->retvar
5219 && VAR_P (id->retvar)
5220 && id->retvar != return_slot
5221 && id->retvar != modify_dest
5222 && !TREE_THIS_VOLATILE (id->retvar)
5223 && !is_gimple_reg (id->retvar))
5224 {
5225 tree clobber = build_clobber (TREE_TYPE (id->retvar));
5226 gimple *clobber_stmt;
5227 clobber_stmt = gimple_build_assign (id->retvar, clobber);
5228 gimple_set_location (clobber_stmt, gimple_location (stmt));
5229 gsi_replace (&stmt_gsi, clobber_stmt, false);
5230 maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
5231 }
5232 else
5233 gsi_remove (&stmt_gsi, true);
5234 }
5235
5236 if (purge_dead_abnormal_edges)
5237 bitmap_set_bit (to_purge, return_block->index);
5238
5239 /* If the value of the new expression is ignored, that's OK. We
5240 don't warn about this for CALL_EXPRs, so we shouldn't warn about
5241 the equivalent inlined version either. */
5242 if (is_gimple_assign (stmt))
5243 {
5244 gcc_assert (gimple_assign_single_p (stmt)
5245 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
5246 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
5247 }
5248
5249 id->add_clobbers_to_eh_landing_pads = 0;
5250
5251 /* Output the inlining info for this abstract function, since it has been
5252 inlined. If we don't do this now, we can lose the information about the
5253 variables in the function when the blocks get blown away as soon as we
5254 remove the cgraph node. */
5255 if (gimple_block (stmt))
5256 (*debug_hooks->outlining_inline_function) (fn);
5257
5258 /* Update callgraph if needed. */
5259 cg_edge->callee->remove ();
5260
5261 id->block = NULL_TREE;
5262 id->retvar = NULL_TREE;
5263 successfully_inlined = true;
5264
5265 egress:
5266 input_location = saved_location;
5267 return successfully_inlined;
5268 }
5269
5270 /* Expand call statements reachable from STMT_P.
5271 We can only have CALL_EXPRs as the "toplevel" tree code or nested
5272 in a MODIFY_EXPR. */
5273
5274 static bool
5275 gimple_expand_calls_inline (basic_block bb, copy_body_data *id,
5276 bitmap to_purge)
5277 {
5278 gimple_stmt_iterator gsi;
5279 bool inlined = false;
5280
5281 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
5282 {
5283 gimple *stmt = gsi_stmt (gsi);
5284 gsi_prev (&gsi);
5285
5286 if (is_gimple_call (stmt)
5287 && !gimple_call_internal_p (stmt))
5288 inlined |= expand_call_inline (bb, stmt, id, to_purge);
5289 }
5290
5291 return inlined;
5292 }
5293
5294
5295 /* Walk all basic blocks created after FIRST and try to fold every statement
5296 in the STATEMENTS pointer set. */
5297
5298 static void
5299 fold_marked_statements (int first, hash_set<gimple *> *statements)
5300 {
5301 auto_bitmap to_purge;
5302
5303 auto_vec<edge, 20> stack (n_basic_blocks_for_fn (cfun) + 2);
5304 auto_sbitmap visited (last_basic_block_for_fn (cfun));
5305 bitmap_clear (visited);
5306
5307 stack.quick_push (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5308 while (!stack.is_empty ())
5309 {
5310 /* Look at the edge on the top of the stack. */
5311 edge e = stack.pop ();
5312 basic_block dest = e->dest;
5313
5314 if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
5315 || bitmap_bit_p (visited, dest->index))
5316 continue;
5317
5318 bitmap_set_bit (visited, dest->index);
5319
5320 if (dest->index >= first)
5321 for (gimple_stmt_iterator gsi = gsi_start_bb (dest);
5322 !gsi_end_p (gsi); gsi_next (&gsi))
5323 {
5324 if (!statements->contains (gsi_stmt (gsi)))
5325 continue;
5326
5327 gimple *old_stmt = gsi_stmt (gsi);
5328 tree old_decl = (is_gimple_call (old_stmt)
5329 ? gimple_call_fndecl (old_stmt) : 0);
5330 if (old_decl && fndecl_built_in_p (old_decl))
5331 {
5332 /* Folding builtins can create multiple instructions,
5333 we need to look at all of them. */
5334 gimple_stmt_iterator i2 = gsi;
5335 gsi_prev (&i2);
5336 if (fold_stmt (&gsi))
5337 {
5338 gimple *new_stmt;
5339 /* If a builtin at the end of a bb folded into nothing,
5340 the following loop won't work. */
5341 if (gsi_end_p (gsi))
5342 {
5343 cgraph_update_edges_for_call_stmt (old_stmt,
5344 old_decl, NULL);
5345 break;
5346 }
5347 if (gsi_end_p (i2))
5348 i2 = gsi_start_bb (dest);
5349 else
5350 gsi_next (&i2);
5351 while (1)
5352 {
5353 new_stmt = gsi_stmt (i2);
5354 update_stmt (new_stmt);
5355 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5356 new_stmt);
5357
5358 if (new_stmt == gsi_stmt (gsi))
5359 {
5360 /* It is okay to check only for the very last
5361 of these statements. If it is a throwing
5362 statement nothing will change. If it isn't
5363 this can remove EH edges. If that weren't
5364 correct then because some intermediate stmts
5365 throw, but not the last one. That would mean
5366 we'd have to split the block, which we can't
5367 here and we'd loose anyway. And as builtins
5368 probably never throw, this all
5369 is mood anyway. */
5370 if (maybe_clean_or_replace_eh_stmt (old_stmt,
5371 new_stmt))
5372 bitmap_set_bit (to_purge, dest->index);
5373 break;
5374 }
5375 gsi_next (&i2);
5376 }
5377 }
5378 }
5379 else if (fold_stmt (&gsi))
5380 {
5381 /* Re-read the statement from GSI as fold_stmt() may
5382 have changed it. */
5383 gimple *new_stmt = gsi_stmt (gsi);
5384 update_stmt (new_stmt);
5385
5386 if (is_gimple_call (old_stmt)
5387 || is_gimple_call (new_stmt))
5388 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
5389 new_stmt);
5390
5391 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
5392 bitmap_set_bit (to_purge, dest->index);
5393 }
5394 }
5395
5396 if (EDGE_COUNT (dest->succs) > 0)
5397 {
5398 /* Avoid warnings emitted from folding statements that
5399 became unreachable because of inlined function parameter
5400 propagation. */
5401 e = find_taken_edge (dest, NULL_TREE);
5402 if (e)
5403 stack.quick_push (e);
5404 else
5405 {
5406 edge_iterator ei;
5407 FOR_EACH_EDGE (e, ei, dest->succs)
5408 stack.safe_push (e);
5409 }
5410 }
5411 }
5412
5413 gimple_purge_all_dead_eh_edges (to_purge);
5414 }
5415
5416 /* Expand calls to inline functions in the body of FN. */
5417
5418 unsigned int
5419 optimize_inline_calls (tree fn)
5420 {
5421 copy_body_data id;
5422 basic_block bb;
5423 int last = n_basic_blocks_for_fn (cfun);
5424 bool inlined_p = false;
5425
5426 /* Clear out ID. */
5427 memset (&id, 0, sizeof (id));
5428
5429 id.src_node = id.dst_node = cgraph_node::get (fn);
5430 gcc_assert (id.dst_node->definition);
5431 id.dst_fn = fn;
5432 /* Or any functions that aren't finished yet. */
5433 if (current_function_decl)
5434 id.dst_fn = current_function_decl;
5435
5436 id.copy_decl = copy_decl_maybe_to_var;
5437 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5438 id.transform_new_cfg = false;
5439 id.transform_return_to_modify = true;
5440 id.transform_parameter = true;
5441 id.transform_lang_insert_block = NULL;
5442 id.statements_to_fold = new hash_set<gimple *>;
5443
5444 push_gimplify_context ();
5445
5446 /* We make no attempts to keep dominance info up-to-date. */
5447 free_dominance_info (CDI_DOMINATORS);
5448 free_dominance_info (CDI_POST_DOMINATORS);
5449
5450 /* Register specific gimple functions. */
5451 gimple_register_cfg_hooks ();
5452
5453 /* Reach the trees by walking over the CFG, and note the
5454 enclosing basic-blocks in the call edges. */
5455 /* We walk the blocks going forward, because inlined function bodies
5456 will split id->current_basic_block, and the new blocks will
5457 follow it; we'll trudge through them, processing their CALL_EXPRs
5458 along the way. */
5459 auto_bitmap to_purge;
5460 FOR_EACH_BB_FN (bb, cfun)
5461 inlined_p |= gimple_expand_calls_inline (bb, &id, to_purge);
5462
5463 pop_gimplify_context (NULL);
5464
5465 if (flag_checking)
5466 {
5467 struct cgraph_edge *e;
5468
5469 id.dst_node->verify ();
5470
5471 /* Double check that we inlined everything we are supposed to inline. */
5472 for (e = id.dst_node->callees; e; e = e->next_callee)
5473 gcc_assert (e->inline_failed);
5474 }
5475
5476 /* If we didn't inline into the function there is nothing to do. */
5477 if (!inlined_p)
5478 {
5479 delete id.statements_to_fold;
5480 return 0;
5481 }
5482
5483 /* Fold queued statements. */
5484 update_max_bb_count ();
5485 fold_marked_statements (last, id.statements_to_fold);
5486 delete id.statements_to_fold;
5487
5488 /* Finally purge EH and abnormal edges from the call stmts we inlined.
5489 We need to do this after fold_marked_statements since that may walk
5490 the SSA use-def chain. */
5491 unsigned i;
5492 bitmap_iterator bi;
5493 EXECUTE_IF_SET_IN_BITMAP (to_purge, 0, i, bi)
5494 {
5495 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
5496 if (bb)
5497 {
5498 gimple_purge_dead_eh_edges (bb);
5499 gimple_purge_dead_abnormal_call_edges (bb);
5500 }
5501 }
5502
5503 gcc_assert (!id.debug_stmts.exists ());
5504
5505 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5506 number_blocks (fn);
5507
5508 delete_unreachable_blocks_update_callgraph (id.dst_node, false);
5509 id.dst_node->calls_comdat_local = id.dst_node->check_calls_comdat_local_p ();
5510
5511 if (flag_checking)
5512 id.dst_node->verify ();
5513
5514 /* It would be nice to check SSA/CFG/statement consistency here, but it is
5515 not possible yet - the IPA passes might make various functions to not
5516 throw and they don't care to proactively update local EH info. This is
5517 done later in fixup_cfg pass that also execute the verification. */
5518 return (TODO_update_ssa
5519 | TODO_cleanup_cfg
5520 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
5521 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
5522 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
5523 ? TODO_rebuild_frequencies : 0));
5524 }
5525
5526 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5527
5528 tree
5529 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5530 {
5531 enum tree_code code = TREE_CODE (*tp);
5532 enum tree_code_class cl = TREE_CODE_CLASS (code);
5533
5534 /* We make copies of most nodes. */
5535 if (IS_EXPR_CODE_CLASS (cl)
5536 || code == TREE_LIST
5537 || code == TREE_VEC
5538 || code == TYPE_DECL
5539 || code == OMP_CLAUSE)
5540 {
5541 /* Because the chain gets clobbered when we make a copy, we save it
5542 here. */
5543 tree chain = NULL_TREE, new_tree;
5544
5545 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5546 chain = TREE_CHAIN (*tp);
5547
5548 /* Copy the node. */
5549 new_tree = copy_node (*tp);
5550
5551 *tp = new_tree;
5552
5553 /* Now, restore the chain, if appropriate. That will cause
5554 walk_tree to walk into the chain as well. */
5555 if (code == PARM_DECL
5556 || code == TREE_LIST
5557 || code == OMP_CLAUSE)
5558 TREE_CHAIN (*tp) = chain;
5559
5560 /* For now, we don't update BLOCKs when we make copies. So, we
5561 have to nullify all BIND_EXPRs. */
5562 if (TREE_CODE (*tp) == BIND_EXPR)
5563 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5564 }
5565 else if (code == CONSTRUCTOR)
5566 {
5567 /* CONSTRUCTOR nodes need special handling because
5568 we need to duplicate the vector of elements. */
5569 tree new_tree;
5570
5571 new_tree = copy_node (*tp);
5572 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5573 *tp = new_tree;
5574 }
5575 else if (code == STATEMENT_LIST)
5576 /* We used to just abort on STATEMENT_LIST, but we can run into them
5577 with statement-expressions (c++/40975). */
5578 copy_statement_list (tp);
5579 else if (TREE_CODE_CLASS (code) == tcc_type)
5580 *walk_subtrees = 0;
5581 else if (TREE_CODE_CLASS (code) == tcc_declaration)
5582 *walk_subtrees = 0;
5583 else if (TREE_CODE_CLASS (code) == tcc_constant)
5584 *walk_subtrees = 0;
5585 return NULL_TREE;
5586 }
5587
5588 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5589 information indicating to what new SAVE_EXPR this one should be mapped,
5590 use that one. Otherwise, create a new node and enter it in ST. FN is
5591 the function into which the copy will be placed. */
5592
5593 static void
5594 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5595 {
5596 tree *n;
5597 tree t;
5598
5599 /* See if we already encountered this SAVE_EXPR. */
5600 n = st->get (*tp);
5601
5602 /* If we didn't already remap this SAVE_EXPR, do so now. */
5603 if (!n)
5604 {
5605 t = copy_node (*tp);
5606
5607 /* Remember this SAVE_EXPR. */
5608 st->put (*tp, t);
5609 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5610 st->put (t, t);
5611 }
5612 else
5613 {
5614 /* We've already walked into this SAVE_EXPR; don't do it again. */
5615 *walk_subtrees = 0;
5616 t = *n;
5617 }
5618
5619 /* Replace this SAVE_EXPR with the copy. */
5620 *tp = t;
5621 }
5622
5623 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5624 label, copies the declaration and enters it in the splay_tree in DATA (which
5625 is really a 'copy_body_data *'. */
5626
5627 static tree
5628 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5629 bool *handled_ops_p ATTRIBUTE_UNUSED,
5630 struct walk_stmt_info *wi)
5631 {
5632 copy_body_data *id = (copy_body_data *) wi->info;
5633 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5634
5635 if (stmt)
5636 {
5637 tree decl = gimple_label_label (stmt);
5638
5639 /* Copy the decl and remember the copy. */
5640 insert_decl_map (id, decl, id->copy_decl (decl, id));
5641 }
5642
5643 return NULL_TREE;
5644 }
5645
5646 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5647 struct walk_stmt_info *wi);
5648
5649 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5650 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5651 remaps all local declarations to appropriate replacements in gimple
5652 operands. */
5653
5654 static tree
5655 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5656 {
5657 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5658 copy_body_data *id = (copy_body_data *) wi->info;
5659 hash_map<tree, tree> *st = id->decl_map;
5660 tree *n;
5661 tree expr = *tp;
5662
5663 /* For recursive invocations this is no longer the LHS itself. */
5664 bool is_lhs = wi->is_lhs;
5665 wi->is_lhs = false;
5666
5667 if (TREE_CODE (expr) == SSA_NAME)
5668 {
5669 *tp = remap_ssa_name (*tp, id);
5670 *walk_subtrees = 0;
5671 if (is_lhs)
5672 SSA_NAME_DEF_STMT (*tp) = gsi_stmt (wi->gsi);
5673 }
5674 /* Only a local declaration (variable or label). */
5675 else if ((VAR_P (expr) && !TREE_STATIC (expr))
5676 || TREE_CODE (expr) == LABEL_DECL)
5677 {
5678 /* Lookup the declaration. */
5679 n = st->get (expr);
5680
5681 /* If it's there, remap it. */
5682 if (n)
5683 *tp = *n;
5684 *walk_subtrees = 0;
5685 }
5686 else if (TREE_CODE (expr) == STATEMENT_LIST
5687 || TREE_CODE (expr) == BIND_EXPR
5688 || TREE_CODE (expr) == SAVE_EXPR)
5689 gcc_unreachable ();
5690 else if (TREE_CODE (expr) == TARGET_EXPR)
5691 {
5692 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5693 It's OK for this to happen if it was part of a subtree that
5694 isn't immediately expanded, such as operand 2 of another
5695 TARGET_EXPR. */
5696 if (!TREE_OPERAND (expr, 1))
5697 {
5698 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5699 TREE_OPERAND (expr, 3) = NULL_TREE;
5700 }
5701 }
5702 else if (TREE_CODE (expr) == OMP_CLAUSE)
5703 {
5704 /* Before the omplower pass completes, some OMP clauses can contain
5705 sequences that are neither copied by gimple_seq_copy nor walked by
5706 walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5707 in those situations, we have to copy and process them explicitely. */
5708
5709 if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5710 {
5711 gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5712 seq = duplicate_remap_omp_clause_seq (seq, wi);
5713 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5714 }
5715 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5716 {
5717 gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5718 seq = duplicate_remap_omp_clause_seq (seq, wi);
5719 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5720 }
5721 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5722 {
5723 gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5724 seq = duplicate_remap_omp_clause_seq (seq, wi);
5725 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5726 seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5727 seq = duplicate_remap_omp_clause_seq (seq, wi);
5728 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5729 }
5730 }
5731
5732 /* Keep iterating. */
5733 return NULL_TREE;
5734 }
5735
5736
5737 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5738 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5739 remaps all local declarations to appropriate replacements in gimple
5740 statements. */
5741
5742 static tree
5743 replace_locals_stmt (gimple_stmt_iterator *gsip,
5744 bool *handled_ops_p ATTRIBUTE_UNUSED,
5745 struct walk_stmt_info *wi)
5746 {
5747 copy_body_data *id = (copy_body_data *) wi->info;
5748 gimple *gs = gsi_stmt (*gsip);
5749
5750 if (gbind *stmt = dyn_cast <gbind *> (gs))
5751 {
5752 tree block = gimple_bind_block (stmt);
5753
5754 if (block)
5755 {
5756 remap_block (&block, id);
5757 gimple_bind_set_block (stmt, block);
5758 }
5759
5760 /* This will remap a lot of the same decls again, but this should be
5761 harmless. */
5762 if (gimple_bind_vars (stmt))
5763 {
5764 tree old_var, decls = gimple_bind_vars (stmt);
5765
5766 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5767 if (!can_be_nonlocal (old_var, id)
5768 && ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5769 remap_decl (old_var, id);
5770
5771 gcc_checking_assert (!id->prevent_decl_creation_for_types);
5772 id->prevent_decl_creation_for_types = true;
5773 gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5774 id->prevent_decl_creation_for_types = false;
5775 }
5776 }
5777
5778 /* Keep iterating. */
5779 return NULL_TREE;
5780 }
5781
5782 /* Create a copy of SEQ and remap all decls in it. */
5783
5784 static gimple_seq
5785 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5786 {
5787 if (!seq)
5788 return NULL;
5789
5790 /* If there are any labels in OMP sequences, they can be only referred to in
5791 the sequence itself and therefore we can do both here. */
5792 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5793 gimple_seq copy = gimple_seq_copy (seq);
5794 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5795 return copy;
5796 }
5797
5798 /* Copies everything in SEQ and replaces variables and labels local to
5799 current_function_decl. */
5800
5801 gimple_seq
5802 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5803 {
5804 copy_body_data id;
5805 struct walk_stmt_info wi;
5806 gimple_seq copy;
5807
5808 /* There's nothing to do for NULL_TREE. */
5809 if (seq == NULL)
5810 return seq;
5811
5812 /* Set up ID. */
5813 memset (&id, 0, sizeof (id));
5814 id.src_fn = current_function_decl;
5815 id.dst_fn = current_function_decl;
5816 id.src_cfun = cfun;
5817 id.decl_map = new hash_map<tree, tree>;
5818 id.debug_map = NULL;
5819
5820 id.copy_decl = copy_decl_no_change;
5821 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5822 id.transform_new_cfg = false;
5823 id.transform_return_to_modify = false;
5824 id.transform_parameter = false;
5825 id.transform_lang_insert_block = NULL;
5826
5827 /* Walk the tree once to find local labels. */
5828 memset (&wi, 0, sizeof (wi));
5829 hash_set<tree> visited;
5830 wi.info = &id;
5831 wi.pset = &visited;
5832 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5833
5834 copy = gimple_seq_copy (seq);
5835
5836 /* Walk the copy, remapping decls. */
5837 memset (&wi, 0, sizeof (wi));
5838 wi.info = &id;
5839 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5840
5841 /* Clean up. */
5842 delete id.decl_map;
5843 if (id.debug_map)
5844 delete id.debug_map;
5845 if (id.dependence_map)
5846 {
5847 delete id.dependence_map;
5848 id.dependence_map = NULL;
5849 }
5850
5851 return copy;
5852 }
5853
5854
5855 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5856
5857 static tree
5858 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5859 {
5860 if (*tp == data)
5861 return (tree) data;
5862 else
5863 return NULL;
5864 }
5865
5866 DEBUG_FUNCTION bool
5867 debug_find_tree (tree top, tree search)
5868 {
5869 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5870 }
5871
5872
5873 /* Declare the variables created by the inliner. Add all the variables in
5874 VARS to BIND_EXPR. */
5875
5876 static void
5877 declare_inline_vars (tree block, tree vars)
5878 {
5879 tree t;
5880 for (t = vars; t; t = DECL_CHAIN (t))
5881 {
5882 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5883 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5884 add_local_decl (cfun, t);
5885 }
5886
5887 if (block)
5888 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5889 }
5890
5891 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5892 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5893 VAR_DECL translation. */
5894
5895 tree
5896 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5897 {
5898 /* Don't generate debug information for the copy if we wouldn't have
5899 generated it for the copy either. */
5900 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5901 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5902
5903 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5904 declaration inspired this copy. */
5905 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5906
5907 /* The new variable/label has no RTL, yet. */
5908 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5909 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5910 SET_DECL_RTL (copy, 0);
5911 /* For vector typed decls make sure to update DECL_MODE according
5912 to the new function context. */
5913 if (VECTOR_TYPE_P (TREE_TYPE (copy)))
5914 SET_DECL_MODE (copy, TYPE_MODE (TREE_TYPE (copy)));
5915
5916 /* These args would always appear unused, if not for this. */
5917 TREE_USED (copy) = 1;
5918
5919 /* Set the context for the new declaration. */
5920 if (!DECL_CONTEXT (decl))
5921 /* Globals stay global. */
5922 ;
5923 else if (DECL_CONTEXT (decl) != id->src_fn)
5924 /* Things that weren't in the scope of the function we're inlining
5925 from aren't in the scope we're inlining to, either. */
5926 ;
5927 else if (TREE_STATIC (decl))
5928 /* Function-scoped static variables should stay in the original
5929 function. */
5930 ;
5931 else
5932 {
5933 /* Ordinary automatic local variables are now in the scope of the
5934 new function. */
5935 DECL_CONTEXT (copy) = id->dst_fn;
5936 if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
5937 {
5938 if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
5939 DECL_ATTRIBUTES (copy)
5940 = tree_cons (get_identifier ("omp simt private"), NULL,
5941 DECL_ATTRIBUTES (copy));
5942 id->dst_simt_vars->safe_push (copy);
5943 }
5944 }
5945
5946 return copy;
5947 }
5948
5949 /* Create a new VAR_DECL that is indentical in all respect to DECL except that
5950 DECL can be either a VAR_DECL, a PARM_DECL or RESULT_DECL. The original
5951 DECL must come from ID->src_fn and the copy will be part of ID->dst_fn. */
5952
5953 tree
5954 copy_decl_to_var (tree decl, copy_body_data *id)
5955 {
5956 tree copy, type;
5957
5958 gcc_assert (TREE_CODE (decl) == PARM_DECL
5959 || TREE_CODE (decl) == RESULT_DECL);
5960
5961 type = TREE_TYPE (decl);
5962
5963 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5964 VAR_DECL, DECL_NAME (decl), type);
5965 if (DECL_PT_UID_SET_P (decl))
5966 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5967 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5968 TREE_READONLY (copy) = TREE_READONLY (decl);
5969 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5970 DECL_NOT_GIMPLE_REG_P (copy) = DECL_NOT_GIMPLE_REG_P (decl);
5971 DECL_BY_REFERENCE (copy) = DECL_BY_REFERENCE (decl);
5972
5973 return copy_decl_for_dup_finish (id, decl, copy);
5974 }
5975
5976 /* Like copy_decl_to_var, but create a return slot object instead of a
5977 pointer variable for return by invisible reference. */
5978
5979 static tree
5980 copy_result_decl_to_var (tree decl, copy_body_data *id)
5981 {
5982 tree copy, type;
5983
5984 gcc_assert (TREE_CODE (decl) == PARM_DECL
5985 || TREE_CODE (decl) == RESULT_DECL);
5986
5987 type = TREE_TYPE (decl);
5988 if (DECL_BY_REFERENCE (decl))
5989 type = TREE_TYPE (type);
5990
5991 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5992 VAR_DECL, DECL_NAME (decl), type);
5993 if (DECL_PT_UID_SET_P (decl))
5994 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5995 TREE_READONLY (copy) = TREE_READONLY (decl);
5996 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5997 if (!DECL_BY_REFERENCE (decl))
5998 {
5999 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
6000 DECL_NOT_GIMPLE_REG_P (copy)
6001 = (DECL_NOT_GIMPLE_REG_P (decl)
6002 /* RESULT_DECLs are treated special by needs_to_live_in_memory,
6003 mirror that to the created VAR_DECL. */
6004 || (TREE_CODE (decl) == RESULT_DECL
6005 && aggregate_value_p (decl, id->src_fn)));
6006 }
6007
6008 return copy_decl_for_dup_finish (id, decl, copy);
6009 }
6010
6011 tree
6012 copy_decl_no_change (tree decl, copy_body_data *id)
6013 {
6014 tree copy;
6015
6016 copy = copy_node (decl);
6017
6018 /* The COPY is not abstract; it will be generated in DST_FN. */
6019 DECL_ABSTRACT_P (copy) = false;
6020 lang_hooks.dup_lang_specific_decl (copy);
6021
6022 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
6023 been taken; it's for internal bookkeeping in expand_goto_internal. */
6024 if (TREE_CODE (copy) == LABEL_DECL)
6025 {
6026 TREE_ADDRESSABLE (copy) = 0;
6027 LABEL_DECL_UID (copy) = -1;
6028 }
6029
6030 return copy_decl_for_dup_finish (id, decl, copy);
6031 }
6032
6033 static tree
6034 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
6035 {
6036 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
6037 return copy_decl_to_var (decl, id);
6038 else
6039 return copy_decl_no_change (decl, id);
6040 }
6041
6042 /* Return a copy of the function's argument tree without any modifications. */
6043
6044 static tree
6045 copy_arguments_nochange (tree orig_parm, copy_body_data * id)
6046 {
6047 tree arg, *parg;
6048 tree new_parm = NULL;
6049
6050 parg = &new_parm;
6051 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg))
6052 {
6053 tree new_tree = remap_decl (arg, id);
6054 if (TREE_CODE (new_tree) != PARM_DECL)
6055 new_tree = id->copy_decl (arg, id);
6056 lang_hooks.dup_lang_specific_decl (new_tree);
6057 *parg = new_tree;
6058 parg = &DECL_CHAIN (new_tree);
6059 }
6060 return new_parm;
6061 }
6062
6063 /* Return a copy of the function's static chain. */
6064 static tree
6065 copy_static_chain (tree static_chain, copy_body_data * id)
6066 {
6067 tree *chain_copy, *pvar;
6068
6069 chain_copy = &static_chain;
6070 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
6071 {
6072 tree new_tree = remap_decl (*pvar, id);
6073 lang_hooks.dup_lang_specific_decl (new_tree);
6074 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
6075 *pvar = new_tree;
6076 }
6077 return static_chain;
6078 }
6079
6080 /* Return true if the function is allowed to be versioned.
6081 This is a guard for the versioning functionality. */
6082
6083 bool
6084 tree_versionable_function_p (tree fndecl)
6085 {
6086 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
6087 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
6088 }
6089
6090 /* Update clone info after duplication. */
6091
6092 static void
6093 update_clone_info (copy_body_data * id)
6094 {
6095 clone_info *dst_info = clone_info::get (id->dst_node);
6096 vec<ipa_param_performed_split, va_gc> *cur_performed_splits
6097 = dst_info ? dst_info->performed_splits : NULL;
6098 if (cur_performed_splits)
6099 {
6100 unsigned len = cur_performed_splits->length ();
6101 for (unsigned i = 0; i < len; i++)
6102 {
6103 ipa_param_performed_split *ps = &(*cur_performed_splits)[i];
6104 ps->dummy_decl = remap_decl (ps->dummy_decl, id);
6105 }
6106 }
6107
6108 struct cgraph_node *node;
6109 if (!id->dst_node->clones)
6110 return;
6111 for (node = id->dst_node->clones; node != id->dst_node;)
6112 {
6113 /* First update replace maps to match the new body. */
6114 clone_info *info = clone_info::get (node);
6115 if (info && info->tree_map)
6116 {
6117 unsigned int i;
6118 for (i = 0; i < vec_safe_length (info->tree_map); i++)
6119 {
6120 struct ipa_replace_map *replace_info;
6121 replace_info = (*info->tree_map)[i];
6122 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
6123 }
6124 }
6125 if (info && info->performed_splits)
6126 {
6127 unsigned len = vec_safe_length (info->performed_splits);
6128 for (unsigned i = 0; i < len; i++)
6129 {
6130 ipa_param_performed_split *ps
6131 = &(*info->performed_splits)[i];
6132 ps->dummy_decl = remap_decl (ps->dummy_decl, id);
6133 }
6134 }
6135 if (unsigned len = vec_safe_length (cur_performed_splits))
6136 {
6137 /* We do not want to add current performed splits when we are saving
6138 a copy of function body for later during inlining, that would just
6139 duplicate all entries. So let's have a look whether anything
6140 referring to the first dummy_decl is present. */
6141 if (!info)
6142 info = clone_info::get_create (node);
6143 unsigned dst_len = vec_safe_length (info->performed_splits);
6144 ipa_param_performed_split *first = &(*cur_performed_splits)[0];
6145 for (unsigned i = 0; i < dst_len; i++)
6146 if ((*info->performed_splits)[i].dummy_decl
6147 == first->dummy_decl)
6148 {
6149 len = 0;
6150 break;
6151 }
6152
6153 for (unsigned i = 0; i < len; i++)
6154 vec_safe_push (info->performed_splits,
6155 (*cur_performed_splits)[i]);
6156 if (flag_checking)
6157 {
6158 for (unsigned i = 0; i < dst_len; i++)
6159 {
6160 ipa_param_performed_split *ps1
6161 = &(*info->performed_splits)[i];
6162 for (unsigned j = i + 1; j < dst_len; j++)
6163 {
6164 ipa_param_performed_split *ps2
6165 = &(*info->performed_splits)[j];
6166 gcc_assert (ps1->dummy_decl != ps2->dummy_decl
6167 || ps1->unit_offset != ps2->unit_offset);
6168 }
6169 }
6170 }
6171 }
6172
6173 if (node->clones)
6174 node = node->clones;
6175 else if (node->next_sibling_clone)
6176 node = node->next_sibling_clone;
6177 else
6178 {
6179 while (node != id->dst_node && !node->next_sibling_clone)
6180 node = node->clone_of;
6181 if (node != id->dst_node)
6182 node = node->next_sibling_clone;
6183 }
6184 }
6185 }
6186
6187 /* Create a copy of a function's tree.
6188 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
6189 of the original function and the new copied function
6190 respectively. In case we want to replace a DECL
6191 tree with another tree while duplicating the function's
6192 body, TREE_MAP represents the mapping between these
6193 trees. If UPDATE_CLONES is set, the call_stmt fields
6194 of edges of clones of the function will be updated.
6195
6196 If non-NULL PARAM_ADJUSTMENTS determines how function prototype (i.e. the
6197 function parameters and return value) should be modified).
6198 If non-NULL BLOCKS_TO_COPY determine what basic blocks to copy.
6199 If non_NULL NEW_ENTRY determine new entry BB of the clone.
6200 */
6201 void
6202 tree_function_versioning (tree old_decl, tree new_decl,
6203 vec<ipa_replace_map *, va_gc> *tree_map,
6204 ipa_param_adjustments *param_adjustments,
6205 bool update_clones, bitmap blocks_to_copy,
6206 basic_block new_entry)
6207 {
6208 struct cgraph_node *old_version_node;
6209 struct cgraph_node *new_version_node;
6210 copy_body_data id;
6211 tree p;
6212 unsigned i;
6213 struct ipa_replace_map *replace_info;
6214 basic_block old_entry_block, bb;
6215 auto_vec<gimple *, 10> init_stmts;
6216 tree vars = NULL_TREE;
6217
6218 /* We can get called recursively from expand_call_inline via clone
6219 materialization. While expand_call_inline maintains input_location
6220 we cannot tolerate it to leak into the materialized clone. */
6221 location_t saved_location = input_location;
6222 input_location = UNKNOWN_LOCATION;
6223
6224 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
6225 && TREE_CODE (new_decl) == FUNCTION_DECL);
6226 DECL_POSSIBLY_INLINED (old_decl) = 1;
6227
6228 old_version_node = cgraph_node::get (old_decl);
6229 gcc_checking_assert (old_version_node);
6230 new_version_node = cgraph_node::get (new_decl);
6231 gcc_checking_assert (new_version_node);
6232
6233 /* Copy over debug args. */
6234 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
6235 {
6236 vec<tree, va_gc> **new_debug_args, **old_debug_args;
6237 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
6238 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
6239 old_debug_args = decl_debug_args_lookup (old_decl);
6240 if (old_debug_args)
6241 {
6242 new_debug_args = decl_debug_args_insert (new_decl);
6243 *new_debug_args = vec_safe_copy (*old_debug_args);
6244 }
6245 }
6246
6247 /* Output the inlining info for this abstract function, since it has been
6248 inlined. If we don't do this now, we can lose the information about the
6249 variables in the function when the blocks get blown away as soon as we
6250 remove the cgraph node. */
6251 (*debug_hooks->outlining_inline_function) (old_decl);
6252
6253 DECL_ARTIFICIAL (new_decl) = 1;
6254 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
6255 if (DECL_ORIGIN (old_decl) == old_decl)
6256 old_version_node->used_as_abstract_origin = true;
6257 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
6258
6259 /* Prepare the data structures for the tree copy. */
6260 memset (&id, 0, sizeof (id));
6261
6262 /* Generate a new name for the new version. */
6263 id.statements_to_fold = new hash_set<gimple *>;
6264
6265 id.decl_map = new hash_map<tree, tree>;
6266 id.debug_map = NULL;
6267 id.src_fn = old_decl;
6268 id.dst_fn = new_decl;
6269 id.src_node = old_version_node;
6270 id.dst_node = new_version_node;
6271 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
6272 id.blocks_to_copy = blocks_to_copy;
6273
6274 id.copy_decl = copy_decl_no_change;
6275 id.transform_call_graph_edges
6276 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
6277 id.transform_new_cfg = true;
6278 id.transform_return_to_modify = false;
6279 id.transform_parameter = false;
6280 id.transform_lang_insert_block = NULL;
6281
6282 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
6283 (DECL_STRUCT_FUNCTION (old_decl));
6284 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
6285 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
6286 initialize_cfun (new_decl, old_decl,
6287 new_entry ? new_entry->count : old_entry_block->count);
6288 new_version_node->calls_declare_variant_alt
6289 = old_version_node->calls_declare_variant_alt;
6290 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
6291 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
6292 = id.src_cfun->gimple_df->ipa_pta;
6293
6294 /* Copy the function's static chain. */
6295 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
6296 if (p)
6297 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
6298 = copy_static_chain (p, &id);
6299
6300 auto_vec<int, 16> new_param_indices;
6301 clone_info *info = clone_info::get (old_version_node);
6302 ipa_param_adjustments *old_param_adjustments
6303 = info ? info->param_adjustments : NULL;
6304 if (old_param_adjustments)
6305 old_param_adjustments->get_updated_indices (&new_param_indices);
6306
6307 /* If there's a tree_map, prepare for substitution. */
6308 if (tree_map)
6309 for (i = 0; i < tree_map->length (); i++)
6310 {
6311 gimple *init;
6312 replace_info = (*tree_map)[i];
6313
6314 int p = replace_info->parm_num;
6315 if (old_param_adjustments)
6316 p = new_param_indices[p];
6317
6318 tree parm;
6319 for (parm = DECL_ARGUMENTS (old_decl); p;
6320 parm = DECL_CHAIN (parm))
6321 p--;
6322 gcc_assert (parm);
6323 init = setup_one_parameter (&id, parm, replace_info->new_tree,
6324 id.src_fn, NULL, &vars);
6325 if (init)
6326 init_stmts.safe_push (init);
6327 }
6328
6329 ipa_param_body_adjustments *param_body_adjs = NULL;
6330 if (param_adjustments)
6331 {
6332 param_body_adjs = new ipa_param_body_adjustments (param_adjustments,
6333 new_decl, old_decl,
6334 &id, &vars, tree_map);
6335 id.param_body_adjs = param_body_adjs;
6336 DECL_ARGUMENTS (new_decl) = param_body_adjs->get_new_param_chain ();
6337 }
6338 else if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
6339 DECL_ARGUMENTS (new_decl)
6340 = copy_arguments_nochange (DECL_ARGUMENTS (old_decl), &id);
6341
6342 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
6343 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
6344
6345 declare_inline_vars (DECL_INITIAL (new_decl), vars);
6346
6347 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
6348 /* Add local vars. */
6349 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
6350
6351 if (DECL_RESULT (old_decl) == NULL_TREE)
6352 ;
6353 else if (param_adjustments && param_adjustments->m_skip_return
6354 && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
6355 {
6356 tree resdecl_repl = copy_result_decl_to_var (DECL_RESULT (old_decl),
6357 &id);
6358 declare_inline_vars (NULL, resdecl_repl);
6359 insert_decl_map (&id, DECL_RESULT (old_decl), resdecl_repl);
6360
6361 DECL_RESULT (new_decl)
6362 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
6363 RESULT_DECL, NULL_TREE, void_type_node);
6364 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
6365 DECL_IS_MALLOC (new_decl) = false;
6366 cfun->returns_struct = 0;
6367 cfun->returns_pcc_struct = 0;
6368 }
6369 else
6370 {
6371 tree old_name;
6372 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
6373 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
6374 if (gimple_in_ssa_p (id.src_cfun)
6375 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
6376 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
6377 {
6378 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
6379 insert_decl_map (&id, old_name, new_name);
6380 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
6381 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
6382 }
6383 }
6384
6385 /* Set up the destination functions loop tree. */
6386 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
6387 {
6388 cfun->curr_properties &= ~PROP_loops;
6389 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6390 cfun->curr_properties |= PROP_loops;
6391 }
6392
6393 /* Copy the Function's body. */
6394 copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
6395 new_entry);
6396
6397 /* Renumber the lexical scoping (non-code) blocks consecutively. */
6398 number_blocks (new_decl);
6399
6400 /* We want to create the BB unconditionally, so that the addition of
6401 debug stmts doesn't affect BB count, which may in the end cause
6402 codegen differences. */
6403 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6404 while (init_stmts.length ())
6405 insert_init_stmt (&id, bb, init_stmts.pop ());
6406 update_clone_info (&id);
6407
6408 /* Remap the nonlocal_goto_save_area, if any. */
6409 if (cfun->nonlocal_goto_save_area)
6410 {
6411 struct walk_stmt_info wi;
6412
6413 memset (&wi, 0, sizeof (wi));
6414 wi.info = &id;
6415 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
6416 }
6417
6418 /* Clean up. */
6419 delete id.decl_map;
6420 if (id.debug_map)
6421 delete id.debug_map;
6422 free_dominance_info (CDI_DOMINATORS);
6423 free_dominance_info (CDI_POST_DOMINATORS);
6424
6425 update_max_bb_count ();
6426 fold_marked_statements (0, id.statements_to_fold);
6427 delete id.statements_to_fold;
6428 delete_unreachable_blocks_update_callgraph (id.dst_node, update_clones);
6429 if (id.dst_node->definition)
6430 cgraph_edge::rebuild_references ();
6431 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
6432 {
6433 calculate_dominance_info (CDI_DOMINATORS);
6434 fix_loop_structure (NULL);
6435 }
6436 update_ssa (TODO_update_ssa);
6437
6438 /* After partial cloning we need to rescale frequencies, so they are
6439 within proper range in the cloned function. */
6440 if (new_entry)
6441 {
6442 struct cgraph_edge *e;
6443 rebuild_frequencies ();
6444
6445 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
6446 for (e = new_version_node->callees; e; e = e->next_callee)
6447 {
6448 basic_block bb = gimple_bb (e->call_stmt);
6449 e->count = bb->count;
6450 }
6451 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
6452 {
6453 basic_block bb = gimple_bb (e->call_stmt);
6454 e->count = bb->count;
6455 }
6456 }
6457
6458 if (param_body_adjs && MAY_HAVE_DEBUG_BIND_STMTS)
6459 {
6460 vec<tree, va_gc> **debug_args = NULL;
6461 unsigned int len = 0;
6462 unsigned reset_len = param_body_adjs->m_reset_debug_decls.length ();
6463
6464 for (i = 0; i < reset_len; i++)
6465 {
6466 tree parm = param_body_adjs->m_reset_debug_decls[i];
6467 gcc_assert (is_gimple_reg (parm));
6468 tree ddecl;
6469
6470 if (debug_args == NULL)
6471 {
6472 debug_args = decl_debug_args_insert (new_decl);
6473 len = vec_safe_length (*debug_args);
6474 }
6475 ddecl = make_node (DEBUG_EXPR_DECL);
6476 DECL_ARTIFICIAL (ddecl) = 1;
6477 TREE_TYPE (ddecl) = TREE_TYPE (parm);
6478 SET_DECL_MODE (ddecl, DECL_MODE (parm));
6479 vec_safe_push (*debug_args, DECL_ORIGIN (parm));
6480 vec_safe_push (*debug_args, ddecl);
6481 }
6482 if (debug_args != NULL)
6483 {
6484 /* On the callee side, add
6485 DEBUG D#Y s=> parm
6486 DEBUG var => D#Y
6487 stmts to the first bb where var is a VAR_DECL created for the
6488 optimized away parameter in DECL_INITIAL block. This hints
6489 in the debug info that var (whole DECL_ORIGIN is the parm
6490 PARM_DECL) is optimized away, but could be looked up at the
6491 call site as value of D#X there. */
6492 tree vexpr;
6493 gimple_stmt_iterator cgsi
6494 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6495 gimple *def_temp;
6496 tree var = vars;
6497 i = vec_safe_length (*debug_args);
6498 do
6499 {
6500 i -= 2;
6501 while (var != NULL_TREE
6502 && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6503 var = TREE_CHAIN (var);
6504 if (var == NULL_TREE)
6505 break;
6506 vexpr = make_node (DEBUG_EXPR_DECL);
6507 tree parm = (**debug_args)[i];
6508 DECL_ARTIFICIAL (vexpr) = 1;
6509 TREE_TYPE (vexpr) = TREE_TYPE (parm);
6510 SET_DECL_MODE (vexpr, DECL_MODE (parm));
6511 def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6512 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6513 def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6514 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6515 }
6516 while (i > len);
6517 }
6518 }
6519 delete param_body_adjs;
6520 free_dominance_info (CDI_DOMINATORS);
6521 free_dominance_info (CDI_POST_DOMINATORS);
6522
6523 gcc_assert (!id.debug_stmts.exists ());
6524 pop_cfun ();
6525 input_location = saved_location;
6526 return;
6527 }
6528
6529 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6530 the callee and return the inlined body on success. */
6531
6532 tree
6533 maybe_inline_call_in_expr (tree exp)
6534 {
6535 tree fn = get_callee_fndecl (exp);
6536
6537 /* We can only try to inline "const" functions. */
6538 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6539 {
6540 call_expr_arg_iterator iter;
6541 copy_body_data id;
6542 tree param, arg, t;
6543 hash_map<tree, tree> decl_map;
6544
6545 /* Remap the parameters. */
6546 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6547 param;
6548 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6549 decl_map.put (param, arg);
6550
6551 memset (&id, 0, sizeof (id));
6552 id.src_fn = fn;
6553 id.dst_fn = current_function_decl;
6554 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6555 id.decl_map = &decl_map;
6556
6557 id.copy_decl = copy_decl_no_change;
6558 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6559 id.transform_new_cfg = false;
6560 id.transform_return_to_modify = true;
6561 id.transform_parameter = true;
6562 id.transform_lang_insert_block = NULL;
6563
6564 /* Make sure not to unshare trees behind the front-end's back
6565 since front-end specific mechanisms may rely on sharing. */
6566 id.regimplify = false;
6567 id.do_not_unshare = true;
6568
6569 /* We're not inside any EH region. */
6570 id.eh_lp_nr = 0;
6571
6572 t = copy_tree_body (&id);
6573
6574 /* We can only return something suitable for use in a GENERIC
6575 expression tree. */
6576 if (TREE_CODE (t) == MODIFY_EXPR)
6577 return TREE_OPERAND (t, 1);
6578 }
6579
6580 return NULL_TREE;
6581 }
6582
6583 /* Duplicate a type, fields and all. */
6584
6585 tree
6586 build_duplicate_type (tree type)
6587 {
6588 struct copy_body_data id;
6589
6590 memset (&id, 0, sizeof (id));
6591 id.src_fn = current_function_decl;
6592 id.dst_fn = current_function_decl;
6593 id.src_cfun = cfun;
6594 id.decl_map = new hash_map<tree, tree>;
6595 id.debug_map = NULL;
6596 id.copy_decl = copy_decl_no_change;
6597
6598 type = remap_type_1 (type, &id);
6599
6600 delete id.decl_map;
6601 if (id.debug_map)
6602 delete id.debug_map;
6603
6604 TYPE_CANONICAL (type) = type;
6605
6606 return type;
6607 }
6608
6609 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6610 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6611 evaluation. */
6612
6613 tree
6614 copy_fn (tree fn, tree& parms, tree& result)
6615 {
6616 copy_body_data id;
6617 tree param;
6618 hash_map<tree, tree> decl_map;
6619
6620 tree *p = &parms;
6621 *p = NULL_TREE;
6622
6623 memset (&id, 0, sizeof (id));
6624 id.src_fn = fn;
6625 id.dst_fn = current_function_decl;
6626 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6627 id.decl_map = &decl_map;
6628
6629 id.copy_decl = copy_decl_no_change;
6630 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6631 id.transform_new_cfg = false;
6632 id.transform_return_to_modify = false;
6633 id.transform_parameter = true;
6634 id.transform_lang_insert_block = NULL;
6635
6636 /* Make sure not to unshare trees behind the front-end's back
6637 since front-end specific mechanisms may rely on sharing. */
6638 id.regimplify = false;
6639 id.do_not_unshare = true;
6640 id.do_not_fold = true;
6641
6642 /* We're not inside any EH region. */
6643 id.eh_lp_nr = 0;
6644
6645 /* Remap the parameters and result and return them to the caller. */
6646 for (param = DECL_ARGUMENTS (fn);
6647 param;
6648 param = DECL_CHAIN (param))
6649 {
6650 *p = remap_decl (param, &id);
6651 p = &DECL_CHAIN (*p);
6652 }
6653
6654 if (DECL_RESULT (fn))
6655 result = remap_decl (DECL_RESULT (fn), &id);
6656 else
6657 result = NULL_TREE;
6658
6659 return copy_tree_body (&id);
6660 }