PR middle-end/98664 - inconsistent -Wfree-nonheap-object for inlined calls to system...
[gcc.git] / gcc / tree-ssa-live.c
1 /* Liveness for SSA trees.
2 Copyright (C) 2003-2021 Free Software Foundation, Inc.
3 Contributed by Andrew MacLeod <amacleod@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "timevar.h"
29 #include "ssa.h"
30 #include "cgraph.h"
31 #include "gimple-pretty-print.h"
32 #include "diagnostic-core.h"
33 #include "gimple-iterator.h"
34 #include "tree-dfa.h"
35 #include "dumpfile.h"
36 #include "tree-ssa-live.h"
37 #include "debug.h"
38 #include "tree-ssa.h"
39 #include "ipa-utils.h"
40 #include "cfgloop.h"
41 #include "stringpool.h"
42 #include "attribs.h"
43 #include "optinfo.h"
44 #include "gimple-walk.h"
45 #include "cfganal.h"
46
47 static void verify_live_on_entry (tree_live_info_p);
48
49
50 /* VARMAP maintains a mapping from SSA version number to real variables.
51
52 All SSA_NAMES are divided into partitions. Initially each ssa_name is the
53 only member of it's own partition. Coalescing will attempt to group any
54 ssa_names which occur in a copy or in a PHI node into the same partition.
55
56 At the end of out-of-ssa, each partition becomes a "real" variable and is
57 rewritten as a compiler variable.
58
59 The var_map data structure is used to manage these partitions. It allows
60 partitions to be combined, and determines which partition belongs to what
61 ssa_name or variable, and vice versa. */
62
63
64 /* Remove the base table in MAP. */
65
66 static void
67 var_map_base_fini (var_map map)
68 {
69 /* Free the basevar info if it is present. */
70 if (map->partition_to_base_index != NULL)
71 {
72 free (map->partition_to_base_index);
73 map->partition_to_base_index = NULL;
74 map->num_basevars = 0;
75 }
76 }
77 /* Create a variable partition map of SIZE for region, initialize and return
78 it. Region is a loop if LOOP is non-NULL, otherwise is the current
79 function. */
80
81 var_map
82 init_var_map (int size, class loop *loop)
83 {
84 var_map map;
85
86 map = (var_map) xmalloc (sizeof (struct _var_map));
87 map->var_partition = partition_new (size);
88
89 map->partition_to_view = NULL;
90 map->view_to_partition = NULL;
91 map->num_partitions = size;
92 map->partition_size = size;
93 map->num_basevars = 0;
94 map->partition_to_base_index = NULL;
95 map->vec_bbs = vNULL;
96 if (loop)
97 {
98 map->bmp_bbs = BITMAP_ALLOC (NULL);
99 map->outofssa_p = false;
100 basic_block *bbs = get_loop_body_in_dom_order (loop);
101 for (unsigned i = 0; i < loop->num_nodes; ++i)
102 {
103 bitmap_set_bit (map->bmp_bbs, bbs[i]->index);
104 map->vec_bbs.safe_push (bbs[i]);
105 }
106 free (bbs);
107 }
108 else
109 {
110 map->bmp_bbs = NULL;
111 map->outofssa_p = true;
112 basic_block bb;
113 FOR_EACH_BB_FN (bb, cfun)
114 map->vec_bbs.safe_push (bb);
115 }
116 return map;
117 }
118
119
120 /* Free memory associated with MAP. */
121
122 void
123 delete_var_map (var_map map)
124 {
125 var_map_base_fini (map);
126 partition_delete (map->var_partition);
127 free (map->partition_to_view);
128 free (map->view_to_partition);
129 if (map->bmp_bbs)
130 BITMAP_FREE (map->bmp_bbs);
131 map->vec_bbs.release ();
132 free (map);
133 }
134
135
136 /* This function will combine the partitions in MAP for VAR1 and VAR2. It
137 Returns the partition which represents the new partition. If the two
138 partitions cannot be combined, NO_PARTITION is returned. */
139
140 int
141 var_union (var_map map, tree var1, tree var2)
142 {
143 int p1, p2, p3;
144
145 gcc_assert (TREE_CODE (var1) == SSA_NAME);
146 gcc_assert (TREE_CODE (var2) == SSA_NAME);
147
148 /* This is independent of partition_to_view. If partition_to_view is
149 on, then whichever one of these partitions is absorbed will never have a
150 dereference into the partition_to_view array any more. */
151
152 p1 = partition_find (map->var_partition, SSA_NAME_VERSION (var1));
153 p2 = partition_find (map->var_partition, SSA_NAME_VERSION (var2));
154
155 gcc_assert (p1 != NO_PARTITION);
156 gcc_assert (p2 != NO_PARTITION);
157
158 if (p1 == p2)
159 p3 = p1;
160 else
161 p3 = partition_union (map->var_partition, p1, p2);
162
163 if (map->partition_to_view)
164 p3 = map->partition_to_view[p3];
165
166 return p3;
167 }
168
169
170 /* Compress the partition numbers in MAP such that they fall in the range
171 0..(num_partitions-1) instead of wherever they turned out during
172 the partitioning exercise. This removes any references to unused
173 partitions, thereby allowing bitmaps and other vectors to be much
174 denser.
175
176 This is implemented such that compaction doesn't affect partitioning.
177 Ie., once partitions are created and possibly merged, running one
178 or more different kind of compaction will not affect the partitions
179 themselves. Their index might change, but all the same variables will
180 still be members of the same partition group. This allows work on reduced
181 sets, and no loss of information when a larger set is later desired.
182
183 In particular, coalescing can work on partitions which have 2 or more
184 definitions, and then 'recompact' later to include all the single
185 definitions for assignment to program variables. */
186
187
188 /* Set MAP back to the initial state of having no partition view. Return a
189 bitmap which has a bit set for each partition number which is in use in the
190 varmap. */
191
192 static bitmap
193 partition_view_init (var_map map)
194 {
195 bitmap used;
196 int tmp;
197 unsigned int x;
198
199 used = BITMAP_ALLOC (NULL);
200
201 /* Already in a view? Abandon the old one. */
202 if (map->partition_to_view)
203 {
204 free (map->partition_to_view);
205 map->partition_to_view = NULL;
206 }
207 if (map->view_to_partition)
208 {
209 free (map->view_to_partition);
210 map->view_to_partition = NULL;
211 }
212
213 /* Find out which partitions are actually referenced. */
214 for (x = 0; x < map->partition_size; x++)
215 {
216 tmp = partition_find (map->var_partition, x);
217 if (ssa_name (tmp) != NULL_TREE && !virtual_operand_p (ssa_name (tmp))
218 && (!has_zero_uses (ssa_name (tmp))
219 || !SSA_NAME_IS_DEFAULT_DEF (ssa_name (tmp))
220 || (SSA_NAME_VAR (ssa_name (tmp))
221 && !VAR_P (SSA_NAME_VAR (ssa_name (tmp))))))
222 bitmap_set_bit (used, tmp);
223 }
224
225 map->num_partitions = map->partition_size;
226 return used;
227 }
228
229
230 /* This routine will finalize the view data for MAP based on the partitions
231 set in SELECTED. This is either the same bitmap returned from
232 partition_view_init, or a trimmed down version if some of those partitions
233 were not desired in this view. SELECTED is freed before returning. */
234
235 static void
236 partition_view_fini (var_map map, bitmap selected)
237 {
238 bitmap_iterator bi;
239 unsigned count, i, x, limit;
240
241 gcc_assert (selected);
242
243 count = bitmap_count_bits (selected);
244 limit = map->partition_size;
245
246 /* If its a one-to-one ratio, we don't need any view compaction. */
247 if (count < limit)
248 {
249 map->partition_to_view = (int *)xmalloc (limit * sizeof (int));
250 memset (map->partition_to_view, 0xff, (limit * sizeof (int)));
251 map->view_to_partition = (int *)xmalloc (count * sizeof (int));
252
253 i = 0;
254 /* Give each selected partition an index. */
255 EXECUTE_IF_SET_IN_BITMAP (selected, 0, x, bi)
256 {
257 map->partition_to_view[x] = i;
258 map->view_to_partition[i] = x;
259 i++;
260 }
261 gcc_assert (i == count);
262 map->num_partitions = i;
263 }
264
265 BITMAP_FREE (selected);
266 }
267
268
269 /* Create a partition view which includes all the used partitions in MAP. */
270
271 void
272 partition_view_normal (var_map map)
273 {
274 bitmap used;
275
276 used = partition_view_init (map);
277 partition_view_fini (map, used);
278
279 var_map_base_fini (map);
280 }
281
282
283 /* Create a partition view in MAP which includes just partitions which occur in
284 the bitmap ONLY. If WANT_BASES is true, create the base variable map
285 as well. */
286
287 void
288 partition_view_bitmap (var_map map, bitmap only)
289 {
290 bitmap used;
291 bitmap new_partitions = BITMAP_ALLOC (NULL);
292 unsigned x, p;
293 bitmap_iterator bi;
294
295 used = partition_view_init (map);
296 EXECUTE_IF_SET_IN_BITMAP (only, 0, x, bi)
297 {
298 p = partition_find (map->var_partition, x);
299 gcc_assert (bitmap_bit_p (used, p));
300 bitmap_set_bit (new_partitions, p);
301 }
302 partition_view_fini (map, new_partitions);
303
304 var_map_base_fini (map);
305 }
306
307
308 static bitmap usedvars;
309
310 /* Mark VAR as used, so that it'll be preserved during rtl expansion.
311 Returns true if VAR wasn't marked before. */
312
313 static inline bool
314 set_is_used (tree var)
315 {
316 return bitmap_set_bit (usedvars, DECL_UID (var));
317 }
318
319 /* Return true if VAR is marked as used. */
320
321 static inline bool
322 is_used_p (tree var)
323 {
324 return bitmap_bit_p (usedvars, DECL_UID (var));
325 }
326
327 static inline void mark_all_vars_used (tree *);
328
329 /* Helper function for mark_all_vars_used, called via walk_tree. */
330
331 static tree
332 mark_all_vars_used_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
333 {
334 tree t = *tp;
335 enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
336 tree b;
337
338 if (TREE_CODE (t) == SSA_NAME)
339 {
340 *walk_subtrees = 0;
341 t = SSA_NAME_VAR (t);
342 if (!t)
343 return NULL;
344 }
345
346 if (IS_EXPR_CODE_CLASS (c)
347 && (b = TREE_BLOCK (t)) != NULL)
348 TREE_USED (b) = true;
349
350 /* Ignore TMR_OFFSET and TMR_STEP for TARGET_MEM_REFS, as those
351 fields do not contain vars. */
352 if (TREE_CODE (t) == TARGET_MEM_REF)
353 {
354 mark_all_vars_used (&TMR_BASE (t));
355 mark_all_vars_used (&TMR_INDEX (t));
356 mark_all_vars_used (&TMR_INDEX2 (t));
357 *walk_subtrees = 0;
358 return NULL;
359 }
360
361 /* Only need to mark VAR_DECLS; parameters and return results are not
362 eliminated as unused. */
363 if (VAR_P (t))
364 {
365 /* When a global var becomes used for the first time also walk its
366 initializer (non global ones don't have any). */
367 if (set_is_used (t) && is_global_var (t)
368 && DECL_CONTEXT (t) == current_function_decl)
369 mark_all_vars_used (&DECL_INITIAL (t));
370 }
371 /* remove_unused_scope_block_p requires information about labels
372 which are not DECL_IGNORED_P to tell if they might be used in the IL. */
373 else if (TREE_CODE (t) == LABEL_DECL)
374 /* Although the TREE_USED values that the frontend uses would be
375 acceptable (albeit slightly over-conservative) for our purposes,
376 init_vars_expansion clears TREE_USED for LABEL_DECLs too, so we
377 must re-compute it here. */
378 TREE_USED (t) = 1;
379
380 if (IS_TYPE_OR_DECL_P (t))
381 *walk_subtrees = 0;
382
383 return NULL;
384 }
385
386 /* Mark the scope block SCOPE and its subblocks unused when they can be
387 possibly eliminated if dead. */
388
389 static void
390 mark_scope_block_unused (tree scope)
391 {
392 tree t;
393 TREE_USED (scope) = false;
394 if (!(*debug_hooks->ignore_block) (scope))
395 TREE_USED (scope) = true;
396 for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t))
397 mark_scope_block_unused (t);
398 }
399
400 /* Look if the block is dead (by possibly eliminating its dead subblocks)
401 and return true if so.
402 Block is declared dead if:
403 1) No statements are associated with it.
404 2) Declares no live variables
405 3) All subblocks are dead
406 or there is precisely one subblocks and the block
407 has same abstract origin as outer block and declares
408 no variables, so it is pure wrapper.
409 When we are not outputting full debug info, we also eliminate dead variables
410 out of scope blocks to let them to be recycled by GGC and to save copying work
411 done by the inliner. */
412
413 static bool
414 remove_unused_scope_block_p (tree scope, bool in_ctor_dtor_block)
415 {
416 tree *t, *next;
417 bool unused = !TREE_USED (scope);
418 int nsubblocks = 0;
419
420 /* For ipa-polymorphic-call.c purposes, preserve blocks:
421 1) with BLOCK_ABSTRACT_ORIGIN of a ctor/dtor or their clones */
422 if (inlined_polymorphic_ctor_dtor_block_p (scope, true))
423 {
424 in_ctor_dtor_block = true;
425 unused = false;
426 }
427 /* 2) inside such blocks, the outermost block with block_ultimate_origin
428 being a FUNCTION_DECL. */
429 else if (in_ctor_dtor_block)
430 {
431 tree fn = block_ultimate_origin (scope);
432 if (fn && TREE_CODE (fn) == FUNCTION_DECL)
433 {
434 in_ctor_dtor_block = false;
435 unused = false;
436 }
437 }
438
439 for (t = &BLOCK_VARS (scope); *t; t = next)
440 {
441 next = &DECL_CHAIN (*t);
442
443 /* Debug info of nested function refers to the block of the
444 function. We might stil call it even if all statements
445 of function it was nested into was elliminated.
446
447 TODO: We can actually look into cgraph to see if function
448 will be output to file. */
449 if (TREE_CODE (*t) == FUNCTION_DECL)
450 unused = false;
451
452 /* If a decl has a value expr, we need to instantiate it
453 regardless of debug info generation, to avoid codegen
454 differences in memory overlap tests. update_equiv_regs() may
455 indirectly call validate_equiv_mem() to test whether a
456 SET_DEST overlaps with others, and if the value expr changes
457 by virtual register instantiation, we may get end up with
458 different results. */
459 else if (VAR_P (*t) && DECL_HAS_VALUE_EXPR_P (*t))
460 unused = false;
461
462 /* Remove everything we don't generate debug info for. */
463 else if (DECL_IGNORED_P (*t))
464 {
465 *t = DECL_CHAIN (*t);
466 next = t;
467 }
468
469 /* When we are outputting debug info, we usually want to output
470 info about optimized-out variables in the scope blocks.
471 Exception are the scope blocks not containing any instructions
472 at all so user can't get into the scopes at first place. */
473 else if (is_used_p (*t))
474 unused = false;
475 else if (TREE_CODE (*t) == LABEL_DECL && TREE_USED (*t))
476 /* For labels that are still used in the IL, the decision to
477 preserve them must not depend DEBUG_INFO_LEVEL, otherwise we
478 risk having different ordering in debug vs. non-debug builds
479 during inlining or versioning.
480 A label appearing here (we have already checked DECL_IGNORED_P)
481 should not be used in the IL unless it has been explicitly used
482 before, so we use TREE_USED as an approximation. */
483 /* In principle, we should do the same here as for the debug case
484 below, however, when debugging, there might be additional nested
485 levels that keep an upper level with a label live, so we have to
486 force this block to be considered used, too. */
487 unused = false;
488
489 /* When we are not doing full debug info, we however can keep around
490 only the used variables for cfgexpand's memory packing saving quite
491 a lot of memory.
492
493 For sake of -g3, we keep around those vars but we don't count this as
494 use of block, so innermost block with no used vars and no instructions
495 can be considered dead. We only want to keep around blocks user can
496 breakpoint into and ask about value of optimized out variables.
497
498 Similarly we need to keep around types at least until all
499 variables of all nested blocks are gone. We track no
500 information on whether given type is used or not, so we have
501 to keep them even when not emitting debug information,
502 otherwise we may end up remapping variables and their (local)
503 types in different orders depending on whether debug
504 information is being generated. */
505
506 else if (TREE_CODE (*t) == TYPE_DECL
507 || debug_info_level == DINFO_LEVEL_NORMAL
508 || debug_info_level == DINFO_LEVEL_VERBOSE)
509 ;
510 else
511 {
512 *t = DECL_CHAIN (*t);
513 next = t;
514 }
515 }
516
517 for (t = &BLOCK_SUBBLOCKS (scope); *t ;)
518 if (remove_unused_scope_block_p (*t, in_ctor_dtor_block))
519 {
520 if (BLOCK_SUBBLOCKS (*t))
521 {
522 tree next = BLOCK_CHAIN (*t);
523 tree supercontext = BLOCK_SUPERCONTEXT (*t);
524
525 *t = BLOCK_SUBBLOCKS (*t);
526 while (BLOCK_CHAIN (*t))
527 {
528 BLOCK_SUPERCONTEXT (*t) = supercontext;
529 t = &BLOCK_CHAIN (*t);
530 }
531 BLOCK_CHAIN (*t) = next;
532 BLOCK_SUPERCONTEXT (*t) = supercontext;
533 t = &BLOCK_CHAIN (*t);
534 nsubblocks ++;
535 }
536 else
537 *t = BLOCK_CHAIN (*t);
538 }
539 else
540 {
541 t = &BLOCK_CHAIN (*t);
542 nsubblocks ++;
543 }
544
545
546 if (!unused)
547 ;
548 /* Outer scope is always used. */
549 else if (!BLOCK_SUPERCONTEXT (scope)
550 || TREE_CODE (BLOCK_SUPERCONTEXT (scope)) == FUNCTION_DECL)
551 unused = false;
552 /* Innermost blocks with no live variables nor statements can be always
553 eliminated. */
554 else if (!nsubblocks)
555 ;
556 /* When not generating debug info we can eliminate info on unused
557 variables. */
558 else if (!flag_auto_profile
559 && debug_info_level == DINFO_LEVEL_NONE
560 && !optinfo_wants_inlining_info_p ())
561 {
562 /* Even for -g0 don't prune outer scopes from inlined functions,
563 otherwise late diagnostics from such functions will not be
564 emitted or suppressed properly. */
565 if (inlined_function_outer_scope_p (scope))
566 {
567 gcc_assert (TREE_CODE (BLOCK_ORIGIN (scope)) == FUNCTION_DECL);
568 unused = false;
569 }
570 }
571 else if (BLOCK_VARS (scope) || BLOCK_NUM_NONLOCALIZED_VARS (scope))
572 unused = false;
573 /* See if this block is important for representation of inlined
574 function. Inlined functions are always represented by block
575 with block_ultimate_origin being set to FUNCTION_DECL and
576 DECL_SOURCE_LOCATION set, unless they expand to nothing... */
577 else if (inlined_function_outer_scope_p (scope))
578 unused = false;
579 else
580 /* Verfify that only blocks with source location set
581 are entry points to the inlined functions. */
582 gcc_assert (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope))
583 == UNKNOWN_LOCATION);
584
585 TREE_USED (scope) = !unused;
586 return unused;
587 }
588
589 /* Mark all VAR_DECLS under *EXPR_P as used, so that they won't be
590 eliminated during the tree->rtl conversion process. */
591
592 static inline void
593 mark_all_vars_used (tree *expr_p)
594 {
595 walk_tree (expr_p, mark_all_vars_used_1, NULL, NULL);
596 }
597
598 /* Helper function for clear_unused_block_pointer, called via walk_tree. */
599
600 static tree
601 clear_unused_block_pointer_1 (tree *tp, int *, void *)
602 {
603 if (EXPR_P (*tp) && TREE_BLOCK (*tp)
604 && !TREE_USED (TREE_BLOCK (*tp)))
605 TREE_SET_BLOCK (*tp, NULL);
606 return NULL_TREE;
607 }
608
609 /* Set all block pointer in debug or clobber stmt to NULL if the block
610 is unused, so that they will not be streamed out. */
611
612 static void
613 clear_unused_block_pointer (void)
614 {
615 basic_block bb;
616 gimple_stmt_iterator gsi;
617
618 FOR_EACH_BB_FN (bb, cfun)
619 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
620 {
621 unsigned i;
622 tree b;
623 gimple *stmt;
624
625 next:
626 stmt = gsi_stmt (gsi);
627 if (!is_gimple_debug (stmt) && !gimple_clobber_p (stmt))
628 continue;
629 b = gimple_block (stmt);
630 if (b && !TREE_USED (b))
631 {
632 /* Elide debug marker stmts that have an associated BLOCK from an
633 inline instance removed with also the outermost scope BLOCK of
634 said inline instance removed. If the outermost scope BLOCK of
635 said inline instance is preserved use that in place of the
636 removed BLOCK. That keeps the marker associated to the correct
637 inline instance (or no inline instance in case it was not from
638 an inline instance). */
639 if (gimple_debug_nonbind_marker_p (stmt)
640 && BLOCK_ABSTRACT_ORIGIN (b))
641 {
642 while (TREE_CODE (b) == BLOCK
643 && !inlined_function_outer_scope_p (b))
644 b = BLOCK_SUPERCONTEXT (b);
645 if (TREE_CODE (b) == BLOCK)
646 {
647 if (TREE_USED (b))
648 {
649 gimple_set_block (stmt, b);
650 continue;
651 }
652 gsi_remove (&gsi, true);
653 if (gsi_end_p (gsi))
654 break;
655 goto next;
656 }
657 }
658 gimple_set_block (stmt, NULL);
659 }
660 for (i = 0; i < gimple_num_ops (stmt); i++)
661 walk_tree (gimple_op_ptr (stmt, i), clear_unused_block_pointer_1,
662 NULL, NULL);
663 }
664 }
665
666 /* Dump scope blocks starting at SCOPE to FILE. INDENT is the
667 indentation level and FLAGS is as in print_generic_expr. */
668
669 static void
670 dump_scope_block (FILE *file, int indent, tree scope, dump_flags_t flags)
671 {
672 tree var, t;
673 unsigned int i;
674
675 fprintf (file, "\n%*s{ Scope block #%i%s",indent, "" , BLOCK_NUMBER (scope),
676 TREE_USED (scope) ? "" : " (unused)");
677 if (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope)) != UNKNOWN_LOCATION)
678 {
679 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (scope));
680 fprintf (file, " %s:%i", s.file, s.line);
681 }
682 if (BLOCK_ABSTRACT_ORIGIN (scope))
683 {
684 tree origin = block_ultimate_origin (scope);
685 if (origin)
686 {
687 fprintf (file, " Originating from :");
688 if (DECL_P (origin))
689 print_generic_decl (file, origin, flags);
690 else
691 fprintf (file, "#%i", BLOCK_NUMBER (origin));
692 }
693 }
694 if (BLOCK_FRAGMENT_ORIGIN (scope))
695 fprintf (file, " Fragment of : #%i",
696 BLOCK_NUMBER (BLOCK_FRAGMENT_ORIGIN (scope)));
697 else if (BLOCK_FRAGMENT_CHAIN (scope))
698 {
699 fprintf (file, " Fragment chain :");
700 for (t = BLOCK_FRAGMENT_CHAIN (scope); t ;
701 t = BLOCK_FRAGMENT_CHAIN (t))
702 fprintf (file, " #%i", BLOCK_NUMBER (t));
703 }
704 fprintf (file, " \n");
705 for (var = BLOCK_VARS (scope); var; var = DECL_CHAIN (var))
706 {
707 fprintf (file, "%*s", indent, "");
708 print_generic_decl (file, var, flags);
709 fprintf (file, "\n");
710 }
711 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (scope); i++)
712 {
713 fprintf (file, "%*s",indent, "");
714 print_generic_decl (file, BLOCK_NONLOCALIZED_VAR (scope, i),
715 flags);
716 fprintf (file, " (nonlocalized)\n");
717 }
718 for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t))
719 dump_scope_block (file, indent + 2, t, flags);
720 fprintf (file, "\n%*s}\n",indent, "");
721 }
722
723 /* Dump the tree of lexical scopes starting at SCOPE to stderr. FLAGS
724 is as in print_generic_expr. */
725
726 DEBUG_FUNCTION void
727 debug_scope_block (tree scope, dump_flags_t flags)
728 {
729 dump_scope_block (stderr, 0, scope, flags);
730 }
731
732
733 /* Dump the tree of lexical scopes of current_function_decl to FILE.
734 FLAGS is as in print_generic_expr. */
735
736 void
737 dump_scope_blocks (FILE *file, dump_flags_t flags)
738 {
739 dump_scope_block (file, 0, DECL_INITIAL (current_function_decl), flags);
740 }
741
742
743 /* Dump the tree of lexical scopes of current_function_decl to stderr.
744 FLAGS is as in print_generic_expr. */
745
746 DEBUG_FUNCTION void
747 debug_scope_blocks (dump_flags_t flags)
748 {
749 dump_scope_blocks (stderr, flags);
750 }
751
752 /* Remove local variables that are not referenced in the IL. */
753
754 void
755 remove_unused_locals (void)
756 {
757 basic_block bb;
758 tree var;
759 unsigned srcidx, dstidx, num;
760 bool have_local_clobbers = false;
761
762 /* Removing declarations from lexical blocks when not optimizing is
763 not only a waste of time, it actually causes differences in stack
764 layout. */
765 if (!optimize)
766 return;
767
768 timevar_push (TV_REMOVE_UNUSED);
769
770 mark_scope_block_unused (DECL_INITIAL (current_function_decl));
771
772 usedvars = BITMAP_ALLOC (NULL);
773 auto_bitmap useddebug;
774
775 /* Walk the CFG marking all referenced symbols. */
776 FOR_EACH_BB_FN (bb, cfun)
777 {
778 gimple_stmt_iterator gsi;
779 size_t i;
780 edge_iterator ei;
781 edge e;
782
783 /* Walk the statements. */
784 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
785 {
786 gimple *stmt = gsi_stmt (gsi);
787 tree b = gimple_block (stmt);
788
789 /* If we wanted to mark the block referenced by the inline
790 entry point marker as used, this would be a good spot to
791 do it. If the block is not otherwise used, the stmt will
792 be cleaned up in clean_unused_block_pointer. */
793 if (is_gimple_debug (stmt))
794 {
795 if (gimple_debug_bind_p (stmt))
796 {
797 tree var = gimple_debug_bind_get_var (stmt);
798 if (VAR_P (var))
799 {
800 if (!gimple_debug_bind_get_value (stmt))
801 /* Run the 2nd phase. */
802 have_local_clobbers = true;
803 else
804 bitmap_set_bit (useddebug, DECL_UID (var));
805 }
806 }
807 continue;
808 }
809
810 if (gimple_clobber_p (stmt))
811 {
812 have_local_clobbers = true;
813 continue;
814 }
815
816 if (b)
817 TREE_USED (b) = true;
818
819 for (i = 0; i < gimple_num_ops (stmt); i++)
820 mark_all_vars_used (gimple_op_ptr (gsi_stmt (gsi), i));
821 }
822
823 for (gphi_iterator gpi = gsi_start_phis (bb);
824 !gsi_end_p (gpi);
825 gsi_next (&gpi))
826 {
827 use_operand_p arg_p;
828 ssa_op_iter i;
829 tree def;
830 gphi *phi = gpi.phi ();
831
832 if (virtual_operand_p (gimple_phi_result (phi)))
833 continue;
834
835 def = gimple_phi_result (phi);
836 mark_all_vars_used (&def);
837
838 FOR_EACH_PHI_ARG (arg_p, phi, i, SSA_OP_ALL_USES)
839 {
840 tree arg = USE_FROM_PTR (arg_p);
841 int index = PHI_ARG_INDEX_FROM_USE (arg_p);
842 tree block =
843 LOCATION_BLOCK (gimple_phi_arg_location (phi, index));
844 if (block != NULL)
845 TREE_USED (block) = true;
846 mark_all_vars_used (&arg);
847 }
848 }
849
850 FOR_EACH_EDGE (e, ei, bb->succs)
851 if (LOCATION_BLOCK (e->goto_locus) != NULL)
852 TREE_USED (LOCATION_BLOCK (e->goto_locus)) = true;
853 }
854
855 /* We do a two-pass approach about the out-of-scope clobbers. We want
856 to remove them if they are the only references to a local variable,
857 but we want to retain them when there's any other. So the first pass
858 ignores them, and the second pass (if there were any) tries to remove
859 them. */
860 if (have_local_clobbers)
861 FOR_EACH_BB_FN (bb, cfun)
862 {
863 gimple_stmt_iterator gsi;
864
865 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
866 {
867 gimple *stmt = gsi_stmt (gsi);
868 tree b = gimple_block (stmt);
869
870 if (gimple_clobber_p (stmt))
871 {
872 tree lhs = gimple_assign_lhs (stmt);
873 tree base = get_base_address (lhs);
874 /* Remove clobbers referencing unused vars, or clobbers
875 with MEM_REF lhs referencing uninitialized pointers. */
876 if ((VAR_P (base) && !is_used_p (base))
877 || (TREE_CODE (lhs) == MEM_REF
878 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME
879 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (lhs, 0))
880 && (TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (lhs, 0)))
881 != PARM_DECL)))
882 {
883 unlink_stmt_vdef (stmt);
884 gsi_remove (&gsi, true);
885 release_defs (stmt);
886 continue;
887 }
888 if (b)
889 TREE_USED (b) = true;
890 }
891 else if (gimple_debug_bind_p (stmt))
892 {
893 tree var = gimple_debug_bind_get_var (stmt);
894 if (VAR_P (var)
895 && !bitmap_bit_p (useddebug, DECL_UID (var))
896 && !is_used_p (var))
897 {
898 if (dump_file && (dump_flags & TDF_DETAILS))
899 fprintf (dump_file, "Dead debug bind reset to %u\n",
900 DECL_UID (var));
901 gsi_remove (&gsi, true);
902 continue;
903 }
904 }
905 gsi_next (&gsi);
906 }
907 }
908
909 if (cfun->has_simduid_loops)
910 {
911 class loop *loop;
912 FOR_EACH_LOOP (loop, 0)
913 if (loop->simduid && !is_used_p (loop->simduid))
914 loop->simduid = NULL_TREE;
915 }
916
917 cfun->has_local_explicit_reg_vars = false;
918
919 /* Remove unmarked local and global vars from local_decls. */
920 num = vec_safe_length (cfun->local_decls);
921 for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++)
922 {
923 var = (*cfun->local_decls)[srcidx];
924 if (VAR_P (var))
925 {
926 if (!is_used_p (var))
927 {
928 tree def;
929 if (cfun->nonlocal_goto_save_area
930 && TREE_OPERAND (cfun->nonlocal_goto_save_area, 0) == var)
931 cfun->nonlocal_goto_save_area = NULL;
932 /* Release any default def associated with var. */
933 if ((def = ssa_default_def (cfun, var)) != NULL_TREE)
934 {
935 set_ssa_default_def (cfun, var, NULL_TREE);
936 release_ssa_name (def);
937 }
938 continue;
939 }
940 }
941 if (VAR_P (var) && DECL_HARD_REGISTER (var) && !is_global_var (var))
942 cfun->has_local_explicit_reg_vars = true;
943
944 if (srcidx != dstidx)
945 (*cfun->local_decls)[dstidx] = var;
946 dstidx++;
947 }
948 if (dstidx != num)
949 {
950 statistics_counter_event (cfun, "unused VAR_DECLs removed", num - dstidx);
951 cfun->local_decls->truncate (dstidx);
952 }
953
954 remove_unused_scope_block_p (DECL_INITIAL (current_function_decl),
955 polymorphic_ctor_dtor_p (current_function_decl,
956 true) != NULL_TREE);
957 clear_unused_block_pointer ();
958
959 BITMAP_FREE (usedvars);
960
961 if (dump_file && (dump_flags & TDF_DETAILS))
962 {
963 fprintf (dump_file, "Scope blocks after cleanups:\n");
964 dump_scope_blocks (dump_file, dump_flags);
965 }
966
967 timevar_pop (TV_REMOVE_UNUSED);
968 }
969
970 /* Allocate and return a new live range information object base on MAP. */
971
972 static tree_live_info_p
973 new_tree_live_info (var_map map)
974 {
975 tree_live_info_p live;
976 basic_block bb;
977
978 live = XNEW (struct tree_live_info_d);
979 live->map = map;
980 live->num_blocks = last_basic_block_for_fn (cfun);
981
982 bitmap_obstack_initialize (&live->livein_obstack);
983 bitmap_obstack_initialize (&live->liveout_obstack);
984
985 live->livein = XCNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
986 live->liveout = XCNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
987 for (unsigned i = 0; map->vec_bbs.iterate (i, &bb); ++i)
988 {
989 bitmap_initialize (&live->livein[bb->index], &live->livein_obstack);
990 bitmap_initialize (&live->liveout[bb->index], &live->liveout_obstack);
991 }
992
993 live->work_stack = XNEWVEC (int, last_basic_block_for_fn (cfun));
994 live->stack_top = live->work_stack;
995
996 live->global = BITMAP_ALLOC (NULL);
997 return live;
998 }
999
1000
1001 /* Free storage for live range info object LIVE. */
1002
1003 void
1004 delete_tree_live_info (tree_live_info_p live)
1005 {
1006 if (live->livein)
1007 {
1008 bitmap_obstack_release (&live->livein_obstack);
1009 free (live->livein);
1010 }
1011 if (live->liveout)
1012 {
1013 bitmap_obstack_release (&live->liveout_obstack);
1014 free (live->liveout);
1015 }
1016 BITMAP_FREE (live->global);
1017 free (live->work_stack);
1018 free (live);
1019 }
1020
1021
1022 /* Visit basic block BB and propagate any required live on entry bits from
1023 LIVE into the predecessors. VISITED is the bitmap of visited blocks.
1024 TMP is a temporary work bitmap which is passed in to avoid reallocating
1025 it each time. */
1026
1027 static void
1028 loe_visit_block (tree_live_info_p live, basic_block bb, sbitmap visited)
1029 {
1030 edge e;
1031 bool change;
1032 edge_iterator ei;
1033 basic_block pred_bb;
1034 bitmap loe;
1035
1036 gcc_checking_assert (!bitmap_bit_p (visited, bb->index));
1037 bitmap_set_bit (visited, bb->index);
1038
1039 loe = live_on_entry (live, bb);
1040
1041 FOR_EACH_EDGE (e, ei, bb->preds)
1042 {
1043 pred_bb = e->src;
1044 if (!region_contains_p (live->map, pred_bb))
1045 continue;
1046 /* Variables live-on-entry from BB that aren't defined in the
1047 predecessor block. This should be the live on entry vars to pred.
1048 Note that liveout is the DEFs in a block while live on entry is
1049 being calculated.
1050 Add these bits to live-on-entry for the pred. if there are any
1051 changes, and pred_bb has been visited already, add it to the
1052 revisit stack. */
1053 change = bitmap_ior_and_compl_into (live_on_entry (live, pred_bb),
1054 loe, &live->liveout[pred_bb->index]);
1055 if (change
1056 && bitmap_bit_p (visited, pred_bb->index))
1057 {
1058 bitmap_clear_bit (visited, pred_bb->index);
1059 *(live->stack_top)++ = pred_bb->index;
1060 }
1061 }
1062 }
1063
1064
1065 /* Using LIVE, fill in all the live-on-entry blocks between the defs and uses
1066 of all the variables. */
1067
1068 static void
1069 live_worklist (tree_live_info_p live)
1070 {
1071 unsigned b;
1072 basic_block bb;
1073 auto_sbitmap visited (last_basic_block_for_fn (cfun) + 1);
1074
1075 bitmap_clear (visited);
1076
1077 /* Visit region's blocks in reverse order and propagate live on entry values
1078 into the predecessors blocks. */
1079 for (unsigned i = live->map->vec_bbs.length () - 1;
1080 live->map->vec_bbs.iterate (i, &bb); --i)
1081 loe_visit_block (live, bb, visited);
1082
1083 /* Process any blocks which require further iteration. */
1084 while (live->stack_top != live->work_stack)
1085 {
1086 b = *--(live->stack_top);
1087 loe_visit_block (live, BASIC_BLOCK_FOR_FN (cfun, b), visited);
1088 }
1089 }
1090
1091
1092 /* Calculate the initial live on entry vector for SSA_NAME using immediate_use
1093 links. Set the live on entry fields in LIVE. Def's are marked temporarily
1094 in the liveout vector. */
1095
1096 static void
1097 set_var_live_on_entry (tree ssa_name, tree_live_info_p live)
1098 {
1099 int p;
1100 gimple *stmt;
1101 use_operand_p use;
1102 basic_block def_bb = NULL;
1103 imm_use_iterator imm_iter;
1104 bool global = false;
1105
1106 p = var_to_partition (live->map, ssa_name);
1107 if (p == NO_PARTITION)
1108 return;
1109
1110 stmt = SSA_NAME_DEF_STMT (ssa_name);
1111 if (stmt)
1112 {
1113 def_bb = gimple_bb (stmt);
1114 /* Mark defs in liveout bitmap temporarily. */
1115 if (def_bb && region_contains_p (live->map, def_bb))
1116 bitmap_set_bit (&live->liveout[def_bb->index], p);
1117 }
1118 else
1119 def_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1120
1121 /* An undefined local variable does not need to be very alive. */
1122 if (ssa_undefined_value_p (ssa_name, false))
1123 return;
1124
1125 /* Visit each use of SSA_NAME and if it isn't in the same block as the def,
1126 add it to the list of live on entry blocks. */
1127 FOR_EACH_IMM_USE_FAST (use, imm_iter, ssa_name)
1128 {
1129 gimple *use_stmt = USE_STMT (use);
1130 basic_block add_block = NULL;
1131
1132 if (gimple_code (use_stmt) == GIMPLE_PHI)
1133 {
1134 /* Uses in PHI's are considered to be live at exit of the SRC block
1135 as this is where a copy would be inserted. Check to see if it is
1136 defined in that block, or whether its live on entry. */
1137 int index = PHI_ARG_INDEX_FROM_USE (use);
1138 edge e = gimple_phi_arg_edge (as_a <gphi *> (use_stmt), index);
1139 if (e->src != def_bb && region_contains_p (live->map, e->src))
1140 add_block = e->src;
1141 }
1142 else if (is_gimple_debug (use_stmt))
1143 continue;
1144 else
1145 {
1146 /* If its not defined in this block, its live on entry. */
1147 basic_block use_bb = gimple_bb (use_stmt);
1148 if (use_bb != def_bb && region_contains_p (live->map, use_bb))
1149 add_block = use_bb;
1150 }
1151
1152 /* If there was a live on entry use, set the bit. */
1153 if (add_block)
1154 {
1155 global = true;
1156 bitmap_set_bit (&live->livein[add_block->index], p);
1157 }
1158 }
1159
1160 /* If SSA_NAME is live on entry to at least one block, fill in all the live
1161 on entry blocks between the def and all the uses. */
1162 if (global)
1163 bitmap_set_bit (live->global, p);
1164 }
1165
1166
1167 /* Calculate the live on exit vectors based on the entry info in LIVEINFO. */
1168
1169 static void
1170 calculate_live_on_exit (tree_live_info_p liveinfo)
1171 {
1172 basic_block bb;
1173 edge e;
1174 edge_iterator ei;
1175
1176 /* live on entry calculations used liveout vectors for defs, clear them. */
1177 for (unsigned i = 0; liveinfo->map->vec_bbs.iterate (i, &bb); ++i)
1178 bitmap_clear (&liveinfo->liveout[bb->index]);
1179
1180 /* Set all the live-on-exit bits for uses in PHIs. */
1181 FOR_EACH_BB_FN (bb, cfun)
1182 {
1183 gphi_iterator gsi;
1184 size_t i;
1185
1186 /* Mark the PHI arguments which are live on exit to the pred block. */
1187 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1188 {
1189 gphi *phi = gsi.phi ();
1190 if (virtual_operand_p (gimple_phi_result (phi)))
1191 continue;
1192 for (i = 0; i < gimple_phi_num_args (phi); i++)
1193 {
1194 tree t = PHI_ARG_DEF (phi, i);
1195 int p;
1196
1197 if (TREE_CODE (t) != SSA_NAME)
1198 continue;
1199
1200 p = var_to_partition (liveinfo->map, t);
1201 if (p == NO_PARTITION)
1202 continue;
1203 e = gimple_phi_arg_edge (phi, i);
1204 if (region_contains_p (liveinfo->map, e->src))
1205 bitmap_set_bit (&liveinfo->liveout[e->src->index], p);
1206 }
1207 }
1208
1209 if (!region_contains_p (liveinfo->map, bb))
1210 continue;
1211
1212 /* Add each successors live on entry to this bock live on exit. */
1213 FOR_EACH_EDGE (e, ei, bb->succs)
1214 if (region_contains_p (liveinfo->map, e->dest))
1215 bitmap_ior_into (&liveinfo->liveout[bb->index],
1216 live_on_entry (liveinfo, e->dest));
1217 }
1218 }
1219
1220
1221 /* Given partition map MAP, calculate all the live on entry bitmaps for
1222 each partition. Return a new live info object. */
1223
1224 tree_live_info_p
1225 calculate_live_ranges (var_map map, bool want_livein)
1226 {
1227 tree var;
1228 unsigned i;
1229 tree_live_info_p live;
1230
1231 live = new_tree_live_info (map);
1232 for (i = 0; i < num_var_partitions (map); i++)
1233 {
1234 var = partition_to_var (map, i);
1235 if (var != NULL_TREE)
1236 set_var_live_on_entry (var, live);
1237 }
1238
1239 live_worklist (live);
1240
1241 if (flag_checking)
1242 verify_live_on_entry (live);
1243
1244 calculate_live_on_exit (live);
1245
1246 if (!want_livein)
1247 {
1248 bitmap_obstack_release (&live->livein_obstack);
1249 free (live->livein);
1250 live->livein = NULL;
1251 }
1252
1253 return live;
1254 }
1255 \f
1256 /* Data structure for compute_live_vars* functions. */
1257
1258 struct compute_live_vars_data {
1259 /* Vector of bitmaps for live vars indices at the end of basic blocks,
1260 indexed by bb->index. ACTIVE[ENTRY_BLOCK] must be empty bitmap,
1261 ACTIVE[EXIT_BLOCK] is used for STOP_AFTER. */
1262 vec<bitmap_head> active;
1263 /* Work bitmap of currently live variables. */
1264 bitmap work;
1265 /* Set of interesting variables. Variables with uids not in this
1266 hash_map are not tracked. */
1267 live_vars_map *vars;
1268 };
1269
1270 /* Callback for walk_stmt_load_store_addr_ops. If OP is a VAR_DECL with
1271 uid set in DATA->vars, enter its corresponding index into bitmap
1272 DATA->work. */
1273
1274 static bool
1275 compute_live_vars_visit (gimple *, tree op, tree, void *pdata)
1276 {
1277 compute_live_vars_data *data = (compute_live_vars_data *) pdata;
1278 op = get_base_address (op);
1279 if (op && VAR_P (op))
1280 if (unsigned int *v = data->vars->get (DECL_UID (op)))
1281 bitmap_set_bit (data->work, *v);
1282 return false;
1283 }
1284
1285 /* Helper routine for compute_live_vars, calculating the sets of live
1286 variables at the end of BB, leaving the result in DATA->work.
1287 If STOP_AFTER is non-NULL, stop processing after that stmt. */
1288
1289 static void
1290 compute_live_vars_1 (basic_block bb, compute_live_vars_data *data,
1291 gimple *stop_after)
1292 {
1293 edge e;
1294 edge_iterator ei;
1295 gimple_stmt_iterator gsi;
1296 walk_stmt_load_store_addr_fn visit = compute_live_vars_visit;
1297
1298 bitmap_clear (data->work);
1299 FOR_EACH_EDGE (e, ei, bb->preds)
1300 bitmap_ior_into (data->work, &data->active[e->src->index]);
1301
1302 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1303 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), data, NULL, NULL, visit);
1304 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1305 {
1306 gimple *stmt = gsi_stmt (gsi);
1307
1308 if (gimple_clobber_p (stmt))
1309 {
1310 tree lhs = gimple_assign_lhs (stmt);
1311 if (VAR_P (lhs))
1312 if (unsigned int *v = data->vars->get (DECL_UID (lhs)))
1313 bitmap_clear_bit (data->work, *v);
1314 }
1315 else if (!is_gimple_debug (stmt))
1316 walk_stmt_load_store_addr_ops (stmt, data, visit, visit, visit);
1317 if (stmt == stop_after)
1318 break;
1319 }
1320 }
1321
1322 /* For function FN and live_vars_map (hash map from DECL_UIDs to a dense set of
1323 indexes of automatic variables VARS, compute which of those variables are
1324 (might be) live at the end of each basic block. */
1325
1326 vec<bitmap_head>
1327 compute_live_vars (struct function *fn, live_vars_map *vars)
1328 {
1329 vec<bitmap_head> active;
1330
1331 /* We approximate the live range of a stack variable by taking the first
1332 mention of its name as starting point(s), and by the end-of-scope
1333 death clobber added by gimplify as ending point(s) of the range.
1334 This overapproximates in the case we for instance moved an address-taken
1335 operation upward, without also moving a dereference to it upwards.
1336 But it's conservatively correct as a variable never can hold values
1337 before its name is mentioned at least once.
1338
1339 We then do a mostly classical bitmap liveness algorithm. */
1340
1341 active.create (last_basic_block_for_fn (fn));
1342 active.quick_grow (last_basic_block_for_fn (fn));
1343 for (int i = 0; i < last_basic_block_for_fn (fn); i++)
1344 bitmap_initialize (&active[i], &bitmap_default_obstack);
1345
1346 bitmap work = BITMAP_ALLOC (NULL);
1347
1348 int *rpo = XNEWVEC (int, last_basic_block_for_fn (fn));
1349 int n_bbs = pre_and_rev_post_order_compute_fn (fn, NULL, rpo, false);
1350
1351 bool changed = true;
1352 compute_live_vars_data data = { active, work, vars };
1353 while (changed)
1354 {
1355 int i;
1356 changed = false;
1357 for (i = 0; i < n_bbs; i++)
1358 {
1359 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
1360 compute_live_vars_1 (bb, &data, NULL);
1361 if (bitmap_ior_into (&active[bb->index], work))
1362 changed = true;
1363 }
1364 }
1365
1366 free (rpo);
1367 BITMAP_FREE (work);
1368
1369 return active;
1370 }
1371
1372 /* For ACTIVE computed by compute_live_vars, compute a bitmap of variables
1373 live after the STOP_AFTER statement and return that bitmap. */
1374
1375 bitmap
1376 live_vars_at_stmt (vec<bitmap_head> &active, live_vars_map *vars,
1377 gimple *stop_after)
1378 {
1379 bitmap work = BITMAP_ALLOC (NULL);
1380 compute_live_vars_data data = { active, work, vars };
1381 basic_block bb = gimple_bb (stop_after);
1382 compute_live_vars_1 (bb, &data, stop_after);
1383 return work;
1384 }
1385
1386 /* Destroy what compute_live_vars has returned when it is no longer needed. */
1387
1388 void
1389 destroy_live_vars (vec<bitmap_head> &active)
1390 {
1391 unsigned len = active.length ();
1392 for (unsigned i = 0; i < len; i++)
1393 bitmap_clear (&active[i]);
1394
1395 active.release ();
1396 }
1397 \f
1398 /* Output partition map MAP to file F. */
1399
1400 void
1401 dump_var_map (FILE *f, var_map map)
1402 {
1403 int t;
1404 unsigned x, y;
1405 int p;
1406
1407 fprintf (f, "\nPartition map \n\n");
1408
1409 for (x = 0; x < map->num_partitions; x++)
1410 {
1411 if (map->view_to_partition != NULL)
1412 p = map->view_to_partition[x];
1413 else
1414 p = x;
1415
1416 if (ssa_name (p) == NULL_TREE
1417 || virtual_operand_p (ssa_name (p)))
1418 continue;
1419
1420 t = 0;
1421 for (y = 1; y < num_ssa_names; y++)
1422 {
1423 p = partition_find (map->var_partition, y);
1424 if (map->partition_to_view)
1425 p = map->partition_to_view[p];
1426 if (p == (int)x)
1427 {
1428 if (t++ == 0)
1429 {
1430 fprintf (f, "Partition %d (", x);
1431 print_generic_expr (f, partition_to_var (map, p), TDF_SLIM);
1432 fprintf (f, " - ");
1433 }
1434 fprintf (f, "%d ", y);
1435 }
1436 }
1437 if (t != 0)
1438 fprintf (f, ")\n");
1439 }
1440 fprintf (f, "\n");
1441 }
1442
1443
1444 /* Generic dump for the above. */
1445
1446 DEBUG_FUNCTION void
1447 debug (_var_map &ref)
1448 {
1449 dump_var_map (stderr, &ref);
1450 }
1451
1452 DEBUG_FUNCTION void
1453 debug (_var_map *ptr)
1454 {
1455 if (ptr)
1456 debug (*ptr);
1457 else
1458 fprintf (stderr, "<nil>\n");
1459 }
1460
1461
1462 /* Output live range info LIVE to file F, controlled by FLAG. */
1463
1464 void
1465 dump_live_info (FILE *f, tree_live_info_p live, int flag)
1466 {
1467 basic_block bb;
1468 unsigned i;
1469 var_map map = live->map;
1470 bitmap_iterator bi;
1471
1472 if ((flag & LIVEDUMP_ENTRY) && live->livein)
1473 {
1474 FOR_EACH_BB_FN (bb, cfun)
1475 {
1476 fprintf (f, "\nLive on entry to BB%d : ", bb->index);
1477 EXECUTE_IF_SET_IN_BITMAP (&live->livein[bb->index], 0, i, bi)
1478 {
1479 print_generic_expr (f, partition_to_var (map, i), TDF_SLIM);
1480 fprintf (f, " ");
1481 }
1482 fprintf (f, "\n");
1483 }
1484 }
1485
1486 if ((flag & LIVEDUMP_EXIT) && live->liveout)
1487 {
1488 FOR_EACH_BB_FN (bb, cfun)
1489 {
1490 fprintf (f, "\nLive on exit from BB%d : ", bb->index);
1491 EXECUTE_IF_SET_IN_BITMAP (&live->liveout[bb->index], 0, i, bi)
1492 {
1493 print_generic_expr (f, partition_to_var (map, i), TDF_SLIM);
1494 fprintf (f, " ");
1495 }
1496 fprintf (f, "\n");
1497 }
1498 }
1499 }
1500
1501
1502 /* Generic dump for the above. */
1503
1504 DEBUG_FUNCTION void
1505 debug (tree_live_info_d &ref)
1506 {
1507 dump_live_info (stderr, &ref, 0);
1508 }
1509
1510 DEBUG_FUNCTION void
1511 debug (tree_live_info_d *ptr)
1512 {
1513 if (ptr)
1514 debug (*ptr);
1515 else
1516 fprintf (stderr, "<nil>\n");
1517 }
1518
1519
1520 /* Verify that the info in LIVE matches the current cfg. */
1521
1522 static void
1523 verify_live_on_entry (tree_live_info_p live)
1524 {
1525 unsigned i;
1526 tree var;
1527 gimple *stmt;
1528 basic_block bb;
1529 edge e;
1530 int num;
1531 edge_iterator ei;
1532 var_map map = live->map;
1533
1534 /* Check for live on entry partitions and report those with a DEF in
1535 the program. This will typically mean an optimization has done
1536 something wrong. */
1537 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1538 num = 0;
1539 FOR_EACH_EDGE (e, ei, bb->succs)
1540 {
1541 int entry_block = e->dest->index;
1542 if (!region_contains_p (live->map, e->dest))
1543 continue;
1544 for (i = 0; i < (unsigned)num_var_partitions (map); i++)
1545 {
1546 basic_block tmp;
1547 tree d = NULL_TREE;
1548 bitmap loe;
1549 var = partition_to_var (map, i);
1550 stmt = SSA_NAME_DEF_STMT (var);
1551 tmp = gimple_bb (stmt);
1552 if (SSA_NAME_VAR (var))
1553 d = ssa_default_def (cfun, SSA_NAME_VAR (var));
1554
1555 loe = live_on_entry (live, e->dest);
1556 if (loe && bitmap_bit_p (loe, i))
1557 {
1558 if (!gimple_nop_p (stmt))
1559 {
1560 num++;
1561 print_generic_expr (stderr, var, TDF_SLIM);
1562 fprintf (stderr, " is defined ");
1563 if (tmp)
1564 fprintf (stderr, " in BB%d, ", tmp->index);
1565 fprintf (stderr, "by:\n");
1566 print_gimple_stmt (stderr, stmt, 0, TDF_SLIM);
1567 fprintf (stderr, "\nIt is also live-on-entry to entry BB %d",
1568 entry_block);
1569 fprintf (stderr, " So it appears to have multiple defs.\n");
1570 }
1571 else
1572 {
1573 if (d != var)
1574 {
1575 num++;
1576 print_generic_expr (stderr, var, TDF_SLIM);
1577 fprintf (stderr, " is live-on-entry to BB%d ",
1578 entry_block);
1579 if (d)
1580 {
1581 fprintf (stderr, " but is not the default def of ");
1582 print_generic_expr (stderr, d, TDF_SLIM);
1583 fprintf (stderr, "\n");
1584 }
1585 else
1586 fprintf (stderr, " and there is no default def.\n");
1587 }
1588 }
1589 }
1590 else
1591 if (d == var)
1592 {
1593 /* An undefined local variable does not need to be very
1594 alive. */
1595 if (ssa_undefined_value_p (var, false))
1596 continue;
1597
1598 /* The only way this var shouldn't be marked live on entry is
1599 if it occurs in a PHI argument of the block. */
1600 size_t z;
1601 bool ok = false;
1602 gphi_iterator gsi;
1603 for (gsi = gsi_start_phis (e->dest);
1604 !gsi_end_p (gsi) && !ok;
1605 gsi_next (&gsi))
1606 {
1607 gphi *phi = gsi.phi ();
1608 if (virtual_operand_p (gimple_phi_result (phi)))
1609 continue;
1610 for (z = 0; z < gimple_phi_num_args (phi); z++)
1611 if (var == gimple_phi_arg_def (phi, z))
1612 {
1613 ok = true;
1614 break;
1615 }
1616 }
1617 if (ok)
1618 continue;
1619 /* Expand adds unused default defs for PARM_DECLs and
1620 RESULT_DECLs. They're ok. */
1621 if (has_zero_uses (var)
1622 && SSA_NAME_VAR (var)
1623 && !VAR_P (SSA_NAME_VAR (var)))
1624 continue;
1625 num++;
1626 print_generic_expr (stderr, var, TDF_SLIM);
1627 fprintf (stderr, " is not marked live-on-entry to entry BB%d ",
1628 entry_block);
1629 fprintf (stderr, "but it is a default def so it should be.\n");
1630 }
1631 }
1632 }
1633 gcc_assert (num <= 0);
1634 }