02a7a56f0f948e753a6616c9a613f6cccef58077
[gcc.git] / gcc / tree-ssa-live.c
1 /* Liveness for SSA trees.
2 Copyright (C) 2003-2021 Free Software Foundation, Inc.
3 Contributed by Andrew MacLeod <amacleod@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "timevar.h"
29 #include "ssa.h"
30 #include "cgraph.h"
31 #include "gimple-pretty-print.h"
32 #include "diagnostic-core.h"
33 #include "gimple-iterator.h"
34 #include "tree-dfa.h"
35 #include "dumpfile.h"
36 #include "tree-ssa-live.h"
37 #include "debug.h"
38 #include "tree-ssa.h"
39 #include "ipa-utils.h"
40 #include "cfgloop.h"
41 #include "stringpool.h"
42 #include "attribs.h"
43 #include "optinfo.h"
44 #include "gimple-walk.h"
45 #include "cfganal.h"
46
47 static void verify_live_on_entry (tree_live_info_p);
48
49
50 /* VARMAP maintains a mapping from SSA version number to real variables.
51
52 All SSA_NAMES are divided into partitions. Initially each ssa_name is the
53 only member of it's own partition. Coalescing will attempt to group any
54 ssa_names which occur in a copy or in a PHI node into the same partition.
55
56 At the end of out-of-ssa, each partition becomes a "real" variable and is
57 rewritten as a compiler variable.
58
59 The var_map data structure is used to manage these partitions. It allows
60 partitions to be combined, and determines which partition belongs to what
61 ssa_name or variable, and vice versa. */
62
63
64 /* Remove the base table in MAP. */
65
66 static void
67 var_map_base_fini (var_map map)
68 {
69 /* Free the basevar info if it is present. */
70 if (map->partition_to_base_index != NULL)
71 {
72 free (map->partition_to_base_index);
73 map->partition_to_base_index = NULL;
74 map->num_basevars = 0;
75 }
76 }
77 /* Create a variable partition map of SIZE for region, initialize and return
78 it. Region is a loop if LOOP is non-NULL, otherwise is the current
79 function. */
80
81 var_map
82 init_var_map (int size, class loop *loop)
83 {
84 var_map map;
85
86 map = (var_map) xmalloc (sizeof (struct _var_map));
87 map->var_partition = partition_new (size);
88
89 map->partition_to_view = NULL;
90 map->view_to_partition = NULL;
91 map->num_partitions = size;
92 map->partition_size = size;
93 map->num_basevars = 0;
94 map->partition_to_base_index = NULL;
95 map->vec_bbs = vNULL;
96 if (loop)
97 {
98 map->bmp_bbs = BITMAP_ALLOC (NULL);
99 map->outofssa_p = false;
100 basic_block *bbs = get_loop_body_in_dom_order (loop);
101 for (unsigned i = 0; i < loop->num_nodes; ++i)
102 {
103 bitmap_set_bit (map->bmp_bbs, bbs[i]->index);
104 map->vec_bbs.safe_push (bbs[i]);
105 }
106 free (bbs);
107 }
108 else
109 {
110 map->bmp_bbs = NULL;
111 map->outofssa_p = true;
112 basic_block bb;
113 FOR_EACH_BB_FN (bb, cfun)
114 map->vec_bbs.safe_push (bb);
115 }
116 return map;
117 }
118
119
120 /* Free memory associated with MAP. */
121
122 void
123 delete_var_map (var_map map)
124 {
125 var_map_base_fini (map);
126 partition_delete (map->var_partition);
127 free (map->partition_to_view);
128 free (map->view_to_partition);
129 if (map->bmp_bbs)
130 BITMAP_FREE (map->bmp_bbs);
131 map->vec_bbs.release ();
132 free (map);
133 }
134
135
136 /* This function will combine the partitions in MAP for VAR1 and VAR2. It
137 Returns the partition which represents the new partition. If the two
138 partitions cannot be combined, NO_PARTITION is returned. */
139
140 int
141 var_union (var_map map, tree var1, tree var2)
142 {
143 int p1, p2, p3;
144
145 gcc_assert (TREE_CODE (var1) == SSA_NAME);
146 gcc_assert (TREE_CODE (var2) == SSA_NAME);
147
148 /* This is independent of partition_to_view. If partition_to_view is
149 on, then whichever one of these partitions is absorbed will never have a
150 dereference into the partition_to_view array any more. */
151
152 p1 = partition_find (map->var_partition, SSA_NAME_VERSION (var1));
153 p2 = partition_find (map->var_partition, SSA_NAME_VERSION (var2));
154
155 gcc_assert (p1 != NO_PARTITION);
156 gcc_assert (p2 != NO_PARTITION);
157
158 if (p1 == p2)
159 p3 = p1;
160 else
161 p3 = partition_union (map->var_partition, p1, p2);
162
163 if (map->partition_to_view)
164 p3 = map->partition_to_view[p3];
165
166 return p3;
167 }
168
169
170 /* Compress the partition numbers in MAP such that they fall in the range
171 0..(num_partitions-1) instead of wherever they turned out during
172 the partitioning exercise. This removes any references to unused
173 partitions, thereby allowing bitmaps and other vectors to be much
174 denser.
175
176 This is implemented such that compaction doesn't affect partitioning.
177 Ie., once partitions are created and possibly merged, running one
178 or more different kind of compaction will not affect the partitions
179 themselves. Their index might change, but all the same variables will
180 still be members of the same partition group. This allows work on reduced
181 sets, and no loss of information when a larger set is later desired.
182
183 In particular, coalescing can work on partitions which have 2 or more
184 definitions, and then 'recompact' later to include all the single
185 definitions for assignment to program variables. */
186
187
188 /* Set MAP back to the initial state of having no partition view. Return a
189 bitmap which has a bit set for each partition number which is in use in the
190 varmap. */
191
192 static bitmap
193 partition_view_init (var_map map)
194 {
195 bitmap used;
196 int tmp;
197 unsigned int x;
198
199 used = BITMAP_ALLOC (NULL);
200
201 /* Already in a view? Abandon the old one. */
202 if (map->partition_to_view)
203 {
204 free (map->partition_to_view);
205 map->partition_to_view = NULL;
206 }
207 if (map->view_to_partition)
208 {
209 free (map->view_to_partition);
210 map->view_to_partition = NULL;
211 }
212
213 /* Find out which partitions are actually referenced. */
214 for (x = 0; x < map->partition_size; x++)
215 {
216 tmp = partition_find (map->var_partition, x);
217 if (ssa_name (tmp) != NULL_TREE && !virtual_operand_p (ssa_name (tmp))
218 && (!has_zero_uses (ssa_name (tmp))
219 || !SSA_NAME_IS_DEFAULT_DEF (ssa_name (tmp))
220 || (SSA_NAME_VAR (ssa_name (tmp))
221 && !VAR_P (SSA_NAME_VAR (ssa_name (tmp))))))
222 bitmap_set_bit (used, tmp);
223 }
224
225 map->num_partitions = map->partition_size;
226 return used;
227 }
228
229
230 /* This routine will finalize the view data for MAP based on the partitions
231 set in SELECTED. This is either the same bitmap returned from
232 partition_view_init, or a trimmed down version if some of those partitions
233 were not desired in this view. SELECTED is freed before returning. */
234
235 static void
236 partition_view_fini (var_map map, bitmap selected)
237 {
238 bitmap_iterator bi;
239 unsigned count, i, x, limit;
240
241 gcc_assert (selected);
242
243 count = bitmap_count_bits (selected);
244 limit = map->partition_size;
245
246 /* If its a one-to-one ratio, we don't need any view compaction. */
247 if (count < limit)
248 {
249 map->partition_to_view = (int *)xmalloc (limit * sizeof (int));
250 memset (map->partition_to_view, 0xff, (limit * sizeof (int)));
251 map->view_to_partition = (int *)xmalloc (count * sizeof (int));
252
253 i = 0;
254 /* Give each selected partition an index. */
255 EXECUTE_IF_SET_IN_BITMAP (selected, 0, x, bi)
256 {
257 map->partition_to_view[x] = i;
258 map->view_to_partition[i] = x;
259 i++;
260 }
261 gcc_assert (i == count);
262 map->num_partitions = i;
263 }
264
265 BITMAP_FREE (selected);
266 }
267
268
269 /* Create a partition view which includes all the used partitions in MAP. */
270
271 void
272 partition_view_normal (var_map map)
273 {
274 bitmap used;
275
276 used = partition_view_init (map);
277 partition_view_fini (map, used);
278
279 var_map_base_fini (map);
280 }
281
282
283 /* Create a partition view in MAP which includes just partitions which occur in
284 the bitmap ONLY. If WANT_BASES is true, create the base variable map
285 as well. */
286
287 void
288 partition_view_bitmap (var_map map, bitmap only)
289 {
290 bitmap used;
291 bitmap new_partitions = BITMAP_ALLOC (NULL);
292 unsigned x, p;
293 bitmap_iterator bi;
294
295 used = partition_view_init (map);
296 EXECUTE_IF_SET_IN_BITMAP (only, 0, x, bi)
297 {
298 p = partition_find (map->var_partition, x);
299 gcc_assert (bitmap_bit_p (used, p));
300 bitmap_set_bit (new_partitions, p);
301 }
302 partition_view_fini (map, new_partitions);
303
304 var_map_base_fini (map);
305 }
306
307
308 static bitmap usedvars;
309
310 /* Mark VAR as used, so that it'll be preserved during rtl expansion.
311 Returns true if VAR wasn't marked before. */
312
313 static inline bool
314 set_is_used (tree var)
315 {
316 return bitmap_set_bit (usedvars, DECL_UID (var));
317 }
318
319 /* Return true if VAR is marked as used. */
320
321 static inline bool
322 is_used_p (tree var)
323 {
324 return bitmap_bit_p (usedvars, DECL_UID (var));
325 }
326
327 static inline void mark_all_vars_used (tree *);
328
329 /* Helper function for mark_all_vars_used, called via walk_tree. */
330
331 static tree
332 mark_all_vars_used_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
333 {
334 tree t = *tp;
335 enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
336 tree b;
337
338 if (TREE_CODE (t) == SSA_NAME)
339 {
340 *walk_subtrees = 0;
341 t = SSA_NAME_VAR (t);
342 if (!t)
343 return NULL;
344 }
345
346 if (IS_EXPR_CODE_CLASS (c)
347 && (b = TREE_BLOCK (t)) != NULL)
348 TREE_USED (b) = true;
349
350 /* Ignore TMR_OFFSET and TMR_STEP for TARGET_MEM_REFS, as those
351 fields do not contain vars. */
352 if (TREE_CODE (t) == TARGET_MEM_REF)
353 {
354 mark_all_vars_used (&TMR_BASE (t));
355 mark_all_vars_used (&TMR_INDEX (t));
356 mark_all_vars_used (&TMR_INDEX2 (t));
357 *walk_subtrees = 0;
358 return NULL;
359 }
360
361 /* Only need to mark VAR_DECLS; parameters and return results are not
362 eliminated as unused. */
363 if (VAR_P (t))
364 {
365 /* When a global var becomes used for the first time also walk its
366 initializer (non global ones don't have any). */
367 if (set_is_used (t) && is_global_var (t)
368 && DECL_CONTEXT (t) == current_function_decl)
369 mark_all_vars_used (&DECL_INITIAL (t));
370 }
371 /* remove_unused_scope_block_p requires information about labels
372 which are not DECL_IGNORED_P to tell if they might be used in the IL. */
373 else if (TREE_CODE (t) == LABEL_DECL)
374 /* Although the TREE_USED values that the frontend uses would be
375 acceptable (albeit slightly over-conservative) for our purposes,
376 init_vars_expansion clears TREE_USED for LABEL_DECLs too, so we
377 must re-compute it here. */
378 TREE_USED (t) = 1;
379
380 if (IS_TYPE_OR_DECL_P (t))
381 *walk_subtrees = 0;
382
383 return NULL;
384 }
385
386 /* Mark the scope block SCOPE and its subblocks unused when they can be
387 possibly eliminated if dead. */
388
389 static void
390 mark_scope_block_unused (tree scope)
391 {
392 tree t;
393 TREE_USED (scope) = false;
394 if (!(*debug_hooks->ignore_block) (scope))
395 TREE_USED (scope) = true;
396 for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t))
397 mark_scope_block_unused (t);
398 }
399
400 /* Look if the block is dead (by possibly eliminating its dead subblocks)
401 and return true if so.
402 Block is declared dead if:
403 1) No statements are associated with it.
404 2) Declares no live variables
405 3) All subblocks are dead
406 or there is precisely one subblocks and the block
407 has same abstract origin as outer block and declares
408 no variables, so it is pure wrapper.
409 When we are not outputting full debug info, we also eliminate dead variables
410 out of scope blocks to let them to be recycled by GGC and to save copying work
411 done by the inliner. */
412
413 static bool
414 remove_unused_scope_block_p (tree scope, bool in_ctor_dtor_block)
415 {
416 tree *t, *next;
417 bool unused = !TREE_USED (scope);
418 int nsubblocks = 0;
419
420 /* For ipa-polymorphic-call.c purposes, preserve blocks:
421 1) with BLOCK_ABSTRACT_ORIGIN of a ctor/dtor or their clones */
422 if (inlined_polymorphic_ctor_dtor_block_p (scope, true))
423 {
424 in_ctor_dtor_block = true;
425 unused = false;
426 }
427 /* 2) inside such blocks, the outermost block with block_ultimate_origin
428 being a FUNCTION_DECL. */
429 else if (in_ctor_dtor_block)
430 {
431 tree fn = block_ultimate_origin (scope);
432 if (fn && TREE_CODE (fn) == FUNCTION_DECL)
433 {
434 in_ctor_dtor_block = false;
435 unused = false;
436 }
437 }
438
439 for (t = &BLOCK_VARS (scope); *t; t = next)
440 {
441 next = &DECL_CHAIN (*t);
442
443 /* Debug info of nested function refers to the block of the
444 function. We might stil call it even if all statements
445 of function it was nested into was elliminated.
446
447 TODO: We can actually look into cgraph to see if function
448 will be output to file. */
449 if (TREE_CODE (*t) == FUNCTION_DECL)
450 unused = false;
451
452 /* If a decl has a value expr, we need to instantiate it
453 regardless of debug info generation, to avoid codegen
454 differences in memory overlap tests. update_equiv_regs() may
455 indirectly call validate_equiv_mem() to test whether a
456 SET_DEST overlaps with others, and if the value expr changes
457 by virtual register instantiation, we may get end up with
458 different results. */
459 else if (VAR_P (*t) && DECL_HAS_VALUE_EXPR_P (*t))
460 unused = false;
461
462 /* Remove everything we don't generate debug info for. */
463 else if (DECL_IGNORED_P (*t))
464 {
465 *t = DECL_CHAIN (*t);
466 next = t;
467 }
468
469 /* When we are outputting debug info, we usually want to output
470 info about optimized-out variables in the scope blocks.
471 Exception are the scope blocks not containing any instructions
472 at all so user can't get into the scopes at first place. */
473 else if (is_used_p (*t))
474 unused = false;
475 else if (TREE_CODE (*t) == LABEL_DECL && TREE_USED (*t))
476 /* For labels that are still used in the IL, the decision to
477 preserve them must not depend DEBUG_INFO_LEVEL, otherwise we
478 risk having different ordering in debug vs. non-debug builds
479 during inlining or versioning.
480 A label appearing here (we have already checked DECL_IGNORED_P)
481 should not be used in the IL unless it has been explicitly used
482 before, so we use TREE_USED as an approximation. */
483 /* In principle, we should do the same here as for the debug case
484 below, however, when debugging, there might be additional nested
485 levels that keep an upper level with a label live, so we have to
486 force this block to be considered used, too. */
487 unused = false;
488
489 /* When we are not doing full debug info, we however can keep around
490 only the used variables for cfgexpand's memory packing saving quite
491 a lot of memory.
492
493 For sake of -g3, we keep around those vars but we don't count this as
494 use of block, so innermost block with no used vars and no instructions
495 can be considered dead. We only want to keep around blocks user can
496 breakpoint into and ask about value of optimized out variables.
497
498 Similarly we need to keep around types at least until all
499 variables of all nested blocks are gone. We track no
500 information on whether given type is used or not, so we have
501 to keep them even when not emitting debug information,
502 otherwise we may end up remapping variables and their (local)
503 types in different orders depending on whether debug
504 information is being generated. */
505
506 else if (TREE_CODE (*t) == TYPE_DECL
507 || debug_info_level == DINFO_LEVEL_NORMAL
508 || debug_info_level == DINFO_LEVEL_VERBOSE)
509 ;
510 else
511 {
512 *t = DECL_CHAIN (*t);
513 next = t;
514 }
515 }
516
517 for (t = &BLOCK_SUBBLOCKS (scope); *t ;)
518 if (remove_unused_scope_block_p (*t, in_ctor_dtor_block))
519 {
520 if (BLOCK_SUBBLOCKS (*t))
521 {
522 tree next = BLOCK_CHAIN (*t);
523 tree supercontext = BLOCK_SUPERCONTEXT (*t);
524
525 *t = BLOCK_SUBBLOCKS (*t);
526 while (BLOCK_CHAIN (*t))
527 {
528 BLOCK_SUPERCONTEXT (*t) = supercontext;
529 t = &BLOCK_CHAIN (*t);
530 }
531 BLOCK_CHAIN (*t) = next;
532 BLOCK_SUPERCONTEXT (*t) = supercontext;
533 t = &BLOCK_CHAIN (*t);
534 nsubblocks ++;
535 }
536 else
537 *t = BLOCK_CHAIN (*t);
538 }
539 else
540 {
541 t = &BLOCK_CHAIN (*t);
542 nsubblocks ++;
543 }
544
545
546 if (!unused)
547 ;
548 /* Outer scope is always used. */
549 else if (!BLOCK_SUPERCONTEXT (scope)
550 || TREE_CODE (BLOCK_SUPERCONTEXT (scope)) == FUNCTION_DECL)
551 unused = false;
552 /* Innermost blocks with no live variables nor statements can be always
553 eliminated. */
554 else if (!nsubblocks)
555 ;
556 /* When not generating debug info we can eliminate info on unused
557 variables. */
558 else if (!flag_auto_profile && debug_info_level == DINFO_LEVEL_NONE
559 && !optinfo_wants_inlining_info_p ())
560 {
561 /* Even for -g0 don't prune outer scopes from artificial
562 functions, otherwise diagnostics using tree_nonartificial_location
563 will not be emitted properly. */
564 if (inlined_function_outer_scope_p (scope))
565 {
566 tree ao = BLOCK_ORIGIN (scope);
567 if (ao
568 && TREE_CODE (ao) == FUNCTION_DECL
569 && DECL_DECLARED_INLINE_P (ao)
570 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
571 unused = false;
572 }
573 }
574 else if (BLOCK_VARS (scope) || BLOCK_NUM_NONLOCALIZED_VARS (scope))
575 unused = false;
576 /* See if this block is important for representation of inlined
577 function. Inlined functions are always represented by block
578 with block_ultimate_origin being set to FUNCTION_DECL and
579 DECL_SOURCE_LOCATION set, unless they expand to nothing... */
580 else if (inlined_function_outer_scope_p (scope))
581 unused = false;
582 else
583 /* Verfify that only blocks with source location set
584 are entry points to the inlined functions. */
585 gcc_assert (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope))
586 == UNKNOWN_LOCATION);
587
588 TREE_USED (scope) = !unused;
589 return unused;
590 }
591
592 /* Mark all VAR_DECLS under *EXPR_P as used, so that they won't be
593 eliminated during the tree->rtl conversion process. */
594
595 static inline void
596 mark_all_vars_used (tree *expr_p)
597 {
598 walk_tree (expr_p, mark_all_vars_used_1, NULL, NULL);
599 }
600
601 /* Helper function for clear_unused_block_pointer, called via walk_tree. */
602
603 static tree
604 clear_unused_block_pointer_1 (tree *tp, int *, void *)
605 {
606 if (EXPR_P (*tp) && TREE_BLOCK (*tp)
607 && !TREE_USED (TREE_BLOCK (*tp)))
608 TREE_SET_BLOCK (*tp, NULL);
609 return NULL_TREE;
610 }
611
612 /* Set all block pointer in debug or clobber stmt to NULL if the block
613 is unused, so that they will not be streamed out. */
614
615 static void
616 clear_unused_block_pointer (void)
617 {
618 basic_block bb;
619 gimple_stmt_iterator gsi;
620
621 FOR_EACH_BB_FN (bb, cfun)
622 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
623 {
624 unsigned i;
625 tree b;
626 gimple *stmt;
627
628 next:
629 stmt = gsi_stmt (gsi);
630 if (!is_gimple_debug (stmt) && !gimple_clobber_p (stmt))
631 continue;
632 b = gimple_block (stmt);
633 if (b && !TREE_USED (b))
634 {
635 /* Elide debug marker stmts that have an associated BLOCK from an
636 inline instance removed with also the outermost scope BLOCK of
637 said inline instance removed. If the outermost scope BLOCK of
638 said inline instance is preserved use that in place of the
639 removed BLOCK. That keeps the marker associated to the correct
640 inline instance (or no inline instance in case it was not from
641 an inline instance). */
642 if (gimple_debug_nonbind_marker_p (stmt)
643 && BLOCK_ABSTRACT_ORIGIN (b))
644 {
645 while (TREE_CODE (b) == BLOCK
646 && !inlined_function_outer_scope_p (b))
647 b = BLOCK_SUPERCONTEXT (b);
648 if (TREE_CODE (b) == BLOCK)
649 {
650 if (TREE_USED (b))
651 {
652 gimple_set_block (stmt, b);
653 continue;
654 }
655 gsi_remove (&gsi, true);
656 if (gsi_end_p (gsi))
657 break;
658 goto next;
659 }
660 }
661 gimple_set_block (stmt, NULL);
662 }
663 for (i = 0; i < gimple_num_ops (stmt); i++)
664 walk_tree (gimple_op_ptr (stmt, i), clear_unused_block_pointer_1,
665 NULL, NULL);
666 }
667 }
668
669 /* Dump scope blocks starting at SCOPE to FILE. INDENT is the
670 indentation level and FLAGS is as in print_generic_expr. */
671
672 static void
673 dump_scope_block (FILE *file, int indent, tree scope, dump_flags_t flags)
674 {
675 tree var, t;
676 unsigned int i;
677
678 fprintf (file, "\n%*s{ Scope block #%i%s",indent, "" , BLOCK_NUMBER (scope),
679 TREE_USED (scope) ? "" : " (unused)");
680 if (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope)) != UNKNOWN_LOCATION)
681 {
682 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (scope));
683 fprintf (file, " %s:%i", s.file, s.line);
684 }
685 if (BLOCK_ABSTRACT_ORIGIN (scope))
686 {
687 tree origin = block_ultimate_origin (scope);
688 if (origin)
689 {
690 fprintf (file, " Originating from :");
691 if (DECL_P (origin))
692 print_generic_decl (file, origin, flags);
693 else
694 fprintf (file, "#%i", BLOCK_NUMBER (origin));
695 }
696 }
697 if (BLOCK_FRAGMENT_ORIGIN (scope))
698 fprintf (file, " Fragment of : #%i",
699 BLOCK_NUMBER (BLOCK_FRAGMENT_ORIGIN (scope)));
700 else if (BLOCK_FRAGMENT_CHAIN (scope))
701 {
702 fprintf (file, " Fragment chain :");
703 for (t = BLOCK_FRAGMENT_CHAIN (scope); t ;
704 t = BLOCK_FRAGMENT_CHAIN (t))
705 fprintf (file, " #%i", BLOCK_NUMBER (t));
706 }
707 fprintf (file, " \n");
708 for (var = BLOCK_VARS (scope); var; var = DECL_CHAIN (var))
709 {
710 fprintf (file, "%*s", indent, "");
711 print_generic_decl (file, var, flags);
712 fprintf (file, "\n");
713 }
714 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (scope); i++)
715 {
716 fprintf (file, "%*s",indent, "");
717 print_generic_decl (file, BLOCK_NONLOCALIZED_VAR (scope, i),
718 flags);
719 fprintf (file, " (nonlocalized)\n");
720 }
721 for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t))
722 dump_scope_block (file, indent + 2, t, flags);
723 fprintf (file, "\n%*s}\n",indent, "");
724 }
725
726 /* Dump the tree of lexical scopes starting at SCOPE to stderr. FLAGS
727 is as in print_generic_expr. */
728
729 DEBUG_FUNCTION void
730 debug_scope_block (tree scope, dump_flags_t flags)
731 {
732 dump_scope_block (stderr, 0, scope, flags);
733 }
734
735
736 /* Dump the tree of lexical scopes of current_function_decl to FILE.
737 FLAGS is as in print_generic_expr. */
738
739 void
740 dump_scope_blocks (FILE *file, dump_flags_t flags)
741 {
742 dump_scope_block (file, 0, DECL_INITIAL (current_function_decl), flags);
743 }
744
745
746 /* Dump the tree of lexical scopes of current_function_decl to stderr.
747 FLAGS is as in print_generic_expr. */
748
749 DEBUG_FUNCTION void
750 debug_scope_blocks (dump_flags_t flags)
751 {
752 dump_scope_blocks (stderr, flags);
753 }
754
755 /* Remove local variables that are not referenced in the IL. */
756
757 void
758 remove_unused_locals (void)
759 {
760 basic_block bb;
761 tree var;
762 unsigned srcidx, dstidx, num;
763 bool have_local_clobbers = false;
764
765 /* Removing declarations from lexical blocks when not optimizing is
766 not only a waste of time, it actually causes differences in stack
767 layout. */
768 if (!optimize)
769 return;
770
771 timevar_push (TV_REMOVE_UNUSED);
772
773 mark_scope_block_unused (DECL_INITIAL (current_function_decl));
774
775 usedvars = BITMAP_ALLOC (NULL);
776 auto_bitmap useddebug;
777
778 /* Walk the CFG marking all referenced symbols. */
779 FOR_EACH_BB_FN (bb, cfun)
780 {
781 gimple_stmt_iterator gsi;
782 size_t i;
783 edge_iterator ei;
784 edge e;
785
786 /* Walk the statements. */
787 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
788 {
789 gimple *stmt = gsi_stmt (gsi);
790 tree b = gimple_block (stmt);
791
792 /* If we wanted to mark the block referenced by the inline
793 entry point marker as used, this would be a good spot to
794 do it. If the block is not otherwise used, the stmt will
795 be cleaned up in clean_unused_block_pointer. */
796 if (is_gimple_debug (stmt))
797 {
798 if (gimple_debug_bind_p (stmt))
799 {
800 tree var = gimple_debug_bind_get_var (stmt);
801 if (VAR_P (var))
802 {
803 if (!gimple_debug_bind_get_value (stmt))
804 /* Run the 2nd phase. */
805 have_local_clobbers = true;
806 else
807 bitmap_set_bit (useddebug, DECL_UID (var));
808 }
809 }
810 continue;
811 }
812
813 if (gimple_clobber_p (stmt))
814 {
815 have_local_clobbers = true;
816 continue;
817 }
818
819 if (b)
820 TREE_USED (b) = true;
821
822 for (i = 0; i < gimple_num_ops (stmt); i++)
823 mark_all_vars_used (gimple_op_ptr (gsi_stmt (gsi), i));
824 }
825
826 for (gphi_iterator gpi = gsi_start_phis (bb);
827 !gsi_end_p (gpi);
828 gsi_next (&gpi))
829 {
830 use_operand_p arg_p;
831 ssa_op_iter i;
832 tree def;
833 gphi *phi = gpi.phi ();
834
835 if (virtual_operand_p (gimple_phi_result (phi)))
836 continue;
837
838 def = gimple_phi_result (phi);
839 mark_all_vars_used (&def);
840
841 FOR_EACH_PHI_ARG (arg_p, phi, i, SSA_OP_ALL_USES)
842 {
843 tree arg = USE_FROM_PTR (arg_p);
844 int index = PHI_ARG_INDEX_FROM_USE (arg_p);
845 tree block =
846 LOCATION_BLOCK (gimple_phi_arg_location (phi, index));
847 if (block != NULL)
848 TREE_USED (block) = true;
849 mark_all_vars_used (&arg);
850 }
851 }
852
853 FOR_EACH_EDGE (e, ei, bb->succs)
854 if (LOCATION_BLOCK (e->goto_locus) != NULL)
855 TREE_USED (LOCATION_BLOCK (e->goto_locus)) = true;
856 }
857
858 /* We do a two-pass approach about the out-of-scope clobbers. We want
859 to remove them if they are the only references to a local variable,
860 but we want to retain them when there's any other. So the first pass
861 ignores them, and the second pass (if there were any) tries to remove
862 them. */
863 if (have_local_clobbers)
864 FOR_EACH_BB_FN (bb, cfun)
865 {
866 gimple_stmt_iterator gsi;
867
868 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
869 {
870 gimple *stmt = gsi_stmt (gsi);
871 tree b = gimple_block (stmt);
872
873 if (gimple_clobber_p (stmt))
874 {
875 tree lhs = gimple_assign_lhs (stmt);
876 tree base = get_base_address (lhs);
877 /* Remove clobbers referencing unused vars, or clobbers
878 with MEM_REF lhs referencing uninitialized pointers. */
879 if ((VAR_P (base) && !is_used_p (base))
880 || (TREE_CODE (lhs) == MEM_REF
881 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME
882 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (lhs, 0))
883 && (TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (lhs, 0)))
884 != PARM_DECL)))
885 {
886 unlink_stmt_vdef (stmt);
887 gsi_remove (&gsi, true);
888 release_defs (stmt);
889 continue;
890 }
891 if (b)
892 TREE_USED (b) = true;
893 }
894 else if (gimple_debug_bind_p (stmt))
895 {
896 tree var = gimple_debug_bind_get_var (stmt);
897 if (VAR_P (var)
898 && !bitmap_bit_p (useddebug, DECL_UID (var))
899 && !is_used_p (var))
900 {
901 if (dump_file && (dump_flags & TDF_DETAILS))
902 fprintf (dump_file, "Dead debug bind reset to %u\n",
903 DECL_UID (var));
904 gsi_remove (&gsi, true);
905 continue;
906 }
907 }
908 gsi_next (&gsi);
909 }
910 }
911
912 if (cfun->has_simduid_loops)
913 {
914 class loop *loop;
915 FOR_EACH_LOOP (loop, 0)
916 if (loop->simduid && !is_used_p (loop->simduid))
917 loop->simduid = NULL_TREE;
918 }
919
920 cfun->has_local_explicit_reg_vars = false;
921
922 /* Remove unmarked local and global vars from local_decls. */
923 num = vec_safe_length (cfun->local_decls);
924 for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++)
925 {
926 var = (*cfun->local_decls)[srcidx];
927 if (VAR_P (var))
928 {
929 if (!is_used_p (var))
930 {
931 tree def;
932 if (cfun->nonlocal_goto_save_area
933 && TREE_OPERAND (cfun->nonlocal_goto_save_area, 0) == var)
934 cfun->nonlocal_goto_save_area = NULL;
935 /* Release any default def associated with var. */
936 if ((def = ssa_default_def (cfun, var)) != NULL_TREE)
937 {
938 set_ssa_default_def (cfun, var, NULL_TREE);
939 release_ssa_name (def);
940 }
941 continue;
942 }
943 }
944 if (VAR_P (var) && DECL_HARD_REGISTER (var) && !is_global_var (var))
945 cfun->has_local_explicit_reg_vars = true;
946
947 if (srcidx != dstidx)
948 (*cfun->local_decls)[dstidx] = var;
949 dstidx++;
950 }
951 if (dstidx != num)
952 {
953 statistics_counter_event (cfun, "unused VAR_DECLs removed", num - dstidx);
954 cfun->local_decls->truncate (dstidx);
955 }
956
957 remove_unused_scope_block_p (DECL_INITIAL (current_function_decl),
958 polymorphic_ctor_dtor_p (current_function_decl,
959 true) != NULL_TREE);
960 clear_unused_block_pointer ();
961
962 BITMAP_FREE (usedvars);
963
964 if (dump_file && (dump_flags & TDF_DETAILS))
965 {
966 fprintf (dump_file, "Scope blocks after cleanups:\n");
967 dump_scope_blocks (dump_file, dump_flags);
968 }
969
970 timevar_pop (TV_REMOVE_UNUSED);
971 }
972
973 /* Allocate and return a new live range information object base on MAP. */
974
975 static tree_live_info_p
976 new_tree_live_info (var_map map)
977 {
978 tree_live_info_p live;
979 basic_block bb;
980
981 live = XNEW (struct tree_live_info_d);
982 live->map = map;
983 live->num_blocks = last_basic_block_for_fn (cfun);
984
985 bitmap_obstack_initialize (&live->livein_obstack);
986 bitmap_obstack_initialize (&live->liveout_obstack);
987
988 live->livein = XCNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
989 live->liveout = XCNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
990 for (unsigned i = 0; map->vec_bbs.iterate (i, &bb); ++i)
991 {
992 bitmap_initialize (&live->livein[bb->index], &live->livein_obstack);
993 bitmap_initialize (&live->liveout[bb->index], &live->liveout_obstack);
994 }
995
996 live->work_stack = XNEWVEC (int, last_basic_block_for_fn (cfun));
997 live->stack_top = live->work_stack;
998
999 live->global = BITMAP_ALLOC (NULL);
1000 return live;
1001 }
1002
1003
1004 /* Free storage for live range info object LIVE. */
1005
1006 void
1007 delete_tree_live_info (tree_live_info_p live)
1008 {
1009 if (live->livein)
1010 {
1011 bitmap_obstack_release (&live->livein_obstack);
1012 free (live->livein);
1013 }
1014 if (live->liveout)
1015 {
1016 bitmap_obstack_release (&live->liveout_obstack);
1017 free (live->liveout);
1018 }
1019 BITMAP_FREE (live->global);
1020 free (live->work_stack);
1021 free (live);
1022 }
1023
1024
1025 /* Visit basic block BB and propagate any required live on entry bits from
1026 LIVE into the predecessors. VISITED is the bitmap of visited blocks.
1027 TMP is a temporary work bitmap which is passed in to avoid reallocating
1028 it each time. */
1029
1030 static void
1031 loe_visit_block (tree_live_info_p live, basic_block bb, sbitmap visited)
1032 {
1033 edge e;
1034 bool change;
1035 edge_iterator ei;
1036 basic_block pred_bb;
1037 bitmap loe;
1038
1039 gcc_checking_assert (!bitmap_bit_p (visited, bb->index));
1040 bitmap_set_bit (visited, bb->index);
1041
1042 loe = live_on_entry (live, bb);
1043
1044 FOR_EACH_EDGE (e, ei, bb->preds)
1045 {
1046 pred_bb = e->src;
1047 if (!region_contains_p (live->map, pred_bb))
1048 continue;
1049 /* Variables live-on-entry from BB that aren't defined in the
1050 predecessor block. This should be the live on entry vars to pred.
1051 Note that liveout is the DEFs in a block while live on entry is
1052 being calculated.
1053 Add these bits to live-on-entry for the pred. if there are any
1054 changes, and pred_bb has been visited already, add it to the
1055 revisit stack. */
1056 change = bitmap_ior_and_compl_into (live_on_entry (live, pred_bb),
1057 loe, &live->liveout[pred_bb->index]);
1058 if (change
1059 && bitmap_bit_p (visited, pred_bb->index))
1060 {
1061 bitmap_clear_bit (visited, pred_bb->index);
1062 *(live->stack_top)++ = pred_bb->index;
1063 }
1064 }
1065 }
1066
1067
1068 /* Using LIVE, fill in all the live-on-entry blocks between the defs and uses
1069 of all the variables. */
1070
1071 static void
1072 live_worklist (tree_live_info_p live)
1073 {
1074 unsigned b;
1075 basic_block bb;
1076 auto_sbitmap visited (last_basic_block_for_fn (cfun) + 1);
1077
1078 bitmap_clear (visited);
1079
1080 /* Visit region's blocks in reverse order and propagate live on entry values
1081 into the predecessors blocks. */
1082 for (unsigned i = live->map->vec_bbs.length () - 1;
1083 live->map->vec_bbs.iterate (i, &bb); --i)
1084 loe_visit_block (live, bb, visited);
1085
1086 /* Process any blocks which require further iteration. */
1087 while (live->stack_top != live->work_stack)
1088 {
1089 b = *--(live->stack_top);
1090 loe_visit_block (live, BASIC_BLOCK_FOR_FN (cfun, b), visited);
1091 }
1092 }
1093
1094
1095 /* Calculate the initial live on entry vector for SSA_NAME using immediate_use
1096 links. Set the live on entry fields in LIVE. Def's are marked temporarily
1097 in the liveout vector. */
1098
1099 static void
1100 set_var_live_on_entry (tree ssa_name, tree_live_info_p live)
1101 {
1102 int p;
1103 gimple *stmt;
1104 use_operand_p use;
1105 basic_block def_bb = NULL;
1106 imm_use_iterator imm_iter;
1107 bool global = false;
1108
1109 p = var_to_partition (live->map, ssa_name);
1110 if (p == NO_PARTITION)
1111 return;
1112
1113 stmt = SSA_NAME_DEF_STMT (ssa_name);
1114 if (stmt)
1115 {
1116 def_bb = gimple_bb (stmt);
1117 /* Mark defs in liveout bitmap temporarily. */
1118 if (def_bb && region_contains_p (live->map, def_bb))
1119 bitmap_set_bit (&live->liveout[def_bb->index], p);
1120 }
1121 else
1122 def_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1123
1124 /* An undefined local variable does not need to be very alive. */
1125 if (ssa_undefined_value_p (ssa_name, false))
1126 return;
1127
1128 /* Visit each use of SSA_NAME and if it isn't in the same block as the def,
1129 add it to the list of live on entry blocks. */
1130 FOR_EACH_IMM_USE_FAST (use, imm_iter, ssa_name)
1131 {
1132 gimple *use_stmt = USE_STMT (use);
1133 basic_block add_block = NULL;
1134
1135 if (gimple_code (use_stmt) == GIMPLE_PHI)
1136 {
1137 /* Uses in PHI's are considered to be live at exit of the SRC block
1138 as this is where a copy would be inserted. Check to see if it is
1139 defined in that block, or whether its live on entry. */
1140 int index = PHI_ARG_INDEX_FROM_USE (use);
1141 edge e = gimple_phi_arg_edge (as_a <gphi *> (use_stmt), index);
1142 if (e->src != def_bb && region_contains_p (live->map, e->src))
1143 add_block = e->src;
1144 }
1145 else if (is_gimple_debug (use_stmt))
1146 continue;
1147 else
1148 {
1149 /* If its not defined in this block, its live on entry. */
1150 basic_block use_bb = gimple_bb (use_stmt);
1151 if (use_bb != def_bb && region_contains_p (live->map, use_bb))
1152 add_block = use_bb;
1153 }
1154
1155 /* If there was a live on entry use, set the bit. */
1156 if (add_block)
1157 {
1158 global = true;
1159 bitmap_set_bit (&live->livein[add_block->index], p);
1160 }
1161 }
1162
1163 /* If SSA_NAME is live on entry to at least one block, fill in all the live
1164 on entry blocks between the def and all the uses. */
1165 if (global)
1166 bitmap_set_bit (live->global, p);
1167 }
1168
1169
1170 /* Calculate the live on exit vectors based on the entry info in LIVEINFO. */
1171
1172 static void
1173 calculate_live_on_exit (tree_live_info_p liveinfo)
1174 {
1175 basic_block bb;
1176 edge e;
1177 edge_iterator ei;
1178
1179 /* live on entry calculations used liveout vectors for defs, clear them. */
1180 for (unsigned i = 0; liveinfo->map->vec_bbs.iterate (i, &bb); ++i)
1181 bitmap_clear (&liveinfo->liveout[bb->index]);
1182
1183 /* Set all the live-on-exit bits for uses in PHIs. */
1184 FOR_EACH_BB_FN (bb, cfun)
1185 {
1186 gphi_iterator gsi;
1187 size_t i;
1188
1189 /* Mark the PHI arguments which are live on exit to the pred block. */
1190 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1191 {
1192 gphi *phi = gsi.phi ();
1193 if (virtual_operand_p (gimple_phi_result (phi)))
1194 continue;
1195 for (i = 0; i < gimple_phi_num_args (phi); i++)
1196 {
1197 tree t = PHI_ARG_DEF (phi, i);
1198 int p;
1199
1200 if (TREE_CODE (t) != SSA_NAME)
1201 continue;
1202
1203 p = var_to_partition (liveinfo->map, t);
1204 if (p == NO_PARTITION)
1205 continue;
1206 e = gimple_phi_arg_edge (phi, i);
1207 if (region_contains_p (liveinfo->map, e->src))
1208 bitmap_set_bit (&liveinfo->liveout[e->src->index], p);
1209 }
1210 }
1211
1212 if (!region_contains_p (liveinfo->map, bb))
1213 continue;
1214
1215 /* Add each successors live on entry to this bock live on exit. */
1216 FOR_EACH_EDGE (e, ei, bb->succs)
1217 if (region_contains_p (liveinfo->map, e->dest))
1218 bitmap_ior_into (&liveinfo->liveout[bb->index],
1219 live_on_entry (liveinfo, e->dest));
1220 }
1221 }
1222
1223
1224 /* Given partition map MAP, calculate all the live on entry bitmaps for
1225 each partition. Return a new live info object. */
1226
1227 tree_live_info_p
1228 calculate_live_ranges (var_map map, bool want_livein)
1229 {
1230 tree var;
1231 unsigned i;
1232 tree_live_info_p live;
1233
1234 live = new_tree_live_info (map);
1235 for (i = 0; i < num_var_partitions (map); i++)
1236 {
1237 var = partition_to_var (map, i);
1238 if (var != NULL_TREE)
1239 set_var_live_on_entry (var, live);
1240 }
1241
1242 live_worklist (live);
1243
1244 if (flag_checking)
1245 verify_live_on_entry (live);
1246
1247 calculate_live_on_exit (live);
1248
1249 if (!want_livein)
1250 {
1251 bitmap_obstack_release (&live->livein_obstack);
1252 free (live->livein);
1253 live->livein = NULL;
1254 }
1255
1256 return live;
1257 }
1258 \f
1259 /* Data structure for compute_live_vars* functions. */
1260
1261 struct compute_live_vars_data {
1262 /* Vector of bitmaps for live vars indices at the end of basic blocks,
1263 indexed by bb->index. ACTIVE[ENTRY_BLOCK] must be empty bitmap,
1264 ACTIVE[EXIT_BLOCK] is used for STOP_AFTER. */
1265 vec<bitmap_head> active;
1266 /* Work bitmap of currently live variables. */
1267 bitmap work;
1268 /* Set of interesting variables. Variables with uids not in this
1269 hash_map are not tracked. */
1270 live_vars_map *vars;
1271 };
1272
1273 /* Callback for walk_stmt_load_store_addr_ops. If OP is a VAR_DECL with
1274 uid set in DATA->vars, enter its corresponding index into bitmap
1275 DATA->work. */
1276
1277 static bool
1278 compute_live_vars_visit (gimple *, tree op, tree, void *pdata)
1279 {
1280 compute_live_vars_data *data = (compute_live_vars_data *) pdata;
1281 op = get_base_address (op);
1282 if (op && VAR_P (op))
1283 if (unsigned int *v = data->vars->get (DECL_UID (op)))
1284 bitmap_set_bit (data->work, *v);
1285 return false;
1286 }
1287
1288 /* Helper routine for compute_live_vars, calculating the sets of live
1289 variables at the end of BB, leaving the result in DATA->work.
1290 If STOP_AFTER is non-NULL, stop processing after that stmt. */
1291
1292 static void
1293 compute_live_vars_1 (basic_block bb, compute_live_vars_data *data,
1294 gimple *stop_after)
1295 {
1296 edge e;
1297 edge_iterator ei;
1298 gimple_stmt_iterator gsi;
1299 walk_stmt_load_store_addr_fn visit = compute_live_vars_visit;
1300
1301 bitmap_clear (data->work);
1302 FOR_EACH_EDGE (e, ei, bb->preds)
1303 bitmap_ior_into (data->work, &data->active[e->src->index]);
1304
1305 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1306 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), data, NULL, NULL, visit);
1307 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1308 {
1309 gimple *stmt = gsi_stmt (gsi);
1310
1311 if (gimple_clobber_p (stmt))
1312 {
1313 tree lhs = gimple_assign_lhs (stmt);
1314 if (VAR_P (lhs))
1315 if (unsigned int *v = data->vars->get (DECL_UID (lhs)))
1316 bitmap_clear_bit (data->work, *v);
1317 }
1318 else if (!is_gimple_debug (stmt))
1319 walk_stmt_load_store_addr_ops (stmt, data, visit, visit, visit);
1320 if (stmt == stop_after)
1321 break;
1322 }
1323 }
1324
1325 /* For function FN and live_vars_map (hash map from DECL_UIDs to a dense set of
1326 indexes of automatic variables VARS, compute which of those variables are
1327 (might be) live at the end of each basic block. */
1328
1329 vec<bitmap_head>
1330 compute_live_vars (struct function *fn, live_vars_map *vars)
1331 {
1332 vec<bitmap_head> active;
1333
1334 /* We approximate the live range of a stack variable by taking the first
1335 mention of its name as starting point(s), and by the end-of-scope
1336 death clobber added by gimplify as ending point(s) of the range.
1337 This overapproximates in the case we for instance moved an address-taken
1338 operation upward, without also moving a dereference to it upwards.
1339 But it's conservatively correct as a variable never can hold values
1340 before its name is mentioned at least once.
1341
1342 We then do a mostly classical bitmap liveness algorithm. */
1343
1344 active.create (last_basic_block_for_fn (fn));
1345 active.quick_grow (last_basic_block_for_fn (fn));
1346 for (int i = 0; i < last_basic_block_for_fn (fn); i++)
1347 bitmap_initialize (&active[i], &bitmap_default_obstack);
1348
1349 bitmap work = BITMAP_ALLOC (NULL);
1350
1351 int *rpo = XNEWVEC (int, last_basic_block_for_fn (fn));
1352 int n_bbs = pre_and_rev_post_order_compute_fn (fn, NULL, rpo, false);
1353
1354 bool changed = true;
1355 compute_live_vars_data data = { active, work, vars };
1356 while (changed)
1357 {
1358 int i;
1359 changed = false;
1360 for (i = 0; i < n_bbs; i++)
1361 {
1362 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
1363 compute_live_vars_1 (bb, &data, NULL);
1364 if (bitmap_ior_into (&active[bb->index], work))
1365 changed = true;
1366 }
1367 }
1368
1369 free (rpo);
1370 BITMAP_FREE (work);
1371
1372 return active;
1373 }
1374
1375 /* For ACTIVE computed by compute_live_vars, compute a bitmap of variables
1376 live after the STOP_AFTER statement and return that bitmap. */
1377
1378 bitmap
1379 live_vars_at_stmt (vec<bitmap_head> &active, live_vars_map *vars,
1380 gimple *stop_after)
1381 {
1382 bitmap work = BITMAP_ALLOC (NULL);
1383 compute_live_vars_data data = { active, work, vars };
1384 basic_block bb = gimple_bb (stop_after);
1385 compute_live_vars_1 (bb, &data, stop_after);
1386 return work;
1387 }
1388
1389 /* Destroy what compute_live_vars has returned when it is no longer needed. */
1390
1391 void
1392 destroy_live_vars (vec<bitmap_head> &active)
1393 {
1394 unsigned len = active.length ();
1395 for (unsigned i = 0; i < len; i++)
1396 bitmap_clear (&active[i]);
1397
1398 active.release ();
1399 }
1400 \f
1401 /* Output partition map MAP to file F. */
1402
1403 void
1404 dump_var_map (FILE *f, var_map map)
1405 {
1406 int t;
1407 unsigned x, y;
1408 int p;
1409
1410 fprintf (f, "\nPartition map \n\n");
1411
1412 for (x = 0; x < map->num_partitions; x++)
1413 {
1414 if (map->view_to_partition != NULL)
1415 p = map->view_to_partition[x];
1416 else
1417 p = x;
1418
1419 if (ssa_name (p) == NULL_TREE
1420 || virtual_operand_p (ssa_name (p)))
1421 continue;
1422
1423 t = 0;
1424 for (y = 1; y < num_ssa_names; y++)
1425 {
1426 p = partition_find (map->var_partition, y);
1427 if (map->partition_to_view)
1428 p = map->partition_to_view[p];
1429 if (p == (int)x)
1430 {
1431 if (t++ == 0)
1432 {
1433 fprintf (f, "Partition %d (", x);
1434 print_generic_expr (f, partition_to_var (map, p), TDF_SLIM);
1435 fprintf (f, " - ");
1436 }
1437 fprintf (f, "%d ", y);
1438 }
1439 }
1440 if (t != 0)
1441 fprintf (f, ")\n");
1442 }
1443 fprintf (f, "\n");
1444 }
1445
1446
1447 /* Generic dump for the above. */
1448
1449 DEBUG_FUNCTION void
1450 debug (_var_map &ref)
1451 {
1452 dump_var_map (stderr, &ref);
1453 }
1454
1455 DEBUG_FUNCTION void
1456 debug (_var_map *ptr)
1457 {
1458 if (ptr)
1459 debug (*ptr);
1460 else
1461 fprintf (stderr, "<nil>\n");
1462 }
1463
1464
1465 /* Output live range info LIVE to file F, controlled by FLAG. */
1466
1467 void
1468 dump_live_info (FILE *f, tree_live_info_p live, int flag)
1469 {
1470 basic_block bb;
1471 unsigned i;
1472 var_map map = live->map;
1473 bitmap_iterator bi;
1474
1475 if ((flag & LIVEDUMP_ENTRY) && live->livein)
1476 {
1477 FOR_EACH_BB_FN (bb, cfun)
1478 {
1479 fprintf (f, "\nLive on entry to BB%d : ", bb->index);
1480 EXECUTE_IF_SET_IN_BITMAP (&live->livein[bb->index], 0, i, bi)
1481 {
1482 print_generic_expr (f, partition_to_var (map, i), TDF_SLIM);
1483 fprintf (f, " ");
1484 }
1485 fprintf (f, "\n");
1486 }
1487 }
1488
1489 if ((flag & LIVEDUMP_EXIT) && live->liveout)
1490 {
1491 FOR_EACH_BB_FN (bb, cfun)
1492 {
1493 fprintf (f, "\nLive on exit from BB%d : ", bb->index);
1494 EXECUTE_IF_SET_IN_BITMAP (&live->liveout[bb->index], 0, i, bi)
1495 {
1496 print_generic_expr (f, partition_to_var (map, i), TDF_SLIM);
1497 fprintf (f, " ");
1498 }
1499 fprintf (f, "\n");
1500 }
1501 }
1502 }
1503
1504
1505 /* Generic dump for the above. */
1506
1507 DEBUG_FUNCTION void
1508 debug (tree_live_info_d &ref)
1509 {
1510 dump_live_info (stderr, &ref, 0);
1511 }
1512
1513 DEBUG_FUNCTION void
1514 debug (tree_live_info_d *ptr)
1515 {
1516 if (ptr)
1517 debug (*ptr);
1518 else
1519 fprintf (stderr, "<nil>\n");
1520 }
1521
1522
1523 /* Verify that the info in LIVE matches the current cfg. */
1524
1525 static void
1526 verify_live_on_entry (tree_live_info_p live)
1527 {
1528 unsigned i;
1529 tree var;
1530 gimple *stmt;
1531 basic_block bb;
1532 edge e;
1533 int num;
1534 edge_iterator ei;
1535 var_map map = live->map;
1536
1537 /* Check for live on entry partitions and report those with a DEF in
1538 the program. This will typically mean an optimization has done
1539 something wrong. */
1540 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1541 num = 0;
1542 FOR_EACH_EDGE (e, ei, bb->succs)
1543 {
1544 int entry_block = e->dest->index;
1545 if (!region_contains_p (live->map, e->dest))
1546 continue;
1547 for (i = 0; i < (unsigned)num_var_partitions (map); i++)
1548 {
1549 basic_block tmp;
1550 tree d = NULL_TREE;
1551 bitmap loe;
1552 var = partition_to_var (map, i);
1553 stmt = SSA_NAME_DEF_STMT (var);
1554 tmp = gimple_bb (stmt);
1555 if (SSA_NAME_VAR (var))
1556 d = ssa_default_def (cfun, SSA_NAME_VAR (var));
1557
1558 loe = live_on_entry (live, e->dest);
1559 if (loe && bitmap_bit_p (loe, i))
1560 {
1561 if (!gimple_nop_p (stmt))
1562 {
1563 num++;
1564 print_generic_expr (stderr, var, TDF_SLIM);
1565 fprintf (stderr, " is defined ");
1566 if (tmp)
1567 fprintf (stderr, " in BB%d, ", tmp->index);
1568 fprintf (stderr, "by:\n");
1569 print_gimple_stmt (stderr, stmt, 0, TDF_SLIM);
1570 fprintf (stderr, "\nIt is also live-on-entry to entry BB %d",
1571 entry_block);
1572 fprintf (stderr, " So it appears to have multiple defs.\n");
1573 }
1574 else
1575 {
1576 if (d != var)
1577 {
1578 num++;
1579 print_generic_expr (stderr, var, TDF_SLIM);
1580 fprintf (stderr, " is live-on-entry to BB%d ",
1581 entry_block);
1582 if (d)
1583 {
1584 fprintf (stderr, " but is not the default def of ");
1585 print_generic_expr (stderr, d, TDF_SLIM);
1586 fprintf (stderr, "\n");
1587 }
1588 else
1589 fprintf (stderr, " and there is no default def.\n");
1590 }
1591 }
1592 }
1593 else
1594 if (d == var)
1595 {
1596 /* An undefined local variable does not need to be very
1597 alive. */
1598 if (ssa_undefined_value_p (var, false))
1599 continue;
1600
1601 /* The only way this var shouldn't be marked live on entry is
1602 if it occurs in a PHI argument of the block. */
1603 size_t z;
1604 bool ok = false;
1605 gphi_iterator gsi;
1606 for (gsi = gsi_start_phis (e->dest);
1607 !gsi_end_p (gsi) && !ok;
1608 gsi_next (&gsi))
1609 {
1610 gphi *phi = gsi.phi ();
1611 if (virtual_operand_p (gimple_phi_result (phi)))
1612 continue;
1613 for (z = 0; z < gimple_phi_num_args (phi); z++)
1614 if (var == gimple_phi_arg_def (phi, z))
1615 {
1616 ok = true;
1617 break;
1618 }
1619 }
1620 if (ok)
1621 continue;
1622 /* Expand adds unused default defs for PARM_DECLs and
1623 RESULT_DECLs. They're ok. */
1624 if (has_zero_uses (var)
1625 && SSA_NAME_VAR (var)
1626 && !VAR_P (SSA_NAME_VAR (var)))
1627 continue;
1628 num++;
1629 print_generic_expr (stderr, var, TDF_SLIM);
1630 fprintf (stderr, " is not marked live-on-entry to entry BB%d ",
1631 entry_block);
1632 fprintf (stderr, "but it is a default def so it should be.\n");
1633 }
1634 }
1635 }
1636 gcc_assert (num <= 0);
1637 }