Daily bump.
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2021 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-general.h"
58 #include "omp-expand.h"
59 #include "tree-cfgcleanup.h"
60 #include "gimplify.h"
61 #include "attribs.h"
62 #include "selftest.h"
63 #include "opts.h"
64 #include "asan.h"
65 #include "profile.h"
66
67 /* This file contains functions for building the Control Flow Graph (CFG)
68 for a function tree. */
69
70 /* Local declarations. */
71
72 /* Initial capacity for the basic block array. */
73 static const int initial_cfg_capacity = 20;
74
75 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
76 which use a particular edge. The CASE_LABEL_EXPRs are chained together
77 via their CASE_CHAIN field, which we clear after we're done with the
78 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
79
80 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
81 update the case vector in response to edge redirections.
82
83 Right now this table is set up and torn down at key points in the
84 compilation process. It would be nice if we could make the table
85 more persistent. The key is getting notification of changes to
86 the CFG (particularly edge removal, creation and redirection). */
87
88 static hash_map<edge, tree> *edge_to_cases;
89
90 /* If we record edge_to_cases, this bitmap will hold indexes
91 of basic blocks that end in a GIMPLE_SWITCH which we touched
92 due to edge manipulations. */
93
94 static bitmap touched_switch_bbs;
95
96 /* CFG statistics. */
97 struct cfg_stats_d
98 {
99 long num_merged_labels;
100 };
101
102 static struct cfg_stats_d cfg_stats;
103
104 /* Data to pass to replace_block_vars_by_duplicates_1. */
105 struct replace_decls_d
106 {
107 hash_map<tree, tree> *vars_map;
108 tree to_context;
109 };
110
111 /* Hash table to store last discriminator assigned for each locus. */
112 struct locus_discrim_map
113 {
114 int location_line;
115 int discriminator;
116 };
117
118 /* Hashtable helpers. */
119
120 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
121 {
122 static inline hashval_t hash (const locus_discrim_map *);
123 static inline bool equal (const locus_discrim_map *,
124 const locus_discrim_map *);
125 };
126
127 /* Trivial hash function for a location_t. ITEM is a pointer to
128 a hash table entry that maps a location_t to a discriminator. */
129
130 inline hashval_t
131 locus_discrim_hasher::hash (const locus_discrim_map *item)
132 {
133 return item->location_line;
134 }
135
136 /* Equality function for the locus-to-discriminator map. A and B
137 point to the two hash table entries to compare. */
138
139 inline bool
140 locus_discrim_hasher::equal (const locus_discrim_map *a,
141 const locus_discrim_map *b)
142 {
143 return a->location_line == b->location_line;
144 }
145
146 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
147
148 /* Basic blocks and flowgraphs. */
149 static void make_blocks (gimple_seq);
150
151 /* Edges. */
152 static void make_edges (void);
153 static void assign_discriminators (void);
154 static void make_cond_expr_edges (basic_block);
155 static void make_gimple_switch_edges (gswitch *, basic_block);
156 static bool make_goto_expr_edges (basic_block);
157 static void make_gimple_asm_edges (basic_block);
158 static edge gimple_redirect_edge_and_branch (edge, basic_block);
159 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
160
161 /* Various helpers. */
162 static inline bool stmt_starts_bb_p (gimple *, gimple *);
163 static int gimple_verify_flow_info (void);
164 static void gimple_make_forwarder_block (edge);
165 static gimple *first_non_label_stmt (basic_block);
166 static bool verify_gimple_transaction (gtransaction *);
167 static bool call_can_make_abnormal_goto (gimple *);
168
169 /* Flowgraph optimization and cleanup. */
170 static void gimple_merge_blocks (basic_block, basic_block);
171 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
172 static void remove_bb (basic_block);
173 static edge find_taken_edge_computed_goto (basic_block, tree);
174 static edge find_taken_edge_cond_expr (const gcond *, tree);
175
176 void
177 init_empty_tree_cfg_for_function (struct function *fn)
178 {
179 /* Initialize the basic block array. */
180 init_flow (fn);
181 profile_status_for_fn (fn) = PROFILE_ABSENT;
182 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
183 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
184 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
185 initial_cfg_capacity, true);
186
187 /* Build a mapping of labels to their associated blocks. */
188 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
189 initial_cfg_capacity, true);
190
191 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
192 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
193
194 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
195 = EXIT_BLOCK_PTR_FOR_FN (fn);
196 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
197 = ENTRY_BLOCK_PTR_FOR_FN (fn);
198 }
199
200 void
201 init_empty_tree_cfg (void)
202 {
203 init_empty_tree_cfg_for_function (cfun);
204 }
205
206 /*---------------------------------------------------------------------------
207 Create basic blocks
208 ---------------------------------------------------------------------------*/
209
210 /* Entry point to the CFG builder for trees. SEQ is the sequence of
211 statements to be added to the flowgraph. */
212
213 static void
214 build_gimple_cfg (gimple_seq seq)
215 {
216 /* Register specific gimple functions. */
217 gimple_register_cfg_hooks ();
218
219 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
220
221 init_empty_tree_cfg ();
222
223 make_blocks (seq);
224
225 /* Make sure there is always at least one block, even if it's empty. */
226 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
227 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
228
229 /* Adjust the size of the array. */
230 if (basic_block_info_for_fn (cfun)->length ()
231 < (size_t) n_basic_blocks_for_fn (cfun))
232 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
233 n_basic_blocks_for_fn (cfun));
234
235 /* To speed up statement iterator walks, we first purge dead labels. */
236 cleanup_dead_labels ();
237
238 /* Group case nodes to reduce the number of edges.
239 We do this after cleaning up dead labels because otherwise we miss
240 a lot of obvious case merging opportunities. */
241 group_case_labels ();
242
243 /* Create the edges of the flowgraph. */
244 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
245 make_edges ();
246 assign_discriminators ();
247 cleanup_dead_labels ();
248 delete discriminator_per_locus;
249 discriminator_per_locus = NULL;
250 }
251
252 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
253 them and propagate the information to LOOP. We assume that the annotations
254 come immediately before the condition in BB, if any. */
255
256 static void
257 replace_loop_annotate_in_block (basic_block bb, class loop *loop)
258 {
259 gimple_stmt_iterator gsi = gsi_last_bb (bb);
260 gimple *stmt = gsi_stmt (gsi);
261
262 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
263 return;
264
265 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
266 {
267 stmt = gsi_stmt (gsi);
268 if (gimple_code (stmt) != GIMPLE_CALL)
269 break;
270 if (!gimple_call_internal_p (stmt)
271 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
272 break;
273
274 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
275 {
276 case annot_expr_ivdep_kind:
277 loop->safelen = INT_MAX;
278 break;
279 case annot_expr_unroll_kind:
280 loop->unroll
281 = (unsigned short) tree_to_shwi (gimple_call_arg (stmt, 2));
282 cfun->has_unroll = true;
283 break;
284 case annot_expr_no_vector_kind:
285 loop->dont_vectorize = true;
286 break;
287 case annot_expr_vector_kind:
288 loop->force_vectorize = true;
289 cfun->has_force_vectorize_loops = true;
290 break;
291 case annot_expr_parallel_kind:
292 loop->can_be_parallel = true;
293 loop->safelen = INT_MAX;
294 break;
295 default:
296 gcc_unreachable ();
297 }
298
299 stmt = gimple_build_assign (gimple_call_lhs (stmt),
300 gimple_call_arg (stmt, 0));
301 gsi_replace (&gsi, stmt, true);
302 }
303 }
304
305 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
306 them and propagate the information to the loop. We assume that the
307 annotations come immediately before the condition of the loop. */
308
309 static void
310 replace_loop_annotate (void)
311 {
312 class loop *loop;
313 basic_block bb;
314 gimple_stmt_iterator gsi;
315 gimple *stmt;
316
317 FOR_EACH_LOOP (loop, 0)
318 {
319 /* First look into the header. */
320 replace_loop_annotate_in_block (loop->header, loop);
321
322 /* Then look into the latch, if any. */
323 if (loop->latch)
324 replace_loop_annotate_in_block (loop->latch, loop);
325
326 /* Push the global flag_finite_loops state down to individual loops. */
327 loop->finite_p = flag_finite_loops;
328 }
329
330 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
331 FOR_EACH_BB_FN (bb, cfun)
332 {
333 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
334 {
335 stmt = gsi_stmt (gsi);
336 if (gimple_code (stmt) != GIMPLE_CALL)
337 continue;
338 if (!gimple_call_internal_p (stmt)
339 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
340 continue;
341
342 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
343 {
344 case annot_expr_ivdep_kind:
345 case annot_expr_unroll_kind:
346 case annot_expr_no_vector_kind:
347 case annot_expr_vector_kind:
348 case annot_expr_parallel_kind:
349 break;
350 default:
351 gcc_unreachable ();
352 }
353
354 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
355 stmt = gimple_build_assign (gimple_call_lhs (stmt),
356 gimple_call_arg (stmt, 0));
357 gsi_replace (&gsi, stmt, true);
358 }
359 }
360 }
361
362 static unsigned int
363 execute_build_cfg (void)
364 {
365 gimple_seq body = gimple_body (current_function_decl);
366
367 build_gimple_cfg (body);
368 gimple_set_body (current_function_decl, NULL);
369 if (dump_file && (dump_flags & TDF_DETAILS))
370 {
371 fprintf (dump_file, "Scope blocks:\n");
372 dump_scope_blocks (dump_file, dump_flags);
373 }
374 cleanup_tree_cfg ();
375 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
376 replace_loop_annotate ();
377 return 0;
378 }
379
380 namespace {
381
382 const pass_data pass_data_build_cfg =
383 {
384 GIMPLE_PASS, /* type */
385 "cfg", /* name */
386 OPTGROUP_NONE, /* optinfo_flags */
387 TV_TREE_CFG, /* tv_id */
388 PROP_gimple_leh, /* properties_required */
389 ( PROP_cfg | PROP_loops ), /* properties_provided */
390 0, /* properties_destroyed */
391 0, /* todo_flags_start */
392 0, /* todo_flags_finish */
393 };
394
395 class pass_build_cfg : public gimple_opt_pass
396 {
397 public:
398 pass_build_cfg (gcc::context *ctxt)
399 : gimple_opt_pass (pass_data_build_cfg, ctxt)
400 {}
401
402 /* opt_pass methods: */
403 virtual unsigned int execute (function *) { return execute_build_cfg (); }
404
405 }; // class pass_build_cfg
406
407 } // anon namespace
408
409 gimple_opt_pass *
410 make_pass_build_cfg (gcc::context *ctxt)
411 {
412 return new pass_build_cfg (ctxt);
413 }
414
415
416 /* Return true if T is a computed goto. */
417
418 bool
419 computed_goto_p (gimple *t)
420 {
421 return (gimple_code (t) == GIMPLE_GOTO
422 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
423 }
424
425 /* Returns true if the sequence of statements STMTS only contains
426 a call to __builtin_unreachable (). */
427
428 bool
429 gimple_seq_unreachable_p (gimple_seq stmts)
430 {
431 if (stmts == NULL
432 /* Return false if -fsanitize=unreachable, we don't want to
433 optimize away those calls, but rather turn them into
434 __ubsan_handle_builtin_unreachable () or __builtin_trap ()
435 later. */
436 || sanitize_flags_p (SANITIZE_UNREACHABLE))
437 return false;
438
439 gimple_stmt_iterator gsi = gsi_last (stmts);
440
441 if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
442 return false;
443
444 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
445 {
446 gimple *stmt = gsi_stmt (gsi);
447 if (gimple_code (stmt) != GIMPLE_LABEL
448 && !is_gimple_debug (stmt)
449 && !gimple_clobber_p (stmt))
450 return false;
451 }
452 return true;
453 }
454
455 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
456 the other edge points to a bb with just __builtin_unreachable ().
457 I.e. return true for C->M edge in:
458 <bb C>:
459 ...
460 if (something)
461 goto <bb N>;
462 else
463 goto <bb M>;
464 <bb N>:
465 __builtin_unreachable ();
466 <bb M>: */
467
468 bool
469 assert_unreachable_fallthru_edge_p (edge e)
470 {
471 basic_block pred_bb = e->src;
472 gimple *last = last_stmt (pred_bb);
473 if (last && gimple_code (last) == GIMPLE_COND)
474 {
475 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
476 if (other_bb == e->dest)
477 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
478 if (EDGE_COUNT (other_bb->succs) == 0)
479 return gimple_seq_unreachable_p (bb_seq (other_bb));
480 }
481 return false;
482 }
483
484
485 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
486 could alter control flow except via eh. We initialize the flag at
487 CFG build time and only ever clear it later. */
488
489 static void
490 gimple_call_initialize_ctrl_altering (gimple *stmt)
491 {
492 int flags = gimple_call_flags (stmt);
493
494 /* A call alters control flow if it can make an abnormal goto. */
495 if (call_can_make_abnormal_goto (stmt)
496 /* A call also alters control flow if it does not return. */
497 || flags & ECF_NORETURN
498 /* TM ending statements have backedges out of the transaction.
499 Return true so we split the basic block containing them.
500 Note that the TM_BUILTIN test is merely an optimization. */
501 || ((flags & ECF_TM_BUILTIN)
502 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
503 /* BUILT_IN_RETURN call is same as return statement. */
504 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
505 /* IFN_UNIQUE should be the last insn, to make checking for it
506 as cheap as possible. */
507 || (gimple_call_internal_p (stmt)
508 && gimple_call_internal_unique_p (stmt)))
509 gimple_call_set_ctrl_altering (stmt, true);
510 else
511 gimple_call_set_ctrl_altering (stmt, false);
512 }
513
514
515 /* Insert SEQ after BB and build a flowgraph. */
516
517 static basic_block
518 make_blocks_1 (gimple_seq seq, basic_block bb)
519 {
520 gimple_stmt_iterator i = gsi_start (seq);
521 gimple *stmt = NULL;
522 gimple *prev_stmt = NULL;
523 bool start_new_block = true;
524 bool first_stmt_of_seq = true;
525
526 while (!gsi_end_p (i))
527 {
528 /* PREV_STMT should only be set to a debug stmt if the debug
529 stmt is before nondebug stmts. Once stmt reaches a nondebug
530 nonlabel, prev_stmt will be set to it, so that
531 stmt_starts_bb_p will know to start a new block if a label is
532 found. However, if stmt was a label after debug stmts only,
533 keep the label in prev_stmt even if we find further debug
534 stmts, for there may be other labels after them, and they
535 should land in the same block. */
536 if (!prev_stmt || !stmt || !is_gimple_debug (stmt))
537 prev_stmt = stmt;
538 stmt = gsi_stmt (i);
539
540 if (stmt && is_gimple_call (stmt))
541 gimple_call_initialize_ctrl_altering (stmt);
542
543 /* If the statement starts a new basic block or if we have determined
544 in a previous pass that we need to create a new block for STMT, do
545 so now. */
546 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
547 {
548 if (!first_stmt_of_seq)
549 gsi_split_seq_before (&i, &seq);
550 bb = create_basic_block (seq, bb);
551 start_new_block = false;
552 prev_stmt = NULL;
553 }
554
555 /* Now add STMT to BB and create the subgraphs for special statement
556 codes. */
557 gimple_set_bb (stmt, bb);
558
559 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
560 next iteration. */
561 if (stmt_ends_bb_p (stmt))
562 {
563 /* If the stmt can make abnormal goto use a new temporary
564 for the assignment to the LHS. This makes sure the old value
565 of the LHS is available on the abnormal edge. Otherwise
566 we will end up with overlapping life-ranges for abnormal
567 SSA names. */
568 if (gimple_has_lhs (stmt)
569 && stmt_can_make_abnormal_goto (stmt)
570 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
571 {
572 tree lhs = gimple_get_lhs (stmt);
573 tree tmp = create_tmp_var (TREE_TYPE (lhs));
574 gimple *s = gimple_build_assign (lhs, tmp);
575 gimple_set_location (s, gimple_location (stmt));
576 gimple_set_block (s, gimple_block (stmt));
577 gimple_set_lhs (stmt, tmp);
578 gsi_insert_after (&i, s, GSI_SAME_STMT);
579 }
580 start_new_block = true;
581 }
582
583 gsi_next (&i);
584 first_stmt_of_seq = false;
585 }
586 return bb;
587 }
588
589 /* Build a flowgraph for the sequence of stmts SEQ. */
590
591 static void
592 make_blocks (gimple_seq seq)
593 {
594 /* Look for debug markers right before labels, and move the debug
595 stmts after the labels. Accepting labels among debug markers
596 adds no value, just complexity; if we wanted to annotate labels
597 with view numbers (so sequencing among markers would matter) or
598 somesuch, we're probably better off still moving the labels, but
599 adding other debug annotations in their original positions or
600 emitting nonbind or bind markers associated with the labels in
601 the original position of the labels.
602
603 Moving labels would probably be simpler, but we can't do that:
604 moving labels assigns label ids to them, and doing so because of
605 debug markers makes for -fcompare-debug and possibly even codegen
606 differences. So, we have to move the debug stmts instead. To
607 that end, we scan SEQ backwards, marking the position of the
608 latest (earliest we find) label, and moving debug stmts that are
609 not separated from it by nondebug nonlabel stmts after the
610 label. */
611 if (MAY_HAVE_DEBUG_MARKER_STMTS)
612 {
613 gimple_stmt_iterator label = gsi_none ();
614
615 for (gimple_stmt_iterator i = gsi_last (seq); !gsi_end_p (i); gsi_prev (&i))
616 {
617 gimple *stmt = gsi_stmt (i);
618
619 /* If this is the first label we encounter (latest in SEQ)
620 before nondebug stmts, record its position. */
621 if (is_a <glabel *> (stmt))
622 {
623 if (gsi_end_p (label))
624 label = i;
625 continue;
626 }
627
628 /* Without a recorded label position to move debug stmts to,
629 there's nothing to do. */
630 if (gsi_end_p (label))
631 continue;
632
633 /* Move the debug stmt at I after LABEL. */
634 if (is_gimple_debug (stmt))
635 {
636 gcc_assert (gimple_debug_nonbind_marker_p (stmt));
637 /* As STMT is removed, I advances to the stmt after
638 STMT, so the gsi_prev in the for "increment"
639 expression gets us to the stmt we're to visit after
640 STMT. LABEL, however, would advance to the moved
641 stmt if we passed it to gsi_move_after, so pass it a
642 copy instead, so as to keep LABEL pointing to the
643 LABEL. */
644 gimple_stmt_iterator copy = label;
645 gsi_move_after (&i, &copy);
646 continue;
647 }
648
649 /* There aren't any (more?) debug stmts before label, so
650 there isn't anything else to move after it. */
651 label = gsi_none ();
652 }
653 }
654
655 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
656 }
657
658 /* Create and return a new empty basic block after bb AFTER. */
659
660 static basic_block
661 create_bb (void *h, void *e, basic_block after)
662 {
663 basic_block bb;
664
665 gcc_assert (!e);
666
667 /* Create and initialize a new basic block. Since alloc_block uses
668 GC allocation that clears memory to allocate a basic block, we do
669 not have to clear the newly allocated basic block here. */
670 bb = alloc_block ();
671
672 bb->index = last_basic_block_for_fn (cfun);
673 bb->flags = BB_NEW;
674 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
675
676 /* Add the new block to the linked list of blocks. */
677 link_block (bb, after);
678
679 /* Grow the basic block array if needed. */
680 if ((size_t) last_basic_block_for_fn (cfun)
681 == basic_block_info_for_fn (cfun)->length ())
682 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
683 last_basic_block_for_fn (cfun) + 1);
684
685 /* Add the newly created block to the array. */
686 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
687
688 n_basic_blocks_for_fn (cfun)++;
689 last_basic_block_for_fn (cfun)++;
690
691 return bb;
692 }
693
694
695 /*---------------------------------------------------------------------------
696 Edge creation
697 ---------------------------------------------------------------------------*/
698
699 /* If basic block BB has an abnormal edge to a basic block
700 containing IFN_ABNORMAL_DISPATCHER internal call, return
701 that the dispatcher's basic block, otherwise return NULL. */
702
703 basic_block
704 get_abnormal_succ_dispatcher (basic_block bb)
705 {
706 edge e;
707 edge_iterator ei;
708
709 FOR_EACH_EDGE (e, ei, bb->succs)
710 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
711 {
712 gimple_stmt_iterator gsi
713 = gsi_start_nondebug_after_labels_bb (e->dest);
714 gimple *g = gsi_stmt (gsi);
715 if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
716 return e->dest;
717 }
718 return NULL;
719 }
720
721 /* Helper function for make_edges. Create a basic block with
722 with ABNORMAL_DISPATCHER internal call in it if needed, and
723 create abnormal edges from BBS to it and from it to FOR_BB
724 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
725
726 static void
727 handle_abnormal_edges (basic_block *dispatcher_bbs,
728 basic_block for_bb, int *bb_to_omp_idx,
729 auto_vec<basic_block> *bbs, bool computed_goto)
730 {
731 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
732 unsigned int idx = 0;
733 basic_block bb;
734 bool inner = false;
735
736 if (bb_to_omp_idx)
737 {
738 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
739 if (bb_to_omp_idx[for_bb->index] != 0)
740 inner = true;
741 }
742
743 /* If the dispatcher has been created already, then there are basic
744 blocks with abnormal edges to it, so just make a new edge to
745 for_bb. */
746 if (*dispatcher == NULL)
747 {
748 /* Check if there are any basic blocks that need to have
749 abnormal edges to this dispatcher. If there are none, return
750 early. */
751 if (bb_to_omp_idx == NULL)
752 {
753 if (bbs->is_empty ())
754 return;
755 }
756 else
757 {
758 FOR_EACH_VEC_ELT (*bbs, idx, bb)
759 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
760 break;
761 if (bb == NULL)
762 return;
763 }
764
765 /* Create the dispatcher bb. */
766 *dispatcher = create_basic_block (NULL, for_bb);
767 if (computed_goto)
768 {
769 /* Factor computed gotos into a common computed goto site. Also
770 record the location of that site so that we can un-factor the
771 gotos after we have converted back to normal form. */
772 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
773
774 /* Create the destination of the factored goto. Each original
775 computed goto will put its desired destination into this
776 variable and jump to the label we create immediately below. */
777 tree var = create_tmp_var (ptr_type_node, "gotovar");
778
779 /* Build a label for the new block which will contain the
780 factored computed goto. */
781 tree factored_label_decl
782 = create_artificial_label (UNKNOWN_LOCATION);
783 gimple *factored_computed_goto_label
784 = gimple_build_label (factored_label_decl);
785 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
786
787 /* Build our new computed goto. */
788 gimple *factored_computed_goto = gimple_build_goto (var);
789 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
790
791 FOR_EACH_VEC_ELT (*bbs, idx, bb)
792 {
793 if (bb_to_omp_idx
794 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
795 continue;
796
797 gsi = gsi_last_bb (bb);
798 gimple *last = gsi_stmt (gsi);
799
800 gcc_assert (computed_goto_p (last));
801
802 /* Copy the original computed goto's destination into VAR. */
803 gimple *assignment
804 = gimple_build_assign (var, gimple_goto_dest (last));
805 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
806
807 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
808 e->goto_locus = gimple_location (last);
809 gsi_remove (&gsi, true);
810 }
811 }
812 else
813 {
814 tree arg = inner ? boolean_true_node : boolean_false_node;
815 gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
816 1, arg);
817 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
818 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
819
820 /* Create predecessor edges of the dispatcher. */
821 FOR_EACH_VEC_ELT (*bbs, idx, bb)
822 {
823 if (bb_to_omp_idx
824 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
825 continue;
826 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
827 }
828 }
829 }
830
831 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
832 }
833
834 /* Creates outgoing edges for BB. Returns 1 when it ends with an
835 computed goto, returns 2 when it ends with a statement that
836 might return to this function via an nonlocal goto, otherwise
837 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
838
839 static int
840 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
841 {
842 gimple *last = last_stmt (bb);
843 bool fallthru = false;
844 int ret = 0;
845
846 if (!last)
847 return ret;
848
849 switch (gimple_code (last))
850 {
851 case GIMPLE_GOTO:
852 if (make_goto_expr_edges (bb))
853 ret = 1;
854 fallthru = false;
855 break;
856 case GIMPLE_RETURN:
857 {
858 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
859 e->goto_locus = gimple_location (last);
860 fallthru = false;
861 }
862 break;
863 case GIMPLE_COND:
864 make_cond_expr_edges (bb);
865 fallthru = false;
866 break;
867 case GIMPLE_SWITCH:
868 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
869 fallthru = false;
870 break;
871 case GIMPLE_RESX:
872 make_eh_edges (last);
873 fallthru = false;
874 break;
875 case GIMPLE_EH_DISPATCH:
876 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
877 break;
878
879 case GIMPLE_CALL:
880 /* If this function receives a nonlocal goto, then we need to
881 make edges from this call site to all the nonlocal goto
882 handlers. */
883 if (stmt_can_make_abnormal_goto (last))
884 ret = 2;
885
886 /* If this statement has reachable exception handlers, then
887 create abnormal edges to them. */
888 make_eh_edges (last);
889
890 /* BUILTIN_RETURN is really a return statement. */
891 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
892 {
893 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
894 fallthru = false;
895 }
896 /* Some calls are known not to return. */
897 else
898 fallthru = !gimple_call_noreturn_p (last);
899 break;
900
901 case GIMPLE_ASSIGN:
902 /* A GIMPLE_ASSIGN may throw internally and thus be considered
903 control-altering. */
904 if (is_ctrl_altering_stmt (last))
905 make_eh_edges (last);
906 fallthru = true;
907 break;
908
909 case GIMPLE_ASM:
910 make_gimple_asm_edges (bb);
911 fallthru = true;
912 break;
913
914 CASE_GIMPLE_OMP:
915 fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
916 break;
917
918 case GIMPLE_TRANSACTION:
919 {
920 gtransaction *txn = as_a <gtransaction *> (last);
921 tree label1 = gimple_transaction_label_norm (txn);
922 tree label2 = gimple_transaction_label_uninst (txn);
923
924 if (label1)
925 make_edge (bb, label_to_block (cfun, label1), EDGE_FALLTHRU);
926 if (label2)
927 make_edge (bb, label_to_block (cfun, label2),
928 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
929
930 tree label3 = gimple_transaction_label_over (txn);
931 if (gimple_transaction_subcode (txn)
932 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
933 make_edge (bb, label_to_block (cfun, label3), EDGE_TM_ABORT);
934
935 fallthru = false;
936 }
937 break;
938
939 default:
940 gcc_assert (!stmt_ends_bb_p (last));
941 fallthru = true;
942 break;
943 }
944
945 if (fallthru)
946 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
947
948 return ret;
949 }
950
951 /* Join all the blocks in the flowgraph. */
952
953 static void
954 make_edges (void)
955 {
956 basic_block bb;
957 struct omp_region *cur_region = NULL;
958 auto_vec<basic_block> ab_edge_goto;
959 auto_vec<basic_block> ab_edge_call;
960 int *bb_to_omp_idx = NULL;
961 int cur_omp_region_idx = 0;
962
963 /* Create an edge from entry to the first block with executable
964 statements in it. */
965 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
966 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
967 EDGE_FALLTHRU);
968
969 /* Traverse the basic block array placing edges. */
970 FOR_EACH_BB_FN (bb, cfun)
971 {
972 int mer;
973
974 if (bb_to_omp_idx)
975 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
976
977 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
978 if (mer == 1)
979 ab_edge_goto.safe_push (bb);
980 else if (mer == 2)
981 ab_edge_call.safe_push (bb);
982
983 if (cur_region && bb_to_omp_idx == NULL)
984 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
985 }
986
987 /* Computed gotos are hell to deal with, especially if there are
988 lots of them with a large number of destinations. So we factor
989 them to a common computed goto location before we build the
990 edge list. After we convert back to normal form, we will un-factor
991 the computed gotos since factoring introduces an unwanted jump.
992 For non-local gotos and abnormal edges from calls to calls that return
993 twice or forced labels, factor the abnormal edges too, by having all
994 abnormal edges from the calls go to a common artificial basic block
995 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
996 basic block to all forced labels and calls returning twice.
997 We do this per-OpenMP structured block, because those regions
998 are guaranteed to be single entry single exit by the standard,
999 so it is not allowed to enter or exit such regions abnormally this way,
1000 thus all computed gotos, non-local gotos and setjmp/longjmp calls
1001 must not transfer control across SESE region boundaries. */
1002 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
1003 {
1004 gimple_stmt_iterator gsi;
1005 basic_block dispatcher_bb_array[2] = { NULL, NULL };
1006 basic_block *dispatcher_bbs = dispatcher_bb_array;
1007 int count = n_basic_blocks_for_fn (cfun);
1008
1009 if (bb_to_omp_idx)
1010 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
1011
1012 FOR_EACH_BB_FN (bb, cfun)
1013 {
1014 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1015 {
1016 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1017 tree target;
1018
1019 if (!label_stmt)
1020 break;
1021
1022 target = gimple_label_label (label_stmt);
1023
1024 /* Make an edge to every label block that has been marked as a
1025 potential target for a computed goto or a non-local goto. */
1026 if (FORCED_LABEL (target))
1027 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1028 &ab_edge_goto, true);
1029 if (DECL_NONLOCAL (target))
1030 {
1031 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1032 &ab_edge_call, false);
1033 break;
1034 }
1035 }
1036
1037 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1038 gsi_next_nondebug (&gsi);
1039 if (!gsi_end_p (gsi))
1040 {
1041 /* Make an edge to every setjmp-like call. */
1042 gimple *call_stmt = gsi_stmt (gsi);
1043 if (is_gimple_call (call_stmt)
1044 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1045 || gimple_call_builtin_p (call_stmt,
1046 BUILT_IN_SETJMP_RECEIVER)))
1047 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1048 &ab_edge_call, false);
1049 }
1050 }
1051
1052 if (bb_to_omp_idx)
1053 XDELETE (dispatcher_bbs);
1054 }
1055
1056 XDELETE (bb_to_omp_idx);
1057
1058 omp_free_regions ();
1059 }
1060
1061 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1062 needed. Returns true if new bbs were created.
1063 Note: This is transitional code, and should not be used for new code. We
1064 should be able to get rid of this by rewriting all target va-arg
1065 gimplification hooks to use an interface gimple_build_cond_value as described
1066 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1067
1068 bool
1069 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1070 {
1071 gimple *stmt = gsi_stmt (*gsi);
1072 basic_block bb = gimple_bb (stmt);
1073 basic_block lastbb, afterbb;
1074 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1075 edge e;
1076 lastbb = make_blocks_1 (seq, bb);
1077 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1078 return false;
1079 e = split_block (bb, stmt);
1080 /* Move e->dest to come after the new basic blocks. */
1081 afterbb = e->dest;
1082 unlink_block (afterbb);
1083 link_block (afterbb, lastbb);
1084 redirect_edge_succ (e, bb->next_bb);
1085 bb = bb->next_bb;
1086 while (bb != afterbb)
1087 {
1088 struct omp_region *cur_region = NULL;
1089 profile_count cnt = profile_count::zero ();
1090 bool all = true;
1091
1092 int cur_omp_region_idx = 0;
1093 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1094 gcc_assert (!mer && !cur_region);
1095 add_bb_to_loop (bb, afterbb->loop_father);
1096
1097 edge e;
1098 edge_iterator ei;
1099 FOR_EACH_EDGE (e, ei, bb->preds)
1100 {
1101 if (e->count ().initialized_p ())
1102 cnt += e->count ();
1103 else
1104 all = false;
1105 }
1106 tree_guess_outgoing_edge_probabilities (bb);
1107 if (all || profile_status_for_fn (cfun) == PROFILE_READ)
1108 bb->count = cnt;
1109
1110 bb = bb->next_bb;
1111 }
1112 return true;
1113 }
1114
1115 /* Find the next available discriminator value for LOCUS. The
1116 discriminator distinguishes among several basic blocks that
1117 share a common locus, allowing for more accurate sample-based
1118 profiling. */
1119
1120 static int
1121 next_discriminator_for_locus (int line)
1122 {
1123 struct locus_discrim_map item;
1124 struct locus_discrim_map **slot;
1125
1126 item.location_line = line;
1127 item.discriminator = 0;
1128 slot = discriminator_per_locus->find_slot_with_hash (&item, line, INSERT);
1129 gcc_assert (slot);
1130 if (*slot == HTAB_EMPTY_ENTRY)
1131 {
1132 *slot = XNEW (struct locus_discrim_map);
1133 gcc_assert (*slot);
1134 (*slot)->location_line = line;
1135 (*slot)->discriminator = 0;
1136 }
1137 (*slot)->discriminator++;
1138 return (*slot)->discriminator;
1139 }
1140
1141 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1142
1143 static bool
1144 same_line_p (location_t locus1, expanded_location *from, location_t locus2)
1145 {
1146 expanded_location to;
1147
1148 if (locus1 == locus2)
1149 return true;
1150
1151 to = expand_location (locus2);
1152
1153 if (from->line != to.line)
1154 return false;
1155 if (from->file == to.file)
1156 return true;
1157 return (from->file != NULL
1158 && to.file != NULL
1159 && filename_cmp (from->file, to.file) == 0);
1160 }
1161
1162 /* Assign discriminators to each basic block. */
1163
1164 static void
1165 assign_discriminators (void)
1166 {
1167 basic_block bb;
1168
1169 FOR_EACH_BB_FN (bb, cfun)
1170 {
1171 edge e;
1172 edge_iterator ei;
1173 gimple *last = last_stmt (bb);
1174 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1175
1176 if (locus == UNKNOWN_LOCATION)
1177 continue;
1178
1179 expanded_location locus_e = expand_location (locus);
1180
1181 FOR_EACH_EDGE (e, ei, bb->succs)
1182 {
1183 gimple *first = first_non_label_stmt (e->dest);
1184 gimple *last = last_stmt (e->dest);
1185 if ((first && same_line_p (locus, &locus_e,
1186 gimple_location (first)))
1187 || (last && same_line_p (locus, &locus_e,
1188 gimple_location (last))))
1189 {
1190 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1191 bb->discriminator
1192 = next_discriminator_for_locus (locus_e.line);
1193 else
1194 e->dest->discriminator
1195 = next_discriminator_for_locus (locus_e.line);
1196 }
1197 }
1198 }
1199 }
1200
1201 /* Create the edges for a GIMPLE_COND starting at block BB. */
1202
1203 static void
1204 make_cond_expr_edges (basic_block bb)
1205 {
1206 gcond *entry = as_a <gcond *> (last_stmt (bb));
1207 gimple *then_stmt, *else_stmt;
1208 basic_block then_bb, else_bb;
1209 tree then_label, else_label;
1210 edge e;
1211
1212 gcc_assert (entry);
1213 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1214
1215 /* Entry basic blocks for each component. */
1216 then_label = gimple_cond_true_label (entry);
1217 else_label = gimple_cond_false_label (entry);
1218 then_bb = label_to_block (cfun, then_label);
1219 else_bb = label_to_block (cfun, else_label);
1220 then_stmt = first_stmt (then_bb);
1221 else_stmt = first_stmt (else_bb);
1222
1223 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1224 e->goto_locus = gimple_location (then_stmt);
1225 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1226 if (e)
1227 e->goto_locus = gimple_location (else_stmt);
1228
1229 /* We do not need the labels anymore. */
1230 gimple_cond_set_true_label (entry, NULL_TREE);
1231 gimple_cond_set_false_label (entry, NULL_TREE);
1232 }
1233
1234
1235 /* Called for each element in the hash table (P) as we delete the
1236 edge to cases hash table.
1237
1238 Clear all the CASE_CHAINs to prevent problems with copying of
1239 SWITCH_EXPRs and structure sharing rules, then free the hash table
1240 element. */
1241
1242 bool
1243 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1244 {
1245 tree t, next;
1246
1247 for (t = value; t; t = next)
1248 {
1249 next = CASE_CHAIN (t);
1250 CASE_CHAIN (t) = NULL;
1251 }
1252
1253 return true;
1254 }
1255
1256 /* Start recording information mapping edges to case labels. */
1257
1258 void
1259 start_recording_case_labels (void)
1260 {
1261 gcc_assert (edge_to_cases == NULL);
1262 edge_to_cases = new hash_map<edge, tree>;
1263 touched_switch_bbs = BITMAP_ALLOC (NULL);
1264 }
1265
1266 /* Return nonzero if we are recording information for case labels. */
1267
1268 static bool
1269 recording_case_labels_p (void)
1270 {
1271 return (edge_to_cases != NULL);
1272 }
1273
1274 /* Stop recording information mapping edges to case labels and
1275 remove any information we have recorded. */
1276 void
1277 end_recording_case_labels (void)
1278 {
1279 bitmap_iterator bi;
1280 unsigned i;
1281 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1282 delete edge_to_cases;
1283 edge_to_cases = NULL;
1284 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1285 {
1286 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1287 if (bb)
1288 {
1289 gimple *stmt = last_stmt (bb);
1290 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1291 group_case_labels_stmt (as_a <gswitch *> (stmt));
1292 }
1293 }
1294 BITMAP_FREE (touched_switch_bbs);
1295 }
1296
1297 /* If we are inside a {start,end}_recording_cases block, then return
1298 a chain of CASE_LABEL_EXPRs from T which reference E.
1299
1300 Otherwise return NULL. */
1301
1302 static tree
1303 get_cases_for_edge (edge e, gswitch *t)
1304 {
1305 tree *slot;
1306 size_t i, n;
1307
1308 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1309 chains available. Return NULL so the caller can detect this case. */
1310 if (!recording_case_labels_p ())
1311 return NULL;
1312
1313 slot = edge_to_cases->get (e);
1314 if (slot)
1315 return *slot;
1316
1317 /* If we did not find E in the hash table, then this must be the first
1318 time we have been queried for information about E & T. Add all the
1319 elements from T to the hash table then perform the query again. */
1320
1321 n = gimple_switch_num_labels (t);
1322 for (i = 0; i < n; i++)
1323 {
1324 tree elt = gimple_switch_label (t, i);
1325 tree lab = CASE_LABEL (elt);
1326 basic_block label_bb = label_to_block (cfun, lab);
1327 edge this_edge = find_edge (e->src, label_bb);
1328
1329 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1330 a new chain. */
1331 tree &s = edge_to_cases->get_or_insert (this_edge);
1332 CASE_CHAIN (elt) = s;
1333 s = elt;
1334 }
1335
1336 return *edge_to_cases->get (e);
1337 }
1338
1339 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1340
1341 static void
1342 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1343 {
1344 size_t i, n;
1345
1346 n = gimple_switch_num_labels (entry);
1347
1348 for (i = 0; i < n; ++i)
1349 {
1350 basic_block label_bb = gimple_switch_label_bb (cfun, entry, i);
1351 make_edge (bb, label_bb, 0);
1352 }
1353 }
1354
1355
1356 /* Return the basic block holding label DEST. */
1357
1358 basic_block
1359 label_to_block (struct function *ifun, tree dest)
1360 {
1361 int uid = LABEL_DECL_UID (dest);
1362
1363 /* We would die hard when faced by an undefined label. Emit a label to
1364 the very first basic block. This will hopefully make even the dataflow
1365 and undefined variable warnings quite right. */
1366 if (seen_error () && uid < 0)
1367 {
1368 gimple_stmt_iterator gsi =
1369 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1370 gimple *stmt;
1371
1372 stmt = gimple_build_label (dest);
1373 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1374 uid = LABEL_DECL_UID (dest);
1375 }
1376 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1377 return NULL;
1378 return (*ifun->cfg->x_label_to_block_map)[uid];
1379 }
1380
1381 /* Create edges for a goto statement at block BB. Returns true
1382 if abnormal edges should be created. */
1383
1384 static bool
1385 make_goto_expr_edges (basic_block bb)
1386 {
1387 gimple_stmt_iterator last = gsi_last_bb (bb);
1388 gimple *goto_t = gsi_stmt (last);
1389
1390 /* A simple GOTO creates normal edges. */
1391 if (simple_goto_p (goto_t))
1392 {
1393 tree dest = gimple_goto_dest (goto_t);
1394 basic_block label_bb = label_to_block (cfun, dest);
1395 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1396 e->goto_locus = gimple_location (goto_t);
1397 gsi_remove (&last, true);
1398 return false;
1399 }
1400
1401 /* A computed GOTO creates abnormal edges. */
1402 return true;
1403 }
1404
1405 /* Create edges for an asm statement with labels at block BB. */
1406
1407 static void
1408 make_gimple_asm_edges (basic_block bb)
1409 {
1410 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1411 int i, n = gimple_asm_nlabels (stmt);
1412
1413 for (i = 0; i < n; ++i)
1414 {
1415 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1416 basic_block label_bb = label_to_block (cfun, label);
1417 make_edge (bb, label_bb, 0);
1418 }
1419 }
1420
1421 /*---------------------------------------------------------------------------
1422 Flowgraph analysis
1423 ---------------------------------------------------------------------------*/
1424
1425 /* Cleanup useless labels in basic blocks. This is something we wish
1426 to do early because it allows us to group case labels before creating
1427 the edges for the CFG, and it speeds up block statement iterators in
1428 all passes later on.
1429 We rerun this pass after CFG is created, to get rid of the labels that
1430 are no longer referenced. After then we do not run it any more, since
1431 (almost) no new labels should be created. */
1432
1433 /* A map from basic block index to the leading label of that block. */
1434 struct label_record
1435 {
1436 /* The label. */
1437 tree label;
1438
1439 /* True if the label is referenced from somewhere. */
1440 bool used;
1441 };
1442
1443 /* Given LABEL return the first label in the same basic block. */
1444
1445 static tree
1446 main_block_label (tree label, label_record *label_for_bb)
1447 {
1448 basic_block bb = label_to_block (cfun, label);
1449 tree main_label = label_for_bb[bb->index].label;
1450
1451 /* label_to_block possibly inserted undefined label into the chain. */
1452 if (!main_label)
1453 {
1454 label_for_bb[bb->index].label = label;
1455 main_label = label;
1456 }
1457
1458 label_for_bb[bb->index].used = true;
1459 return main_label;
1460 }
1461
1462 /* Clean up redundant labels within the exception tree. */
1463
1464 static void
1465 cleanup_dead_labels_eh (label_record *label_for_bb)
1466 {
1467 eh_landing_pad lp;
1468 eh_region r;
1469 tree lab;
1470 int i;
1471
1472 if (cfun->eh == NULL)
1473 return;
1474
1475 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1476 if (lp && lp->post_landing_pad)
1477 {
1478 lab = main_block_label (lp->post_landing_pad, label_for_bb);
1479 if (lab != lp->post_landing_pad)
1480 {
1481 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1482 EH_LANDING_PAD_NR (lab) = lp->index;
1483 }
1484 }
1485
1486 FOR_ALL_EH_REGION (r)
1487 switch (r->type)
1488 {
1489 case ERT_CLEANUP:
1490 case ERT_MUST_NOT_THROW:
1491 break;
1492
1493 case ERT_TRY:
1494 {
1495 eh_catch c;
1496 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1497 {
1498 lab = c->label;
1499 if (lab)
1500 c->label = main_block_label (lab, label_for_bb);
1501 }
1502 }
1503 break;
1504
1505 case ERT_ALLOWED_EXCEPTIONS:
1506 lab = r->u.allowed.label;
1507 if (lab)
1508 r->u.allowed.label = main_block_label (lab, label_for_bb);
1509 break;
1510 }
1511 }
1512
1513
1514 /* Cleanup redundant labels. This is a three-step process:
1515 1) Find the leading label for each block.
1516 2) Redirect all references to labels to the leading labels.
1517 3) Cleanup all useless labels. */
1518
1519 void
1520 cleanup_dead_labels (void)
1521 {
1522 basic_block bb;
1523 label_record *label_for_bb = XCNEWVEC (struct label_record,
1524 last_basic_block_for_fn (cfun));
1525
1526 /* Find a suitable label for each block. We use the first user-defined
1527 label if there is one, or otherwise just the first label we see. */
1528 FOR_EACH_BB_FN (bb, cfun)
1529 {
1530 gimple_stmt_iterator i;
1531
1532 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1533 {
1534 tree label;
1535 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1536
1537 if (!label_stmt)
1538 break;
1539
1540 label = gimple_label_label (label_stmt);
1541
1542 /* If we have not yet seen a label for the current block,
1543 remember this one and see if there are more labels. */
1544 if (!label_for_bb[bb->index].label)
1545 {
1546 label_for_bb[bb->index].label = label;
1547 continue;
1548 }
1549
1550 /* If we did see a label for the current block already, but it
1551 is an artificially created label, replace it if the current
1552 label is a user defined label. */
1553 if (!DECL_ARTIFICIAL (label)
1554 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1555 {
1556 label_for_bb[bb->index].label = label;
1557 break;
1558 }
1559 }
1560 }
1561
1562 /* Now redirect all jumps/branches to the selected label.
1563 First do so for each block ending in a control statement. */
1564 FOR_EACH_BB_FN (bb, cfun)
1565 {
1566 gimple *stmt = last_stmt (bb);
1567 tree label, new_label;
1568
1569 if (!stmt)
1570 continue;
1571
1572 switch (gimple_code (stmt))
1573 {
1574 case GIMPLE_COND:
1575 {
1576 gcond *cond_stmt = as_a <gcond *> (stmt);
1577 label = gimple_cond_true_label (cond_stmt);
1578 if (label)
1579 {
1580 new_label = main_block_label (label, label_for_bb);
1581 if (new_label != label)
1582 gimple_cond_set_true_label (cond_stmt, new_label);
1583 }
1584
1585 label = gimple_cond_false_label (cond_stmt);
1586 if (label)
1587 {
1588 new_label = main_block_label (label, label_for_bb);
1589 if (new_label != label)
1590 gimple_cond_set_false_label (cond_stmt, new_label);
1591 }
1592 }
1593 break;
1594
1595 case GIMPLE_SWITCH:
1596 {
1597 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1598 size_t i, n = gimple_switch_num_labels (switch_stmt);
1599
1600 /* Replace all destination labels. */
1601 for (i = 0; i < n; ++i)
1602 {
1603 tree case_label = gimple_switch_label (switch_stmt, i);
1604 label = CASE_LABEL (case_label);
1605 new_label = main_block_label (label, label_for_bb);
1606 if (new_label != label)
1607 CASE_LABEL (case_label) = new_label;
1608 }
1609 break;
1610 }
1611
1612 case GIMPLE_ASM:
1613 {
1614 gasm *asm_stmt = as_a <gasm *> (stmt);
1615 int i, n = gimple_asm_nlabels (asm_stmt);
1616
1617 for (i = 0; i < n; ++i)
1618 {
1619 tree cons = gimple_asm_label_op (asm_stmt, i);
1620 tree label = main_block_label (TREE_VALUE (cons), label_for_bb);
1621 TREE_VALUE (cons) = label;
1622 }
1623 break;
1624 }
1625
1626 /* We have to handle gotos until they're removed, and we don't
1627 remove them until after we've created the CFG edges. */
1628 case GIMPLE_GOTO:
1629 if (!computed_goto_p (stmt))
1630 {
1631 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1632 label = gimple_goto_dest (goto_stmt);
1633 new_label = main_block_label (label, label_for_bb);
1634 if (new_label != label)
1635 gimple_goto_set_dest (goto_stmt, new_label);
1636 }
1637 break;
1638
1639 case GIMPLE_TRANSACTION:
1640 {
1641 gtransaction *txn = as_a <gtransaction *> (stmt);
1642
1643 label = gimple_transaction_label_norm (txn);
1644 if (label)
1645 {
1646 new_label = main_block_label (label, label_for_bb);
1647 if (new_label != label)
1648 gimple_transaction_set_label_norm (txn, new_label);
1649 }
1650
1651 label = gimple_transaction_label_uninst (txn);
1652 if (label)
1653 {
1654 new_label = main_block_label (label, label_for_bb);
1655 if (new_label != label)
1656 gimple_transaction_set_label_uninst (txn, new_label);
1657 }
1658
1659 label = gimple_transaction_label_over (txn);
1660 if (label)
1661 {
1662 new_label = main_block_label (label, label_for_bb);
1663 if (new_label != label)
1664 gimple_transaction_set_label_over (txn, new_label);
1665 }
1666 }
1667 break;
1668
1669 default:
1670 break;
1671 }
1672 }
1673
1674 /* Do the same for the exception region tree labels. */
1675 cleanup_dead_labels_eh (label_for_bb);
1676
1677 /* Finally, purge dead labels. All user-defined labels and labels that
1678 can be the target of non-local gotos and labels which have their
1679 address taken are preserved. */
1680 FOR_EACH_BB_FN (bb, cfun)
1681 {
1682 gimple_stmt_iterator i;
1683 tree label_for_this_bb = label_for_bb[bb->index].label;
1684
1685 if (!label_for_this_bb)
1686 continue;
1687
1688 /* If the main label of the block is unused, we may still remove it. */
1689 if (!label_for_bb[bb->index].used)
1690 label_for_this_bb = NULL;
1691
1692 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1693 {
1694 tree label;
1695 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1696
1697 if (!label_stmt)
1698 break;
1699
1700 label = gimple_label_label (label_stmt);
1701
1702 if (label == label_for_this_bb
1703 || !DECL_ARTIFICIAL (label)
1704 || DECL_NONLOCAL (label)
1705 || FORCED_LABEL (label))
1706 gsi_next (&i);
1707 else
1708 gsi_remove (&i, true);
1709 }
1710 }
1711
1712 free (label_for_bb);
1713 }
1714
1715 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1716 the ones jumping to the same label.
1717 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1718
1719 bool
1720 group_case_labels_stmt (gswitch *stmt)
1721 {
1722 int old_size = gimple_switch_num_labels (stmt);
1723 int i, next_index, new_size;
1724 basic_block default_bb = NULL;
1725 hash_set<tree> *removed_labels = NULL;
1726
1727 default_bb = gimple_switch_default_bb (cfun, stmt);
1728
1729 /* Look for possible opportunities to merge cases. */
1730 new_size = i = 1;
1731 while (i < old_size)
1732 {
1733 tree base_case, base_high;
1734 basic_block base_bb;
1735
1736 base_case = gimple_switch_label (stmt, i);
1737
1738 gcc_assert (base_case);
1739 base_bb = label_to_block (cfun, CASE_LABEL (base_case));
1740
1741 /* Discard cases that have the same destination as the default case or
1742 whose destination blocks have already been removed as unreachable. */
1743 if (base_bb == NULL
1744 || base_bb == default_bb
1745 || (removed_labels
1746 && removed_labels->contains (CASE_LABEL (base_case))))
1747 {
1748 i++;
1749 continue;
1750 }
1751
1752 base_high = CASE_HIGH (base_case)
1753 ? CASE_HIGH (base_case)
1754 : CASE_LOW (base_case);
1755 next_index = i + 1;
1756
1757 /* Try to merge case labels. Break out when we reach the end
1758 of the label vector or when we cannot merge the next case
1759 label with the current one. */
1760 while (next_index < old_size)
1761 {
1762 tree merge_case = gimple_switch_label (stmt, next_index);
1763 basic_block merge_bb = label_to_block (cfun, CASE_LABEL (merge_case));
1764 wide_int bhp1 = wi::to_wide (base_high) + 1;
1765
1766 /* Merge the cases if they jump to the same place,
1767 and their ranges are consecutive. */
1768 if (merge_bb == base_bb
1769 && (removed_labels == NULL
1770 || !removed_labels->contains (CASE_LABEL (merge_case)))
1771 && wi::to_wide (CASE_LOW (merge_case)) == bhp1)
1772 {
1773 base_high
1774 = (CASE_HIGH (merge_case)
1775 ? CASE_HIGH (merge_case) : CASE_LOW (merge_case));
1776 CASE_HIGH (base_case) = base_high;
1777 next_index++;
1778 }
1779 else
1780 break;
1781 }
1782
1783 /* Discard cases that have an unreachable destination block. */
1784 if (EDGE_COUNT (base_bb->succs) == 0
1785 && gimple_seq_unreachable_p (bb_seq (base_bb))
1786 /* Don't optimize this if __builtin_unreachable () is the
1787 implicitly added one by the C++ FE too early, before
1788 -Wreturn-type can be diagnosed. We'll optimize it later
1789 during switchconv pass or any other cfg cleanup. */
1790 && (gimple_in_ssa_p (cfun)
1791 || (LOCATION_LOCUS (gimple_location (last_stmt (base_bb)))
1792 != BUILTINS_LOCATION)))
1793 {
1794 edge base_edge = find_edge (gimple_bb (stmt), base_bb);
1795 if (base_edge != NULL)
1796 {
1797 for (gimple_stmt_iterator gsi = gsi_start_bb (base_bb);
1798 !gsi_end_p (gsi); gsi_next (&gsi))
1799 if (glabel *stmt = dyn_cast <glabel *> (gsi_stmt (gsi)))
1800 {
1801 if (FORCED_LABEL (gimple_label_label (stmt))
1802 || DECL_NONLOCAL (gimple_label_label (stmt)))
1803 {
1804 /* Forced/non-local labels aren't going to be removed,
1805 but they will be moved to some neighbouring basic
1806 block. If some later case label refers to one of
1807 those labels, we should throw that case away rather
1808 than keeping it around and refering to some random
1809 other basic block without an edge to it. */
1810 if (removed_labels == NULL)
1811 removed_labels = new hash_set<tree>;
1812 removed_labels->add (gimple_label_label (stmt));
1813 }
1814 }
1815 else
1816 break;
1817 remove_edge_and_dominated_blocks (base_edge);
1818 }
1819 i = next_index;
1820 continue;
1821 }
1822
1823 if (new_size < i)
1824 gimple_switch_set_label (stmt, new_size,
1825 gimple_switch_label (stmt, i));
1826 i = next_index;
1827 new_size++;
1828 }
1829
1830 gcc_assert (new_size <= old_size);
1831
1832 if (new_size < old_size)
1833 gimple_switch_set_num_labels (stmt, new_size);
1834
1835 delete removed_labels;
1836 return new_size < old_size;
1837 }
1838
1839 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1840 and scan the sorted vector of cases. Combine the ones jumping to the
1841 same label. */
1842
1843 bool
1844 group_case_labels (void)
1845 {
1846 basic_block bb;
1847 bool changed = false;
1848
1849 FOR_EACH_BB_FN (bb, cfun)
1850 {
1851 gimple *stmt = last_stmt (bb);
1852 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1853 changed |= group_case_labels_stmt (as_a <gswitch *> (stmt));
1854 }
1855
1856 return changed;
1857 }
1858
1859 /* Checks whether we can merge block B into block A. */
1860
1861 static bool
1862 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1863 {
1864 gimple *stmt;
1865
1866 if (!single_succ_p (a))
1867 return false;
1868
1869 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1870 return false;
1871
1872 if (single_succ (a) != b)
1873 return false;
1874
1875 if (!single_pred_p (b))
1876 return false;
1877
1878 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1879 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1880 return false;
1881
1882 /* If A ends by a statement causing exceptions or something similar, we
1883 cannot merge the blocks. */
1884 stmt = last_stmt (a);
1885 if (stmt && stmt_ends_bb_p (stmt))
1886 return false;
1887
1888 /* Do not allow a block with only a non-local label to be merged. */
1889 if (stmt)
1890 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1891 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1892 return false;
1893
1894 /* Examine the labels at the beginning of B. */
1895 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1896 gsi_next (&gsi))
1897 {
1898 tree lab;
1899 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1900 if (!label_stmt)
1901 break;
1902 lab = gimple_label_label (label_stmt);
1903
1904 /* Do not remove user forced labels or for -O0 any user labels. */
1905 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1906 return false;
1907 }
1908
1909 /* Protect simple loop latches. We only want to avoid merging
1910 the latch with the loop header or with a block in another
1911 loop in this case. */
1912 if (current_loops
1913 && b->loop_father->latch == b
1914 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1915 && (b->loop_father->header == a
1916 || b->loop_father != a->loop_father))
1917 return false;
1918
1919 /* It must be possible to eliminate all phi nodes in B. If ssa form
1920 is not up-to-date and a name-mapping is registered, we cannot eliminate
1921 any phis. Symbols marked for renaming are never a problem though. */
1922 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1923 gsi_next (&gsi))
1924 {
1925 gphi *phi = gsi.phi ();
1926 /* Technically only new names matter. */
1927 if (name_registered_for_update_p (PHI_RESULT (phi)))
1928 return false;
1929 }
1930
1931 /* When not optimizing, don't merge if we'd lose goto_locus. */
1932 if (!optimize
1933 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1934 {
1935 location_t goto_locus = single_succ_edge (a)->goto_locus;
1936 gimple_stmt_iterator prev, next;
1937 prev = gsi_last_nondebug_bb (a);
1938 next = gsi_after_labels (b);
1939 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1940 gsi_next_nondebug (&next);
1941 if ((gsi_end_p (prev)
1942 || gimple_location (gsi_stmt (prev)) != goto_locus)
1943 && (gsi_end_p (next)
1944 || gimple_location (gsi_stmt (next)) != goto_locus))
1945 return false;
1946 }
1947
1948 return true;
1949 }
1950
1951 /* Replaces all uses of NAME by VAL. */
1952
1953 void
1954 replace_uses_by (tree name, tree val)
1955 {
1956 imm_use_iterator imm_iter;
1957 use_operand_p use;
1958 gimple *stmt;
1959 edge e;
1960
1961 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1962 {
1963 /* Mark the block if we change the last stmt in it. */
1964 if (cfgcleanup_altered_bbs
1965 && stmt_ends_bb_p (stmt))
1966 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1967
1968 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1969 {
1970 replace_exp (use, val);
1971
1972 if (gimple_code (stmt) == GIMPLE_PHI)
1973 {
1974 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1975 PHI_ARG_INDEX_FROM_USE (use));
1976 if (e->flags & EDGE_ABNORMAL
1977 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1978 {
1979 /* This can only occur for virtual operands, since
1980 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1981 would prevent replacement. */
1982 gcc_checking_assert (virtual_operand_p (name));
1983 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1984 }
1985 }
1986 }
1987
1988 if (gimple_code (stmt) != GIMPLE_PHI)
1989 {
1990 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1991 gimple *orig_stmt = stmt;
1992 size_t i;
1993
1994 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1995 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1996 only change sth from non-invariant to invariant, and only
1997 when propagating constants. */
1998 if (is_gimple_min_invariant (val))
1999 for (i = 0; i < gimple_num_ops (stmt); i++)
2000 {
2001 tree op = gimple_op (stmt, i);
2002 /* Operands may be empty here. For example, the labels
2003 of a GIMPLE_COND are nulled out following the creation
2004 of the corresponding CFG edges. */
2005 if (op && TREE_CODE (op) == ADDR_EXPR)
2006 recompute_tree_invariant_for_addr_expr (op);
2007 }
2008
2009 if (fold_stmt (&gsi))
2010 stmt = gsi_stmt (gsi);
2011
2012 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
2013 gimple_purge_dead_eh_edges (gimple_bb (stmt));
2014
2015 update_stmt (stmt);
2016 }
2017 }
2018
2019 gcc_checking_assert (has_zero_uses (name));
2020
2021 /* Also update the trees stored in loop structures. */
2022 if (current_loops)
2023 {
2024 class loop *loop;
2025
2026 FOR_EACH_LOOP (loop, 0)
2027 {
2028 substitute_in_loop_info (loop, name, val);
2029 }
2030 }
2031 }
2032
2033 /* Merge block B into block A. */
2034
2035 static void
2036 gimple_merge_blocks (basic_block a, basic_block b)
2037 {
2038 gimple_stmt_iterator last, gsi;
2039 gphi_iterator psi;
2040
2041 if (dump_file)
2042 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
2043
2044 /* Remove all single-valued PHI nodes from block B of the form
2045 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
2046 gsi = gsi_last_bb (a);
2047 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
2048 {
2049 gimple *phi = gsi_stmt (psi);
2050 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
2051 gimple *copy;
2052 bool may_replace_uses = (virtual_operand_p (def)
2053 || may_propagate_copy (def, use));
2054
2055 /* In case we maintain loop closed ssa form, do not propagate arguments
2056 of loop exit phi nodes. */
2057 if (current_loops
2058 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
2059 && !virtual_operand_p (def)
2060 && TREE_CODE (use) == SSA_NAME
2061 && a->loop_father != b->loop_father)
2062 may_replace_uses = false;
2063
2064 if (!may_replace_uses)
2065 {
2066 gcc_assert (!virtual_operand_p (def));
2067
2068 /* Note that just emitting the copies is fine -- there is no problem
2069 with ordering of phi nodes. This is because A is the single
2070 predecessor of B, therefore results of the phi nodes cannot
2071 appear as arguments of the phi nodes. */
2072 copy = gimple_build_assign (def, use);
2073 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
2074 remove_phi_node (&psi, false);
2075 }
2076 else
2077 {
2078 /* If we deal with a PHI for virtual operands, we can simply
2079 propagate these without fussing with folding or updating
2080 the stmt. */
2081 if (virtual_operand_p (def))
2082 {
2083 imm_use_iterator iter;
2084 use_operand_p use_p;
2085 gimple *stmt;
2086
2087 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
2088 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2089 SET_USE (use_p, use);
2090
2091 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2092 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
2093 }
2094 else
2095 replace_uses_by (def, use);
2096
2097 remove_phi_node (&psi, true);
2098 }
2099 }
2100
2101 /* Ensure that B follows A. */
2102 move_block_after (b, a);
2103
2104 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
2105 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
2106
2107 /* Remove labels from B and set gimple_bb to A for other statements. */
2108 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
2109 {
2110 gimple *stmt = gsi_stmt (gsi);
2111 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2112 {
2113 tree label = gimple_label_label (label_stmt);
2114 int lp_nr;
2115
2116 gsi_remove (&gsi, false);
2117
2118 /* Now that we can thread computed gotos, we might have
2119 a situation where we have a forced label in block B
2120 However, the label at the start of block B might still be
2121 used in other ways (think about the runtime checking for
2122 Fortran assigned gotos). So we cannot just delete the
2123 label. Instead we move the label to the start of block A. */
2124 if (FORCED_LABEL (label))
2125 {
2126 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2127 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2128 }
2129 /* Other user labels keep around in a form of a debug stmt. */
2130 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_BIND_STMTS)
2131 {
2132 gimple *dbg = gimple_build_debug_bind (label,
2133 integer_zero_node,
2134 stmt);
2135 gimple_debug_bind_reset_value (dbg);
2136 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2137 }
2138
2139 lp_nr = EH_LANDING_PAD_NR (label);
2140 if (lp_nr)
2141 {
2142 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2143 lp->post_landing_pad = NULL;
2144 }
2145 }
2146 else
2147 {
2148 gimple_set_bb (stmt, a);
2149 gsi_next (&gsi);
2150 }
2151 }
2152
2153 /* When merging two BBs, if their counts are different, the larger count
2154 is selected as the new bb count. This is to handle inconsistent
2155 profiles. */
2156 if (a->loop_father == b->loop_father)
2157 {
2158 a->count = a->count.merge (b->count);
2159 }
2160
2161 /* Merge the sequences. */
2162 last = gsi_last_bb (a);
2163 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2164 set_bb_seq (b, NULL);
2165
2166 if (cfgcleanup_altered_bbs)
2167 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2168 }
2169
2170
2171 /* Return the one of two successors of BB that is not reachable by a
2172 complex edge, if there is one. Else, return BB. We use
2173 this in optimizations that use post-dominators for their heuristics,
2174 to catch the cases in C++ where function calls are involved. */
2175
2176 basic_block
2177 single_noncomplex_succ (basic_block bb)
2178 {
2179 edge e0, e1;
2180 if (EDGE_COUNT (bb->succs) != 2)
2181 return bb;
2182
2183 e0 = EDGE_SUCC (bb, 0);
2184 e1 = EDGE_SUCC (bb, 1);
2185 if (e0->flags & EDGE_COMPLEX)
2186 return e1->dest;
2187 if (e1->flags & EDGE_COMPLEX)
2188 return e0->dest;
2189
2190 return bb;
2191 }
2192
2193 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2194
2195 void
2196 notice_special_calls (gcall *call)
2197 {
2198 int flags = gimple_call_flags (call);
2199
2200 if (flags & ECF_MAY_BE_ALLOCA)
2201 cfun->calls_alloca = true;
2202 if (flags & ECF_RETURNS_TWICE)
2203 cfun->calls_setjmp = true;
2204 }
2205
2206
2207 /* Clear flags set by notice_special_calls. Used by dead code removal
2208 to update the flags. */
2209
2210 void
2211 clear_special_calls (void)
2212 {
2213 cfun->calls_alloca = false;
2214 cfun->calls_setjmp = false;
2215 }
2216
2217 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2218
2219 static void
2220 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2221 {
2222 /* Since this block is no longer reachable, we can just delete all
2223 of its PHI nodes. */
2224 remove_phi_nodes (bb);
2225
2226 /* Remove edges to BB's successors. */
2227 while (EDGE_COUNT (bb->succs) > 0)
2228 remove_edge (EDGE_SUCC (bb, 0));
2229 }
2230
2231
2232 /* Remove statements of basic block BB. */
2233
2234 static void
2235 remove_bb (basic_block bb)
2236 {
2237 gimple_stmt_iterator i;
2238
2239 if (dump_file)
2240 {
2241 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2242 if (dump_flags & TDF_DETAILS)
2243 {
2244 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2245 fprintf (dump_file, "\n");
2246 }
2247 }
2248
2249 if (current_loops)
2250 {
2251 class loop *loop = bb->loop_father;
2252
2253 /* If a loop gets removed, clean up the information associated
2254 with it. */
2255 if (loop->latch == bb
2256 || loop->header == bb)
2257 free_numbers_of_iterations_estimates (loop);
2258 }
2259
2260 /* Remove all the instructions in the block. */
2261 if (bb_seq (bb) != NULL)
2262 {
2263 /* Walk backwards so as to get a chance to substitute all
2264 released DEFs into debug stmts. See
2265 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2266 details. */
2267 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2268 {
2269 gimple *stmt = gsi_stmt (i);
2270 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2271 if (label_stmt
2272 && (FORCED_LABEL (gimple_label_label (label_stmt))
2273 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2274 {
2275 basic_block new_bb;
2276 gimple_stmt_iterator new_gsi;
2277
2278 /* A non-reachable non-local label may still be referenced.
2279 But it no longer needs to carry the extra semantics of
2280 non-locality. */
2281 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2282 {
2283 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2284 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2285 }
2286
2287 new_bb = bb->prev_bb;
2288 /* Don't move any labels into ENTRY block. */
2289 if (new_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
2290 {
2291 new_bb = single_succ (new_bb);
2292 gcc_assert (new_bb != bb);
2293 }
2294 new_gsi = gsi_after_labels (new_bb);
2295 gsi_remove (&i, false);
2296 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2297 }
2298 else
2299 {
2300 /* Release SSA definitions. */
2301 release_defs (stmt);
2302 gsi_remove (&i, true);
2303 }
2304
2305 if (gsi_end_p (i))
2306 i = gsi_last_bb (bb);
2307 else
2308 gsi_prev (&i);
2309 }
2310 }
2311
2312 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2313 bb->il.gimple.seq = NULL;
2314 bb->il.gimple.phi_nodes = NULL;
2315 }
2316
2317
2318 /* Given a basic block BB and a value VAL for use in the final statement
2319 of the block (if a GIMPLE_COND, GIMPLE_SWITCH, or computed goto), return
2320 the edge that will be taken out of the block.
2321 If VAL is NULL_TREE, then the current value of the final statement's
2322 predicate or index is used.
2323 If the value does not match a unique edge, NULL is returned. */
2324
2325 edge
2326 find_taken_edge (basic_block bb, tree val)
2327 {
2328 gimple *stmt;
2329
2330 stmt = last_stmt (bb);
2331
2332 /* Handle ENTRY and EXIT. */
2333 if (!stmt)
2334 return NULL;
2335
2336 if (gimple_code (stmt) == GIMPLE_COND)
2337 return find_taken_edge_cond_expr (as_a <gcond *> (stmt), val);
2338
2339 if (gimple_code (stmt) == GIMPLE_SWITCH)
2340 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), val);
2341
2342 if (computed_goto_p (stmt))
2343 {
2344 /* Only optimize if the argument is a label, if the argument is
2345 not a label then we cannot construct a proper CFG.
2346
2347 It may be the case that we only need to allow the LABEL_REF to
2348 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2349 appear inside a LABEL_EXPR just to be safe. */
2350 if (val
2351 && (TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2352 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2353 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2354 }
2355
2356 /* Otherwise we only know the taken successor edge if it's unique. */
2357 return single_succ_p (bb) ? single_succ_edge (bb) : NULL;
2358 }
2359
2360 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2361 statement, determine which of the outgoing edges will be taken out of the
2362 block. Return NULL if either edge may be taken. */
2363
2364 static edge
2365 find_taken_edge_computed_goto (basic_block bb, tree val)
2366 {
2367 basic_block dest;
2368 edge e = NULL;
2369
2370 dest = label_to_block (cfun, val);
2371 if (dest)
2372 e = find_edge (bb, dest);
2373
2374 /* It's possible for find_edge to return NULL here on invalid code
2375 that abuses the labels-as-values extension (e.g. code that attempts to
2376 jump *between* functions via stored labels-as-values; PR 84136).
2377 If so, then we simply return that NULL for the edge.
2378 We don't currently have a way of detecting such invalid code, so we
2379 can't assert that it was the case when a NULL edge occurs here. */
2380
2381 return e;
2382 }
2383
2384 /* Given COND_STMT and a constant value VAL for use as the predicate,
2385 determine which of the two edges will be taken out of
2386 the statement's block. Return NULL if either edge may be taken.
2387 If VAL is NULL_TREE, then the current value of COND_STMT's predicate
2388 is used. */
2389
2390 static edge
2391 find_taken_edge_cond_expr (const gcond *cond_stmt, tree val)
2392 {
2393 edge true_edge, false_edge;
2394
2395 if (val == NULL_TREE)
2396 {
2397 /* Use the current value of the predicate. */
2398 if (gimple_cond_true_p (cond_stmt))
2399 val = integer_one_node;
2400 else if (gimple_cond_false_p (cond_stmt))
2401 val = integer_zero_node;
2402 else
2403 return NULL;
2404 }
2405 else if (TREE_CODE (val) != INTEGER_CST)
2406 return NULL;
2407
2408 extract_true_false_edges_from_block (gimple_bb (cond_stmt),
2409 &true_edge, &false_edge);
2410
2411 return (integer_zerop (val) ? false_edge : true_edge);
2412 }
2413
2414 /* Given SWITCH_STMT and an INTEGER_CST VAL for use as the index, determine
2415 which edge will be taken out of the statement's block. Return NULL if any
2416 edge may be taken.
2417 If VAL is NULL_TREE, then the current value of SWITCH_STMT's index
2418 is used. */
2419
2420 edge
2421 find_taken_edge_switch_expr (const gswitch *switch_stmt, tree val)
2422 {
2423 basic_block dest_bb;
2424 edge e;
2425 tree taken_case;
2426
2427 if (gimple_switch_num_labels (switch_stmt) == 1)
2428 taken_case = gimple_switch_default_label (switch_stmt);
2429 else
2430 {
2431 if (val == NULL_TREE)
2432 val = gimple_switch_index (switch_stmt);
2433 if (TREE_CODE (val) != INTEGER_CST)
2434 return NULL;
2435 else
2436 taken_case = find_case_label_for_value (switch_stmt, val);
2437 }
2438 dest_bb = label_to_block (cfun, CASE_LABEL (taken_case));
2439
2440 e = find_edge (gimple_bb (switch_stmt), dest_bb);
2441 gcc_assert (e);
2442 return e;
2443 }
2444
2445
2446 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2447 We can make optimal use here of the fact that the case labels are
2448 sorted: We can do a binary search for a case matching VAL. */
2449
2450 tree
2451 find_case_label_for_value (const gswitch *switch_stmt, tree val)
2452 {
2453 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2454 tree default_case = gimple_switch_default_label (switch_stmt);
2455
2456 for (low = 0, high = n; high - low > 1; )
2457 {
2458 size_t i = (high + low) / 2;
2459 tree t = gimple_switch_label (switch_stmt, i);
2460 int cmp;
2461
2462 /* Cache the result of comparing CASE_LOW and val. */
2463 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2464
2465 if (cmp > 0)
2466 high = i;
2467 else
2468 low = i;
2469
2470 if (CASE_HIGH (t) == NULL)
2471 {
2472 /* A singe-valued case label. */
2473 if (cmp == 0)
2474 return t;
2475 }
2476 else
2477 {
2478 /* A case range. We can only handle integer ranges. */
2479 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2480 return t;
2481 }
2482 }
2483
2484 return default_case;
2485 }
2486
2487
2488 /* Dump a basic block on stderr. */
2489
2490 void
2491 gimple_debug_bb (basic_block bb)
2492 {
2493 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2494 }
2495
2496
2497 /* Dump basic block with index N on stderr. */
2498
2499 basic_block
2500 gimple_debug_bb_n (int n)
2501 {
2502 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2503 return BASIC_BLOCK_FOR_FN (cfun, n);
2504 }
2505
2506
2507 /* Dump the CFG on stderr.
2508
2509 FLAGS are the same used by the tree dumping functions
2510 (see TDF_* in dumpfile.h). */
2511
2512 void
2513 gimple_debug_cfg (dump_flags_t flags)
2514 {
2515 gimple_dump_cfg (stderr, flags);
2516 }
2517
2518
2519 /* Dump the program showing basic block boundaries on the given FILE.
2520
2521 FLAGS are the same used by the tree dumping functions (see TDF_* in
2522 tree.h). */
2523
2524 void
2525 gimple_dump_cfg (FILE *file, dump_flags_t flags)
2526 {
2527 if (flags & TDF_DETAILS)
2528 {
2529 dump_function_header (file, current_function_decl, flags);
2530 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2531 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2532 last_basic_block_for_fn (cfun));
2533
2534 brief_dump_cfg (file, flags);
2535 fprintf (file, "\n");
2536 }
2537
2538 if (flags & TDF_STATS)
2539 dump_cfg_stats (file);
2540
2541 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2542 }
2543
2544
2545 /* Dump CFG statistics on FILE. */
2546
2547 void
2548 dump_cfg_stats (FILE *file)
2549 {
2550 static long max_num_merged_labels = 0;
2551 unsigned long size, total = 0;
2552 long num_edges;
2553 basic_block bb;
2554 const char * const fmt_str = "%-30s%-13s%12s\n";
2555 const char * const fmt_str_1 = "%-30s%13d" PRsa (11) "\n";
2556 const char * const fmt_str_2 = "%-30s%13ld" PRsa (11) "\n";
2557 const char * const fmt_str_3 = "%-43s" PRsa (11) "\n";
2558 const char *funcname = current_function_name ();
2559
2560 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2561
2562 fprintf (file, "---------------------------------------------------------\n");
2563 fprintf (file, fmt_str, "", " Number of ", "Memory");
2564 fprintf (file, fmt_str, "", " instances ", "used ");
2565 fprintf (file, "---------------------------------------------------------\n");
2566
2567 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2568 total += size;
2569 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2570 SIZE_AMOUNT (size));
2571
2572 num_edges = 0;
2573 FOR_EACH_BB_FN (bb, cfun)
2574 num_edges += EDGE_COUNT (bb->succs);
2575 size = num_edges * sizeof (class edge_def);
2576 total += size;
2577 fprintf (file, fmt_str_2, "Edges", num_edges, SIZE_AMOUNT (size));
2578
2579 fprintf (file, "---------------------------------------------------------\n");
2580 fprintf (file, fmt_str_3, "Total memory used by CFG data",
2581 SIZE_AMOUNT (total));
2582 fprintf (file, "---------------------------------------------------------\n");
2583 fprintf (file, "\n");
2584
2585 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2586 max_num_merged_labels = cfg_stats.num_merged_labels;
2587
2588 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2589 cfg_stats.num_merged_labels, max_num_merged_labels);
2590
2591 fprintf (file, "\n");
2592 }
2593
2594
2595 /* Dump CFG statistics on stderr. Keep extern so that it's always
2596 linked in the final executable. */
2597
2598 DEBUG_FUNCTION void
2599 debug_cfg_stats (void)
2600 {
2601 dump_cfg_stats (stderr);
2602 }
2603
2604 /*---------------------------------------------------------------------------
2605 Miscellaneous helpers
2606 ---------------------------------------------------------------------------*/
2607
2608 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2609 flow. Transfers of control flow associated with EH are excluded. */
2610
2611 static bool
2612 call_can_make_abnormal_goto (gimple *t)
2613 {
2614 /* If the function has no non-local labels, then a call cannot make an
2615 abnormal transfer of control. */
2616 if (!cfun->has_nonlocal_label
2617 && !cfun->calls_setjmp)
2618 return false;
2619
2620 /* Likewise if the call has no side effects. */
2621 if (!gimple_has_side_effects (t))
2622 return false;
2623
2624 /* Likewise if the called function is leaf. */
2625 if (gimple_call_flags (t) & ECF_LEAF)
2626 return false;
2627
2628 return true;
2629 }
2630
2631
2632 /* Return true if T can make an abnormal transfer of control flow.
2633 Transfers of control flow associated with EH are excluded. */
2634
2635 bool
2636 stmt_can_make_abnormal_goto (gimple *t)
2637 {
2638 if (computed_goto_p (t))
2639 return true;
2640 if (is_gimple_call (t))
2641 return call_can_make_abnormal_goto (t);
2642 return false;
2643 }
2644
2645
2646 /* Return true if T represents a stmt that always transfers control. */
2647
2648 bool
2649 is_ctrl_stmt (gimple *t)
2650 {
2651 switch (gimple_code (t))
2652 {
2653 case GIMPLE_COND:
2654 case GIMPLE_SWITCH:
2655 case GIMPLE_GOTO:
2656 case GIMPLE_RETURN:
2657 case GIMPLE_RESX:
2658 return true;
2659 default:
2660 return false;
2661 }
2662 }
2663
2664
2665 /* Return true if T is a statement that may alter the flow of control
2666 (e.g., a call to a non-returning function). */
2667
2668 bool
2669 is_ctrl_altering_stmt (gimple *t)
2670 {
2671 gcc_assert (t);
2672
2673 switch (gimple_code (t))
2674 {
2675 case GIMPLE_CALL:
2676 /* Per stmt call flag indicates whether the call could alter
2677 controlflow. */
2678 if (gimple_call_ctrl_altering_p (t))
2679 return true;
2680 break;
2681
2682 case GIMPLE_EH_DISPATCH:
2683 /* EH_DISPATCH branches to the individual catch handlers at
2684 this level of a try or allowed-exceptions region. It can
2685 fallthru to the next statement as well. */
2686 return true;
2687
2688 case GIMPLE_ASM:
2689 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2690 return true;
2691 break;
2692
2693 CASE_GIMPLE_OMP:
2694 /* OpenMP directives alter control flow. */
2695 return true;
2696
2697 case GIMPLE_TRANSACTION:
2698 /* A transaction start alters control flow. */
2699 return true;
2700
2701 default:
2702 break;
2703 }
2704
2705 /* If a statement can throw, it alters control flow. */
2706 return stmt_can_throw_internal (cfun, t);
2707 }
2708
2709
2710 /* Return true if T is a simple local goto. */
2711
2712 bool
2713 simple_goto_p (gimple *t)
2714 {
2715 return (gimple_code (t) == GIMPLE_GOTO
2716 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2717 }
2718
2719
2720 /* Return true if STMT should start a new basic block. PREV_STMT is
2721 the statement preceding STMT. It is used when STMT is a label or a
2722 case label. Labels should only start a new basic block if their
2723 previous statement wasn't a label. Otherwise, sequence of labels
2724 would generate unnecessary basic blocks that only contain a single
2725 label. */
2726
2727 static inline bool
2728 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2729 {
2730 if (stmt == NULL)
2731 return false;
2732
2733 /* PREV_STMT is only set to a debug stmt if the debug stmt is before
2734 any nondebug stmts in the block. We don't want to start another
2735 block in this case: the debug stmt will already have started the
2736 one STMT would start if we weren't outputting debug stmts. */
2737 if (prev_stmt && is_gimple_debug (prev_stmt))
2738 return false;
2739
2740 /* Labels start a new basic block only if the preceding statement
2741 wasn't a label of the same type. This prevents the creation of
2742 consecutive blocks that have nothing but a single label. */
2743 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2744 {
2745 /* Nonlocal and computed GOTO targets always start a new block. */
2746 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2747 || FORCED_LABEL (gimple_label_label (label_stmt)))
2748 return true;
2749
2750 if (glabel *plabel = safe_dyn_cast <glabel *> (prev_stmt))
2751 {
2752 if (DECL_NONLOCAL (gimple_label_label (plabel))
2753 || !DECL_ARTIFICIAL (gimple_label_label (plabel)))
2754 return true;
2755
2756 cfg_stats.num_merged_labels++;
2757 return false;
2758 }
2759 else
2760 return true;
2761 }
2762 else if (gimple_code (stmt) == GIMPLE_CALL)
2763 {
2764 if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2765 /* setjmp acts similar to a nonlocal GOTO target and thus should
2766 start a new block. */
2767 return true;
2768 if (gimple_call_internal_p (stmt, IFN_PHI)
2769 && prev_stmt
2770 && gimple_code (prev_stmt) != GIMPLE_LABEL
2771 && (gimple_code (prev_stmt) != GIMPLE_CALL
2772 || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2773 /* PHI nodes start a new block unless preceeded by a label
2774 or another PHI. */
2775 return true;
2776 }
2777
2778 return false;
2779 }
2780
2781
2782 /* Return true if T should end a basic block. */
2783
2784 bool
2785 stmt_ends_bb_p (gimple *t)
2786 {
2787 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2788 }
2789
2790 /* Remove block annotations and other data structures. */
2791
2792 void
2793 delete_tree_cfg_annotations (struct function *fn)
2794 {
2795 vec_free (label_to_block_map_for_fn (fn));
2796 }
2797
2798 /* Return the virtual phi in BB. */
2799
2800 gphi *
2801 get_virtual_phi (basic_block bb)
2802 {
2803 for (gphi_iterator gsi = gsi_start_phis (bb);
2804 !gsi_end_p (gsi);
2805 gsi_next (&gsi))
2806 {
2807 gphi *phi = gsi.phi ();
2808
2809 if (virtual_operand_p (PHI_RESULT (phi)))
2810 return phi;
2811 }
2812
2813 return NULL;
2814 }
2815
2816 /* Return the first statement in basic block BB. */
2817
2818 gimple *
2819 first_stmt (basic_block bb)
2820 {
2821 gimple_stmt_iterator i = gsi_start_bb (bb);
2822 gimple *stmt = NULL;
2823
2824 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2825 {
2826 gsi_next (&i);
2827 stmt = NULL;
2828 }
2829 return stmt;
2830 }
2831
2832 /* Return the first non-label statement in basic block BB. */
2833
2834 static gimple *
2835 first_non_label_stmt (basic_block bb)
2836 {
2837 gimple_stmt_iterator i = gsi_start_bb (bb);
2838 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2839 gsi_next (&i);
2840 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2841 }
2842
2843 /* Return the last statement in basic block BB. */
2844
2845 gimple *
2846 last_stmt (basic_block bb)
2847 {
2848 gimple_stmt_iterator i = gsi_last_bb (bb);
2849 gimple *stmt = NULL;
2850
2851 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2852 {
2853 gsi_prev (&i);
2854 stmt = NULL;
2855 }
2856 return stmt;
2857 }
2858
2859 /* Return the last statement of an otherwise empty block. Return NULL
2860 if the block is totally empty, or if it contains more than one
2861 statement. */
2862
2863 gimple *
2864 last_and_only_stmt (basic_block bb)
2865 {
2866 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2867 gimple *last, *prev;
2868
2869 if (gsi_end_p (i))
2870 return NULL;
2871
2872 last = gsi_stmt (i);
2873 gsi_prev_nondebug (&i);
2874 if (gsi_end_p (i))
2875 return last;
2876
2877 /* Empty statements should no longer appear in the instruction stream.
2878 Everything that might have appeared before should be deleted by
2879 remove_useless_stmts, and the optimizers should just gsi_remove
2880 instead of smashing with build_empty_stmt.
2881
2882 Thus the only thing that should appear here in a block containing
2883 one executable statement is a label. */
2884 prev = gsi_stmt (i);
2885 if (gimple_code (prev) == GIMPLE_LABEL)
2886 return last;
2887 else
2888 return NULL;
2889 }
2890
2891 /* Returns the basic block after which the new basic block created
2892 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2893 near its "logical" location. This is of most help to humans looking
2894 at debugging dumps. */
2895
2896 basic_block
2897 split_edge_bb_loc (edge edge_in)
2898 {
2899 basic_block dest = edge_in->dest;
2900 basic_block dest_prev = dest->prev_bb;
2901
2902 if (dest_prev)
2903 {
2904 edge e = find_edge (dest_prev, dest);
2905 if (e && !(e->flags & EDGE_COMPLEX))
2906 return edge_in->src;
2907 }
2908 return dest_prev;
2909 }
2910
2911 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2912 Abort on abnormal edges. */
2913
2914 static basic_block
2915 gimple_split_edge (edge edge_in)
2916 {
2917 basic_block new_bb, after_bb, dest;
2918 edge new_edge, e;
2919
2920 /* Abnormal edges cannot be split. */
2921 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2922
2923 dest = edge_in->dest;
2924
2925 after_bb = split_edge_bb_loc (edge_in);
2926
2927 new_bb = create_empty_bb (after_bb);
2928 new_bb->count = edge_in->count ();
2929
2930 /* We want to avoid re-allocating PHIs when we first
2931 add the fallthru edge from new_bb to dest but we also
2932 want to avoid changing PHI argument order when
2933 first redirecting edge_in away from dest. The former
2934 avoids changing PHI argument order by adding them
2935 last and then the redirection swapping it back into
2936 place by means of unordered remove.
2937 So hack around things by temporarily removing all PHIs
2938 from the destination during the edge redirection and then
2939 making sure the edges stay in order. */
2940 gimple_seq saved_phis = phi_nodes (dest);
2941 unsigned old_dest_idx = edge_in->dest_idx;
2942 set_phi_nodes (dest, NULL);
2943 new_edge = make_single_succ_edge (new_bb, dest, EDGE_FALLTHRU);
2944 e = redirect_edge_and_branch (edge_in, new_bb);
2945 gcc_assert (e == edge_in && new_edge->dest_idx == old_dest_idx);
2946 /* set_phi_nodes sets the BB of the PHI nodes, so do it manually here. */
2947 dest->il.gimple.phi_nodes = saved_phis;
2948
2949 return new_bb;
2950 }
2951
2952
2953 /* Verify properties of the address expression T whose base should be
2954 TREE_ADDRESSABLE if VERIFY_ADDRESSABLE is true. */
2955
2956 static bool
2957 verify_address (tree t, bool verify_addressable)
2958 {
2959 bool old_constant;
2960 bool old_side_effects;
2961 bool new_constant;
2962 bool new_side_effects;
2963
2964 old_constant = TREE_CONSTANT (t);
2965 old_side_effects = TREE_SIDE_EFFECTS (t);
2966
2967 recompute_tree_invariant_for_addr_expr (t);
2968 new_side_effects = TREE_SIDE_EFFECTS (t);
2969 new_constant = TREE_CONSTANT (t);
2970
2971 if (old_constant != new_constant)
2972 {
2973 error ("constant not recomputed when %<ADDR_EXPR%> changed");
2974 return true;
2975 }
2976 if (old_side_effects != new_side_effects)
2977 {
2978 error ("side effects not recomputed when %<ADDR_EXPR%> changed");
2979 return true;
2980 }
2981
2982 tree base = TREE_OPERAND (t, 0);
2983 while (handled_component_p (base))
2984 base = TREE_OPERAND (base, 0);
2985
2986 if (!(VAR_P (base)
2987 || TREE_CODE (base) == PARM_DECL
2988 || TREE_CODE (base) == RESULT_DECL))
2989 return false;
2990
2991 if (verify_addressable && !TREE_ADDRESSABLE (base))
2992 {
2993 error ("address taken but %<TREE_ADDRESSABLE%> bit not set");
2994 return true;
2995 }
2996
2997 return false;
2998 }
2999
3000
3001 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3002 Returns true if there is an error, otherwise false. */
3003
3004 static bool
3005 verify_types_in_gimple_min_lval (tree expr)
3006 {
3007 tree op;
3008
3009 if (is_gimple_id (expr))
3010 return false;
3011
3012 if (TREE_CODE (expr) != TARGET_MEM_REF
3013 && TREE_CODE (expr) != MEM_REF)
3014 {
3015 error ("invalid expression for min lvalue");
3016 return true;
3017 }
3018
3019 /* TARGET_MEM_REFs are strange beasts. */
3020 if (TREE_CODE (expr) == TARGET_MEM_REF)
3021 return false;
3022
3023 op = TREE_OPERAND (expr, 0);
3024 if (!is_gimple_val (op))
3025 {
3026 error ("invalid operand in indirect reference");
3027 debug_generic_stmt (op);
3028 return true;
3029 }
3030 /* Memory references now generally can involve a value conversion. */
3031
3032 return false;
3033 }
3034
3035 /* Verify if EXPR is a valid GIMPLE reference expression. If
3036 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3037 if there is an error, otherwise false. */
3038
3039 static bool
3040 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3041 {
3042 const char *code_name = get_tree_code_name (TREE_CODE (expr));
3043
3044 if (TREE_CODE (expr) == REALPART_EXPR
3045 || TREE_CODE (expr) == IMAGPART_EXPR
3046 || TREE_CODE (expr) == BIT_FIELD_REF)
3047 {
3048 tree op = TREE_OPERAND (expr, 0);
3049 if (!is_gimple_reg_type (TREE_TYPE (expr)))
3050 {
3051 error ("non-scalar %qs", code_name);
3052 return true;
3053 }
3054
3055 if (TREE_CODE (expr) == BIT_FIELD_REF)
3056 {
3057 tree t1 = TREE_OPERAND (expr, 1);
3058 tree t2 = TREE_OPERAND (expr, 2);
3059 poly_uint64 size, bitpos;
3060 if (!poly_int_tree_p (t1, &size)
3061 || !poly_int_tree_p (t2, &bitpos)
3062 || !types_compatible_p (bitsizetype, TREE_TYPE (t1))
3063 || !types_compatible_p (bitsizetype, TREE_TYPE (t2)))
3064 {
3065 error ("invalid position or size operand to %qs", code_name);
3066 return true;
3067 }
3068 if (INTEGRAL_TYPE_P (TREE_TYPE (expr))
3069 && maybe_ne (TYPE_PRECISION (TREE_TYPE (expr)), size))
3070 {
3071 error ("integral result type precision does not match "
3072 "field size of %qs", code_name);
3073 return true;
3074 }
3075 else if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
3076 && TYPE_MODE (TREE_TYPE (expr)) != BLKmode
3077 && maybe_ne (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (expr))),
3078 size))
3079 {
3080 error ("mode size of non-integral result does not "
3081 "match field size of %qs",
3082 code_name);
3083 return true;
3084 }
3085 if (INTEGRAL_TYPE_P (TREE_TYPE (op))
3086 && !type_has_mode_precision_p (TREE_TYPE (op)))
3087 {
3088 error ("%qs of non-mode-precision operand", code_name);
3089 return true;
3090 }
3091 if (!AGGREGATE_TYPE_P (TREE_TYPE (op))
3092 && maybe_gt (size + bitpos,
3093 tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (op)))))
3094 {
3095 error ("position plus size exceeds size of referenced object in "
3096 "%qs", code_name);
3097 return true;
3098 }
3099 }
3100
3101 if ((TREE_CODE (expr) == REALPART_EXPR
3102 || TREE_CODE (expr) == IMAGPART_EXPR)
3103 && !useless_type_conversion_p (TREE_TYPE (expr),
3104 TREE_TYPE (TREE_TYPE (op))))
3105 {
3106 error ("type mismatch in %qs reference", code_name);
3107 debug_generic_stmt (TREE_TYPE (expr));
3108 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3109 return true;
3110 }
3111 expr = op;
3112 }
3113
3114 while (handled_component_p (expr))
3115 {
3116 code_name = get_tree_code_name (TREE_CODE (expr));
3117
3118 if (TREE_CODE (expr) == REALPART_EXPR
3119 || TREE_CODE (expr) == IMAGPART_EXPR
3120 || TREE_CODE (expr) == BIT_FIELD_REF)
3121 {
3122 error ("non-top-level %qs", code_name);
3123 return true;
3124 }
3125
3126 tree op = TREE_OPERAND (expr, 0);
3127
3128 if (TREE_CODE (expr) == ARRAY_REF
3129 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3130 {
3131 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3132 || (TREE_OPERAND (expr, 2)
3133 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3134 || (TREE_OPERAND (expr, 3)
3135 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3136 {
3137 error ("invalid operands to %qs", code_name);
3138 debug_generic_stmt (expr);
3139 return true;
3140 }
3141 }
3142
3143 /* Verify if the reference array element types are compatible. */
3144 if (TREE_CODE (expr) == ARRAY_REF
3145 && !useless_type_conversion_p (TREE_TYPE (expr),
3146 TREE_TYPE (TREE_TYPE (op))))
3147 {
3148 error ("type mismatch in %qs", code_name);
3149 debug_generic_stmt (TREE_TYPE (expr));
3150 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3151 return true;
3152 }
3153 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3154 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3155 TREE_TYPE (TREE_TYPE (op))))
3156 {
3157 error ("type mismatch in %qs", code_name);
3158 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3159 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3160 return true;
3161 }
3162
3163 if (TREE_CODE (expr) == COMPONENT_REF)
3164 {
3165 if (TREE_OPERAND (expr, 2)
3166 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3167 {
3168 error ("invalid %qs offset operator", code_name);
3169 return true;
3170 }
3171 if (!useless_type_conversion_p (TREE_TYPE (expr),
3172 TREE_TYPE (TREE_OPERAND (expr, 1))))
3173 {
3174 error ("type mismatch in %qs", code_name);
3175 debug_generic_stmt (TREE_TYPE (expr));
3176 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3177 return true;
3178 }
3179 }
3180
3181 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3182 {
3183 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3184 that their operand is not an SSA name or an invariant when
3185 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3186 bug). Otherwise there is nothing to verify, gross mismatches at
3187 most invoke undefined behavior. */
3188 if (require_lvalue
3189 && (TREE_CODE (op) == SSA_NAME
3190 || is_gimple_min_invariant (op)))
3191 {
3192 error ("conversion of %qs on the left hand side of %qs",
3193 get_tree_code_name (TREE_CODE (op)), code_name);
3194 debug_generic_stmt (expr);
3195 return true;
3196 }
3197 else if (TREE_CODE (op) == SSA_NAME
3198 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3199 {
3200 error ("conversion of register to a different size in %qs",
3201 code_name);
3202 debug_generic_stmt (expr);
3203 return true;
3204 }
3205 else if (!handled_component_p (op))
3206 return false;
3207 }
3208
3209 expr = op;
3210 }
3211
3212 code_name = get_tree_code_name (TREE_CODE (expr));
3213
3214 if (TREE_CODE (expr) == MEM_REF)
3215 {
3216 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0))
3217 || (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
3218 && verify_address (TREE_OPERAND (expr, 0), false)))
3219 {
3220 error ("invalid address operand in %qs", code_name);
3221 debug_generic_stmt (expr);
3222 return true;
3223 }
3224 if (!poly_int_tree_p (TREE_OPERAND (expr, 1))
3225 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3226 {
3227 error ("invalid offset operand in %qs", code_name);
3228 debug_generic_stmt (expr);
3229 return true;
3230 }
3231 if (MR_DEPENDENCE_CLIQUE (expr) != 0
3232 && MR_DEPENDENCE_CLIQUE (expr) > cfun->last_clique)
3233 {
3234 error ("invalid clique in %qs", code_name);
3235 debug_generic_stmt (expr);
3236 return true;
3237 }
3238 }
3239 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3240 {
3241 if (!TMR_BASE (expr)
3242 || !is_gimple_mem_ref_addr (TMR_BASE (expr))
3243 || (TREE_CODE (TMR_BASE (expr)) == ADDR_EXPR
3244 && verify_address (TMR_BASE (expr), false)))
3245 {
3246 error ("invalid address operand in %qs", code_name);
3247 return true;
3248 }
3249 if (!TMR_OFFSET (expr)
3250 || !poly_int_tree_p (TMR_OFFSET (expr))
3251 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3252 {
3253 error ("invalid offset operand in %qs", code_name);
3254 debug_generic_stmt (expr);
3255 return true;
3256 }
3257 if (MR_DEPENDENCE_CLIQUE (expr) != 0
3258 && MR_DEPENDENCE_CLIQUE (expr) > cfun->last_clique)
3259 {
3260 error ("invalid clique in %qs", code_name);
3261 debug_generic_stmt (expr);
3262 return true;
3263 }
3264 }
3265 else if (TREE_CODE (expr) == INDIRECT_REF)
3266 {
3267 error ("%qs in gimple IL", code_name);
3268 debug_generic_stmt (expr);
3269 return true;
3270 }
3271
3272 return ((require_lvalue || !is_gimple_min_invariant (expr))
3273 && verify_types_in_gimple_min_lval (expr));
3274 }
3275
3276 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3277 list of pointer-to types that is trivially convertible to DEST. */
3278
3279 static bool
3280 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3281 {
3282 tree src;
3283
3284 if (!TYPE_POINTER_TO (src_obj))
3285 return true;
3286
3287 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3288 if (useless_type_conversion_p (dest, src))
3289 return true;
3290
3291 return false;
3292 }
3293
3294 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3295 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3296
3297 static bool
3298 valid_fixed_convert_types_p (tree type1, tree type2)
3299 {
3300 return (FIXED_POINT_TYPE_P (type1)
3301 && (INTEGRAL_TYPE_P (type2)
3302 || SCALAR_FLOAT_TYPE_P (type2)
3303 || FIXED_POINT_TYPE_P (type2)));
3304 }
3305
3306 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3307 is a problem, otherwise false. */
3308
3309 static bool
3310 verify_gimple_call (gcall *stmt)
3311 {
3312 tree fn = gimple_call_fn (stmt);
3313 tree fntype, fndecl;
3314 unsigned i;
3315
3316 if (gimple_call_internal_p (stmt))
3317 {
3318 if (fn)
3319 {
3320 error ("gimple call has two targets");
3321 debug_generic_stmt (fn);
3322 return true;
3323 }
3324 }
3325 else
3326 {
3327 if (!fn)
3328 {
3329 error ("gimple call has no target");
3330 return true;
3331 }
3332 }
3333
3334 if (fn && !is_gimple_call_addr (fn))
3335 {
3336 error ("invalid function in gimple call");
3337 debug_generic_stmt (fn);
3338 return true;
3339 }
3340
3341 if (fn
3342 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3343 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3344 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3345 {
3346 error ("non-function in gimple call");
3347 return true;
3348 }
3349
3350 fndecl = gimple_call_fndecl (stmt);
3351 if (fndecl
3352 && TREE_CODE (fndecl) == FUNCTION_DECL
3353 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3354 && !DECL_PURE_P (fndecl)
3355 && !TREE_READONLY (fndecl))
3356 {
3357 error ("invalid pure const state for function");
3358 return true;
3359 }
3360
3361 tree lhs = gimple_call_lhs (stmt);
3362 if (lhs
3363 && (!is_gimple_lvalue (lhs)
3364 || verify_types_in_gimple_reference (lhs, true)))
3365 {
3366 error ("invalid LHS in gimple call");
3367 return true;
3368 }
3369
3370 if (gimple_call_ctrl_altering_p (stmt)
3371 && gimple_call_noreturn_p (stmt)
3372 && should_remove_lhs_p (lhs))
3373 {
3374 error ("LHS in %<noreturn%> call");
3375 return true;
3376 }
3377
3378 fntype = gimple_call_fntype (stmt);
3379 if (fntype
3380 && lhs
3381 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3382 /* ??? At least C++ misses conversions at assignments from
3383 void * call results.
3384 For now simply allow arbitrary pointer type conversions. */
3385 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3386 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3387 {
3388 error ("invalid conversion in gimple call");
3389 debug_generic_stmt (TREE_TYPE (lhs));
3390 debug_generic_stmt (TREE_TYPE (fntype));
3391 return true;
3392 }
3393
3394 if (gimple_call_chain (stmt)
3395 && !is_gimple_val (gimple_call_chain (stmt)))
3396 {
3397 error ("invalid static chain in gimple call");
3398 debug_generic_stmt (gimple_call_chain (stmt));
3399 return true;
3400 }
3401
3402 /* If there is a static chain argument, the call should either be
3403 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3404 if (gimple_call_chain (stmt)
3405 && fndecl
3406 && !DECL_STATIC_CHAIN (fndecl))
3407 {
3408 error ("static chain with function that doesn%'t use one");
3409 return true;
3410 }
3411
3412 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3413 {
3414 switch (DECL_FUNCTION_CODE (fndecl))
3415 {
3416 case BUILT_IN_UNREACHABLE:
3417 case BUILT_IN_TRAP:
3418 if (gimple_call_num_args (stmt) > 0)
3419 {
3420 /* Built-in unreachable with parameters might not be caught by
3421 undefined behavior sanitizer. Front-ends do check users do not
3422 call them that way but we also produce calls to
3423 __builtin_unreachable internally, for example when IPA figures
3424 out a call cannot happen in a legal program. In such cases,
3425 we must make sure arguments are stripped off. */
3426 error ("%<__builtin_unreachable%> or %<__builtin_trap%> call "
3427 "with arguments");
3428 return true;
3429 }
3430 break;
3431 default:
3432 break;
3433 }
3434 }
3435
3436 /* ??? The C frontend passes unpromoted arguments in case it
3437 didn't see a function declaration before the call. So for now
3438 leave the call arguments mostly unverified. Once we gimplify
3439 unit-at-a-time we have a chance to fix this. */
3440
3441 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3442 {
3443 tree arg = gimple_call_arg (stmt, i);
3444 if ((is_gimple_reg_type (TREE_TYPE (arg))
3445 && !is_gimple_val (arg))
3446 || (!is_gimple_reg_type (TREE_TYPE (arg))
3447 && !is_gimple_lvalue (arg)))
3448 {
3449 error ("invalid argument to gimple call");
3450 debug_generic_expr (arg);
3451 return true;
3452 }
3453 }
3454
3455 return false;
3456 }
3457
3458 /* Verifies the gimple comparison with the result type TYPE and
3459 the operands OP0 and OP1, comparison code is CODE. */
3460
3461 static bool
3462 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3463 {
3464 tree op0_type = TREE_TYPE (op0);
3465 tree op1_type = TREE_TYPE (op1);
3466
3467 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3468 {
3469 error ("invalid operands in gimple comparison");
3470 return true;
3471 }
3472
3473 /* For comparisons we do not have the operations type as the
3474 effective type the comparison is carried out in. Instead
3475 we require that either the first operand is trivially
3476 convertible into the second, or the other way around. */
3477 if (!useless_type_conversion_p (op0_type, op1_type)
3478 && !useless_type_conversion_p (op1_type, op0_type))
3479 {
3480 error ("mismatching comparison operand types");
3481 debug_generic_expr (op0_type);
3482 debug_generic_expr (op1_type);
3483 return true;
3484 }
3485
3486 /* The resulting type of a comparison may be an effective boolean type. */
3487 if (INTEGRAL_TYPE_P (type)
3488 && (TREE_CODE (type) == BOOLEAN_TYPE
3489 || TYPE_PRECISION (type) == 1))
3490 {
3491 if ((TREE_CODE (op0_type) == VECTOR_TYPE
3492 || TREE_CODE (op1_type) == VECTOR_TYPE)
3493 && code != EQ_EXPR && code != NE_EXPR
3494 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3495 && !VECTOR_INTEGER_TYPE_P (op0_type))
3496 {
3497 error ("unsupported operation or type for vector comparison"
3498 " returning a boolean");
3499 debug_generic_expr (op0_type);
3500 debug_generic_expr (op1_type);
3501 return true;
3502 }
3503 }
3504 /* Or a boolean vector type with the same element count
3505 as the comparison operand types. */
3506 else if (TREE_CODE (type) == VECTOR_TYPE
3507 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3508 {
3509 if (TREE_CODE (op0_type) != VECTOR_TYPE
3510 || TREE_CODE (op1_type) != VECTOR_TYPE)
3511 {
3512 error ("non-vector operands in vector comparison");
3513 debug_generic_expr (op0_type);
3514 debug_generic_expr (op1_type);
3515 return true;
3516 }
3517
3518 if (maybe_ne (TYPE_VECTOR_SUBPARTS (type),
3519 TYPE_VECTOR_SUBPARTS (op0_type)))
3520 {
3521 error ("invalid vector comparison resulting type");
3522 debug_generic_expr (type);
3523 return true;
3524 }
3525 }
3526 else
3527 {
3528 error ("bogus comparison result type");
3529 debug_generic_expr (type);
3530 return true;
3531 }
3532
3533 return false;
3534 }
3535
3536 /* Verify a gimple assignment statement STMT with an unary rhs.
3537 Returns true if anything is wrong. */
3538
3539 static bool
3540 verify_gimple_assign_unary (gassign *stmt)
3541 {
3542 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3543 tree lhs = gimple_assign_lhs (stmt);
3544 tree lhs_type = TREE_TYPE (lhs);
3545 tree rhs1 = gimple_assign_rhs1 (stmt);
3546 tree rhs1_type = TREE_TYPE (rhs1);
3547
3548 if (!is_gimple_reg (lhs))
3549 {
3550 error ("non-register as LHS of unary operation");
3551 return true;
3552 }
3553
3554 if (!is_gimple_val (rhs1))
3555 {
3556 error ("invalid operand in unary operation");
3557 return true;
3558 }
3559
3560 const char* const code_name = get_tree_code_name (rhs_code);
3561
3562 /* First handle conversions. */
3563 switch (rhs_code)
3564 {
3565 CASE_CONVERT:
3566 {
3567 /* Allow conversions between vectors with the same number of elements,
3568 provided that the conversion is OK for the element types too. */
3569 if (VECTOR_TYPE_P (lhs_type)
3570 && VECTOR_TYPE_P (rhs1_type)
3571 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
3572 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3573 {
3574 lhs_type = TREE_TYPE (lhs_type);
3575 rhs1_type = TREE_TYPE (rhs1_type);
3576 }
3577 else if (VECTOR_TYPE_P (lhs_type) || VECTOR_TYPE_P (rhs1_type))
3578 {
3579 error ("invalid vector types in nop conversion");
3580 debug_generic_expr (lhs_type);
3581 debug_generic_expr (rhs1_type);
3582 return true;
3583 }
3584
3585 /* Allow conversions from pointer type to integral type only if
3586 there is no sign or zero extension involved.
3587 For targets were the precision of ptrofftype doesn't match that
3588 of pointers we allow conversions to types where
3589 POINTERS_EXTEND_UNSIGNED specifies how that works. */
3590 if ((POINTER_TYPE_P (lhs_type)
3591 && INTEGRAL_TYPE_P (rhs1_type))
3592 || (POINTER_TYPE_P (rhs1_type)
3593 && INTEGRAL_TYPE_P (lhs_type)
3594 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3595 #if defined(POINTERS_EXTEND_UNSIGNED)
3596 || (TYPE_MODE (rhs1_type) == ptr_mode
3597 && (TYPE_PRECISION (lhs_type)
3598 == BITS_PER_WORD /* word_mode */
3599 || (TYPE_PRECISION (lhs_type)
3600 == GET_MODE_PRECISION (Pmode))))
3601 #endif
3602 )))
3603 return false;
3604
3605 /* Allow conversion from integral to offset type and vice versa. */
3606 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3607 && INTEGRAL_TYPE_P (rhs1_type))
3608 || (INTEGRAL_TYPE_P (lhs_type)
3609 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3610 return false;
3611
3612 /* Otherwise assert we are converting between types of the
3613 same kind. */
3614 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3615 {
3616 error ("invalid types in nop conversion");
3617 debug_generic_expr (lhs_type);
3618 debug_generic_expr (rhs1_type);
3619 return true;
3620 }
3621
3622 return false;
3623 }
3624
3625 case ADDR_SPACE_CONVERT_EXPR:
3626 {
3627 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3628 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3629 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3630 {
3631 error ("invalid types in address space conversion");
3632 debug_generic_expr (lhs_type);
3633 debug_generic_expr (rhs1_type);
3634 return true;
3635 }
3636
3637 return false;
3638 }
3639
3640 case FIXED_CONVERT_EXPR:
3641 {
3642 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3643 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3644 {
3645 error ("invalid types in fixed-point conversion");
3646 debug_generic_expr (lhs_type);
3647 debug_generic_expr (rhs1_type);
3648 return true;
3649 }
3650
3651 return false;
3652 }
3653
3654 case FLOAT_EXPR:
3655 {
3656 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3657 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3658 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3659 {
3660 error ("invalid types in conversion to floating-point");
3661 debug_generic_expr (lhs_type);
3662 debug_generic_expr (rhs1_type);
3663 return true;
3664 }
3665
3666 return false;
3667 }
3668
3669 case FIX_TRUNC_EXPR:
3670 {
3671 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3672 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3673 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3674 {
3675 error ("invalid types in conversion to integer");
3676 debug_generic_expr (lhs_type);
3677 debug_generic_expr (rhs1_type);
3678 return true;
3679 }
3680
3681 return false;
3682 }
3683
3684 case VEC_UNPACK_HI_EXPR:
3685 case VEC_UNPACK_LO_EXPR:
3686 case VEC_UNPACK_FLOAT_HI_EXPR:
3687 case VEC_UNPACK_FLOAT_LO_EXPR:
3688 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
3689 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
3690 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3691 || TREE_CODE (lhs_type) != VECTOR_TYPE
3692 || (!INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3693 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type)))
3694 || (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3695 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3696 || ((rhs_code == VEC_UNPACK_HI_EXPR
3697 || rhs_code == VEC_UNPACK_LO_EXPR)
3698 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3699 != INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3700 || ((rhs_code == VEC_UNPACK_FLOAT_HI_EXPR
3701 || rhs_code == VEC_UNPACK_FLOAT_LO_EXPR)
3702 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3703 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))))
3704 || ((rhs_code == VEC_UNPACK_FIX_TRUNC_HI_EXPR
3705 || rhs_code == VEC_UNPACK_FIX_TRUNC_LO_EXPR)
3706 && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3707 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))))
3708 || (maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
3709 2 * GET_MODE_SIZE (element_mode (rhs1_type)))
3710 && (!VECTOR_BOOLEAN_TYPE_P (lhs_type)
3711 || !VECTOR_BOOLEAN_TYPE_P (rhs1_type)))
3712 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (lhs_type),
3713 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3714 {
3715 error ("type mismatch in %qs expression", code_name);
3716 debug_generic_expr (lhs_type);
3717 debug_generic_expr (rhs1_type);
3718 return true;
3719 }
3720
3721 return false;
3722
3723 case NEGATE_EXPR:
3724 case ABS_EXPR:
3725 case BIT_NOT_EXPR:
3726 case PAREN_EXPR:
3727 case CONJ_EXPR:
3728 break;
3729
3730 case ABSU_EXPR:
3731 if (!ANY_INTEGRAL_TYPE_P (lhs_type)
3732 || !TYPE_UNSIGNED (lhs_type)
3733 || !ANY_INTEGRAL_TYPE_P (rhs1_type)
3734 || TYPE_UNSIGNED (rhs1_type)
3735 || element_precision (lhs_type) != element_precision (rhs1_type))
3736 {
3737 error ("invalid types for %qs", code_name);
3738 debug_generic_expr (lhs_type);
3739 debug_generic_expr (rhs1_type);
3740 return true;
3741 }
3742 return false;
3743
3744 case VEC_DUPLICATE_EXPR:
3745 if (TREE_CODE (lhs_type) != VECTOR_TYPE
3746 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
3747 {
3748 error ("%qs should be from a scalar to a like vector", code_name);
3749 debug_generic_expr (lhs_type);
3750 debug_generic_expr (rhs1_type);
3751 return true;
3752 }
3753 return false;
3754
3755 default:
3756 gcc_unreachable ();
3757 }
3758
3759 /* For the remaining codes assert there is no conversion involved. */
3760 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3761 {
3762 error ("non-trivial conversion in unary operation");
3763 debug_generic_expr (lhs_type);
3764 debug_generic_expr (rhs1_type);
3765 return true;
3766 }
3767
3768 return false;
3769 }
3770
3771 /* Verify a gimple assignment statement STMT with a binary rhs.
3772 Returns true if anything is wrong. */
3773
3774 static bool
3775 verify_gimple_assign_binary (gassign *stmt)
3776 {
3777 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3778 tree lhs = gimple_assign_lhs (stmt);
3779 tree lhs_type = TREE_TYPE (lhs);
3780 tree rhs1 = gimple_assign_rhs1 (stmt);
3781 tree rhs1_type = TREE_TYPE (rhs1);
3782 tree rhs2 = gimple_assign_rhs2 (stmt);
3783 tree rhs2_type = TREE_TYPE (rhs2);
3784
3785 if (!is_gimple_reg (lhs))
3786 {
3787 error ("non-register as LHS of binary operation");
3788 return true;
3789 }
3790
3791 if (!is_gimple_val (rhs1)
3792 || !is_gimple_val (rhs2))
3793 {
3794 error ("invalid operands in binary operation");
3795 return true;
3796 }
3797
3798 const char* const code_name = get_tree_code_name (rhs_code);
3799
3800 /* First handle operations that involve different types. */
3801 switch (rhs_code)
3802 {
3803 case COMPLEX_EXPR:
3804 {
3805 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3806 || !(INTEGRAL_TYPE_P (rhs1_type)
3807 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3808 || !(INTEGRAL_TYPE_P (rhs2_type)
3809 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3810 {
3811 error ("type mismatch in %qs", code_name);
3812 debug_generic_expr (lhs_type);
3813 debug_generic_expr (rhs1_type);
3814 debug_generic_expr (rhs2_type);
3815 return true;
3816 }
3817
3818 return false;
3819 }
3820
3821 case LSHIFT_EXPR:
3822 case RSHIFT_EXPR:
3823 case LROTATE_EXPR:
3824 case RROTATE_EXPR:
3825 {
3826 /* Shifts and rotates are ok on integral types, fixed point
3827 types and integer vector types. */
3828 if ((!INTEGRAL_TYPE_P (rhs1_type)
3829 && !FIXED_POINT_TYPE_P (rhs1_type)
3830 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3831 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3832 || (!INTEGRAL_TYPE_P (rhs2_type)
3833 /* Vector shifts of vectors are also ok. */
3834 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3835 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3836 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3837 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3838 || !useless_type_conversion_p (lhs_type, rhs1_type))
3839 {
3840 error ("type mismatch in %qs", code_name);
3841 debug_generic_expr (lhs_type);
3842 debug_generic_expr (rhs1_type);
3843 debug_generic_expr (rhs2_type);
3844 return true;
3845 }
3846
3847 return false;
3848 }
3849
3850 case WIDEN_LSHIFT_EXPR:
3851 {
3852 if (!INTEGRAL_TYPE_P (lhs_type)
3853 || !INTEGRAL_TYPE_P (rhs1_type)
3854 || TREE_CODE (rhs2) != INTEGER_CST
3855 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3856 {
3857 error ("type mismatch in %qs", code_name);
3858 debug_generic_expr (lhs_type);
3859 debug_generic_expr (rhs1_type);
3860 debug_generic_expr (rhs2_type);
3861 return true;
3862 }
3863
3864 return false;
3865 }
3866
3867 case VEC_WIDEN_LSHIFT_HI_EXPR:
3868 case VEC_WIDEN_LSHIFT_LO_EXPR:
3869 {
3870 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3871 || TREE_CODE (lhs_type) != VECTOR_TYPE
3872 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3873 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3874 || TREE_CODE (rhs2) != INTEGER_CST
3875 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3876 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3877 {
3878 error ("type mismatch in %qs", code_name);
3879 debug_generic_expr (lhs_type);
3880 debug_generic_expr (rhs1_type);
3881 debug_generic_expr (rhs2_type);
3882 return true;
3883 }
3884
3885 return false;
3886 }
3887
3888 case WIDEN_PLUS_EXPR:
3889 case WIDEN_MINUS_EXPR:
3890 case PLUS_EXPR:
3891 case MINUS_EXPR:
3892 {
3893 tree lhs_etype = lhs_type;
3894 tree rhs1_etype = rhs1_type;
3895 tree rhs2_etype = rhs2_type;
3896 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3897 {
3898 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3899 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3900 {
3901 error ("invalid non-vector operands to %qs", code_name);
3902 return true;
3903 }
3904 lhs_etype = TREE_TYPE (lhs_type);
3905 rhs1_etype = TREE_TYPE (rhs1_type);
3906 rhs2_etype = TREE_TYPE (rhs2_type);
3907 }
3908 if (POINTER_TYPE_P (lhs_etype)
3909 || POINTER_TYPE_P (rhs1_etype)
3910 || POINTER_TYPE_P (rhs2_etype))
3911 {
3912 error ("invalid (pointer) operands %qs", code_name);
3913 return true;
3914 }
3915
3916 /* Continue with generic binary expression handling. */
3917 break;
3918 }
3919
3920 case POINTER_PLUS_EXPR:
3921 {
3922 if (!POINTER_TYPE_P (rhs1_type)
3923 || !useless_type_conversion_p (lhs_type, rhs1_type)
3924 || !ptrofftype_p (rhs2_type))
3925 {
3926 error ("type mismatch in %qs", code_name);
3927 debug_generic_stmt (lhs_type);
3928 debug_generic_stmt (rhs1_type);
3929 debug_generic_stmt (rhs2_type);
3930 return true;
3931 }
3932
3933 return false;
3934 }
3935
3936 case POINTER_DIFF_EXPR:
3937 {
3938 if (!POINTER_TYPE_P (rhs1_type)
3939 || !POINTER_TYPE_P (rhs2_type)
3940 /* Because we special-case pointers to void we allow difference
3941 of arbitrary pointers with the same mode. */
3942 || TYPE_MODE (rhs1_type) != TYPE_MODE (rhs2_type)
3943 || !INTEGRAL_TYPE_P (lhs_type)
3944 || TYPE_UNSIGNED (lhs_type)
3945 || TYPE_PRECISION (lhs_type) != TYPE_PRECISION (rhs1_type))
3946 {
3947 error ("type mismatch in %qs", code_name);
3948 debug_generic_stmt (lhs_type);
3949 debug_generic_stmt (rhs1_type);
3950 debug_generic_stmt (rhs2_type);
3951 return true;
3952 }
3953
3954 return false;
3955 }
3956
3957 case TRUTH_ANDIF_EXPR:
3958 case TRUTH_ORIF_EXPR:
3959 case TRUTH_AND_EXPR:
3960 case TRUTH_OR_EXPR:
3961 case TRUTH_XOR_EXPR:
3962
3963 gcc_unreachable ();
3964
3965 case LT_EXPR:
3966 case LE_EXPR:
3967 case GT_EXPR:
3968 case GE_EXPR:
3969 case EQ_EXPR:
3970 case NE_EXPR:
3971 case UNORDERED_EXPR:
3972 case ORDERED_EXPR:
3973 case UNLT_EXPR:
3974 case UNLE_EXPR:
3975 case UNGT_EXPR:
3976 case UNGE_EXPR:
3977 case UNEQ_EXPR:
3978 case LTGT_EXPR:
3979 /* Comparisons are also binary, but the result type is not
3980 connected to the operand types. */
3981 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
3982
3983 case WIDEN_MULT_EXPR:
3984 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3985 return true;
3986 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3987 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3988
3989 case WIDEN_SUM_EXPR:
3990 {
3991 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
3992 || TREE_CODE (lhs_type) != VECTOR_TYPE)
3993 && ((!INTEGRAL_TYPE_P (rhs1_type)
3994 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
3995 || (!INTEGRAL_TYPE_P (lhs_type)
3996 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
3997 || !useless_type_conversion_p (lhs_type, rhs2_type)
3998 || maybe_lt (GET_MODE_SIZE (element_mode (rhs2_type)),
3999 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4000 {
4001 error ("type mismatch in %qs", code_name);
4002 debug_generic_expr (lhs_type);
4003 debug_generic_expr (rhs1_type);
4004 debug_generic_expr (rhs2_type);
4005 return true;
4006 }
4007 return false;
4008 }
4009
4010 case VEC_WIDEN_MINUS_HI_EXPR:
4011 case VEC_WIDEN_MINUS_LO_EXPR:
4012 case VEC_WIDEN_PLUS_HI_EXPR:
4013 case VEC_WIDEN_PLUS_LO_EXPR:
4014 case VEC_WIDEN_MULT_HI_EXPR:
4015 case VEC_WIDEN_MULT_LO_EXPR:
4016 case VEC_WIDEN_MULT_EVEN_EXPR:
4017 case VEC_WIDEN_MULT_ODD_EXPR:
4018 {
4019 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4020 || TREE_CODE (lhs_type) != VECTOR_TYPE
4021 || !types_compatible_p (rhs1_type, rhs2_type)
4022 || maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
4023 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4024 {
4025 error ("type mismatch in %qs", code_name);
4026 debug_generic_expr (lhs_type);
4027 debug_generic_expr (rhs1_type);
4028 debug_generic_expr (rhs2_type);
4029 return true;
4030 }
4031 return false;
4032 }
4033
4034 case VEC_PACK_TRUNC_EXPR:
4035 /* ??? We currently use VEC_PACK_TRUNC_EXPR to simply concat
4036 vector boolean types. */
4037 if (VECTOR_BOOLEAN_TYPE_P (lhs_type)
4038 && VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4039 && types_compatible_p (rhs1_type, rhs2_type)
4040 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
4041 2 * TYPE_VECTOR_SUBPARTS (rhs1_type)))
4042 return false;
4043
4044 /* Fallthru. */
4045 case VEC_PACK_SAT_EXPR:
4046 case VEC_PACK_FIX_TRUNC_EXPR:
4047 {
4048 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4049 || TREE_CODE (lhs_type) != VECTOR_TYPE
4050 || !((rhs_code == VEC_PACK_FIX_TRUNC_EXPR
4051 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
4052 && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type)))
4053 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4054 == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))))
4055 || !types_compatible_p (rhs1_type, rhs2_type)
4056 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4057 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4058 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4059 TYPE_VECTOR_SUBPARTS (lhs_type)))
4060 {
4061 error ("type mismatch in %qs", code_name);
4062 debug_generic_expr (lhs_type);
4063 debug_generic_expr (rhs1_type);
4064 debug_generic_expr (rhs2_type);
4065 return true;
4066 }
4067
4068 return false;
4069 }
4070
4071 case VEC_PACK_FLOAT_EXPR:
4072 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4073 || TREE_CODE (lhs_type) != VECTOR_TYPE
4074 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4075 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))
4076 || !types_compatible_p (rhs1_type, rhs2_type)
4077 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4078 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4079 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4080 TYPE_VECTOR_SUBPARTS (lhs_type)))
4081 {
4082 error ("type mismatch in %qs", code_name);
4083 debug_generic_expr (lhs_type);
4084 debug_generic_expr (rhs1_type);
4085 debug_generic_expr (rhs2_type);
4086 return true;
4087 }
4088
4089 return false;
4090
4091 case MULT_EXPR:
4092 case MULT_HIGHPART_EXPR:
4093 case TRUNC_DIV_EXPR:
4094 case CEIL_DIV_EXPR:
4095 case FLOOR_DIV_EXPR:
4096 case ROUND_DIV_EXPR:
4097 case TRUNC_MOD_EXPR:
4098 case CEIL_MOD_EXPR:
4099 case FLOOR_MOD_EXPR:
4100 case ROUND_MOD_EXPR:
4101 case RDIV_EXPR:
4102 case EXACT_DIV_EXPR:
4103 case MIN_EXPR:
4104 case MAX_EXPR:
4105 case BIT_IOR_EXPR:
4106 case BIT_XOR_EXPR:
4107 case BIT_AND_EXPR:
4108 /* Continue with generic binary expression handling. */
4109 break;
4110
4111 case VEC_SERIES_EXPR:
4112 if (!useless_type_conversion_p (rhs1_type, rhs2_type))
4113 {
4114 error ("type mismatch in %qs", code_name);
4115 debug_generic_expr (rhs1_type);
4116 debug_generic_expr (rhs2_type);
4117 return true;
4118 }
4119 if (TREE_CODE (lhs_type) != VECTOR_TYPE
4120 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
4121 {
4122 error ("vector type expected in %qs", code_name);
4123 debug_generic_expr (lhs_type);
4124 return true;
4125 }
4126 return false;
4127
4128 default:
4129 gcc_unreachable ();
4130 }
4131
4132 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4133 || !useless_type_conversion_p (lhs_type, rhs2_type))
4134 {
4135 error ("type mismatch in binary expression");
4136 debug_generic_stmt (lhs_type);
4137 debug_generic_stmt (rhs1_type);
4138 debug_generic_stmt (rhs2_type);
4139 return true;
4140 }
4141
4142 return false;
4143 }
4144
4145 /* Verify a gimple assignment statement STMT with a ternary rhs.
4146 Returns true if anything is wrong. */
4147
4148 static bool
4149 verify_gimple_assign_ternary (gassign *stmt)
4150 {
4151 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4152 tree lhs = gimple_assign_lhs (stmt);
4153 tree lhs_type = TREE_TYPE (lhs);
4154 tree rhs1 = gimple_assign_rhs1 (stmt);
4155 tree rhs1_type = TREE_TYPE (rhs1);
4156 tree rhs2 = gimple_assign_rhs2 (stmt);
4157 tree rhs2_type = TREE_TYPE (rhs2);
4158 tree rhs3 = gimple_assign_rhs3 (stmt);
4159 tree rhs3_type = TREE_TYPE (rhs3);
4160
4161 if (!is_gimple_reg (lhs))
4162 {
4163 error ("non-register as LHS of ternary operation");
4164 return true;
4165 }
4166
4167 if ((rhs_code == COND_EXPR
4168 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
4169 || !is_gimple_val (rhs2)
4170 || !is_gimple_val (rhs3))
4171 {
4172 error ("invalid operands in ternary operation");
4173 return true;
4174 }
4175
4176 const char* const code_name = get_tree_code_name (rhs_code);
4177
4178 /* First handle operations that involve different types. */
4179 switch (rhs_code)
4180 {
4181 case WIDEN_MULT_PLUS_EXPR:
4182 case WIDEN_MULT_MINUS_EXPR:
4183 if ((!INTEGRAL_TYPE_P (rhs1_type)
4184 && !FIXED_POINT_TYPE_P (rhs1_type))
4185 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4186 || !useless_type_conversion_p (lhs_type, rhs3_type)
4187 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4188 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4189 {
4190 error ("type mismatch in %qs", code_name);
4191 debug_generic_expr (lhs_type);
4192 debug_generic_expr (rhs1_type);
4193 debug_generic_expr (rhs2_type);
4194 debug_generic_expr (rhs3_type);
4195 return true;
4196 }
4197 break;
4198
4199 case VEC_COND_EXPR:
4200 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4201 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4202 TYPE_VECTOR_SUBPARTS (lhs_type)))
4203 {
4204 error ("the first argument of a %qs must be of a "
4205 "boolean vector type of the same number of elements "
4206 "as the result", code_name);
4207 debug_generic_expr (lhs_type);
4208 debug_generic_expr (rhs1_type);
4209 return true;
4210 }
4211 /* Fallthrough. */
4212 case COND_EXPR:
4213 if (!is_gimple_val (rhs1)
4214 && verify_gimple_comparison (TREE_TYPE (rhs1),
4215 TREE_OPERAND (rhs1, 0),
4216 TREE_OPERAND (rhs1, 1),
4217 TREE_CODE (rhs1)))
4218 return true;
4219 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4220 || !useless_type_conversion_p (lhs_type, rhs3_type))
4221 {
4222 error ("type mismatch in %qs", code_name);
4223 debug_generic_expr (lhs_type);
4224 debug_generic_expr (rhs2_type);
4225 debug_generic_expr (rhs3_type);
4226 return true;
4227 }
4228 break;
4229
4230 case VEC_PERM_EXPR:
4231 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4232 || !useless_type_conversion_p (lhs_type, rhs2_type))
4233 {
4234 error ("type mismatch in %qs", code_name);
4235 debug_generic_expr (lhs_type);
4236 debug_generic_expr (rhs1_type);
4237 debug_generic_expr (rhs2_type);
4238 debug_generic_expr (rhs3_type);
4239 return true;
4240 }
4241
4242 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4243 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4244 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4245 {
4246 error ("vector types expected in %qs", code_name);
4247 debug_generic_expr (lhs_type);
4248 debug_generic_expr (rhs1_type);
4249 debug_generic_expr (rhs2_type);
4250 debug_generic_expr (rhs3_type);
4251 return true;
4252 }
4253
4254 if (maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4255 TYPE_VECTOR_SUBPARTS (rhs2_type))
4256 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs2_type),
4257 TYPE_VECTOR_SUBPARTS (rhs3_type))
4258 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs3_type),
4259 TYPE_VECTOR_SUBPARTS (lhs_type)))
4260 {
4261 error ("vectors with different element number found in %qs",
4262 code_name);
4263 debug_generic_expr (lhs_type);
4264 debug_generic_expr (rhs1_type);
4265 debug_generic_expr (rhs2_type);
4266 debug_generic_expr (rhs3_type);
4267 return true;
4268 }
4269
4270 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4271 || (TREE_CODE (rhs3) != VECTOR_CST
4272 && (GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE
4273 (TREE_TYPE (rhs3_type)))
4274 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE
4275 (TREE_TYPE (rhs1_type))))))
4276 {
4277 error ("invalid mask type in %qs", code_name);
4278 debug_generic_expr (lhs_type);
4279 debug_generic_expr (rhs1_type);
4280 debug_generic_expr (rhs2_type);
4281 debug_generic_expr (rhs3_type);
4282 return true;
4283 }
4284
4285 return false;
4286
4287 case SAD_EXPR:
4288 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4289 || !useless_type_conversion_p (lhs_type, rhs3_type)
4290 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4291 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4292 {
4293 error ("type mismatch in %qs", code_name);
4294 debug_generic_expr (lhs_type);
4295 debug_generic_expr (rhs1_type);
4296 debug_generic_expr (rhs2_type);
4297 debug_generic_expr (rhs3_type);
4298 return true;
4299 }
4300
4301 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4302 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4303 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4304 {
4305 error ("vector types expected in %qs", code_name);
4306 debug_generic_expr (lhs_type);
4307 debug_generic_expr (rhs1_type);
4308 debug_generic_expr (rhs2_type);
4309 debug_generic_expr (rhs3_type);
4310 return true;
4311 }
4312
4313 return false;
4314
4315 case BIT_INSERT_EXPR:
4316 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4317 {
4318 error ("type mismatch in %qs", code_name);
4319 debug_generic_expr (lhs_type);
4320 debug_generic_expr (rhs1_type);
4321 return true;
4322 }
4323 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4324 && INTEGRAL_TYPE_P (rhs2_type))
4325 /* Vector element insert. */
4326 || (VECTOR_TYPE_P (rhs1_type)
4327 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))
4328 /* Aligned sub-vector insert. */
4329 || (VECTOR_TYPE_P (rhs1_type)
4330 && VECTOR_TYPE_P (rhs2_type)
4331 && types_compatible_p (TREE_TYPE (rhs1_type),
4332 TREE_TYPE (rhs2_type))
4333 && multiple_p (TYPE_VECTOR_SUBPARTS (rhs1_type),
4334 TYPE_VECTOR_SUBPARTS (rhs2_type))
4335 && multiple_of_p (bitsizetype, rhs3, TYPE_SIZE (rhs2_type)))))
4336 {
4337 error ("not allowed type combination in %qs", code_name);
4338 debug_generic_expr (rhs1_type);
4339 debug_generic_expr (rhs2_type);
4340 return true;
4341 }
4342 if (! tree_fits_uhwi_p (rhs3)
4343 || ! types_compatible_p (bitsizetype, TREE_TYPE (rhs3))
4344 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4345 {
4346 error ("invalid position or size in %qs", code_name);
4347 return true;
4348 }
4349 if (INTEGRAL_TYPE_P (rhs1_type)
4350 && !type_has_mode_precision_p (rhs1_type))
4351 {
4352 error ("%qs into non-mode-precision operand", code_name);
4353 return true;
4354 }
4355 if (INTEGRAL_TYPE_P (rhs1_type))
4356 {
4357 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4358 if (bitpos >= TYPE_PRECISION (rhs1_type)
4359 || (bitpos + TYPE_PRECISION (rhs2_type)
4360 > TYPE_PRECISION (rhs1_type)))
4361 {
4362 error ("insertion out of range in %qs", code_name);
4363 return true;
4364 }
4365 }
4366 else if (VECTOR_TYPE_P (rhs1_type))
4367 {
4368 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4369 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4370 if (bitpos % bitsize != 0)
4371 {
4372 error ("%qs not at element boundary", code_name);
4373 return true;
4374 }
4375 }
4376 return false;
4377
4378 case DOT_PROD_EXPR:
4379 {
4380 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4381 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4382 && ((!INTEGRAL_TYPE_P (rhs1_type)
4383 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4384 || (!INTEGRAL_TYPE_P (lhs_type)
4385 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4386 || !types_compatible_p (rhs1_type, rhs2_type)
4387 || !useless_type_conversion_p (lhs_type, rhs3_type)
4388 || maybe_lt (GET_MODE_SIZE (element_mode (rhs3_type)),
4389 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4390 {
4391 error ("type mismatch in %qs", code_name);
4392 debug_generic_expr (lhs_type);
4393 debug_generic_expr (rhs1_type);
4394 debug_generic_expr (rhs2_type);
4395 return true;
4396 }
4397 return false;
4398 }
4399
4400 case REALIGN_LOAD_EXPR:
4401 /* FIXME. */
4402 return false;
4403
4404 default:
4405 gcc_unreachable ();
4406 }
4407 return false;
4408 }
4409
4410 /* Verify a gimple assignment statement STMT with a single rhs.
4411 Returns true if anything is wrong. */
4412
4413 static bool
4414 verify_gimple_assign_single (gassign *stmt)
4415 {
4416 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4417 tree lhs = gimple_assign_lhs (stmt);
4418 tree lhs_type = TREE_TYPE (lhs);
4419 tree rhs1 = gimple_assign_rhs1 (stmt);
4420 tree rhs1_type = TREE_TYPE (rhs1);
4421 bool res = false;
4422
4423 const char* const code_name = get_tree_code_name (rhs_code);
4424
4425 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4426 {
4427 error ("non-trivial conversion in %qs", code_name);
4428 debug_generic_expr (lhs_type);
4429 debug_generic_expr (rhs1_type);
4430 return true;
4431 }
4432
4433 if (gimple_clobber_p (stmt)
4434 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4435 {
4436 error ("%qs LHS in clobber statement",
4437 get_tree_code_name (TREE_CODE (lhs)));
4438 debug_generic_expr (lhs);
4439 return true;
4440 }
4441
4442 if (handled_component_p (lhs)
4443 || TREE_CODE (lhs) == MEM_REF
4444 || TREE_CODE (lhs) == TARGET_MEM_REF)
4445 res |= verify_types_in_gimple_reference (lhs, true);
4446
4447 /* Special codes we cannot handle via their class. */
4448 switch (rhs_code)
4449 {
4450 case ADDR_EXPR:
4451 {
4452 tree op = TREE_OPERAND (rhs1, 0);
4453 if (!is_gimple_addressable (op))
4454 {
4455 error ("invalid operand in %qs", code_name);
4456 return true;
4457 }
4458
4459 /* Technically there is no longer a need for matching types, but
4460 gimple hygiene asks for this check. In LTO we can end up
4461 combining incompatible units and thus end up with addresses
4462 of globals that change their type to a common one. */
4463 if (!in_lto_p
4464 && !types_compatible_p (TREE_TYPE (op),
4465 TREE_TYPE (TREE_TYPE (rhs1)))
4466 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4467 TREE_TYPE (op)))
4468 {
4469 error ("type mismatch in %qs", code_name);
4470 debug_generic_stmt (TREE_TYPE (rhs1));
4471 debug_generic_stmt (TREE_TYPE (op));
4472 return true;
4473 }
4474
4475 return (verify_address (rhs1, true)
4476 || verify_types_in_gimple_reference (op, true));
4477 }
4478
4479 /* tcc_reference */
4480 case INDIRECT_REF:
4481 error ("%qs in gimple IL", code_name);
4482 return true;
4483
4484 case COMPONENT_REF:
4485 case BIT_FIELD_REF:
4486 case ARRAY_REF:
4487 case ARRAY_RANGE_REF:
4488 case VIEW_CONVERT_EXPR:
4489 case REALPART_EXPR:
4490 case IMAGPART_EXPR:
4491 case TARGET_MEM_REF:
4492 case MEM_REF:
4493 if (!is_gimple_reg (lhs)
4494 && is_gimple_reg_type (TREE_TYPE (lhs)))
4495 {
4496 error ("invalid RHS for gimple memory store: %qs", code_name);
4497 debug_generic_stmt (lhs);
4498 debug_generic_stmt (rhs1);
4499 return true;
4500 }
4501 return res || verify_types_in_gimple_reference (rhs1, false);
4502
4503 /* tcc_constant */
4504 case SSA_NAME:
4505 case INTEGER_CST:
4506 case REAL_CST:
4507 case FIXED_CST:
4508 case COMPLEX_CST:
4509 case VECTOR_CST:
4510 case STRING_CST:
4511 return res;
4512
4513 /* tcc_declaration */
4514 case CONST_DECL:
4515 return res;
4516 case VAR_DECL:
4517 case PARM_DECL:
4518 if (!is_gimple_reg (lhs)
4519 && !is_gimple_reg (rhs1)
4520 && is_gimple_reg_type (TREE_TYPE (lhs)))
4521 {
4522 error ("invalid RHS for gimple memory store: %qs", code_name);
4523 debug_generic_stmt (lhs);
4524 debug_generic_stmt (rhs1);
4525 return true;
4526 }
4527 return res;
4528
4529 case CONSTRUCTOR:
4530 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4531 {
4532 unsigned int i;
4533 tree elt_i, elt_v, elt_t = NULL_TREE;
4534
4535 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4536 return res;
4537 /* For vector CONSTRUCTORs we require that either it is empty
4538 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4539 (then the element count must be correct to cover the whole
4540 outer vector and index must be NULL on all elements, or it is
4541 a CONSTRUCTOR of scalar elements, where we as an exception allow
4542 smaller number of elements (assuming zero filling) and
4543 consecutive indexes as compared to NULL indexes (such
4544 CONSTRUCTORs can appear in the IL from FEs). */
4545 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4546 {
4547 if (elt_t == NULL_TREE)
4548 {
4549 elt_t = TREE_TYPE (elt_v);
4550 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4551 {
4552 tree elt_t = TREE_TYPE (elt_v);
4553 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4554 TREE_TYPE (elt_t)))
4555 {
4556 error ("incorrect type of vector %qs elements",
4557 code_name);
4558 debug_generic_stmt (rhs1);
4559 return true;
4560 }
4561 else if (maybe_ne (CONSTRUCTOR_NELTS (rhs1)
4562 * TYPE_VECTOR_SUBPARTS (elt_t),
4563 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4564 {
4565 error ("incorrect number of vector %qs elements",
4566 code_name);
4567 debug_generic_stmt (rhs1);
4568 return true;
4569 }
4570 }
4571 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4572 elt_t))
4573 {
4574 error ("incorrect type of vector %qs elements",
4575 code_name);
4576 debug_generic_stmt (rhs1);
4577 return true;
4578 }
4579 else if (maybe_gt (CONSTRUCTOR_NELTS (rhs1),
4580 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4581 {
4582 error ("incorrect number of vector %qs elements",
4583 code_name);
4584 debug_generic_stmt (rhs1);
4585 return true;
4586 }
4587 }
4588 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4589 {
4590 error ("incorrect type of vector CONSTRUCTOR elements");
4591 debug_generic_stmt (rhs1);
4592 return true;
4593 }
4594 if (elt_i != NULL_TREE
4595 && (TREE_CODE (elt_t) == VECTOR_TYPE
4596 || TREE_CODE (elt_i) != INTEGER_CST
4597 || compare_tree_int (elt_i, i) != 0))
4598 {
4599 error ("vector %qs with non-NULL element index",
4600 code_name);
4601 debug_generic_stmt (rhs1);
4602 return true;
4603 }
4604 if (!is_gimple_val (elt_v))
4605 {
4606 error ("vector %qs element is not a GIMPLE value",
4607 code_name);
4608 debug_generic_stmt (rhs1);
4609 return true;
4610 }
4611 }
4612 }
4613 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4614 {
4615 error ("non-vector %qs with elements", code_name);
4616 debug_generic_stmt (rhs1);
4617 return true;
4618 }
4619 return res;
4620
4621 case ASSERT_EXPR:
4622 /* FIXME. */
4623 rhs1 = fold (ASSERT_EXPR_COND (rhs1));
4624 if (rhs1 == boolean_false_node)
4625 {
4626 error ("%qs with an always-false condition", code_name);
4627 debug_generic_stmt (rhs1);
4628 return true;
4629 }
4630 break;
4631
4632 case OBJ_TYPE_REF:
4633 case WITH_SIZE_EXPR:
4634 /* FIXME. */
4635 return res;
4636
4637 default:;
4638 }
4639
4640 return res;
4641 }
4642
4643 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4644 is a problem, otherwise false. */
4645
4646 static bool
4647 verify_gimple_assign (gassign *stmt)
4648 {
4649 switch (gimple_assign_rhs_class (stmt))
4650 {
4651 case GIMPLE_SINGLE_RHS:
4652 return verify_gimple_assign_single (stmt);
4653
4654 case GIMPLE_UNARY_RHS:
4655 return verify_gimple_assign_unary (stmt);
4656
4657 case GIMPLE_BINARY_RHS:
4658 return verify_gimple_assign_binary (stmt);
4659
4660 case GIMPLE_TERNARY_RHS:
4661 return verify_gimple_assign_ternary (stmt);
4662
4663 default:
4664 gcc_unreachable ();
4665 }
4666 }
4667
4668 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4669 is a problem, otherwise false. */
4670
4671 static bool
4672 verify_gimple_return (greturn *stmt)
4673 {
4674 tree op = gimple_return_retval (stmt);
4675 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4676
4677 /* We cannot test for present return values as we do not fix up missing
4678 return values from the original source. */
4679 if (op == NULL)
4680 return false;
4681
4682 if (!is_gimple_val (op)
4683 && TREE_CODE (op) != RESULT_DECL)
4684 {
4685 error ("invalid operand in return statement");
4686 debug_generic_stmt (op);
4687 return true;
4688 }
4689
4690 if ((TREE_CODE (op) == RESULT_DECL
4691 && DECL_BY_REFERENCE (op))
4692 || (TREE_CODE (op) == SSA_NAME
4693 && SSA_NAME_VAR (op)
4694 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4695 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4696 op = TREE_TYPE (op);
4697
4698 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4699 {
4700 error ("invalid conversion in return statement");
4701 debug_generic_stmt (restype);
4702 debug_generic_stmt (TREE_TYPE (op));
4703 return true;
4704 }
4705
4706 return false;
4707 }
4708
4709
4710 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4711 is a problem, otherwise false. */
4712
4713 static bool
4714 verify_gimple_goto (ggoto *stmt)
4715 {
4716 tree dest = gimple_goto_dest (stmt);
4717
4718 /* ??? We have two canonical forms of direct goto destinations, a
4719 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4720 if (TREE_CODE (dest) != LABEL_DECL
4721 && (!is_gimple_val (dest)
4722 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4723 {
4724 error ("goto destination is neither a label nor a pointer");
4725 return true;
4726 }
4727
4728 return false;
4729 }
4730
4731 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4732 is a problem, otherwise false. */
4733
4734 static bool
4735 verify_gimple_switch (gswitch *stmt)
4736 {
4737 unsigned int i, n;
4738 tree elt, prev_upper_bound = NULL_TREE;
4739 tree index_type, elt_type = NULL_TREE;
4740
4741 if (!is_gimple_val (gimple_switch_index (stmt)))
4742 {
4743 error ("invalid operand to switch statement");
4744 debug_generic_stmt (gimple_switch_index (stmt));
4745 return true;
4746 }
4747
4748 index_type = TREE_TYPE (gimple_switch_index (stmt));
4749 if (! INTEGRAL_TYPE_P (index_type))
4750 {
4751 error ("non-integral type switch statement");
4752 debug_generic_expr (index_type);
4753 return true;
4754 }
4755
4756 elt = gimple_switch_label (stmt, 0);
4757 if (CASE_LOW (elt) != NULL_TREE
4758 || CASE_HIGH (elt) != NULL_TREE
4759 || CASE_CHAIN (elt) != NULL_TREE)
4760 {
4761 error ("invalid default case label in switch statement");
4762 debug_generic_expr (elt);
4763 return true;
4764 }
4765
4766 n = gimple_switch_num_labels (stmt);
4767 for (i = 1; i < n; i++)
4768 {
4769 elt = gimple_switch_label (stmt, i);
4770
4771 if (CASE_CHAIN (elt))
4772 {
4773 error ("invalid %<CASE_CHAIN%>");
4774 debug_generic_expr (elt);
4775 return true;
4776 }
4777 if (! CASE_LOW (elt))
4778 {
4779 error ("invalid case label in switch statement");
4780 debug_generic_expr (elt);
4781 return true;
4782 }
4783 if (CASE_HIGH (elt)
4784 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4785 {
4786 error ("invalid case range in switch statement");
4787 debug_generic_expr (elt);
4788 return true;
4789 }
4790
4791 if (! elt_type)
4792 {
4793 elt_type = TREE_TYPE (CASE_LOW (elt));
4794 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4795 {
4796 error ("type precision mismatch in switch statement");
4797 return true;
4798 }
4799 }
4800 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4801 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4802 {
4803 error ("type mismatch for case label in switch statement");
4804 debug_generic_expr (elt);
4805 return true;
4806 }
4807
4808 if (prev_upper_bound)
4809 {
4810 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4811 {
4812 error ("case labels not sorted in switch statement");
4813 return true;
4814 }
4815 }
4816
4817 prev_upper_bound = CASE_HIGH (elt);
4818 if (! prev_upper_bound)
4819 prev_upper_bound = CASE_LOW (elt);
4820 }
4821
4822 return false;
4823 }
4824
4825 /* Verify a gimple debug statement STMT.
4826 Returns true if anything is wrong. */
4827
4828 static bool
4829 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4830 {
4831 /* There isn't much that could be wrong in a gimple debug stmt. A
4832 gimple debug bind stmt, for example, maps a tree, that's usually
4833 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4834 component or member of an aggregate type, to another tree, that
4835 can be an arbitrary expression. These stmts expand into debug
4836 insns, and are converted to debug notes by var-tracking.c. */
4837 return false;
4838 }
4839
4840 /* Verify a gimple label statement STMT.
4841 Returns true if anything is wrong. */
4842
4843 static bool
4844 verify_gimple_label (glabel *stmt)
4845 {
4846 tree decl = gimple_label_label (stmt);
4847 int uid;
4848 bool err = false;
4849
4850 if (TREE_CODE (decl) != LABEL_DECL)
4851 return true;
4852 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4853 && DECL_CONTEXT (decl) != current_function_decl)
4854 {
4855 error ("label context is not the current function declaration");
4856 err |= true;
4857 }
4858
4859 uid = LABEL_DECL_UID (decl);
4860 if (cfun->cfg
4861 && (uid == -1
4862 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4863 {
4864 error ("incorrect entry in %<label_to_block_map%>");
4865 err |= true;
4866 }
4867
4868 uid = EH_LANDING_PAD_NR (decl);
4869 if (uid)
4870 {
4871 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4872 if (decl != lp->post_landing_pad)
4873 {
4874 error ("incorrect setting of landing pad number");
4875 err |= true;
4876 }
4877 }
4878
4879 return err;
4880 }
4881
4882 /* Verify a gimple cond statement STMT.
4883 Returns true if anything is wrong. */
4884
4885 static bool
4886 verify_gimple_cond (gcond *stmt)
4887 {
4888 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4889 {
4890 error ("invalid comparison code in gimple cond");
4891 return true;
4892 }
4893 if (!(!gimple_cond_true_label (stmt)
4894 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4895 || !(!gimple_cond_false_label (stmt)
4896 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4897 {
4898 error ("invalid labels in gimple cond");
4899 return true;
4900 }
4901
4902 return verify_gimple_comparison (boolean_type_node,
4903 gimple_cond_lhs (stmt),
4904 gimple_cond_rhs (stmt),
4905 gimple_cond_code (stmt));
4906 }
4907
4908 /* Verify the GIMPLE statement STMT. Returns true if there is an
4909 error, otherwise false. */
4910
4911 static bool
4912 verify_gimple_stmt (gimple *stmt)
4913 {
4914 switch (gimple_code (stmt))
4915 {
4916 case GIMPLE_ASSIGN:
4917 return verify_gimple_assign (as_a <gassign *> (stmt));
4918
4919 case GIMPLE_LABEL:
4920 return verify_gimple_label (as_a <glabel *> (stmt));
4921
4922 case GIMPLE_CALL:
4923 return verify_gimple_call (as_a <gcall *> (stmt));
4924
4925 case GIMPLE_COND:
4926 return verify_gimple_cond (as_a <gcond *> (stmt));
4927
4928 case GIMPLE_GOTO:
4929 return verify_gimple_goto (as_a <ggoto *> (stmt));
4930
4931 case GIMPLE_SWITCH:
4932 return verify_gimple_switch (as_a <gswitch *> (stmt));
4933
4934 case GIMPLE_RETURN:
4935 return verify_gimple_return (as_a <greturn *> (stmt));
4936
4937 case GIMPLE_ASM:
4938 return false;
4939
4940 case GIMPLE_TRANSACTION:
4941 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4942
4943 /* Tuples that do not have tree operands. */
4944 case GIMPLE_NOP:
4945 case GIMPLE_PREDICT:
4946 case GIMPLE_RESX:
4947 case GIMPLE_EH_DISPATCH:
4948 case GIMPLE_EH_MUST_NOT_THROW:
4949 return false;
4950
4951 CASE_GIMPLE_OMP:
4952 /* OpenMP directives are validated by the FE and never operated
4953 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4954 non-gimple expressions when the main index variable has had
4955 its address taken. This does not affect the loop itself
4956 because the header of an GIMPLE_OMP_FOR is merely used to determine
4957 how to setup the parallel iteration. */
4958 return false;
4959
4960 case GIMPLE_DEBUG:
4961 return verify_gimple_debug (stmt);
4962
4963 default:
4964 gcc_unreachable ();
4965 }
4966 }
4967
4968 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4969 and false otherwise. */
4970
4971 static bool
4972 verify_gimple_phi (gphi *phi)
4973 {
4974 bool err = false;
4975 unsigned i;
4976 tree phi_result = gimple_phi_result (phi);
4977 bool virtual_p;
4978
4979 if (!phi_result)
4980 {
4981 error ("invalid %<PHI%> result");
4982 return true;
4983 }
4984
4985 virtual_p = virtual_operand_p (phi_result);
4986 if (TREE_CODE (phi_result) != SSA_NAME
4987 || (virtual_p
4988 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4989 {
4990 error ("invalid %<PHI%> result");
4991 err = true;
4992 }
4993
4994 for (i = 0; i < gimple_phi_num_args (phi); i++)
4995 {
4996 tree t = gimple_phi_arg_def (phi, i);
4997
4998 if (!t)
4999 {
5000 error ("missing %<PHI%> def");
5001 err |= true;
5002 continue;
5003 }
5004 /* Addressable variables do have SSA_NAMEs but they
5005 are not considered gimple values. */
5006 else if ((TREE_CODE (t) == SSA_NAME
5007 && virtual_p != virtual_operand_p (t))
5008 || (virtual_p
5009 && (TREE_CODE (t) != SSA_NAME
5010 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
5011 || (!virtual_p
5012 && !is_gimple_val (t)))
5013 {
5014 error ("invalid %<PHI%> argument");
5015 debug_generic_expr (t);
5016 err |= true;
5017 }
5018 #ifdef ENABLE_TYPES_CHECKING
5019 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
5020 {
5021 error ("incompatible types in %<PHI%> argument %u", i);
5022 debug_generic_stmt (TREE_TYPE (phi_result));
5023 debug_generic_stmt (TREE_TYPE (t));
5024 err |= true;
5025 }
5026 #endif
5027 }
5028
5029 return err;
5030 }
5031
5032 /* Verify the GIMPLE statements inside the sequence STMTS. */
5033
5034 static bool
5035 verify_gimple_in_seq_2 (gimple_seq stmts)
5036 {
5037 gimple_stmt_iterator ittr;
5038 bool err = false;
5039
5040 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
5041 {
5042 gimple *stmt = gsi_stmt (ittr);
5043
5044 switch (gimple_code (stmt))
5045 {
5046 case GIMPLE_BIND:
5047 err |= verify_gimple_in_seq_2 (
5048 gimple_bind_body (as_a <gbind *> (stmt)));
5049 break;
5050
5051 case GIMPLE_TRY:
5052 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
5053 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
5054 break;
5055
5056 case GIMPLE_EH_FILTER:
5057 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
5058 break;
5059
5060 case GIMPLE_EH_ELSE:
5061 {
5062 geh_else *eh_else = as_a <geh_else *> (stmt);
5063 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
5064 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
5065 }
5066 break;
5067
5068 case GIMPLE_CATCH:
5069 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
5070 as_a <gcatch *> (stmt)));
5071 break;
5072
5073 case GIMPLE_TRANSACTION:
5074 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
5075 break;
5076
5077 default:
5078 {
5079 bool err2 = verify_gimple_stmt (stmt);
5080 if (err2)
5081 debug_gimple_stmt (stmt);
5082 err |= err2;
5083 }
5084 }
5085 }
5086
5087 return err;
5088 }
5089
5090 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
5091 is a problem, otherwise false. */
5092
5093 static bool
5094 verify_gimple_transaction (gtransaction *stmt)
5095 {
5096 tree lab;
5097
5098 lab = gimple_transaction_label_norm (stmt);
5099 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5100 return true;
5101 lab = gimple_transaction_label_uninst (stmt);
5102 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5103 return true;
5104 lab = gimple_transaction_label_over (stmt);
5105 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5106 return true;
5107
5108 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
5109 }
5110
5111
5112 /* Verify the GIMPLE statements inside the statement list STMTS. */
5113
5114 DEBUG_FUNCTION void
5115 verify_gimple_in_seq (gimple_seq stmts)
5116 {
5117 timevar_push (TV_TREE_STMT_VERIFY);
5118 if (verify_gimple_in_seq_2 (stmts))
5119 internal_error ("%<verify_gimple%> failed");
5120 timevar_pop (TV_TREE_STMT_VERIFY);
5121 }
5122
5123 /* Return true when the T can be shared. */
5124
5125 static bool
5126 tree_node_can_be_shared (tree t)
5127 {
5128 if (IS_TYPE_OR_DECL_P (t)
5129 || TREE_CODE (t) == SSA_NAME
5130 || TREE_CODE (t) == IDENTIFIER_NODE
5131 || TREE_CODE (t) == CASE_LABEL_EXPR
5132 || is_gimple_min_invariant (t))
5133 return true;
5134
5135 if (t == error_mark_node)
5136 return true;
5137
5138 return false;
5139 }
5140
5141 /* Called via walk_tree. Verify tree sharing. */
5142
5143 static tree
5144 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
5145 {
5146 hash_set<void *> *visited = (hash_set<void *> *) data;
5147
5148 if (tree_node_can_be_shared (*tp))
5149 {
5150 *walk_subtrees = false;
5151 return NULL;
5152 }
5153
5154 if (visited->add (*tp))
5155 return *tp;
5156
5157 return NULL;
5158 }
5159
5160 /* Called via walk_gimple_stmt. Verify tree sharing. */
5161
5162 static tree
5163 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
5164 {
5165 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5166 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
5167 }
5168
5169 static bool eh_error_found;
5170 bool
5171 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
5172 hash_set<gimple *> *visited)
5173 {
5174 if (!visited->contains (stmt))
5175 {
5176 error ("dead statement in EH table");
5177 debug_gimple_stmt (stmt);
5178 eh_error_found = true;
5179 }
5180 return true;
5181 }
5182
5183 /* Verify if the location LOCs block is in BLOCKS. */
5184
5185 static bool
5186 verify_location (hash_set<tree> *blocks, location_t loc)
5187 {
5188 tree block = LOCATION_BLOCK (loc);
5189 if (block != NULL_TREE
5190 && !blocks->contains (block))
5191 {
5192 error ("location references block not in block tree");
5193 return true;
5194 }
5195 if (block != NULL_TREE)
5196 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5197 return false;
5198 }
5199
5200 /* Called via walk_tree. Verify that expressions have no blocks. */
5201
5202 static tree
5203 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5204 {
5205 if (!EXPR_P (*tp))
5206 {
5207 *walk_subtrees = false;
5208 return NULL;
5209 }
5210
5211 location_t loc = EXPR_LOCATION (*tp);
5212 if (LOCATION_BLOCK (loc) != NULL)
5213 return *tp;
5214
5215 return NULL;
5216 }
5217
5218 /* Called via walk_tree. Verify locations of expressions. */
5219
5220 static tree
5221 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5222 {
5223 hash_set<tree> *blocks = (hash_set<tree> *) data;
5224 tree t = *tp;
5225
5226 /* ??? This doesn't really belong here but there's no good place to
5227 stick this remainder of old verify_expr. */
5228 /* ??? This barfs on debug stmts which contain binds to vars with
5229 different function context. */
5230 #if 0
5231 if (VAR_P (t)
5232 || TREE_CODE (t) == PARM_DECL
5233 || TREE_CODE (t) == RESULT_DECL)
5234 {
5235 tree context = decl_function_context (t);
5236 if (context != cfun->decl
5237 && !SCOPE_FILE_SCOPE_P (context)
5238 && !TREE_STATIC (t)
5239 && !DECL_EXTERNAL (t))
5240 {
5241 error ("local declaration from a different function");
5242 return t;
5243 }
5244 }
5245 #endif
5246
5247 if (VAR_P (t) && DECL_HAS_DEBUG_EXPR_P (t))
5248 {
5249 tree x = DECL_DEBUG_EXPR (t);
5250 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5251 if (addr)
5252 return addr;
5253 }
5254 if ((VAR_P (t)
5255 || TREE_CODE (t) == PARM_DECL
5256 || TREE_CODE (t) == RESULT_DECL)
5257 && DECL_HAS_VALUE_EXPR_P (t))
5258 {
5259 tree x = DECL_VALUE_EXPR (t);
5260 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5261 if (addr)
5262 return addr;
5263 }
5264
5265 if (!EXPR_P (t))
5266 {
5267 *walk_subtrees = false;
5268 return NULL;
5269 }
5270
5271 location_t loc = EXPR_LOCATION (t);
5272 if (verify_location (blocks, loc))
5273 return t;
5274
5275 return NULL;
5276 }
5277
5278 /* Called via walk_gimple_op. Verify locations of expressions. */
5279
5280 static tree
5281 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5282 {
5283 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5284 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5285 }
5286
5287 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5288
5289 static void
5290 collect_subblocks (hash_set<tree> *blocks, tree block)
5291 {
5292 tree t;
5293 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5294 {
5295 blocks->add (t);
5296 collect_subblocks (blocks, t);
5297 }
5298 }
5299
5300 /* Disable warnings about missing quoting in GCC diagnostics for
5301 the verification errors. Their format strings don't follow
5302 GCC diagnostic conventions and trigger an ICE in the end. */
5303 #if __GNUC__ >= 10
5304 # pragma GCC diagnostic push
5305 # pragma GCC diagnostic ignored "-Wformat-diag"
5306 #endif
5307
5308 /* Verify the GIMPLE statements in the CFG of FN. */
5309
5310 DEBUG_FUNCTION void
5311 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
5312 {
5313 basic_block bb;
5314 bool err = false;
5315
5316 timevar_push (TV_TREE_STMT_VERIFY);
5317 hash_set<void *> visited;
5318 hash_set<gimple *> visited_throwing_stmts;
5319
5320 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5321 hash_set<tree> blocks;
5322 if (DECL_INITIAL (fn->decl))
5323 {
5324 blocks.add (DECL_INITIAL (fn->decl));
5325 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5326 }
5327
5328 FOR_EACH_BB_FN (bb, fn)
5329 {
5330 gimple_stmt_iterator gsi;
5331 edge_iterator ei;
5332 edge e;
5333
5334 for (gphi_iterator gpi = gsi_start_phis (bb);
5335 !gsi_end_p (gpi);
5336 gsi_next (&gpi))
5337 {
5338 gphi *phi = gpi.phi ();
5339 bool err2 = false;
5340 unsigned i;
5341
5342 if (gimple_bb (phi) != bb)
5343 {
5344 error ("gimple_bb (phi) is set to a wrong basic block");
5345 err2 = true;
5346 }
5347
5348 err2 |= verify_gimple_phi (phi);
5349
5350 /* Only PHI arguments have locations. */
5351 if (gimple_location (phi) != UNKNOWN_LOCATION)
5352 {
5353 error ("PHI node with location");
5354 err2 = true;
5355 }
5356
5357 for (i = 0; i < gimple_phi_num_args (phi); i++)
5358 {
5359 tree arg = gimple_phi_arg_def (phi, i);
5360 tree addr = walk_tree (&arg, verify_node_sharing_1,
5361 &visited, NULL);
5362 if (addr)
5363 {
5364 error ("incorrect sharing of tree nodes");
5365 debug_generic_expr (addr);
5366 err2 |= true;
5367 }
5368 location_t loc = gimple_phi_arg_location (phi, i);
5369 if (virtual_operand_p (gimple_phi_result (phi))
5370 && loc != UNKNOWN_LOCATION)
5371 {
5372 error ("virtual PHI with argument locations");
5373 err2 = true;
5374 }
5375 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5376 if (addr)
5377 {
5378 debug_generic_expr (addr);
5379 err2 = true;
5380 }
5381 err2 |= verify_location (&blocks, loc);
5382 }
5383
5384 if (err2)
5385 debug_gimple_stmt (phi);
5386 err |= err2;
5387 }
5388
5389 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5390 {
5391 gimple *stmt = gsi_stmt (gsi);
5392 bool err2 = false;
5393 struct walk_stmt_info wi;
5394 tree addr;
5395 int lp_nr;
5396
5397 if (gimple_bb (stmt) != bb)
5398 {
5399 error ("gimple_bb (stmt) is set to a wrong basic block");
5400 err2 = true;
5401 }
5402
5403 err2 |= verify_gimple_stmt (stmt);
5404 err2 |= verify_location (&blocks, gimple_location (stmt));
5405
5406 memset (&wi, 0, sizeof (wi));
5407 wi.info = (void *) &visited;
5408 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5409 if (addr)
5410 {
5411 error ("incorrect sharing of tree nodes");
5412 debug_generic_expr (addr);
5413 err2 |= true;
5414 }
5415
5416 memset (&wi, 0, sizeof (wi));
5417 wi.info = (void *) &blocks;
5418 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5419 if (addr)
5420 {
5421 debug_generic_expr (addr);
5422 err2 |= true;
5423 }
5424
5425 /* If the statement is marked as part of an EH region, then it is
5426 expected that the statement could throw. Verify that when we
5427 have optimizations that simplify statements such that we prove
5428 that they cannot throw, that we update other data structures
5429 to match. */
5430 lp_nr = lookup_stmt_eh_lp (stmt);
5431 if (lp_nr != 0)
5432 visited_throwing_stmts.add (stmt);
5433 if (lp_nr > 0)
5434 {
5435 if (!stmt_could_throw_p (cfun, stmt))
5436 {
5437 if (verify_nothrow)
5438 {
5439 error ("statement marked for throw, but doesn%'t");
5440 err2 |= true;
5441 }
5442 }
5443 else if (!gsi_one_before_end_p (gsi))
5444 {
5445 error ("statement marked for throw in middle of block");
5446 err2 |= true;
5447 }
5448 }
5449
5450 if (err2)
5451 debug_gimple_stmt (stmt);
5452 err |= err2;
5453 }
5454
5455 FOR_EACH_EDGE (e, ei, bb->succs)
5456 if (e->goto_locus != UNKNOWN_LOCATION)
5457 err |= verify_location (&blocks, e->goto_locus);
5458 }
5459
5460 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5461 eh_error_found = false;
5462 if (eh_table)
5463 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5464 (&visited_throwing_stmts);
5465
5466 if (err || eh_error_found)
5467 internal_error ("verify_gimple failed");
5468
5469 verify_histograms ();
5470 timevar_pop (TV_TREE_STMT_VERIFY);
5471 }
5472
5473
5474 /* Verifies that the flow information is OK. */
5475
5476 static int
5477 gimple_verify_flow_info (void)
5478 {
5479 int err = 0;
5480 basic_block bb;
5481 gimple_stmt_iterator gsi;
5482 gimple *stmt;
5483 edge e;
5484 edge_iterator ei;
5485
5486 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5487 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5488 {
5489 error ("ENTRY_BLOCK has IL associated with it");
5490 err = 1;
5491 }
5492
5493 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5494 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5495 {
5496 error ("EXIT_BLOCK has IL associated with it");
5497 err = 1;
5498 }
5499
5500 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5501 if (e->flags & EDGE_FALLTHRU)
5502 {
5503 error ("fallthru to exit from bb %d", e->src->index);
5504 err = 1;
5505 }
5506
5507 FOR_EACH_BB_FN (bb, cfun)
5508 {
5509 bool found_ctrl_stmt = false;
5510
5511 stmt = NULL;
5512
5513 /* Skip labels on the start of basic block. */
5514 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5515 {
5516 tree label;
5517 gimple *prev_stmt = stmt;
5518
5519 stmt = gsi_stmt (gsi);
5520
5521 if (gimple_code (stmt) != GIMPLE_LABEL)
5522 break;
5523
5524 label = gimple_label_label (as_a <glabel *> (stmt));
5525 if (prev_stmt && DECL_NONLOCAL (label))
5526 {
5527 error ("nonlocal label ");
5528 print_generic_expr (stderr, label);
5529 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5530 bb->index);
5531 err = 1;
5532 }
5533
5534 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5535 {
5536 error ("EH landing pad label ");
5537 print_generic_expr (stderr, label);
5538 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5539 bb->index);
5540 err = 1;
5541 }
5542
5543 if (label_to_block (cfun, label) != bb)
5544 {
5545 error ("label ");
5546 print_generic_expr (stderr, label);
5547 fprintf (stderr, " to block does not match in bb %d",
5548 bb->index);
5549 err = 1;
5550 }
5551
5552 if (decl_function_context (label) != current_function_decl)
5553 {
5554 error ("label ");
5555 print_generic_expr (stderr, label);
5556 fprintf (stderr, " has incorrect context in bb %d",
5557 bb->index);
5558 err = 1;
5559 }
5560 }
5561
5562 /* Verify that body of basic block BB is free of control flow. */
5563 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5564 {
5565 gimple *stmt = gsi_stmt (gsi);
5566
5567 if (found_ctrl_stmt)
5568 {
5569 error ("control flow in the middle of basic block %d",
5570 bb->index);
5571 err = 1;
5572 }
5573
5574 if (stmt_ends_bb_p (stmt))
5575 found_ctrl_stmt = true;
5576
5577 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5578 {
5579 error ("label ");
5580 print_generic_expr (stderr, gimple_label_label (label_stmt));
5581 fprintf (stderr, " in the middle of basic block %d", bb->index);
5582 err = 1;
5583 }
5584 }
5585
5586 gsi = gsi_last_nondebug_bb (bb);
5587 if (gsi_end_p (gsi))
5588 continue;
5589
5590 stmt = gsi_stmt (gsi);
5591
5592 if (gimple_code (stmt) == GIMPLE_LABEL)
5593 continue;
5594
5595 err |= verify_eh_edges (stmt);
5596
5597 if (is_ctrl_stmt (stmt))
5598 {
5599 FOR_EACH_EDGE (e, ei, bb->succs)
5600 if (e->flags & EDGE_FALLTHRU)
5601 {
5602 error ("fallthru edge after a control statement in bb %d",
5603 bb->index);
5604 err = 1;
5605 }
5606 }
5607
5608 if (gimple_code (stmt) != GIMPLE_COND)
5609 {
5610 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5611 after anything else but if statement. */
5612 FOR_EACH_EDGE (e, ei, bb->succs)
5613 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5614 {
5615 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5616 bb->index);
5617 err = 1;
5618 }
5619 }
5620
5621 switch (gimple_code (stmt))
5622 {
5623 case GIMPLE_COND:
5624 {
5625 edge true_edge;
5626 edge false_edge;
5627
5628 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5629
5630 if (!true_edge
5631 || !false_edge
5632 || !(true_edge->flags & EDGE_TRUE_VALUE)
5633 || !(false_edge->flags & EDGE_FALSE_VALUE)
5634 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5635 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5636 || EDGE_COUNT (bb->succs) >= 3)
5637 {
5638 error ("wrong outgoing edge flags at end of bb %d",
5639 bb->index);
5640 err = 1;
5641 }
5642 }
5643 break;
5644
5645 case GIMPLE_GOTO:
5646 if (simple_goto_p (stmt))
5647 {
5648 error ("explicit goto at end of bb %d", bb->index);
5649 err = 1;
5650 }
5651 else
5652 {
5653 /* FIXME. We should double check that the labels in the
5654 destination blocks have their address taken. */
5655 FOR_EACH_EDGE (e, ei, bb->succs)
5656 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5657 | EDGE_FALSE_VALUE))
5658 || !(e->flags & EDGE_ABNORMAL))
5659 {
5660 error ("wrong outgoing edge flags at end of bb %d",
5661 bb->index);
5662 err = 1;
5663 }
5664 }
5665 break;
5666
5667 case GIMPLE_CALL:
5668 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5669 break;
5670 /* fallthru */
5671 case GIMPLE_RETURN:
5672 if (!single_succ_p (bb)
5673 || (single_succ_edge (bb)->flags
5674 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5675 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5676 {
5677 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5678 err = 1;
5679 }
5680 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5681 {
5682 error ("return edge does not point to exit in bb %d",
5683 bb->index);
5684 err = 1;
5685 }
5686 break;
5687
5688 case GIMPLE_SWITCH:
5689 {
5690 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5691 tree prev;
5692 edge e;
5693 size_t i, n;
5694
5695 n = gimple_switch_num_labels (switch_stmt);
5696
5697 /* Mark all the destination basic blocks. */
5698 for (i = 0; i < n; ++i)
5699 {
5700 basic_block label_bb = gimple_switch_label_bb (cfun, switch_stmt, i);
5701 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5702 label_bb->aux = (void *)1;
5703 }
5704
5705 /* Verify that the case labels are sorted. */
5706 prev = gimple_switch_label (switch_stmt, 0);
5707 for (i = 1; i < n; ++i)
5708 {
5709 tree c = gimple_switch_label (switch_stmt, i);
5710 if (!CASE_LOW (c))
5711 {
5712 error ("found default case not at the start of "
5713 "case vector");
5714 err = 1;
5715 continue;
5716 }
5717 if (CASE_LOW (prev)
5718 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5719 {
5720 error ("case labels not sorted: ");
5721 print_generic_expr (stderr, prev);
5722 fprintf (stderr," is greater than ");
5723 print_generic_expr (stderr, c);
5724 fprintf (stderr," but comes before it.\n");
5725 err = 1;
5726 }
5727 prev = c;
5728 }
5729 /* VRP will remove the default case if it can prove it will
5730 never be executed. So do not verify there always exists
5731 a default case here. */
5732
5733 FOR_EACH_EDGE (e, ei, bb->succs)
5734 {
5735 if (!e->dest->aux)
5736 {
5737 error ("extra outgoing edge %d->%d",
5738 bb->index, e->dest->index);
5739 err = 1;
5740 }
5741
5742 e->dest->aux = (void *)2;
5743 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5744 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5745 {
5746 error ("wrong outgoing edge flags at end of bb %d",
5747 bb->index);
5748 err = 1;
5749 }
5750 }
5751
5752 /* Check that we have all of them. */
5753 for (i = 0; i < n; ++i)
5754 {
5755 basic_block label_bb = gimple_switch_label_bb (cfun,
5756 switch_stmt, i);
5757
5758 if (label_bb->aux != (void *)2)
5759 {
5760 error ("missing edge %i->%i", bb->index, label_bb->index);
5761 err = 1;
5762 }
5763 }
5764
5765 FOR_EACH_EDGE (e, ei, bb->succs)
5766 e->dest->aux = (void *)0;
5767 }
5768 break;
5769
5770 case GIMPLE_EH_DISPATCH:
5771 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5772 break;
5773
5774 default:
5775 break;
5776 }
5777 }
5778
5779 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5780 verify_dominators (CDI_DOMINATORS);
5781
5782 return err;
5783 }
5784
5785 #if __GNUC__ >= 10
5786 # pragma GCC diagnostic pop
5787 #endif
5788
5789 /* Updates phi nodes after creating a forwarder block joined
5790 by edge FALLTHRU. */
5791
5792 static void
5793 gimple_make_forwarder_block (edge fallthru)
5794 {
5795 edge e;
5796 edge_iterator ei;
5797 basic_block dummy, bb;
5798 tree var;
5799 gphi_iterator gsi;
5800 bool forward_location_p;
5801
5802 dummy = fallthru->src;
5803 bb = fallthru->dest;
5804
5805 if (single_pred_p (bb))
5806 return;
5807
5808 /* We can forward location info if we have only one predecessor. */
5809 forward_location_p = single_pred_p (dummy);
5810
5811 /* If we redirected a branch we must create new PHI nodes at the
5812 start of BB. */
5813 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5814 {
5815 gphi *phi, *new_phi;
5816
5817 phi = gsi.phi ();
5818 var = gimple_phi_result (phi);
5819 new_phi = create_phi_node (var, bb);
5820 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5821 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5822 forward_location_p
5823 ? gimple_phi_arg_location (phi, 0) : UNKNOWN_LOCATION);
5824 }
5825
5826 /* Add the arguments we have stored on edges. */
5827 FOR_EACH_EDGE (e, ei, bb->preds)
5828 {
5829 if (e == fallthru)
5830 continue;
5831
5832 flush_pending_stmts (e);
5833 }
5834 }
5835
5836
5837 /* Return a non-special label in the head of basic block BLOCK.
5838 Create one if it doesn't exist. */
5839
5840 tree
5841 gimple_block_label (basic_block bb)
5842 {
5843 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5844 bool first = true;
5845 tree label;
5846 glabel *stmt;
5847
5848 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5849 {
5850 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5851 if (!stmt)
5852 break;
5853 label = gimple_label_label (stmt);
5854 if (!DECL_NONLOCAL (label))
5855 {
5856 if (!first)
5857 gsi_move_before (&i, &s);
5858 return label;
5859 }
5860 }
5861
5862 label = create_artificial_label (UNKNOWN_LOCATION);
5863 stmt = gimple_build_label (label);
5864 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5865 return label;
5866 }
5867
5868
5869 /* Attempt to perform edge redirection by replacing a possibly complex
5870 jump instruction by a goto or by removing the jump completely.
5871 This can apply only if all edges now point to the same block. The
5872 parameters and return values are equivalent to
5873 redirect_edge_and_branch. */
5874
5875 static edge
5876 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5877 {
5878 basic_block src = e->src;
5879 gimple_stmt_iterator i;
5880 gimple *stmt;
5881
5882 /* We can replace or remove a complex jump only when we have exactly
5883 two edges. */
5884 if (EDGE_COUNT (src->succs) != 2
5885 /* Verify that all targets will be TARGET. Specifically, the
5886 edge that is not E must also go to TARGET. */
5887 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5888 return NULL;
5889
5890 i = gsi_last_bb (src);
5891 if (gsi_end_p (i))
5892 return NULL;
5893
5894 stmt = gsi_stmt (i);
5895
5896 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5897 {
5898 gsi_remove (&i, true);
5899 e = ssa_redirect_edge (e, target);
5900 e->flags = EDGE_FALLTHRU;
5901 return e;
5902 }
5903
5904 return NULL;
5905 }
5906
5907
5908 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5909 edge representing the redirected branch. */
5910
5911 static edge
5912 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5913 {
5914 basic_block bb = e->src;
5915 gimple_stmt_iterator gsi;
5916 edge ret;
5917 gimple *stmt;
5918
5919 if (e->flags & EDGE_ABNORMAL)
5920 return NULL;
5921
5922 if (e->dest == dest)
5923 return NULL;
5924
5925 if (e->flags & EDGE_EH)
5926 return redirect_eh_edge (e, dest);
5927
5928 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5929 {
5930 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5931 if (ret)
5932 return ret;
5933 }
5934
5935 gsi = gsi_last_nondebug_bb (bb);
5936 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5937
5938 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5939 {
5940 case GIMPLE_COND:
5941 /* For COND_EXPR, we only need to redirect the edge. */
5942 break;
5943
5944 case GIMPLE_GOTO:
5945 /* No non-abnormal edges should lead from a non-simple goto, and
5946 simple ones should be represented implicitly. */
5947 gcc_unreachable ();
5948
5949 case GIMPLE_SWITCH:
5950 {
5951 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5952 tree label = gimple_block_label (dest);
5953 tree cases = get_cases_for_edge (e, switch_stmt);
5954
5955 /* If we have a list of cases associated with E, then use it
5956 as it's a lot faster than walking the entire case vector. */
5957 if (cases)
5958 {
5959 edge e2 = find_edge (e->src, dest);
5960 tree last, first;
5961
5962 first = cases;
5963 while (cases)
5964 {
5965 last = cases;
5966 CASE_LABEL (cases) = label;
5967 cases = CASE_CHAIN (cases);
5968 }
5969
5970 /* If there was already an edge in the CFG, then we need
5971 to move all the cases associated with E to E2. */
5972 if (e2)
5973 {
5974 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5975
5976 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5977 CASE_CHAIN (cases2) = first;
5978 }
5979 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5980 }
5981 else
5982 {
5983 size_t i, n = gimple_switch_num_labels (switch_stmt);
5984
5985 for (i = 0; i < n; i++)
5986 {
5987 tree elt = gimple_switch_label (switch_stmt, i);
5988 if (label_to_block (cfun, CASE_LABEL (elt)) == e->dest)
5989 CASE_LABEL (elt) = label;
5990 }
5991 }
5992 }
5993 break;
5994
5995 case GIMPLE_ASM:
5996 {
5997 gasm *asm_stmt = as_a <gasm *> (stmt);
5998 int i, n = gimple_asm_nlabels (asm_stmt);
5999 tree label = NULL;
6000
6001 for (i = 0; i < n; ++i)
6002 {
6003 tree cons = gimple_asm_label_op (asm_stmt, i);
6004 if (label_to_block (cfun, TREE_VALUE (cons)) == e->dest)
6005 {
6006 if (!label)
6007 label = gimple_block_label (dest);
6008 TREE_VALUE (cons) = label;
6009 }
6010 }
6011
6012 /* If we didn't find any label matching the former edge in the
6013 asm labels, we must be redirecting the fallthrough
6014 edge. */
6015 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
6016 }
6017 break;
6018
6019 case GIMPLE_RETURN:
6020 gsi_remove (&gsi, true);
6021 e->flags |= EDGE_FALLTHRU;
6022 break;
6023
6024 case GIMPLE_OMP_RETURN:
6025 case GIMPLE_OMP_CONTINUE:
6026 case GIMPLE_OMP_SECTIONS_SWITCH:
6027 case GIMPLE_OMP_FOR:
6028 /* The edges from OMP constructs can be simply redirected. */
6029 break;
6030
6031 case GIMPLE_EH_DISPATCH:
6032 if (!(e->flags & EDGE_FALLTHRU))
6033 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
6034 break;
6035
6036 case GIMPLE_TRANSACTION:
6037 if (e->flags & EDGE_TM_ABORT)
6038 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
6039 gimple_block_label (dest));
6040 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
6041 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
6042 gimple_block_label (dest));
6043 else
6044 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
6045 gimple_block_label (dest));
6046 break;
6047
6048 default:
6049 /* Otherwise it must be a fallthru edge, and we don't need to
6050 do anything besides redirecting it. */
6051 gcc_assert (e->flags & EDGE_FALLTHRU);
6052 break;
6053 }
6054
6055 /* Update/insert PHI nodes as necessary. */
6056
6057 /* Now update the edges in the CFG. */
6058 e = ssa_redirect_edge (e, dest);
6059
6060 return e;
6061 }
6062
6063 /* Returns true if it is possible to remove edge E by redirecting
6064 it to the destination of the other edge from E->src. */
6065
6066 static bool
6067 gimple_can_remove_branch_p (const_edge e)
6068 {
6069 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
6070 return false;
6071
6072 return true;
6073 }
6074
6075 /* Simple wrapper, as we can always redirect fallthru edges. */
6076
6077 static basic_block
6078 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
6079 {
6080 e = gimple_redirect_edge_and_branch (e, dest);
6081 gcc_assert (e);
6082
6083 return NULL;
6084 }
6085
6086
6087 /* Splits basic block BB after statement STMT (but at least after the
6088 labels). If STMT is NULL, BB is split just after the labels. */
6089
6090 static basic_block
6091 gimple_split_block (basic_block bb, void *stmt)
6092 {
6093 gimple_stmt_iterator gsi;
6094 gimple_stmt_iterator gsi_tgt;
6095 gimple_seq list;
6096 basic_block new_bb;
6097 edge e;
6098 edge_iterator ei;
6099
6100 new_bb = create_empty_bb (bb);
6101
6102 /* Redirect the outgoing edges. */
6103 new_bb->succs = bb->succs;
6104 bb->succs = NULL;
6105 FOR_EACH_EDGE (e, ei, new_bb->succs)
6106 e->src = new_bb;
6107
6108 /* Get a stmt iterator pointing to the first stmt to move. */
6109 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
6110 gsi = gsi_after_labels (bb);
6111 else
6112 {
6113 gsi = gsi_for_stmt ((gimple *) stmt);
6114 gsi_next (&gsi);
6115 }
6116
6117 /* Move everything from GSI to the new basic block. */
6118 if (gsi_end_p (gsi))
6119 return new_bb;
6120
6121 /* Split the statement list - avoid re-creating new containers as this
6122 brings ugly quadratic memory consumption in the inliner.
6123 (We are still quadratic since we need to update stmt BB pointers,
6124 sadly.) */
6125 gsi_split_seq_before (&gsi, &list);
6126 set_bb_seq (new_bb, list);
6127 for (gsi_tgt = gsi_start (list);
6128 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
6129 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
6130
6131 return new_bb;
6132 }
6133
6134
6135 /* Moves basic block BB after block AFTER. */
6136
6137 static bool
6138 gimple_move_block_after (basic_block bb, basic_block after)
6139 {
6140 if (bb->prev_bb == after)
6141 return true;
6142
6143 unlink_block (bb);
6144 link_block (bb, after);
6145
6146 return true;
6147 }
6148
6149
6150 /* Return TRUE if block BB has no executable statements, otherwise return
6151 FALSE. */
6152
6153 static bool
6154 gimple_empty_block_p (basic_block bb)
6155 {
6156 /* BB must have no executable statements. */
6157 gimple_stmt_iterator gsi = gsi_after_labels (bb);
6158 if (phi_nodes (bb))
6159 return false;
6160 while (!gsi_end_p (gsi))
6161 {
6162 gimple *stmt = gsi_stmt (gsi);
6163 if (is_gimple_debug (stmt))
6164 ;
6165 else if (gimple_code (stmt) == GIMPLE_NOP
6166 || gimple_code (stmt) == GIMPLE_PREDICT)
6167 ;
6168 else
6169 return false;
6170 gsi_next (&gsi);
6171 }
6172 return true;
6173 }
6174
6175
6176 /* Split a basic block if it ends with a conditional branch and if the
6177 other part of the block is not empty. */
6178
6179 static basic_block
6180 gimple_split_block_before_cond_jump (basic_block bb)
6181 {
6182 gimple *last, *split_point;
6183 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6184 if (gsi_end_p (gsi))
6185 return NULL;
6186 last = gsi_stmt (gsi);
6187 if (gimple_code (last) != GIMPLE_COND
6188 && gimple_code (last) != GIMPLE_SWITCH)
6189 return NULL;
6190 gsi_prev (&gsi);
6191 split_point = gsi_stmt (gsi);
6192 return split_block (bb, split_point)->dest;
6193 }
6194
6195
6196 /* Return true if basic_block can be duplicated. */
6197
6198 static bool
6199 gimple_can_duplicate_bb_p (const_basic_block bb)
6200 {
6201 gimple *last = last_stmt (CONST_CAST_BB (bb));
6202
6203 /* Do checks that can only fail for the last stmt, to minimize the work in the
6204 stmt loop. */
6205 if (last) {
6206 /* A transaction is a single entry multiple exit region. It
6207 must be duplicated in its entirety or not at all. */
6208 if (gimple_code (last) == GIMPLE_TRANSACTION)
6209 return false;
6210
6211 /* An IFN_UNIQUE call must be duplicated as part of its group,
6212 or not at all. */
6213 if (is_gimple_call (last)
6214 && gimple_call_internal_p (last)
6215 && gimple_call_internal_unique_p (last))
6216 return false;
6217 }
6218
6219 for (gimple_stmt_iterator gsi = gsi_start_bb (CONST_CAST_BB (bb));
6220 !gsi_end_p (gsi); gsi_next (&gsi))
6221 {
6222 gimple *g = gsi_stmt (gsi);
6223
6224 /* An IFN_GOMP_SIMT_ENTER_ALLOC/IFN_GOMP_SIMT_EXIT call must be
6225 duplicated as part of its group, or not at all.
6226 The IFN_GOMP_SIMT_VOTE_ANY and IFN_GOMP_SIMT_XCHG_* are part of such a
6227 group, so the same holds there. */
6228 if (is_gimple_call (g)
6229 && (gimple_call_internal_p (g, IFN_GOMP_SIMT_ENTER_ALLOC)
6230 || gimple_call_internal_p (g, IFN_GOMP_SIMT_EXIT)
6231 || gimple_call_internal_p (g, IFN_GOMP_SIMT_VOTE_ANY)
6232 || gimple_call_internal_p (g, IFN_GOMP_SIMT_XCHG_BFLY)
6233 || gimple_call_internal_p (g, IFN_GOMP_SIMT_XCHG_IDX)))
6234 return false;
6235 }
6236
6237 return true;
6238 }
6239
6240 /* Create a duplicate of the basic block BB. NOTE: This does not
6241 preserve SSA form. */
6242
6243 static basic_block
6244 gimple_duplicate_bb (basic_block bb, copy_bb_data *id)
6245 {
6246 basic_block new_bb;
6247 gimple_stmt_iterator gsi_tgt;
6248
6249 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
6250
6251 /* Copy the PHI nodes. We ignore PHI node arguments here because
6252 the incoming edges have not been setup yet. */
6253 for (gphi_iterator gpi = gsi_start_phis (bb);
6254 !gsi_end_p (gpi);
6255 gsi_next (&gpi))
6256 {
6257 gphi *phi, *copy;
6258 phi = gpi.phi ();
6259 copy = create_phi_node (NULL_TREE, new_bb);
6260 create_new_def_for (gimple_phi_result (phi), copy,
6261 gimple_phi_result_ptr (copy));
6262 gimple_set_uid (copy, gimple_uid (phi));
6263 }
6264
6265 gsi_tgt = gsi_start_bb (new_bb);
6266 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6267 !gsi_end_p (gsi);
6268 gsi_next (&gsi))
6269 {
6270 def_operand_p def_p;
6271 ssa_op_iter op_iter;
6272 tree lhs;
6273 gimple *stmt, *copy;
6274
6275 stmt = gsi_stmt (gsi);
6276 if (gimple_code (stmt) == GIMPLE_LABEL)
6277 continue;
6278
6279 /* Don't duplicate label debug stmts. */
6280 if (gimple_debug_bind_p (stmt)
6281 && TREE_CODE (gimple_debug_bind_get_var (stmt))
6282 == LABEL_DECL)
6283 continue;
6284
6285 /* Create a new copy of STMT and duplicate STMT's virtual
6286 operands. */
6287 copy = gimple_copy (stmt);
6288 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6289
6290 maybe_duplicate_eh_stmt (copy, stmt);
6291 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6292
6293 /* When copying around a stmt writing into a local non-user
6294 aggregate, make sure it won't share stack slot with other
6295 vars. */
6296 lhs = gimple_get_lhs (stmt);
6297 if (lhs && TREE_CODE (lhs) != SSA_NAME)
6298 {
6299 tree base = get_base_address (lhs);
6300 if (base
6301 && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6302 && DECL_IGNORED_P (base)
6303 && !TREE_STATIC (base)
6304 && !DECL_EXTERNAL (base)
6305 && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6306 DECL_NONSHAREABLE (base) = 1;
6307 }
6308
6309 /* If requested remap dependence info of cliques brought in
6310 via inlining. */
6311 if (id)
6312 for (unsigned i = 0; i < gimple_num_ops (copy); ++i)
6313 {
6314 tree op = gimple_op (copy, i);
6315 if (!op)
6316 continue;
6317 if (TREE_CODE (op) == ADDR_EXPR
6318 || TREE_CODE (op) == WITH_SIZE_EXPR)
6319 op = TREE_OPERAND (op, 0);
6320 while (handled_component_p (op))
6321 op = TREE_OPERAND (op, 0);
6322 if ((TREE_CODE (op) == MEM_REF
6323 || TREE_CODE (op) == TARGET_MEM_REF)
6324 && MR_DEPENDENCE_CLIQUE (op) > 1
6325 && MR_DEPENDENCE_CLIQUE (op) != bb->loop_father->owned_clique)
6326 {
6327 if (!id->dependence_map)
6328 id->dependence_map = new hash_map<dependence_hash,
6329 unsigned short>;
6330 bool existed;
6331 unsigned short &newc = id->dependence_map->get_or_insert
6332 (MR_DEPENDENCE_CLIQUE (op), &existed);
6333 if (!existed)
6334 {
6335 gcc_assert (MR_DEPENDENCE_CLIQUE (op) <= cfun->last_clique);
6336 newc = ++cfun->last_clique;
6337 }
6338 MR_DEPENDENCE_CLIQUE (op) = newc;
6339 }
6340 }
6341
6342 /* Create new names for all the definitions created by COPY and
6343 add replacement mappings for each new name. */
6344 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6345 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6346 }
6347
6348 return new_bb;
6349 }
6350
6351 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6352
6353 static void
6354 add_phi_args_after_copy_edge (edge e_copy)
6355 {
6356 basic_block bb, bb_copy = e_copy->src, dest;
6357 edge e;
6358 edge_iterator ei;
6359 gphi *phi, *phi_copy;
6360 tree def;
6361 gphi_iterator psi, psi_copy;
6362
6363 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6364 return;
6365
6366 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6367
6368 if (e_copy->dest->flags & BB_DUPLICATED)
6369 dest = get_bb_original (e_copy->dest);
6370 else
6371 dest = e_copy->dest;
6372
6373 e = find_edge (bb, dest);
6374 if (!e)
6375 {
6376 /* During loop unrolling the target of the latch edge is copied.
6377 In this case we are not looking for edge to dest, but to
6378 duplicated block whose original was dest. */
6379 FOR_EACH_EDGE (e, ei, bb->succs)
6380 {
6381 if ((e->dest->flags & BB_DUPLICATED)
6382 && get_bb_original (e->dest) == dest)
6383 break;
6384 }
6385
6386 gcc_assert (e != NULL);
6387 }
6388
6389 for (psi = gsi_start_phis (e->dest),
6390 psi_copy = gsi_start_phis (e_copy->dest);
6391 !gsi_end_p (psi);
6392 gsi_next (&psi), gsi_next (&psi_copy))
6393 {
6394 phi = psi.phi ();
6395 phi_copy = psi_copy.phi ();
6396 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6397 add_phi_arg (phi_copy, def, e_copy,
6398 gimple_phi_arg_location_from_edge (phi, e));
6399 }
6400 }
6401
6402
6403 /* Basic block BB_COPY was created by code duplication. Add phi node
6404 arguments for edges going out of BB_COPY. The blocks that were
6405 duplicated have BB_DUPLICATED set. */
6406
6407 void
6408 add_phi_args_after_copy_bb (basic_block bb_copy)
6409 {
6410 edge e_copy;
6411 edge_iterator ei;
6412
6413 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6414 {
6415 add_phi_args_after_copy_edge (e_copy);
6416 }
6417 }
6418
6419 /* Blocks in REGION_COPY array of length N_REGION were created by
6420 duplication of basic blocks. Add phi node arguments for edges
6421 going from these blocks. If E_COPY is not NULL, also add
6422 phi node arguments for its destination.*/
6423
6424 void
6425 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6426 edge e_copy)
6427 {
6428 unsigned i;
6429
6430 for (i = 0; i < n_region; i++)
6431 region_copy[i]->flags |= BB_DUPLICATED;
6432
6433 for (i = 0; i < n_region; i++)
6434 add_phi_args_after_copy_bb (region_copy[i]);
6435 if (e_copy)
6436 add_phi_args_after_copy_edge (e_copy);
6437
6438 for (i = 0; i < n_region; i++)
6439 region_copy[i]->flags &= ~BB_DUPLICATED;
6440 }
6441
6442 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6443 important exit edge EXIT. By important we mean that no SSA name defined
6444 inside region is live over the other exit edges of the region. All entry
6445 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6446 to the duplicate of the region. Dominance and loop information is
6447 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6448 UPDATE_DOMINANCE is false then we assume that the caller will update the
6449 dominance information after calling this function. The new basic
6450 blocks are stored to REGION_COPY in the same order as they had in REGION,
6451 provided that REGION_COPY is not NULL.
6452 The function returns false if it is unable to copy the region,
6453 true otherwise. */
6454
6455 bool
6456 gimple_duplicate_sese_region (edge entry, edge exit,
6457 basic_block *region, unsigned n_region,
6458 basic_block *region_copy,
6459 bool update_dominance)
6460 {
6461 unsigned i;
6462 bool free_region_copy = false, copying_header = false;
6463 class loop *loop = entry->dest->loop_father;
6464 edge exit_copy;
6465 vec<basic_block> doms = vNULL;
6466 edge redirected;
6467 profile_count total_count = profile_count::uninitialized ();
6468 profile_count entry_count = profile_count::uninitialized ();
6469
6470 if (!can_copy_bbs_p (region, n_region))
6471 return false;
6472
6473 /* Some sanity checking. Note that we do not check for all possible
6474 missuses of the functions. I.e. if you ask to copy something weird,
6475 it will work, but the state of structures probably will not be
6476 correct. */
6477 for (i = 0; i < n_region; i++)
6478 {
6479 /* We do not handle subloops, i.e. all the blocks must belong to the
6480 same loop. */
6481 if (region[i]->loop_father != loop)
6482 return false;
6483
6484 if (region[i] != entry->dest
6485 && region[i] == loop->header)
6486 return false;
6487 }
6488
6489 /* In case the function is used for loop header copying (which is the primary
6490 use), ensure that EXIT and its copy will be new latch and entry edges. */
6491 if (loop->header == entry->dest)
6492 {
6493 copying_header = true;
6494
6495 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6496 return false;
6497
6498 for (i = 0; i < n_region; i++)
6499 if (region[i] != exit->src
6500 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6501 return false;
6502 }
6503
6504 initialize_original_copy_tables ();
6505
6506 if (copying_header)
6507 set_loop_copy (loop, loop_outer (loop));
6508 else
6509 set_loop_copy (loop, loop);
6510
6511 if (!region_copy)
6512 {
6513 region_copy = XNEWVEC (basic_block, n_region);
6514 free_region_copy = true;
6515 }
6516
6517 /* Record blocks outside the region that are dominated by something
6518 inside. */
6519 if (update_dominance)
6520 {
6521 doms.create (0);
6522 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6523 }
6524
6525 if (entry->dest->count.initialized_p ())
6526 {
6527 total_count = entry->dest->count;
6528 entry_count = entry->count ();
6529 /* Fix up corner cases, to avoid division by zero or creation of negative
6530 frequencies. */
6531 if (entry_count > total_count)
6532 entry_count = total_count;
6533 }
6534
6535 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6536 split_edge_bb_loc (entry), update_dominance);
6537 if (total_count.initialized_p () && entry_count.initialized_p ())
6538 {
6539 scale_bbs_frequencies_profile_count (region, n_region,
6540 total_count - entry_count,
6541 total_count);
6542 scale_bbs_frequencies_profile_count (region_copy, n_region, entry_count,
6543 total_count);
6544 }
6545
6546 if (copying_header)
6547 {
6548 loop->header = exit->dest;
6549 loop->latch = exit->src;
6550 }
6551
6552 /* Redirect the entry and add the phi node arguments. */
6553 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6554 gcc_assert (redirected != NULL);
6555 flush_pending_stmts (entry);
6556
6557 /* Concerning updating of dominators: We must recount dominators
6558 for entry block and its copy. Anything that is outside of the
6559 region, but was dominated by something inside needs recounting as
6560 well. */
6561 if (update_dominance)
6562 {
6563 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6564 doms.safe_push (get_bb_original (entry->dest));
6565 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6566 doms.release ();
6567 }
6568
6569 /* Add the other PHI node arguments. */
6570 add_phi_args_after_copy (region_copy, n_region, NULL);
6571
6572 if (free_region_copy)
6573 free (region_copy);
6574
6575 free_original_copy_tables ();
6576 return true;
6577 }
6578
6579 /* Checks if BB is part of the region defined by N_REGION BBS. */
6580 static bool
6581 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6582 {
6583 unsigned int n;
6584
6585 for (n = 0; n < n_region; n++)
6586 {
6587 if (bb == bbs[n])
6588 return true;
6589 }
6590 return false;
6591 }
6592
6593 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6594 are stored to REGION_COPY in the same order in that they appear
6595 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6596 the region, EXIT an exit from it. The condition guarding EXIT
6597 is moved to ENTRY. Returns true if duplication succeeds, false
6598 otherwise.
6599
6600 For example,
6601
6602 some_code;
6603 if (cond)
6604 A;
6605 else
6606 B;
6607
6608 is transformed to
6609
6610 if (cond)
6611 {
6612 some_code;
6613 A;
6614 }
6615 else
6616 {
6617 some_code;
6618 B;
6619 }
6620 */
6621
6622 bool
6623 gimple_duplicate_sese_tail (edge entry, edge exit,
6624 basic_block *region, unsigned n_region,
6625 basic_block *region_copy)
6626 {
6627 unsigned i;
6628 bool free_region_copy = false;
6629 class loop *loop = exit->dest->loop_father;
6630 class loop *orig_loop = entry->dest->loop_father;
6631 basic_block switch_bb, entry_bb, nentry_bb;
6632 vec<basic_block> doms;
6633 profile_count total_count = profile_count::uninitialized (),
6634 exit_count = profile_count::uninitialized ();
6635 edge exits[2], nexits[2], e;
6636 gimple_stmt_iterator gsi;
6637 gimple *cond_stmt;
6638 edge sorig, snew;
6639 basic_block exit_bb;
6640 gphi_iterator psi;
6641 gphi *phi;
6642 tree def;
6643 class loop *target, *aloop, *cloop;
6644
6645 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6646 exits[0] = exit;
6647 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6648
6649 if (!can_copy_bbs_p (region, n_region))
6650 return false;
6651
6652 initialize_original_copy_tables ();
6653 set_loop_copy (orig_loop, loop);
6654
6655 target= loop;
6656 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6657 {
6658 if (bb_part_of_region_p (aloop->header, region, n_region))
6659 {
6660 cloop = duplicate_loop (aloop, target);
6661 duplicate_subloops (aloop, cloop);
6662 }
6663 }
6664
6665 if (!region_copy)
6666 {
6667 region_copy = XNEWVEC (basic_block, n_region);
6668 free_region_copy = true;
6669 }
6670
6671 gcc_assert (!need_ssa_update_p (cfun));
6672
6673 /* Record blocks outside the region that are dominated by something
6674 inside. */
6675 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6676
6677 total_count = exit->src->count;
6678 exit_count = exit->count ();
6679 /* Fix up corner cases, to avoid division by zero or creation of negative
6680 frequencies. */
6681 if (exit_count > total_count)
6682 exit_count = total_count;
6683
6684 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6685 split_edge_bb_loc (exit), true);
6686 if (total_count.initialized_p () && exit_count.initialized_p ())
6687 {
6688 scale_bbs_frequencies_profile_count (region, n_region,
6689 total_count - exit_count,
6690 total_count);
6691 scale_bbs_frequencies_profile_count (region_copy, n_region, exit_count,
6692 total_count);
6693 }
6694
6695 /* Create the switch block, and put the exit condition to it. */
6696 entry_bb = entry->dest;
6697 nentry_bb = get_bb_copy (entry_bb);
6698 if (!last_stmt (entry->src)
6699 || !stmt_ends_bb_p (last_stmt (entry->src)))
6700 switch_bb = entry->src;
6701 else
6702 switch_bb = split_edge (entry);
6703 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6704
6705 gsi = gsi_last_bb (switch_bb);
6706 cond_stmt = last_stmt (exit->src);
6707 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6708 cond_stmt = gimple_copy (cond_stmt);
6709
6710 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6711
6712 sorig = single_succ_edge (switch_bb);
6713 sorig->flags = exits[1]->flags;
6714 sorig->probability = exits[1]->probability;
6715 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6716 snew->probability = exits[0]->probability;
6717
6718
6719 /* Register the new edge from SWITCH_BB in loop exit lists. */
6720 rescan_loop_exit (snew, true, false);
6721
6722 /* Add the PHI node arguments. */
6723 add_phi_args_after_copy (region_copy, n_region, snew);
6724
6725 /* Get rid of now superfluous conditions and associated edges (and phi node
6726 arguments). */
6727 exit_bb = exit->dest;
6728
6729 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6730 PENDING_STMT (e) = NULL;
6731
6732 /* The latch of ORIG_LOOP was copied, and so was the backedge
6733 to the original header. We redirect this backedge to EXIT_BB. */
6734 for (i = 0; i < n_region; i++)
6735 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6736 {
6737 gcc_assert (single_succ_edge (region_copy[i]));
6738 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6739 PENDING_STMT (e) = NULL;
6740 for (psi = gsi_start_phis (exit_bb);
6741 !gsi_end_p (psi);
6742 gsi_next (&psi))
6743 {
6744 phi = psi.phi ();
6745 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6746 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6747 }
6748 }
6749 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6750 PENDING_STMT (e) = NULL;
6751
6752 /* Anything that is outside of the region, but was dominated by something
6753 inside needs to update dominance info. */
6754 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6755 doms.release ();
6756 /* Update the SSA web. */
6757 update_ssa (TODO_update_ssa);
6758
6759 if (free_region_copy)
6760 free (region_copy);
6761
6762 free_original_copy_tables ();
6763 return true;
6764 }
6765
6766 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6767 adding blocks when the dominator traversal reaches EXIT. This
6768 function silently assumes that ENTRY strictly dominates EXIT. */
6769
6770 void
6771 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6772 vec<basic_block> *bbs_p)
6773 {
6774 basic_block son;
6775
6776 for (son = first_dom_son (CDI_DOMINATORS, entry);
6777 son;
6778 son = next_dom_son (CDI_DOMINATORS, son))
6779 {
6780 bbs_p->safe_push (son);
6781 if (son != exit)
6782 gather_blocks_in_sese_region (son, exit, bbs_p);
6783 }
6784 }
6785
6786 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6787 The duplicates are recorded in VARS_MAP. */
6788
6789 static void
6790 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6791 tree to_context)
6792 {
6793 tree t = *tp, new_t;
6794 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6795
6796 if (DECL_CONTEXT (t) == to_context)
6797 return;
6798
6799 bool existed;
6800 tree &loc = vars_map->get_or_insert (t, &existed);
6801
6802 if (!existed)
6803 {
6804 if (SSA_VAR_P (t))
6805 {
6806 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6807 add_local_decl (f, new_t);
6808 }
6809 else
6810 {
6811 gcc_assert (TREE_CODE (t) == CONST_DECL);
6812 new_t = copy_node (t);
6813 }
6814 DECL_CONTEXT (new_t) = to_context;
6815
6816 loc = new_t;
6817 }
6818 else
6819 new_t = loc;
6820
6821 *tp = new_t;
6822 }
6823
6824
6825 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6826 VARS_MAP maps old ssa names and var_decls to the new ones. */
6827
6828 static tree
6829 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6830 tree to_context)
6831 {
6832 tree new_name;
6833
6834 gcc_assert (!virtual_operand_p (name));
6835
6836 tree *loc = vars_map->get (name);
6837
6838 if (!loc)
6839 {
6840 tree decl = SSA_NAME_VAR (name);
6841 if (decl)
6842 {
6843 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6844 replace_by_duplicate_decl (&decl, vars_map, to_context);
6845 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6846 decl, SSA_NAME_DEF_STMT (name));
6847 }
6848 else
6849 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6850 name, SSA_NAME_DEF_STMT (name));
6851
6852 /* Now that we've used the def stmt to define new_name, make sure it
6853 doesn't define name anymore. */
6854 SSA_NAME_DEF_STMT (name) = NULL;
6855
6856 vars_map->put (name, new_name);
6857 }
6858 else
6859 new_name = *loc;
6860
6861 return new_name;
6862 }
6863
6864 struct move_stmt_d
6865 {
6866 tree orig_block;
6867 tree new_block;
6868 tree from_context;
6869 tree to_context;
6870 hash_map<tree, tree> *vars_map;
6871 htab_t new_label_map;
6872 hash_map<void *, void *> *eh_map;
6873 bool remap_decls_p;
6874 };
6875
6876 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6877 contained in *TP if it has been ORIG_BLOCK previously and change the
6878 DECL_CONTEXT of every local variable referenced in *TP. */
6879
6880 static tree
6881 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6882 {
6883 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6884 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6885 tree t = *tp;
6886
6887 if (EXPR_P (t))
6888 {
6889 tree block = TREE_BLOCK (t);
6890 if (block == NULL_TREE)
6891 ;
6892 else if (block == p->orig_block
6893 || p->orig_block == NULL_TREE)
6894 {
6895 /* tree_node_can_be_shared says we can share invariant
6896 addresses but unshare_expr copies them anyways. Make sure
6897 to unshare before adjusting the block in place - we do not
6898 always see a copy here. */
6899 if (TREE_CODE (t) == ADDR_EXPR
6900 && is_gimple_min_invariant (t))
6901 *tp = t = unshare_expr (t);
6902 TREE_SET_BLOCK (t, p->new_block);
6903 }
6904 else if (flag_checking)
6905 {
6906 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6907 block = BLOCK_SUPERCONTEXT (block);
6908 gcc_assert (block == p->orig_block);
6909 }
6910 }
6911 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6912 {
6913 if (TREE_CODE (t) == SSA_NAME)
6914 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6915 else if (TREE_CODE (t) == PARM_DECL
6916 && gimple_in_ssa_p (cfun))
6917 *tp = *(p->vars_map->get (t));
6918 else if (TREE_CODE (t) == LABEL_DECL)
6919 {
6920 if (p->new_label_map)
6921 {
6922 struct tree_map in, *out;
6923 in.base.from = t;
6924 out = (struct tree_map *)
6925 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6926 if (out)
6927 *tp = t = out->to;
6928 }
6929
6930 /* For FORCED_LABELs we can end up with references from other
6931 functions if some SESE regions are outlined. It is UB to
6932 jump in between them, but they could be used just for printing
6933 addresses etc. In that case, DECL_CONTEXT on the label should
6934 be the function containing the glabel stmt with that LABEL_DECL,
6935 rather than whatever function a reference to the label was seen
6936 last time. */
6937 if (!FORCED_LABEL (t) && !DECL_NONLOCAL (t))
6938 DECL_CONTEXT (t) = p->to_context;
6939 }
6940 else if (p->remap_decls_p)
6941 {
6942 /* Replace T with its duplicate. T should no longer appear in the
6943 parent function, so this looks wasteful; however, it may appear
6944 in referenced_vars, and more importantly, as virtual operands of
6945 statements, and in alias lists of other variables. It would be
6946 quite difficult to expunge it from all those places. ??? It might
6947 suffice to do this for addressable variables. */
6948 if ((VAR_P (t) && !is_global_var (t))
6949 || TREE_CODE (t) == CONST_DECL)
6950 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6951 }
6952 *walk_subtrees = 0;
6953 }
6954 else if (TYPE_P (t))
6955 *walk_subtrees = 0;
6956
6957 return NULL_TREE;
6958 }
6959
6960 /* Helper for move_stmt_r. Given an EH region number for the source
6961 function, map that to the duplicate EH regio number in the dest. */
6962
6963 static int
6964 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6965 {
6966 eh_region old_r, new_r;
6967
6968 old_r = get_eh_region_from_number (old_nr);
6969 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6970
6971 return new_r->index;
6972 }
6973
6974 /* Similar, but operate on INTEGER_CSTs. */
6975
6976 static tree
6977 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6978 {
6979 int old_nr, new_nr;
6980
6981 old_nr = tree_to_shwi (old_t_nr);
6982 new_nr = move_stmt_eh_region_nr (old_nr, p);
6983
6984 return build_int_cst (integer_type_node, new_nr);
6985 }
6986
6987 /* Like move_stmt_op, but for gimple statements.
6988
6989 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6990 contained in the current statement in *GSI_P and change the
6991 DECL_CONTEXT of every local variable referenced in the current
6992 statement. */
6993
6994 static tree
6995 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6996 struct walk_stmt_info *wi)
6997 {
6998 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6999 gimple *stmt = gsi_stmt (*gsi_p);
7000 tree block = gimple_block (stmt);
7001
7002 if (block == p->orig_block
7003 || (p->orig_block == NULL_TREE
7004 && block != NULL_TREE))
7005 gimple_set_block (stmt, p->new_block);
7006
7007 switch (gimple_code (stmt))
7008 {
7009 case GIMPLE_CALL:
7010 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
7011 {
7012 tree r, fndecl = gimple_call_fndecl (stmt);
7013 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
7014 switch (DECL_FUNCTION_CODE (fndecl))
7015 {
7016 case BUILT_IN_EH_COPY_VALUES:
7017 r = gimple_call_arg (stmt, 1);
7018 r = move_stmt_eh_region_tree_nr (r, p);
7019 gimple_call_set_arg (stmt, 1, r);
7020 /* FALLTHRU */
7021
7022 case BUILT_IN_EH_POINTER:
7023 case BUILT_IN_EH_FILTER:
7024 r = gimple_call_arg (stmt, 0);
7025 r = move_stmt_eh_region_tree_nr (r, p);
7026 gimple_call_set_arg (stmt, 0, r);
7027 break;
7028
7029 default:
7030 break;
7031 }
7032 }
7033 break;
7034
7035 case GIMPLE_RESX:
7036 {
7037 gresx *resx_stmt = as_a <gresx *> (stmt);
7038 int r = gimple_resx_region (resx_stmt);
7039 r = move_stmt_eh_region_nr (r, p);
7040 gimple_resx_set_region (resx_stmt, r);
7041 }
7042 break;
7043
7044 case GIMPLE_EH_DISPATCH:
7045 {
7046 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
7047 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
7048 r = move_stmt_eh_region_nr (r, p);
7049 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
7050 }
7051 break;
7052
7053 case GIMPLE_OMP_RETURN:
7054 case GIMPLE_OMP_CONTINUE:
7055 break;
7056
7057 case GIMPLE_LABEL:
7058 {
7059 /* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
7060 so that such labels can be referenced from other regions.
7061 Make sure to update it when seeing a GIMPLE_LABEL though,
7062 that is the owner of the label. */
7063 walk_gimple_op (stmt, move_stmt_op, wi);
7064 *handled_ops_p = true;
7065 tree label = gimple_label_label (as_a <glabel *> (stmt));
7066 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
7067 DECL_CONTEXT (label) = p->to_context;
7068 }
7069 break;
7070
7071 default:
7072 if (is_gimple_omp (stmt))
7073 {
7074 /* Do not remap variables inside OMP directives. Variables
7075 referenced in clauses and directive header belong to the
7076 parent function and should not be moved into the child
7077 function. */
7078 bool save_remap_decls_p = p->remap_decls_p;
7079 p->remap_decls_p = false;
7080 *handled_ops_p = true;
7081
7082 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
7083 move_stmt_op, wi);
7084
7085 p->remap_decls_p = save_remap_decls_p;
7086 }
7087 break;
7088 }
7089
7090 return NULL_TREE;
7091 }
7092
7093 /* Move basic block BB from function CFUN to function DEST_FN. The
7094 block is moved out of the original linked list and placed after
7095 block AFTER in the new list. Also, the block is removed from the
7096 original array of blocks and placed in DEST_FN's array of blocks.
7097 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
7098 updated to reflect the moved edges.
7099
7100 The local variables are remapped to new instances, VARS_MAP is used
7101 to record the mapping. */
7102
7103 static void
7104 move_block_to_fn (struct function *dest_cfun, basic_block bb,
7105 basic_block after, bool update_edge_count_p,
7106 struct move_stmt_d *d)
7107 {
7108 struct control_flow_graph *cfg;
7109 edge_iterator ei;
7110 edge e;
7111 gimple_stmt_iterator si;
7112 unsigned old_len;
7113
7114 /* Remove BB from dominance structures. */
7115 delete_from_dominance_info (CDI_DOMINATORS, bb);
7116
7117 /* Move BB from its current loop to the copy in the new function. */
7118 if (current_loops)
7119 {
7120 class loop *new_loop = (class loop *)bb->loop_father->aux;
7121 if (new_loop)
7122 bb->loop_father = new_loop;
7123 }
7124
7125 /* Link BB to the new linked list. */
7126 move_block_after (bb, after);
7127
7128 /* Update the edge count in the corresponding flowgraphs. */
7129 if (update_edge_count_p)
7130 FOR_EACH_EDGE (e, ei, bb->succs)
7131 {
7132 cfun->cfg->x_n_edges--;
7133 dest_cfun->cfg->x_n_edges++;
7134 }
7135
7136 /* Remove BB from the original basic block array. */
7137 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
7138 cfun->cfg->x_n_basic_blocks--;
7139
7140 /* Grow DEST_CFUN's basic block array if needed. */
7141 cfg = dest_cfun->cfg;
7142 cfg->x_n_basic_blocks++;
7143 if (bb->index >= cfg->x_last_basic_block)
7144 cfg->x_last_basic_block = bb->index + 1;
7145
7146 old_len = vec_safe_length (cfg->x_basic_block_info);
7147 if ((unsigned) cfg->x_last_basic_block >= old_len)
7148 vec_safe_grow_cleared (cfg->x_basic_block_info,
7149 cfg->x_last_basic_block + 1);
7150
7151 (*cfg->x_basic_block_info)[bb->index] = bb;
7152
7153 /* Remap the variables in phi nodes. */
7154 for (gphi_iterator psi = gsi_start_phis (bb);
7155 !gsi_end_p (psi); )
7156 {
7157 gphi *phi = psi.phi ();
7158 use_operand_p use;
7159 tree op = PHI_RESULT (phi);
7160 ssa_op_iter oi;
7161 unsigned i;
7162
7163 if (virtual_operand_p (op))
7164 {
7165 /* Remove the phi nodes for virtual operands (alias analysis will be
7166 run for the new function, anyway). But replace all uses that
7167 might be outside of the region we move. */
7168 use_operand_p use_p;
7169 imm_use_iterator iter;
7170 gimple *use_stmt;
7171 FOR_EACH_IMM_USE_STMT (use_stmt, iter, op)
7172 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7173 SET_USE (use_p, SSA_NAME_VAR (op));
7174 remove_phi_node (&psi, true);
7175 continue;
7176 }
7177
7178 SET_PHI_RESULT (phi,
7179 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7180 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
7181 {
7182 op = USE_FROM_PTR (use);
7183 if (TREE_CODE (op) == SSA_NAME)
7184 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7185 }
7186
7187 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
7188 {
7189 location_t locus = gimple_phi_arg_location (phi, i);
7190 tree block = LOCATION_BLOCK (locus);
7191
7192 if (locus == UNKNOWN_LOCATION)
7193 continue;
7194 if (d->orig_block == NULL_TREE || block == d->orig_block)
7195 {
7196 locus = set_block (locus, d->new_block);
7197 gimple_phi_arg_set_location (phi, i, locus);
7198 }
7199 }
7200
7201 gsi_next (&psi);
7202 }
7203
7204 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7205 {
7206 gimple *stmt = gsi_stmt (si);
7207 struct walk_stmt_info wi;
7208
7209 memset (&wi, 0, sizeof (wi));
7210 wi.info = d;
7211 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
7212
7213 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
7214 {
7215 tree label = gimple_label_label (label_stmt);
7216 int uid = LABEL_DECL_UID (label);
7217
7218 gcc_assert (uid > -1);
7219
7220 old_len = vec_safe_length (cfg->x_label_to_block_map);
7221 if (old_len <= (unsigned) uid)
7222 vec_safe_grow_cleared (cfg->x_label_to_block_map, uid + 1);
7223
7224 (*cfg->x_label_to_block_map)[uid] = bb;
7225 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
7226
7227 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
7228
7229 if (uid >= dest_cfun->cfg->last_label_uid)
7230 dest_cfun->cfg->last_label_uid = uid + 1;
7231 }
7232
7233 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
7234 remove_stmt_from_eh_lp_fn (cfun, stmt);
7235
7236 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
7237 gimple_remove_stmt_histograms (cfun, stmt);
7238
7239 /* We cannot leave any operands allocated from the operand caches of
7240 the current function. */
7241 free_stmt_operands (cfun, stmt);
7242 push_cfun (dest_cfun);
7243 update_stmt (stmt);
7244 if (is_gimple_call (stmt))
7245 notice_special_calls (as_a <gcall *> (stmt));
7246 pop_cfun ();
7247 }
7248
7249 FOR_EACH_EDGE (e, ei, bb->succs)
7250 if (e->goto_locus != UNKNOWN_LOCATION)
7251 {
7252 tree block = LOCATION_BLOCK (e->goto_locus);
7253 if (d->orig_block == NULL_TREE
7254 || block == d->orig_block)
7255 e->goto_locus = set_block (e->goto_locus, d->new_block);
7256 }
7257 }
7258
7259 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7260 the outermost EH region. Use REGION as the incoming base EH region.
7261 If there is no single outermost region, return NULL and set *ALL to
7262 true. */
7263
7264 static eh_region
7265 find_outermost_region_in_block (struct function *src_cfun,
7266 basic_block bb, eh_region region,
7267 bool *all)
7268 {
7269 gimple_stmt_iterator si;
7270
7271 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7272 {
7273 gimple *stmt = gsi_stmt (si);
7274 eh_region stmt_region;
7275 int lp_nr;
7276
7277 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
7278 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
7279 if (stmt_region)
7280 {
7281 if (region == NULL)
7282 region = stmt_region;
7283 else if (stmt_region != region)
7284 {
7285 region = eh_region_outermost (src_cfun, stmt_region, region);
7286 if (region == NULL)
7287 {
7288 *all = true;
7289 return NULL;
7290 }
7291 }
7292 }
7293 }
7294
7295 return region;
7296 }
7297
7298 static tree
7299 new_label_mapper (tree decl, void *data)
7300 {
7301 htab_t hash = (htab_t) data;
7302 struct tree_map *m;
7303 void **slot;
7304
7305 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7306
7307 m = XNEW (struct tree_map);
7308 m->hash = DECL_UID (decl);
7309 m->base.from = decl;
7310 m->to = create_artificial_label (UNKNOWN_LOCATION);
7311 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7312 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7313 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7314
7315 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7316 gcc_assert (*slot == NULL);
7317
7318 *slot = m;
7319
7320 return m->to;
7321 }
7322
7323 /* Tree walker to replace the decls used inside value expressions by
7324 duplicates. */
7325
7326 static tree
7327 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7328 {
7329 struct replace_decls_d *rd = (struct replace_decls_d *)data;
7330
7331 switch (TREE_CODE (*tp))
7332 {
7333 case VAR_DECL:
7334 case PARM_DECL:
7335 case RESULT_DECL:
7336 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7337 break;
7338 default:
7339 break;
7340 }
7341
7342 if (IS_TYPE_OR_DECL_P (*tp))
7343 *walk_subtrees = false;
7344
7345 return NULL;
7346 }
7347
7348 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7349 subblocks. */
7350
7351 static void
7352 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7353 tree to_context)
7354 {
7355 tree *tp, t;
7356
7357 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7358 {
7359 t = *tp;
7360 if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7361 continue;
7362 replace_by_duplicate_decl (&t, vars_map, to_context);
7363 if (t != *tp)
7364 {
7365 if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7366 {
7367 tree x = DECL_VALUE_EXPR (*tp);
7368 struct replace_decls_d rd = { vars_map, to_context };
7369 unshare_expr (x);
7370 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7371 SET_DECL_VALUE_EXPR (t, x);
7372 DECL_HAS_VALUE_EXPR_P (t) = 1;
7373 }
7374 DECL_CHAIN (t) = DECL_CHAIN (*tp);
7375 *tp = t;
7376 }
7377 }
7378
7379 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7380 replace_block_vars_by_duplicates (block, vars_map, to_context);
7381 }
7382
7383 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7384 from FN1 to FN2. */
7385
7386 static void
7387 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7388 class loop *loop)
7389 {
7390 /* Discard it from the old loop array. */
7391 (*get_loops (fn1))[loop->num] = NULL;
7392
7393 /* Place it in the new loop array, assigning it a new number. */
7394 loop->num = number_of_loops (fn2);
7395 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7396
7397 /* Recurse to children. */
7398 for (loop = loop->inner; loop; loop = loop->next)
7399 fixup_loop_arrays_after_move (fn1, fn2, loop);
7400 }
7401
7402 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7403 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7404
7405 DEBUG_FUNCTION void
7406 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7407 {
7408 basic_block bb;
7409 edge_iterator ei;
7410 edge e;
7411 bitmap bbs = BITMAP_ALLOC (NULL);
7412 int i;
7413
7414 gcc_assert (entry != NULL);
7415 gcc_assert (entry != exit);
7416 gcc_assert (bbs_p != NULL);
7417
7418 gcc_assert (bbs_p->length () > 0);
7419
7420 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7421 bitmap_set_bit (bbs, bb->index);
7422
7423 gcc_assert (bitmap_bit_p (bbs, entry->index));
7424 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7425
7426 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7427 {
7428 if (bb == entry)
7429 {
7430 gcc_assert (single_pred_p (entry));
7431 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7432 }
7433 else
7434 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7435 {
7436 e = ei_edge (ei);
7437 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7438 }
7439
7440 if (bb == exit)
7441 {
7442 gcc_assert (single_succ_p (exit));
7443 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7444 }
7445 else
7446 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7447 {
7448 e = ei_edge (ei);
7449 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7450 }
7451 }
7452
7453 BITMAP_FREE (bbs);
7454 }
7455
7456 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7457
7458 bool
7459 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7460 {
7461 bitmap release_names = (bitmap)data;
7462
7463 if (TREE_CODE (from) != SSA_NAME)
7464 return true;
7465
7466 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7467 return true;
7468 }
7469
7470 /* Return LOOP_DIST_ALIAS call if present in BB. */
7471
7472 static gimple *
7473 find_loop_dist_alias (basic_block bb)
7474 {
7475 gimple *g = last_stmt (bb);
7476 if (g == NULL || gimple_code (g) != GIMPLE_COND)
7477 return NULL;
7478
7479 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7480 gsi_prev (&gsi);
7481 if (gsi_end_p (gsi))
7482 return NULL;
7483
7484 g = gsi_stmt (gsi);
7485 if (gimple_call_internal_p (g, IFN_LOOP_DIST_ALIAS))
7486 return g;
7487 return NULL;
7488 }
7489
7490 /* Fold loop internal call G like IFN_LOOP_VECTORIZED/IFN_LOOP_DIST_ALIAS
7491 to VALUE and update any immediate uses of it's LHS. */
7492
7493 void
7494 fold_loop_internal_call (gimple *g, tree value)
7495 {
7496 tree lhs = gimple_call_lhs (g);
7497 use_operand_p use_p;
7498 imm_use_iterator iter;
7499 gimple *use_stmt;
7500 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7501
7502 update_call_from_tree (&gsi, value);
7503 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
7504 {
7505 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7506 SET_USE (use_p, value);
7507 update_stmt (use_stmt);
7508 }
7509 }
7510
7511 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7512 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7513 single basic block in the original CFG and the new basic block is
7514 returned. DEST_CFUN must not have a CFG yet.
7515
7516 Note that the region need not be a pure SESE region. Blocks inside
7517 the region may contain calls to abort/exit. The only restriction
7518 is that ENTRY_BB should be the only entry point and it must
7519 dominate EXIT_BB.
7520
7521 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7522 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7523 to the new function.
7524
7525 All local variables referenced in the region are assumed to be in
7526 the corresponding BLOCK_VARS and unexpanded variable lists
7527 associated with DEST_CFUN.
7528
7529 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7530 reimplement move_sese_region_to_fn by duplicating the region rather than
7531 moving it. */
7532
7533 basic_block
7534 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7535 basic_block exit_bb, tree orig_block)
7536 {
7537 vec<basic_block> bbs, dom_bbs;
7538 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7539 basic_block after, bb, *entry_pred, *exit_succ, abb;
7540 struct function *saved_cfun = cfun;
7541 int *entry_flag, *exit_flag;
7542 profile_probability *entry_prob, *exit_prob;
7543 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7544 edge e;
7545 edge_iterator ei;
7546 htab_t new_label_map;
7547 hash_map<void *, void *> *eh_map;
7548 class loop *loop = entry_bb->loop_father;
7549 class loop *loop0 = get_loop (saved_cfun, 0);
7550 struct move_stmt_d d;
7551
7552 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7553 region. */
7554 gcc_assert (entry_bb != exit_bb
7555 && (!exit_bb
7556 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7557
7558 /* Collect all the blocks in the region. Manually add ENTRY_BB
7559 because it won't be added by dfs_enumerate_from. */
7560 bbs.create (0);
7561 bbs.safe_push (entry_bb);
7562 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7563
7564 if (flag_checking)
7565 verify_sese (entry_bb, exit_bb, &bbs);
7566
7567 /* The blocks that used to be dominated by something in BBS will now be
7568 dominated by the new block. */
7569 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7570 bbs.address (),
7571 bbs.length ());
7572
7573 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7574 the predecessor edges to ENTRY_BB and the successor edges to
7575 EXIT_BB so that we can re-attach them to the new basic block that
7576 will replace the region. */
7577 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7578 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7579 entry_flag = XNEWVEC (int, num_entry_edges);
7580 entry_prob = XNEWVEC (profile_probability, num_entry_edges);
7581 i = 0;
7582 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7583 {
7584 entry_prob[i] = e->probability;
7585 entry_flag[i] = e->flags;
7586 entry_pred[i++] = e->src;
7587 remove_edge (e);
7588 }
7589
7590 if (exit_bb)
7591 {
7592 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7593 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7594 exit_flag = XNEWVEC (int, num_exit_edges);
7595 exit_prob = XNEWVEC (profile_probability, num_exit_edges);
7596 i = 0;
7597 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7598 {
7599 exit_prob[i] = e->probability;
7600 exit_flag[i] = e->flags;
7601 exit_succ[i++] = e->dest;
7602 remove_edge (e);
7603 }
7604 }
7605 else
7606 {
7607 num_exit_edges = 0;
7608 exit_succ = NULL;
7609 exit_flag = NULL;
7610 exit_prob = NULL;
7611 }
7612
7613 /* Switch context to the child function to initialize DEST_FN's CFG. */
7614 gcc_assert (dest_cfun->cfg == NULL);
7615 push_cfun (dest_cfun);
7616
7617 init_empty_tree_cfg ();
7618
7619 /* Initialize EH information for the new function. */
7620 eh_map = NULL;
7621 new_label_map = NULL;
7622 if (saved_cfun->eh)
7623 {
7624 eh_region region = NULL;
7625 bool all = false;
7626
7627 FOR_EACH_VEC_ELT (bbs, i, bb)
7628 {
7629 region = find_outermost_region_in_block (saved_cfun, bb, region, &all);
7630 if (all)
7631 break;
7632 }
7633
7634 init_eh_for_function ();
7635 if (region != NULL || all)
7636 {
7637 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7638 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7639 new_label_mapper, new_label_map);
7640 }
7641 }
7642
7643 /* Initialize an empty loop tree. */
7644 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7645 init_loops_structure (dest_cfun, loops, 1);
7646 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7647 set_loops_for_fn (dest_cfun, loops);
7648
7649 vec<loop_p, va_gc> *larray = get_loops (saved_cfun)->copy ();
7650
7651 /* Move the outlined loop tree part. */
7652 num_nodes = bbs.length ();
7653 FOR_EACH_VEC_ELT (bbs, i, bb)
7654 {
7655 if (bb->loop_father->header == bb)
7656 {
7657 class loop *this_loop = bb->loop_father;
7658 class loop *outer = loop_outer (this_loop);
7659 if (outer == loop
7660 /* If the SESE region contains some bbs ending with
7661 a noreturn call, those are considered to belong
7662 to the outermost loop in saved_cfun, rather than
7663 the entry_bb's loop_father. */
7664 || outer == loop0)
7665 {
7666 if (outer != loop)
7667 num_nodes -= this_loop->num_nodes;
7668 flow_loop_tree_node_remove (bb->loop_father);
7669 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7670 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7671 }
7672 }
7673 else if (bb->loop_father == loop0 && loop0 != loop)
7674 num_nodes--;
7675
7676 /* Remove loop exits from the outlined region. */
7677 if (loops_for_fn (saved_cfun)->exits)
7678 FOR_EACH_EDGE (e, ei, bb->succs)
7679 {
7680 struct loops *l = loops_for_fn (saved_cfun);
7681 loop_exit **slot
7682 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7683 NO_INSERT);
7684 if (slot)
7685 l->exits->clear_slot (slot);
7686 }
7687 }
7688
7689 /* Adjust the number of blocks in the tree root of the outlined part. */
7690 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7691
7692 /* Setup a mapping to be used by move_block_to_fn. */
7693 loop->aux = current_loops->tree_root;
7694 loop0->aux = current_loops->tree_root;
7695
7696 /* Fix up orig_loop_num. If the block referenced in it has been moved
7697 to dest_cfun, update orig_loop_num field, otherwise clear it. */
7698 class loop *dloop;
7699 signed char *moved_orig_loop_num = NULL;
7700 FOR_EACH_LOOP_FN (dest_cfun, dloop, 0)
7701 if (dloop->orig_loop_num)
7702 {
7703 if (moved_orig_loop_num == NULL)
7704 moved_orig_loop_num
7705 = XCNEWVEC (signed char, vec_safe_length (larray));
7706 if ((*larray)[dloop->orig_loop_num] != NULL
7707 && get_loop (saved_cfun, dloop->orig_loop_num) == NULL)
7708 {
7709 if (moved_orig_loop_num[dloop->orig_loop_num] >= 0
7710 && moved_orig_loop_num[dloop->orig_loop_num] < 2)
7711 moved_orig_loop_num[dloop->orig_loop_num]++;
7712 dloop->orig_loop_num = (*larray)[dloop->orig_loop_num]->num;
7713 }
7714 else
7715 {
7716 moved_orig_loop_num[dloop->orig_loop_num] = -1;
7717 dloop->orig_loop_num = 0;
7718 }
7719 }
7720 pop_cfun ();
7721
7722 if (moved_orig_loop_num)
7723 {
7724 FOR_EACH_VEC_ELT (bbs, i, bb)
7725 {
7726 gimple *g = find_loop_dist_alias (bb);
7727 if (g == NULL)
7728 continue;
7729
7730 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7731 gcc_assert (orig_loop_num
7732 && (unsigned) orig_loop_num < vec_safe_length (larray));
7733 if (moved_orig_loop_num[orig_loop_num] == 2)
7734 {
7735 /* If we have moved both loops with this orig_loop_num into
7736 dest_cfun and the LOOP_DIST_ALIAS call is being moved there
7737 too, update the first argument. */
7738 gcc_assert ((*larray)[dloop->orig_loop_num] != NULL
7739 && (get_loop (saved_cfun, dloop->orig_loop_num)
7740 == NULL));
7741 tree t = build_int_cst (integer_type_node,
7742 (*larray)[dloop->orig_loop_num]->num);
7743 gimple_call_set_arg (g, 0, t);
7744 update_stmt (g);
7745 /* Make sure the following loop will not update it. */
7746 moved_orig_loop_num[orig_loop_num] = 0;
7747 }
7748 else
7749 /* Otherwise at least one of the loops stayed in saved_cfun.
7750 Remove the LOOP_DIST_ALIAS call. */
7751 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7752 }
7753 FOR_EACH_BB_FN (bb, saved_cfun)
7754 {
7755 gimple *g = find_loop_dist_alias (bb);
7756 if (g == NULL)
7757 continue;
7758 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7759 gcc_assert (orig_loop_num
7760 && (unsigned) orig_loop_num < vec_safe_length (larray));
7761 if (moved_orig_loop_num[orig_loop_num])
7762 /* LOOP_DIST_ALIAS call remained in saved_cfun, if at least one
7763 of the corresponding loops was moved, remove it. */
7764 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7765 }
7766 XDELETEVEC (moved_orig_loop_num);
7767 }
7768 ggc_free (larray);
7769
7770 /* Move blocks from BBS into DEST_CFUN. */
7771 gcc_assert (bbs.length () >= 2);
7772 after = dest_cfun->cfg->x_entry_block_ptr;
7773 hash_map<tree, tree> vars_map;
7774
7775 memset (&d, 0, sizeof (d));
7776 d.orig_block = orig_block;
7777 d.new_block = DECL_INITIAL (dest_cfun->decl);
7778 d.from_context = cfun->decl;
7779 d.to_context = dest_cfun->decl;
7780 d.vars_map = &vars_map;
7781 d.new_label_map = new_label_map;
7782 d.eh_map = eh_map;
7783 d.remap_decls_p = true;
7784
7785 if (gimple_in_ssa_p (cfun))
7786 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7787 {
7788 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7789 set_ssa_default_def (dest_cfun, arg, narg);
7790 vars_map.put (arg, narg);
7791 }
7792
7793 FOR_EACH_VEC_ELT (bbs, i, bb)
7794 {
7795 /* No need to update edge counts on the last block. It has
7796 already been updated earlier when we detached the region from
7797 the original CFG. */
7798 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7799 after = bb;
7800 }
7801
7802 /* Adjust the maximum clique used. */
7803 dest_cfun->last_clique = saved_cfun->last_clique;
7804
7805 loop->aux = NULL;
7806 loop0->aux = NULL;
7807 /* Loop sizes are no longer correct, fix them up. */
7808 loop->num_nodes -= num_nodes;
7809 for (class loop *outer = loop_outer (loop);
7810 outer; outer = loop_outer (outer))
7811 outer->num_nodes -= num_nodes;
7812 loop0->num_nodes -= bbs.length () - num_nodes;
7813
7814 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7815 {
7816 class loop *aloop;
7817 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7818 if (aloop != NULL)
7819 {
7820 if (aloop->simduid)
7821 {
7822 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7823 d.to_context);
7824 dest_cfun->has_simduid_loops = true;
7825 }
7826 if (aloop->force_vectorize)
7827 dest_cfun->has_force_vectorize_loops = true;
7828 }
7829 }
7830
7831 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7832 if (orig_block)
7833 {
7834 tree block;
7835 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7836 == NULL_TREE);
7837 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7838 = BLOCK_SUBBLOCKS (orig_block);
7839 for (block = BLOCK_SUBBLOCKS (orig_block);
7840 block; block = BLOCK_CHAIN (block))
7841 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7842 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7843 }
7844
7845 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7846 &vars_map, dest_cfun->decl);
7847
7848 if (new_label_map)
7849 htab_delete (new_label_map);
7850 if (eh_map)
7851 delete eh_map;
7852
7853 if (gimple_in_ssa_p (cfun))
7854 {
7855 /* We need to release ssa-names in a defined order, so first find them,
7856 and then iterate in ascending version order. */
7857 bitmap release_names = BITMAP_ALLOC (NULL);
7858 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7859 bitmap_iterator bi;
7860 unsigned i;
7861 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7862 release_ssa_name (ssa_name (i));
7863 BITMAP_FREE (release_names);
7864 }
7865
7866 /* Rewire the entry and exit blocks. The successor to the entry
7867 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7868 the child function. Similarly, the predecessor of DEST_FN's
7869 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7870 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7871 various CFG manipulation function get to the right CFG.
7872
7873 FIXME, this is silly. The CFG ought to become a parameter to
7874 these helpers. */
7875 push_cfun (dest_cfun);
7876 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = entry_bb->count;
7877 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7878 if (exit_bb)
7879 {
7880 make_single_succ_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7881 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = exit_bb->count;
7882 }
7883 else
7884 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = profile_count::zero ();
7885 pop_cfun ();
7886
7887 /* Back in the original function, the SESE region has disappeared,
7888 create a new basic block in its place. */
7889 bb = create_empty_bb (entry_pred[0]);
7890 if (current_loops)
7891 add_bb_to_loop (bb, loop);
7892 for (i = 0; i < num_entry_edges; i++)
7893 {
7894 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7895 e->probability = entry_prob[i];
7896 }
7897
7898 for (i = 0; i < num_exit_edges; i++)
7899 {
7900 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7901 e->probability = exit_prob[i];
7902 }
7903
7904 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7905 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7906 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7907 dom_bbs.release ();
7908
7909 if (exit_bb)
7910 {
7911 free (exit_prob);
7912 free (exit_flag);
7913 free (exit_succ);
7914 }
7915 free (entry_prob);
7916 free (entry_flag);
7917 free (entry_pred);
7918 bbs.release ();
7919
7920 return bb;
7921 }
7922
7923 /* Dump default def DEF to file FILE using FLAGS and indentation
7924 SPC. */
7925
7926 static void
7927 dump_default_def (FILE *file, tree def, int spc, dump_flags_t flags)
7928 {
7929 for (int i = 0; i < spc; ++i)
7930 fprintf (file, " ");
7931 dump_ssaname_info_to_file (file, def, spc);
7932
7933 print_generic_expr (file, TREE_TYPE (def), flags);
7934 fprintf (file, " ");
7935 print_generic_expr (file, def, flags);
7936 fprintf (file, " = ");
7937 print_generic_expr (file, SSA_NAME_VAR (def), flags);
7938 fprintf (file, ";\n");
7939 }
7940
7941 /* Print no_sanitize attribute to FILE for a given attribute VALUE. */
7942
7943 static void
7944 print_no_sanitize_attr_value (FILE *file, tree value)
7945 {
7946 unsigned int flags = tree_to_uhwi (value);
7947 bool first = true;
7948 for (int i = 0; sanitizer_opts[i].name != NULL; ++i)
7949 {
7950 if ((sanitizer_opts[i].flag & flags) == sanitizer_opts[i].flag)
7951 {
7952 if (!first)
7953 fprintf (file, " | ");
7954 fprintf (file, "%s", sanitizer_opts[i].name);
7955 first = false;
7956 }
7957 }
7958 }
7959
7960 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7961 */
7962
7963 void
7964 dump_function_to_file (tree fndecl, FILE *file, dump_flags_t flags)
7965 {
7966 tree arg, var, old_current_fndecl = current_function_decl;
7967 struct function *dsf;
7968 bool ignore_topmost_bind = false, any_var = false;
7969 basic_block bb;
7970 tree chain;
7971 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7972 && decl_is_tm_clone (fndecl));
7973 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7974
7975 tree fntype = TREE_TYPE (fndecl);
7976 tree attrs[] = { DECL_ATTRIBUTES (fndecl), TYPE_ATTRIBUTES (fntype) };
7977
7978 for (int i = 0; i != 2; ++i)
7979 {
7980 if (!attrs[i])
7981 continue;
7982
7983 fprintf (file, "__attribute__((");
7984
7985 bool first = true;
7986 tree chain;
7987 for (chain = attrs[i]; chain; first = false, chain = TREE_CHAIN (chain))
7988 {
7989 if (!first)
7990 fprintf (file, ", ");
7991
7992 tree name = get_attribute_name (chain);
7993 print_generic_expr (file, name, dump_flags);
7994 if (TREE_VALUE (chain) != NULL_TREE)
7995 {
7996 fprintf (file, " (");
7997
7998 if (strstr (IDENTIFIER_POINTER (name), "no_sanitize"))
7999 print_no_sanitize_attr_value (file, TREE_VALUE (chain));
8000 else
8001 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
8002 fprintf (file, ")");
8003 }
8004 }
8005
8006 fprintf (file, "))\n");
8007 }
8008
8009 current_function_decl = fndecl;
8010 if (flags & TDF_GIMPLE)
8011 {
8012 static bool hotness_bb_param_printed = false;
8013 if (profile_info != NULL
8014 && !hotness_bb_param_printed)
8015 {
8016 hotness_bb_param_printed = true;
8017 fprintf (file,
8018 "/* --param=gimple-fe-computed-hot-bb-threshold=%" PRId64
8019 " */\n", get_hot_bb_threshold ());
8020 }
8021
8022 print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
8023 dump_flags | TDF_SLIM);
8024 fprintf (file, " __GIMPLE (%s",
8025 (fun->curr_properties & PROP_ssa) ? "ssa"
8026 : (fun->curr_properties & PROP_cfg) ? "cfg"
8027 : "");
8028
8029 if (cfun->cfg)
8030 {
8031 basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
8032 if (bb->count.initialized_p ())
8033 fprintf (file, ",%s(%d)",
8034 profile_quality_as_string (bb->count.quality ()),
8035 bb->count.value ());
8036 fprintf (file, ")\n%s (", function_name (fun));
8037 }
8038 }
8039 else
8040 {
8041 print_generic_expr (file, TREE_TYPE (fntype), dump_flags);
8042 fprintf (file, " %s %s(", function_name (fun),
8043 tmclone ? "[tm-clone] " : "");
8044 }
8045
8046 arg = DECL_ARGUMENTS (fndecl);
8047 while (arg)
8048 {
8049 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
8050 fprintf (file, " ");
8051 print_generic_expr (file, arg, dump_flags);
8052 if (DECL_CHAIN (arg))
8053 fprintf (file, ", ");
8054 arg = DECL_CHAIN (arg);
8055 }
8056 fprintf (file, ")\n");
8057
8058 dsf = DECL_STRUCT_FUNCTION (fndecl);
8059 if (dsf && (flags & TDF_EH))
8060 dump_eh_tree (file, dsf);
8061
8062 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
8063 {
8064 dump_node (fndecl, TDF_SLIM | flags, file);
8065 current_function_decl = old_current_fndecl;
8066 return;
8067 }
8068
8069 /* When GIMPLE is lowered, the variables are no longer available in
8070 BIND_EXPRs, so display them separately. */
8071 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
8072 {
8073 unsigned ix;
8074 ignore_topmost_bind = true;
8075
8076 fprintf (file, "{\n");
8077 if (gimple_in_ssa_p (fun)
8078 && (flags & TDF_ALIAS))
8079 {
8080 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
8081 arg = DECL_CHAIN (arg))
8082 {
8083 tree def = ssa_default_def (fun, arg);
8084 if (def)
8085 dump_default_def (file, def, 2, flags);
8086 }
8087
8088 tree res = DECL_RESULT (fun->decl);
8089 if (res != NULL_TREE
8090 && DECL_BY_REFERENCE (res))
8091 {
8092 tree def = ssa_default_def (fun, res);
8093 if (def)
8094 dump_default_def (file, def, 2, flags);
8095 }
8096
8097 tree static_chain = fun->static_chain_decl;
8098 if (static_chain != NULL_TREE)
8099 {
8100 tree def = ssa_default_def (fun, static_chain);
8101 if (def)
8102 dump_default_def (file, def, 2, flags);
8103 }
8104 }
8105
8106 if (!vec_safe_is_empty (fun->local_decls))
8107 FOR_EACH_LOCAL_DECL (fun, ix, var)
8108 {
8109 print_generic_decl (file, var, flags);
8110 fprintf (file, "\n");
8111
8112 any_var = true;
8113 }
8114
8115 tree name;
8116
8117 if (gimple_in_ssa_p (cfun))
8118 FOR_EACH_SSA_NAME (ix, name, cfun)
8119 {
8120 if (!SSA_NAME_VAR (name))
8121 {
8122 fprintf (file, " ");
8123 print_generic_expr (file, TREE_TYPE (name), flags);
8124 fprintf (file, " ");
8125 print_generic_expr (file, name, flags);
8126 fprintf (file, ";\n");
8127
8128 any_var = true;
8129 }
8130 }
8131 }
8132
8133 if (fun && fun->decl == fndecl
8134 && fun->cfg
8135 && basic_block_info_for_fn (fun))
8136 {
8137 /* If the CFG has been built, emit a CFG-based dump. */
8138 if (!ignore_topmost_bind)
8139 fprintf (file, "{\n");
8140
8141 if (any_var && n_basic_blocks_for_fn (fun))
8142 fprintf (file, "\n");
8143
8144 FOR_EACH_BB_FN (bb, fun)
8145 dump_bb (file, bb, 2, flags);
8146
8147 fprintf (file, "}\n");
8148 }
8149 else if (fun->curr_properties & PROP_gimple_any)
8150 {
8151 /* The function is now in GIMPLE form but the CFG has not been
8152 built yet. Emit the single sequence of GIMPLE statements
8153 that make up its body. */
8154 gimple_seq body = gimple_body (fndecl);
8155
8156 if (gimple_seq_first_stmt (body)
8157 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
8158 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
8159 print_gimple_seq (file, body, 0, flags);
8160 else
8161 {
8162 if (!ignore_topmost_bind)
8163 fprintf (file, "{\n");
8164
8165 if (any_var)
8166 fprintf (file, "\n");
8167
8168 print_gimple_seq (file, body, 2, flags);
8169 fprintf (file, "}\n");
8170 }
8171 }
8172 else
8173 {
8174 int indent;
8175
8176 /* Make a tree based dump. */
8177 chain = DECL_SAVED_TREE (fndecl);
8178 if (chain && TREE_CODE (chain) == BIND_EXPR)
8179 {
8180 if (ignore_topmost_bind)
8181 {
8182 chain = BIND_EXPR_BODY (chain);
8183 indent = 2;
8184 }
8185 else
8186 indent = 0;
8187 }
8188 else
8189 {
8190 if (!ignore_topmost_bind)
8191 {
8192 fprintf (file, "{\n");
8193 /* No topmost bind, pretend it's ignored for later. */
8194 ignore_topmost_bind = true;
8195 }
8196 indent = 2;
8197 }
8198
8199 if (any_var)
8200 fprintf (file, "\n");
8201
8202 print_generic_stmt_indented (file, chain, flags, indent);
8203 if (ignore_topmost_bind)
8204 fprintf (file, "}\n");
8205 }
8206
8207 if (flags & TDF_ENUMERATE_LOCALS)
8208 dump_enumerated_decls (file, flags);
8209 fprintf (file, "\n\n");
8210
8211 current_function_decl = old_current_fndecl;
8212 }
8213
8214 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
8215
8216 DEBUG_FUNCTION void
8217 debug_function (tree fn, dump_flags_t flags)
8218 {
8219 dump_function_to_file (fn, stderr, flags);
8220 }
8221
8222
8223 /* Print on FILE the indexes for the predecessors of basic_block BB. */
8224
8225 static void
8226 print_pred_bbs (FILE *file, basic_block bb)
8227 {
8228 edge e;
8229 edge_iterator ei;
8230
8231 FOR_EACH_EDGE (e, ei, bb->preds)
8232 fprintf (file, "bb_%d ", e->src->index);
8233 }
8234
8235
8236 /* Print on FILE the indexes for the successors of basic_block BB. */
8237
8238 static void
8239 print_succ_bbs (FILE *file, basic_block bb)
8240 {
8241 edge e;
8242 edge_iterator ei;
8243
8244 FOR_EACH_EDGE (e, ei, bb->succs)
8245 fprintf (file, "bb_%d ", e->dest->index);
8246 }
8247
8248 /* Print to FILE the basic block BB following the VERBOSITY level. */
8249
8250 void
8251 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
8252 {
8253 char *s_indent = (char *) alloca ((size_t) indent + 1);
8254 memset ((void *) s_indent, ' ', (size_t) indent);
8255 s_indent[indent] = '\0';
8256
8257 /* Print basic_block's header. */
8258 if (verbosity >= 2)
8259 {
8260 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
8261 print_pred_bbs (file, bb);
8262 fprintf (file, "}, succs = {");
8263 print_succ_bbs (file, bb);
8264 fprintf (file, "})\n");
8265 }
8266
8267 /* Print basic_block's body. */
8268 if (verbosity >= 3)
8269 {
8270 fprintf (file, "%s {\n", s_indent);
8271 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
8272 fprintf (file, "%s }\n", s_indent);
8273 }
8274 }
8275
8276 static void print_loop_and_siblings (FILE *, class loop *, int, int);
8277
8278 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
8279 VERBOSITY level this outputs the contents of the loop, or just its
8280 structure. */
8281
8282 static void
8283 print_loop (FILE *file, class loop *loop, int indent, int verbosity)
8284 {
8285 char *s_indent;
8286 basic_block bb;
8287
8288 if (loop == NULL)
8289 return;
8290
8291 s_indent = (char *) alloca ((size_t) indent + 1);
8292 memset ((void *) s_indent, ' ', (size_t) indent);
8293 s_indent[indent] = '\0';
8294
8295 /* Print loop's header. */
8296 fprintf (file, "%sloop_%d (", s_indent, loop->num);
8297 if (loop->header)
8298 fprintf (file, "header = %d", loop->header->index);
8299 else
8300 {
8301 fprintf (file, "deleted)\n");
8302 return;
8303 }
8304 if (loop->latch)
8305 fprintf (file, ", latch = %d", loop->latch->index);
8306 else
8307 fprintf (file, ", multiple latches");
8308 fprintf (file, ", niter = ");
8309 print_generic_expr (file, loop->nb_iterations);
8310
8311 if (loop->any_upper_bound)
8312 {
8313 fprintf (file, ", upper_bound = ");
8314 print_decu (loop->nb_iterations_upper_bound, file);
8315 }
8316 if (loop->any_likely_upper_bound)
8317 {
8318 fprintf (file, ", likely_upper_bound = ");
8319 print_decu (loop->nb_iterations_likely_upper_bound, file);
8320 }
8321
8322 if (loop->any_estimate)
8323 {
8324 fprintf (file, ", estimate = ");
8325 print_decu (loop->nb_iterations_estimate, file);
8326 }
8327 if (loop->unroll)
8328 fprintf (file, ", unroll = %d", loop->unroll);
8329 fprintf (file, ")\n");
8330
8331 /* Print loop's body. */
8332 if (verbosity >= 1)
8333 {
8334 fprintf (file, "%s{\n", s_indent);
8335 FOR_EACH_BB_FN (bb, cfun)
8336 if (bb->loop_father == loop)
8337 print_loops_bb (file, bb, indent, verbosity);
8338
8339 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
8340 fprintf (file, "%s}\n", s_indent);
8341 }
8342 }
8343
8344 /* Print the LOOP and its sibling loops on FILE, indented INDENT
8345 spaces. Following VERBOSITY level this outputs the contents of the
8346 loop, or just its structure. */
8347
8348 static void
8349 print_loop_and_siblings (FILE *file, class loop *loop, int indent,
8350 int verbosity)
8351 {
8352 if (loop == NULL)
8353 return;
8354
8355 print_loop (file, loop, indent, verbosity);
8356 print_loop_and_siblings (file, loop->next, indent, verbosity);
8357 }
8358
8359 /* Follow a CFG edge from the entry point of the program, and on entry
8360 of a loop, pretty print the loop structure on FILE. */
8361
8362 void
8363 print_loops (FILE *file, int verbosity)
8364 {
8365 basic_block bb;
8366
8367 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
8368 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
8369 if (bb && bb->loop_father)
8370 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
8371 }
8372
8373 /* Dump a loop. */
8374
8375 DEBUG_FUNCTION void
8376 debug (class loop &ref)
8377 {
8378 print_loop (stderr, &ref, 0, /*verbosity*/0);
8379 }
8380
8381 DEBUG_FUNCTION void
8382 debug (class loop *ptr)
8383 {
8384 if (ptr)
8385 debug (*ptr);
8386 else
8387 fprintf (stderr, "<nil>\n");
8388 }
8389
8390 /* Dump a loop verbosely. */
8391
8392 DEBUG_FUNCTION void
8393 debug_verbose (class loop &ref)
8394 {
8395 print_loop (stderr, &ref, 0, /*verbosity*/3);
8396 }
8397
8398 DEBUG_FUNCTION void
8399 debug_verbose (class loop *ptr)
8400 {
8401 if (ptr)
8402 debug (*ptr);
8403 else
8404 fprintf (stderr, "<nil>\n");
8405 }
8406
8407
8408 /* Debugging loops structure at tree level, at some VERBOSITY level. */
8409
8410 DEBUG_FUNCTION void
8411 debug_loops (int verbosity)
8412 {
8413 print_loops (stderr, verbosity);
8414 }
8415
8416 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
8417
8418 DEBUG_FUNCTION void
8419 debug_loop (class loop *loop, int verbosity)
8420 {
8421 print_loop (stderr, loop, 0, verbosity);
8422 }
8423
8424 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8425 level. */
8426
8427 DEBUG_FUNCTION void
8428 debug_loop_num (unsigned num, int verbosity)
8429 {
8430 debug_loop (get_loop (cfun, num), verbosity);
8431 }
8432
8433 /* Return true if BB ends with a call, possibly followed by some
8434 instructions that must stay with the call. Return false,
8435 otherwise. */
8436
8437 static bool
8438 gimple_block_ends_with_call_p (basic_block bb)
8439 {
8440 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8441 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
8442 }
8443
8444
8445 /* Return true if BB ends with a conditional branch. Return false,
8446 otherwise. */
8447
8448 static bool
8449 gimple_block_ends_with_condjump_p (const_basic_block bb)
8450 {
8451 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
8452 return (stmt && gimple_code (stmt) == GIMPLE_COND);
8453 }
8454
8455
8456 /* Return true if statement T may terminate execution of BB in ways not
8457 explicitly represtented in the CFG. */
8458
8459 bool
8460 stmt_can_terminate_bb_p (gimple *t)
8461 {
8462 tree fndecl = NULL_TREE;
8463 int call_flags = 0;
8464
8465 /* Eh exception not handled internally terminates execution of the whole
8466 function. */
8467 if (stmt_can_throw_external (cfun, t))
8468 return true;
8469
8470 /* NORETURN and LONGJMP calls already have an edge to exit.
8471 CONST and PURE calls do not need one.
8472 We don't currently check for CONST and PURE here, although
8473 it would be a good idea, because those attributes are
8474 figured out from the RTL in mark_constant_function, and
8475 the counter incrementation code from -fprofile-arcs
8476 leads to different results from -fbranch-probabilities. */
8477 if (is_gimple_call (t))
8478 {
8479 fndecl = gimple_call_fndecl (t);
8480 call_flags = gimple_call_flags (t);
8481 }
8482
8483 if (is_gimple_call (t)
8484 && fndecl
8485 && fndecl_built_in_p (fndecl)
8486 && (call_flags & ECF_NOTHROW)
8487 && !(call_flags & ECF_RETURNS_TWICE)
8488 /* fork() doesn't really return twice, but the effect of
8489 wrapping it in __gcov_fork() which calls __gcov_dump() and
8490 __gcov_reset() and clears the counters before forking has the same
8491 effect as returning twice. Force a fake edge. */
8492 && !fndecl_built_in_p (fndecl, BUILT_IN_FORK))
8493 return false;
8494
8495 if (is_gimple_call (t))
8496 {
8497 edge_iterator ei;
8498 edge e;
8499 basic_block bb;
8500
8501 if (call_flags & (ECF_PURE | ECF_CONST)
8502 && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8503 return false;
8504
8505 /* Function call may do longjmp, terminate program or do other things.
8506 Special case noreturn that have non-abnormal edges out as in this case
8507 the fact is sufficiently represented by lack of edges out of T. */
8508 if (!(call_flags & ECF_NORETURN))
8509 return true;
8510
8511 bb = gimple_bb (t);
8512 FOR_EACH_EDGE (e, ei, bb->succs)
8513 if ((e->flags & EDGE_FAKE) == 0)
8514 return true;
8515 }
8516
8517 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8518 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8519 return true;
8520
8521 return false;
8522 }
8523
8524
8525 /* Add fake edges to the function exit for any non constant and non
8526 noreturn calls (or noreturn calls with EH/abnormal edges),
8527 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8528 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8529 that were split.
8530
8531 The goal is to expose cases in which entering a basic block does
8532 not imply that all subsequent instructions must be executed. */
8533
8534 static int
8535 gimple_flow_call_edges_add (sbitmap blocks)
8536 {
8537 int i;
8538 int blocks_split = 0;
8539 int last_bb = last_basic_block_for_fn (cfun);
8540 bool check_last_block = false;
8541
8542 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8543 return 0;
8544
8545 if (! blocks)
8546 check_last_block = true;
8547 else
8548 check_last_block = bitmap_bit_p (blocks,
8549 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8550
8551 /* In the last basic block, before epilogue generation, there will be
8552 a fallthru edge to EXIT. Special care is required if the last insn
8553 of the last basic block is a call because make_edge folds duplicate
8554 edges, which would result in the fallthru edge also being marked
8555 fake, which would result in the fallthru edge being removed by
8556 remove_fake_edges, which would result in an invalid CFG.
8557
8558 Moreover, we can't elide the outgoing fake edge, since the block
8559 profiler needs to take this into account in order to solve the minimal
8560 spanning tree in the case that the call doesn't return.
8561
8562 Handle this by adding a dummy instruction in a new last basic block. */
8563 if (check_last_block)
8564 {
8565 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8566 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8567 gimple *t = NULL;
8568
8569 if (!gsi_end_p (gsi))
8570 t = gsi_stmt (gsi);
8571
8572 if (t && stmt_can_terminate_bb_p (t))
8573 {
8574 edge e;
8575
8576 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8577 if (e)
8578 {
8579 gsi_insert_on_edge (e, gimple_build_nop ());
8580 gsi_commit_edge_inserts ();
8581 }
8582 }
8583 }
8584
8585 /* Now add fake edges to the function exit for any non constant
8586 calls since there is no way that we can determine if they will
8587 return or not... */
8588 for (i = 0; i < last_bb; i++)
8589 {
8590 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8591 gimple_stmt_iterator gsi;
8592 gimple *stmt, *last_stmt;
8593
8594 if (!bb)
8595 continue;
8596
8597 if (blocks && !bitmap_bit_p (blocks, i))
8598 continue;
8599
8600 gsi = gsi_last_nondebug_bb (bb);
8601 if (!gsi_end_p (gsi))
8602 {
8603 last_stmt = gsi_stmt (gsi);
8604 do
8605 {
8606 stmt = gsi_stmt (gsi);
8607 if (stmt_can_terminate_bb_p (stmt))
8608 {
8609 edge e;
8610
8611 /* The handling above of the final block before the
8612 epilogue should be enough to verify that there is
8613 no edge to the exit block in CFG already.
8614 Calling make_edge in such case would cause us to
8615 mark that edge as fake and remove it later. */
8616 if (flag_checking && stmt == last_stmt)
8617 {
8618 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8619 gcc_assert (e == NULL);
8620 }
8621
8622 /* Note that the following may create a new basic block
8623 and renumber the existing basic blocks. */
8624 if (stmt != last_stmt)
8625 {
8626 e = split_block (bb, stmt);
8627 if (e)
8628 blocks_split++;
8629 }
8630 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8631 e->probability = profile_probability::guessed_never ();
8632 }
8633 gsi_prev (&gsi);
8634 }
8635 while (!gsi_end_p (gsi));
8636 }
8637 }
8638
8639 if (blocks_split)
8640 checking_verify_flow_info ();
8641
8642 return blocks_split;
8643 }
8644
8645 /* Removes edge E and all the blocks dominated by it, and updates dominance
8646 information. The IL in E->src needs to be updated separately.
8647 If dominance info is not available, only the edge E is removed.*/
8648
8649 void
8650 remove_edge_and_dominated_blocks (edge e)
8651 {
8652 vec<basic_block> bbs_to_remove = vNULL;
8653 vec<basic_block> bbs_to_fix_dom = vNULL;
8654 edge f;
8655 edge_iterator ei;
8656 bool none_removed = false;
8657 unsigned i;
8658 basic_block bb, dbb;
8659 bitmap_iterator bi;
8660
8661 /* If we are removing a path inside a non-root loop that may change
8662 loop ownership of blocks or remove loops. Mark loops for fixup. */
8663 if (current_loops
8664 && loop_outer (e->src->loop_father) != NULL
8665 && e->src->loop_father == e->dest->loop_father)
8666 loops_state_set (LOOPS_NEED_FIXUP);
8667
8668 if (!dom_info_available_p (CDI_DOMINATORS))
8669 {
8670 remove_edge (e);
8671 return;
8672 }
8673
8674 /* No updating is needed for edges to exit. */
8675 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8676 {
8677 if (cfgcleanup_altered_bbs)
8678 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8679 remove_edge (e);
8680 return;
8681 }
8682
8683 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8684 that is not dominated by E->dest, then this set is empty. Otherwise,
8685 all the basic blocks dominated by E->dest are removed.
8686
8687 Also, to DF_IDOM we store the immediate dominators of the blocks in
8688 the dominance frontier of E (i.e., of the successors of the
8689 removed blocks, if there are any, and of E->dest otherwise). */
8690 FOR_EACH_EDGE (f, ei, e->dest->preds)
8691 {
8692 if (f == e)
8693 continue;
8694
8695 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8696 {
8697 none_removed = true;
8698 break;
8699 }
8700 }
8701
8702 auto_bitmap df, df_idom;
8703 if (none_removed)
8704 bitmap_set_bit (df_idom,
8705 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8706 else
8707 {
8708 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8709 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8710 {
8711 FOR_EACH_EDGE (f, ei, bb->succs)
8712 {
8713 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8714 bitmap_set_bit (df, f->dest->index);
8715 }
8716 }
8717 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8718 bitmap_clear_bit (df, bb->index);
8719
8720 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8721 {
8722 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8723 bitmap_set_bit (df_idom,
8724 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8725 }
8726 }
8727
8728 if (cfgcleanup_altered_bbs)
8729 {
8730 /* Record the set of the altered basic blocks. */
8731 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8732 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8733 }
8734
8735 /* Remove E and the cancelled blocks. */
8736 if (none_removed)
8737 remove_edge (e);
8738 else
8739 {
8740 /* Walk backwards so as to get a chance to substitute all
8741 released DEFs into debug stmts. See
8742 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8743 details. */
8744 for (i = bbs_to_remove.length (); i-- > 0; )
8745 delete_basic_block (bbs_to_remove[i]);
8746 }
8747
8748 /* Update the dominance information. The immediate dominator may change only
8749 for blocks whose immediate dominator belongs to DF_IDOM:
8750
8751 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8752 removal. Let Z the arbitrary block such that idom(Z) = Y and
8753 Z dominates X after the removal. Before removal, there exists a path P
8754 from Y to X that avoids Z. Let F be the last edge on P that is
8755 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8756 dominates W, and because of P, Z does not dominate W), and W belongs to
8757 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8758 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8759 {
8760 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8761 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8762 dbb;
8763 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8764 bbs_to_fix_dom.safe_push (dbb);
8765 }
8766
8767 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8768
8769 bbs_to_remove.release ();
8770 bbs_to_fix_dom.release ();
8771 }
8772
8773 /* Purge dead EH edges from basic block BB. */
8774
8775 bool
8776 gimple_purge_dead_eh_edges (basic_block bb)
8777 {
8778 bool changed = false;
8779 edge e;
8780 edge_iterator ei;
8781 gimple *stmt = last_stmt (bb);
8782
8783 if (stmt && stmt_can_throw_internal (cfun, stmt))
8784 return false;
8785
8786 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8787 {
8788 if (e->flags & EDGE_EH)
8789 {
8790 remove_edge_and_dominated_blocks (e);
8791 changed = true;
8792 }
8793 else
8794 ei_next (&ei);
8795 }
8796
8797 return changed;
8798 }
8799
8800 /* Purge dead EH edges from basic block listed in BLOCKS. */
8801
8802 bool
8803 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8804 {
8805 bool changed = false;
8806 unsigned i;
8807 bitmap_iterator bi;
8808
8809 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8810 {
8811 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8812
8813 /* Earlier gimple_purge_dead_eh_edges could have removed
8814 this basic block already. */
8815 gcc_assert (bb || changed);
8816 if (bb != NULL)
8817 changed |= gimple_purge_dead_eh_edges (bb);
8818 }
8819
8820 return changed;
8821 }
8822
8823 /* Purge dead abnormal call edges from basic block BB. */
8824
8825 bool
8826 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8827 {
8828 bool changed = false;
8829 edge e;
8830 edge_iterator ei;
8831 gimple *stmt = last_stmt (bb);
8832
8833 if (!cfun->has_nonlocal_label
8834 && !cfun->calls_setjmp)
8835 return false;
8836
8837 if (stmt && stmt_can_make_abnormal_goto (stmt))
8838 return false;
8839
8840 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8841 {
8842 if (e->flags & EDGE_ABNORMAL)
8843 {
8844 if (e->flags & EDGE_FALLTHRU)
8845 e->flags &= ~EDGE_ABNORMAL;
8846 else
8847 remove_edge_and_dominated_blocks (e);
8848 changed = true;
8849 }
8850 else
8851 ei_next (&ei);
8852 }
8853
8854 return changed;
8855 }
8856
8857 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8858
8859 bool
8860 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8861 {
8862 bool changed = false;
8863 unsigned i;
8864 bitmap_iterator bi;
8865
8866 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8867 {
8868 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8869
8870 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8871 this basic block already. */
8872 gcc_assert (bb || changed);
8873 if (bb != NULL)
8874 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8875 }
8876
8877 return changed;
8878 }
8879
8880 /* This function is called whenever a new edge is created or
8881 redirected. */
8882
8883 static void
8884 gimple_execute_on_growing_pred (edge e)
8885 {
8886 basic_block bb = e->dest;
8887
8888 if (!gimple_seq_empty_p (phi_nodes (bb)))
8889 reserve_phi_args_for_new_edge (bb);
8890 }
8891
8892 /* This function is called immediately before edge E is removed from
8893 the edge vector E->dest->preds. */
8894
8895 static void
8896 gimple_execute_on_shrinking_pred (edge e)
8897 {
8898 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8899 remove_phi_args (e);
8900 }
8901
8902 /*---------------------------------------------------------------------------
8903 Helper functions for Loop versioning
8904 ---------------------------------------------------------------------------*/
8905
8906 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8907 of 'first'. Both of them are dominated by 'new_head' basic block. When
8908 'new_head' was created by 'second's incoming edge it received phi arguments
8909 on the edge by split_edge(). Later, additional edge 'e' was created to
8910 connect 'new_head' and 'first'. Now this routine adds phi args on this
8911 additional edge 'e' that new_head to second edge received as part of edge
8912 splitting. */
8913
8914 static void
8915 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8916 basic_block new_head, edge e)
8917 {
8918 gphi *phi1, *phi2;
8919 gphi_iterator psi1, psi2;
8920 tree def;
8921 edge e2 = find_edge (new_head, second);
8922
8923 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8924 edge, we should always have an edge from NEW_HEAD to SECOND. */
8925 gcc_assert (e2 != NULL);
8926
8927 /* Browse all 'second' basic block phi nodes and add phi args to
8928 edge 'e' for 'first' head. PHI args are always in correct order. */
8929
8930 for (psi2 = gsi_start_phis (second),
8931 psi1 = gsi_start_phis (first);
8932 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8933 gsi_next (&psi2), gsi_next (&psi1))
8934 {
8935 phi1 = psi1.phi ();
8936 phi2 = psi2.phi ();
8937 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8938 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8939 }
8940 }
8941
8942
8943 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8944 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8945 the destination of the ELSE part. */
8946
8947 static void
8948 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8949 basic_block second_head ATTRIBUTE_UNUSED,
8950 basic_block cond_bb, void *cond_e)
8951 {
8952 gimple_stmt_iterator gsi;
8953 gimple *new_cond_expr;
8954 tree cond_expr = (tree) cond_e;
8955 edge e0;
8956
8957 /* Build new conditional expr */
8958 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8959 NULL_TREE, NULL_TREE);
8960
8961 /* Add new cond in cond_bb. */
8962 gsi = gsi_last_bb (cond_bb);
8963 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8964
8965 /* Adjust edges appropriately to connect new head with first head
8966 as well as second head. */
8967 e0 = single_succ_edge (cond_bb);
8968 e0->flags &= ~EDGE_FALLTHRU;
8969 e0->flags |= EDGE_FALSE_VALUE;
8970 }
8971
8972
8973 /* Do book-keeping of basic block BB for the profile consistency checker.
8974 Store the counting in RECORD. */
8975 static void
8976 gimple_account_profile_record (basic_block bb,
8977 struct profile_record *record)
8978 {
8979 gimple_stmt_iterator i;
8980 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8981 {
8982 record->size
8983 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8984 if (bb->count.initialized_p ())
8985 record->time
8986 += estimate_num_insns (gsi_stmt (i),
8987 &eni_time_weights) * bb->count.to_gcov_type ();
8988 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8989 record->time
8990 += estimate_num_insns (gsi_stmt (i),
8991 &eni_time_weights) * bb->count.to_frequency (cfun);
8992 }
8993 }
8994
8995 struct cfg_hooks gimple_cfg_hooks = {
8996 "gimple",
8997 gimple_verify_flow_info,
8998 gimple_dump_bb, /* dump_bb */
8999 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
9000 create_bb, /* create_basic_block */
9001 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
9002 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
9003 gimple_can_remove_branch_p, /* can_remove_branch_p */
9004 remove_bb, /* delete_basic_block */
9005 gimple_split_block, /* split_block */
9006 gimple_move_block_after, /* move_block_after */
9007 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
9008 gimple_merge_blocks, /* merge_blocks */
9009 gimple_predict_edge, /* predict_edge */
9010 gimple_predicted_by_p, /* predicted_by_p */
9011 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
9012 gimple_duplicate_bb, /* duplicate_block */
9013 gimple_split_edge, /* split_edge */
9014 gimple_make_forwarder_block, /* make_forward_block */
9015 NULL, /* tidy_fallthru_edge */
9016 NULL, /* force_nonfallthru */
9017 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
9018 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
9019 gimple_flow_call_edges_add, /* flow_call_edges_add */
9020 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
9021 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
9022 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
9023 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
9024 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
9025 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
9026 flush_pending_stmts, /* flush_pending_stmts */
9027 gimple_empty_block_p, /* block_empty_p */
9028 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
9029 gimple_account_profile_record,
9030 };
9031
9032
9033 /* Split all critical edges. Split some extra (not necessarily critical) edges
9034 if FOR_EDGE_INSERTION_P is true. */
9035
9036 unsigned int
9037 split_critical_edges (bool for_edge_insertion_p /* = false */)
9038 {
9039 basic_block bb;
9040 edge e;
9041 edge_iterator ei;
9042
9043 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
9044 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
9045 mappings around the calls to split_edge. */
9046 start_recording_case_labels ();
9047 FOR_ALL_BB_FN (bb, cfun)
9048 {
9049 FOR_EACH_EDGE (e, ei, bb->succs)
9050 {
9051 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
9052 split_edge (e);
9053 /* PRE inserts statements to edges and expects that
9054 since split_critical_edges was done beforehand, committing edge
9055 insertions will not split more edges. In addition to critical
9056 edges we must split edges that have multiple successors and
9057 end by control flow statements, such as RESX.
9058 Go ahead and split them too. This matches the logic in
9059 gimple_find_edge_insert_loc. */
9060 else if (for_edge_insertion_p
9061 && (!single_pred_p (e->dest)
9062 || !gimple_seq_empty_p (phi_nodes (e->dest))
9063 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
9064 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
9065 && !(e->flags & EDGE_ABNORMAL))
9066 {
9067 gimple_stmt_iterator gsi;
9068
9069 gsi = gsi_last_bb (e->src);
9070 if (!gsi_end_p (gsi)
9071 && stmt_ends_bb_p (gsi_stmt (gsi))
9072 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
9073 && !gimple_call_builtin_p (gsi_stmt (gsi),
9074 BUILT_IN_RETURN)))
9075 split_edge (e);
9076 }
9077 }
9078 }
9079 end_recording_case_labels ();
9080 return 0;
9081 }
9082
9083 namespace {
9084
9085 const pass_data pass_data_split_crit_edges =
9086 {
9087 GIMPLE_PASS, /* type */
9088 "crited", /* name */
9089 OPTGROUP_NONE, /* optinfo_flags */
9090 TV_TREE_SPLIT_EDGES, /* tv_id */
9091 PROP_cfg, /* properties_required */
9092 PROP_no_crit_edges, /* properties_provided */
9093 0, /* properties_destroyed */
9094 0, /* todo_flags_start */
9095 0, /* todo_flags_finish */
9096 };
9097
9098 class pass_split_crit_edges : public gimple_opt_pass
9099 {
9100 public:
9101 pass_split_crit_edges (gcc::context *ctxt)
9102 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
9103 {}
9104
9105 /* opt_pass methods: */
9106 virtual unsigned int execute (function *) { return split_critical_edges (); }
9107
9108 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
9109 }; // class pass_split_crit_edges
9110
9111 } // anon namespace
9112
9113 gimple_opt_pass *
9114 make_pass_split_crit_edges (gcc::context *ctxt)
9115 {
9116 return new pass_split_crit_edges (ctxt);
9117 }
9118
9119
9120 /* Insert COND expression which is GIMPLE_COND after STMT
9121 in basic block BB with appropriate basic block split
9122 and creation of a new conditionally executed basic block.
9123 Update profile so the new bb is visited with probability PROB.
9124 Return created basic block. */
9125 basic_block
9126 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond,
9127 profile_probability prob)
9128 {
9129 edge fall = split_block (bb, stmt);
9130 gimple_stmt_iterator iter = gsi_last_bb (bb);
9131 basic_block new_bb;
9132
9133 /* Insert cond statement. */
9134 gcc_assert (gimple_code (cond) == GIMPLE_COND);
9135 if (gsi_end_p (iter))
9136 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
9137 else
9138 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
9139
9140 /* Create conditionally executed block. */
9141 new_bb = create_empty_bb (bb);
9142 edge e = make_edge (bb, new_bb, EDGE_TRUE_VALUE);
9143 e->probability = prob;
9144 new_bb->count = e->count ();
9145 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
9146
9147 /* Fix edge for split bb. */
9148 fall->flags = EDGE_FALSE_VALUE;
9149 fall->probability -= e->probability;
9150
9151 /* Update dominance info. */
9152 if (dom_info_available_p (CDI_DOMINATORS))
9153 {
9154 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
9155 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
9156 }
9157
9158 /* Update loop info. */
9159 if (current_loops)
9160 add_bb_to_loop (new_bb, bb->loop_father);
9161
9162 return new_bb;
9163 }
9164
9165 /* Build a ternary operation and gimplify it. Emit code before GSI.
9166 Return the gimple_val holding the result. */
9167
9168 tree
9169 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
9170 tree type, tree a, tree b, tree c)
9171 {
9172 tree ret;
9173 location_t loc = gimple_location (gsi_stmt (*gsi));
9174
9175 ret = fold_build3_loc (loc, code, type, a, b, c);
9176 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9177 GSI_SAME_STMT);
9178 }
9179
9180 /* Build a binary operation and gimplify it. Emit code before GSI.
9181 Return the gimple_val holding the result. */
9182
9183 tree
9184 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
9185 tree type, tree a, tree b)
9186 {
9187 tree ret;
9188
9189 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
9190 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9191 GSI_SAME_STMT);
9192 }
9193
9194 /* Build a unary operation and gimplify it. Emit code before GSI.
9195 Return the gimple_val holding the result. */
9196
9197 tree
9198 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
9199 tree a)
9200 {
9201 tree ret;
9202
9203 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
9204 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9205 GSI_SAME_STMT);
9206 }
9207
9208
9209 \f
9210 /* Given a basic block B which ends with a conditional and has
9211 precisely two successors, determine which of the edges is taken if
9212 the conditional is true and which is taken if the conditional is
9213 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
9214
9215 void
9216 extract_true_false_edges_from_block (basic_block b,
9217 edge *true_edge,
9218 edge *false_edge)
9219 {
9220 edge e = EDGE_SUCC (b, 0);
9221
9222 if (e->flags & EDGE_TRUE_VALUE)
9223 {
9224 *true_edge = e;
9225 *false_edge = EDGE_SUCC (b, 1);
9226 }
9227 else
9228 {
9229 *false_edge = e;
9230 *true_edge = EDGE_SUCC (b, 1);
9231 }
9232 }
9233
9234
9235 /* From a controlling predicate in the immediate dominator DOM of
9236 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
9237 predicate evaluates to true and false and store them to
9238 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
9239 they are non-NULL. Returns true if the edges can be determined,
9240 else return false. */
9241
9242 bool
9243 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
9244 edge *true_controlled_edge,
9245 edge *false_controlled_edge)
9246 {
9247 basic_block bb = phiblock;
9248 edge true_edge, false_edge, tem;
9249 edge e0 = NULL, e1 = NULL;
9250
9251 /* We have to verify that one edge into the PHI node is dominated
9252 by the true edge of the predicate block and the other edge
9253 dominated by the false edge. This ensures that the PHI argument
9254 we are going to take is completely determined by the path we
9255 take from the predicate block.
9256 We can only use BB dominance checks below if the destination of
9257 the true/false edges are dominated by their edge, thus only
9258 have a single predecessor. */
9259 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
9260 tem = EDGE_PRED (bb, 0);
9261 if (tem == true_edge
9262 || (single_pred_p (true_edge->dest)
9263 && (tem->src == true_edge->dest
9264 || dominated_by_p (CDI_DOMINATORS,
9265 tem->src, true_edge->dest))))
9266 e0 = tem;
9267 else if (tem == false_edge
9268 || (single_pred_p (false_edge->dest)
9269 && (tem->src == false_edge->dest
9270 || dominated_by_p (CDI_DOMINATORS,
9271 tem->src, false_edge->dest))))
9272 e1 = tem;
9273 else
9274 return false;
9275 tem = EDGE_PRED (bb, 1);
9276 if (tem == true_edge
9277 || (single_pred_p (true_edge->dest)
9278 && (tem->src == true_edge->dest
9279 || dominated_by_p (CDI_DOMINATORS,
9280 tem->src, true_edge->dest))))
9281 e0 = tem;
9282 else if (tem == false_edge
9283 || (single_pred_p (false_edge->dest)
9284 && (tem->src == false_edge->dest
9285 || dominated_by_p (CDI_DOMINATORS,
9286 tem->src, false_edge->dest))))
9287 e1 = tem;
9288 else
9289 return false;
9290 if (!e0 || !e1)
9291 return false;
9292
9293 if (true_controlled_edge)
9294 *true_controlled_edge = e0;
9295 if (false_controlled_edge)
9296 *false_controlled_edge = e1;
9297
9298 return true;
9299 }
9300
9301 /* Generate a range test LHS CODE RHS that determines whether INDEX is in the
9302 range [low, high]. Place associated stmts before *GSI. */
9303
9304 void
9305 generate_range_test (basic_block bb, tree index, tree low, tree high,
9306 tree *lhs, tree *rhs)
9307 {
9308 tree type = TREE_TYPE (index);
9309 tree utype = range_check_type (type);
9310
9311 low = fold_convert (utype, low);
9312 high = fold_convert (utype, high);
9313
9314 gimple_seq seq = NULL;
9315 index = gimple_convert (&seq, utype, index);
9316 *lhs = gimple_build (&seq, MINUS_EXPR, utype, index, low);
9317 *rhs = const_binop (MINUS_EXPR, utype, high, low);
9318
9319 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9320 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
9321 }
9322
9323 /* Return the basic block that belongs to label numbered INDEX
9324 of a switch statement. */
9325
9326 basic_block
9327 gimple_switch_label_bb (function *ifun, gswitch *gs, unsigned index)
9328 {
9329 return label_to_block (ifun, CASE_LABEL (gimple_switch_label (gs, index)));
9330 }
9331
9332 /* Return the default basic block of a switch statement. */
9333
9334 basic_block
9335 gimple_switch_default_bb (function *ifun, gswitch *gs)
9336 {
9337 return gimple_switch_label_bb (ifun, gs, 0);
9338 }
9339
9340 /* Return the edge that belongs to label numbered INDEX
9341 of a switch statement. */
9342
9343 edge
9344 gimple_switch_edge (function *ifun, gswitch *gs, unsigned index)
9345 {
9346 return find_edge (gimple_bb (gs), gimple_switch_label_bb (ifun, gs, index));
9347 }
9348
9349 /* Return the default edge of a switch statement. */
9350
9351 edge
9352 gimple_switch_default_edge (function *ifun, gswitch *gs)
9353 {
9354 return gimple_switch_edge (ifun, gs, 0);
9355 }
9356
9357
9358 /* Emit return warnings. */
9359
9360 namespace {
9361
9362 const pass_data pass_data_warn_function_return =
9363 {
9364 GIMPLE_PASS, /* type */
9365 "*warn_function_return", /* name */
9366 OPTGROUP_NONE, /* optinfo_flags */
9367 TV_NONE, /* tv_id */
9368 PROP_cfg, /* properties_required */
9369 0, /* properties_provided */
9370 0, /* properties_destroyed */
9371 0, /* todo_flags_start */
9372 0, /* todo_flags_finish */
9373 };
9374
9375 class pass_warn_function_return : public gimple_opt_pass
9376 {
9377 public:
9378 pass_warn_function_return (gcc::context *ctxt)
9379 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
9380 {}
9381
9382 /* opt_pass methods: */
9383 virtual unsigned int execute (function *);
9384
9385 }; // class pass_warn_function_return
9386
9387 unsigned int
9388 pass_warn_function_return::execute (function *fun)
9389 {
9390 location_t location;
9391 gimple *last;
9392 edge e;
9393 edge_iterator ei;
9394
9395 if (!targetm.warn_func_return (fun->decl))
9396 return 0;
9397
9398 /* If we have a path to EXIT, then we do return. */
9399 if (TREE_THIS_VOLATILE (fun->decl)
9400 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
9401 {
9402 location = UNKNOWN_LOCATION;
9403 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (fun)->preds);
9404 (e = ei_safe_edge (ei)); )
9405 {
9406 last = last_stmt (e->src);
9407 if ((gimple_code (last) == GIMPLE_RETURN
9408 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
9409 && location == UNKNOWN_LOCATION
9410 && ((location = LOCATION_LOCUS (gimple_location (last)))
9411 != UNKNOWN_LOCATION)
9412 && !optimize)
9413 break;
9414 /* When optimizing, replace return stmts in noreturn functions
9415 with __builtin_unreachable () call. */
9416 if (optimize && gimple_code (last) == GIMPLE_RETURN)
9417 {
9418 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9419 gimple *new_stmt = gimple_build_call (fndecl, 0);
9420 gimple_set_location (new_stmt, gimple_location (last));
9421 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9422 gsi_replace (&gsi, new_stmt, true);
9423 remove_edge (e);
9424 }
9425 else
9426 ei_next (&ei);
9427 }
9428 if (location == UNKNOWN_LOCATION)
9429 location = cfun->function_end_locus;
9430 warning_at (location, 0, "%<noreturn%> function does return");
9431 }
9432
9433 /* If we see "return;" in some basic block, then we do reach the end
9434 without returning a value. */
9435 else if (warn_return_type > 0
9436 && !TREE_NO_WARNING (fun->decl)
9437 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
9438 {
9439 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
9440 {
9441 gimple *last = last_stmt (e->src);
9442 greturn *return_stmt = dyn_cast <greturn *> (last);
9443 if (return_stmt
9444 && gimple_return_retval (return_stmt) == NULL
9445 && !gimple_no_warning_p (last))
9446 {
9447 location = gimple_location (last);
9448 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9449 location = fun->function_end_locus;
9450 if (warning_at (location, OPT_Wreturn_type,
9451 "control reaches end of non-void function"))
9452 TREE_NO_WARNING (fun->decl) = 1;
9453 break;
9454 }
9455 }
9456 /* The C++ FE turns fallthrough from the end of non-void function
9457 into __builtin_unreachable () call with BUILTINS_LOCATION.
9458 Recognize those too. */
9459 basic_block bb;
9460 if (!TREE_NO_WARNING (fun->decl))
9461 FOR_EACH_BB_FN (bb, fun)
9462 if (EDGE_COUNT (bb->succs) == 0)
9463 {
9464 gimple *last = last_stmt (bb);
9465 const enum built_in_function ubsan_missing_ret
9466 = BUILT_IN_UBSAN_HANDLE_MISSING_RETURN;
9467 if (last
9468 && ((LOCATION_LOCUS (gimple_location (last))
9469 == BUILTINS_LOCATION
9470 && gimple_call_builtin_p (last, BUILT_IN_UNREACHABLE))
9471 || gimple_call_builtin_p (last, ubsan_missing_ret)))
9472 {
9473 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9474 gsi_prev_nondebug (&gsi);
9475 gimple *prev = gsi_stmt (gsi);
9476 if (prev == NULL)
9477 location = UNKNOWN_LOCATION;
9478 else
9479 location = gimple_location (prev);
9480 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9481 location = fun->function_end_locus;
9482 if (warning_at (location, OPT_Wreturn_type,
9483 "control reaches end of non-void function"))
9484 TREE_NO_WARNING (fun->decl) = 1;
9485 break;
9486 }
9487 }
9488 }
9489 return 0;
9490 }
9491
9492 } // anon namespace
9493
9494 gimple_opt_pass *
9495 make_pass_warn_function_return (gcc::context *ctxt)
9496 {
9497 return new pass_warn_function_return (ctxt);
9498 }
9499
9500 /* Walk a gimplified function and warn for functions whose return value is
9501 ignored and attribute((warn_unused_result)) is set. This is done before
9502 inlining, so we don't have to worry about that. */
9503
9504 static void
9505 do_warn_unused_result (gimple_seq seq)
9506 {
9507 tree fdecl, ftype;
9508 gimple_stmt_iterator i;
9509
9510 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
9511 {
9512 gimple *g = gsi_stmt (i);
9513
9514 switch (gimple_code (g))
9515 {
9516 case GIMPLE_BIND:
9517 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
9518 break;
9519 case GIMPLE_TRY:
9520 do_warn_unused_result (gimple_try_eval (g));
9521 do_warn_unused_result (gimple_try_cleanup (g));
9522 break;
9523 case GIMPLE_CATCH:
9524 do_warn_unused_result (gimple_catch_handler (
9525 as_a <gcatch *> (g)));
9526 break;
9527 case GIMPLE_EH_FILTER:
9528 do_warn_unused_result (gimple_eh_filter_failure (g));
9529 break;
9530
9531 case GIMPLE_CALL:
9532 if (gimple_call_lhs (g))
9533 break;
9534 if (gimple_call_internal_p (g))
9535 break;
9536
9537 /* This is a naked call, as opposed to a GIMPLE_CALL with an
9538 LHS. All calls whose value is ignored should be
9539 represented like this. Look for the attribute. */
9540 fdecl = gimple_call_fndecl (g);
9541 ftype = gimple_call_fntype (g);
9542
9543 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
9544 {
9545 location_t loc = gimple_location (g);
9546
9547 if (fdecl)
9548 warning_at (loc, OPT_Wunused_result,
9549 "ignoring return value of %qD "
9550 "declared with attribute %<warn_unused_result%>",
9551 fdecl);
9552 else
9553 warning_at (loc, OPT_Wunused_result,
9554 "ignoring return value of function "
9555 "declared with attribute %<warn_unused_result%>");
9556 }
9557 break;
9558
9559 default:
9560 /* Not a container, not a call, or a call whose value is used. */
9561 break;
9562 }
9563 }
9564 }
9565
9566 namespace {
9567
9568 const pass_data pass_data_warn_unused_result =
9569 {
9570 GIMPLE_PASS, /* type */
9571 "*warn_unused_result", /* name */
9572 OPTGROUP_NONE, /* optinfo_flags */
9573 TV_NONE, /* tv_id */
9574 PROP_gimple_any, /* properties_required */
9575 0, /* properties_provided */
9576 0, /* properties_destroyed */
9577 0, /* todo_flags_start */
9578 0, /* todo_flags_finish */
9579 };
9580
9581 class pass_warn_unused_result : public gimple_opt_pass
9582 {
9583 public:
9584 pass_warn_unused_result (gcc::context *ctxt)
9585 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9586 {}
9587
9588 /* opt_pass methods: */
9589 virtual bool gate (function *) { return flag_warn_unused_result; }
9590 virtual unsigned int execute (function *)
9591 {
9592 do_warn_unused_result (gimple_body (current_function_decl));
9593 return 0;
9594 }
9595
9596 }; // class pass_warn_unused_result
9597
9598 } // anon namespace
9599
9600 gimple_opt_pass *
9601 make_pass_warn_unused_result (gcc::context *ctxt)
9602 {
9603 return new pass_warn_unused_result (ctxt);
9604 }
9605
9606 /* IPA passes, compilation of earlier functions or inlining
9607 might have changed some properties, such as marked functions nothrow,
9608 pure, const or noreturn.
9609 Remove redundant edges and basic blocks, and create new ones if necessary.
9610
9611 This pass can't be executed as stand alone pass from pass manager, because
9612 in between inlining and this fixup the verify_flow_info would fail. */
9613
9614 unsigned int
9615 execute_fixup_cfg (void)
9616 {
9617 basic_block bb;
9618 gimple_stmt_iterator gsi;
9619 int todo = 0;
9620 cgraph_node *node = cgraph_node::get (current_function_decl);
9621 /* Same scaling is also done by ipa_merge_profiles. */
9622 profile_count num = node->count;
9623 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
9624 bool scale = num.initialized_p () && !(num == den);
9625
9626 if (scale)
9627 {
9628 profile_count::adjust_for_ipa_scaling (&num, &den);
9629 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
9630 EXIT_BLOCK_PTR_FOR_FN (cfun)->count
9631 = EXIT_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (num, den);
9632 }
9633
9634 FOR_EACH_BB_FN (bb, cfun)
9635 {
9636 if (scale)
9637 bb->count = bb->count.apply_scale (num, den);
9638 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9639 {
9640 gimple *stmt = gsi_stmt (gsi);
9641 tree decl = is_gimple_call (stmt)
9642 ? gimple_call_fndecl (stmt)
9643 : NULL;
9644 if (decl)
9645 {
9646 int flags = gimple_call_flags (stmt);
9647 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9648 {
9649 if (gimple_purge_dead_abnormal_call_edges (bb))
9650 todo |= TODO_cleanup_cfg;
9651
9652 if (gimple_in_ssa_p (cfun))
9653 {
9654 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9655 update_stmt (stmt);
9656 }
9657 }
9658
9659 if (flags & ECF_NORETURN
9660 && fixup_noreturn_call (stmt))
9661 todo |= TODO_cleanup_cfg;
9662 }
9663
9664 /* Remove stores to variables we marked write-only.
9665 Keep access when store has side effect, i.e. in case when source
9666 is volatile. */
9667 if (gimple_store_p (stmt)
9668 && !gimple_has_side_effects (stmt)
9669 && !optimize_debug)
9670 {
9671 tree lhs = get_base_address (gimple_get_lhs (stmt));
9672
9673 if (VAR_P (lhs)
9674 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9675 && varpool_node::get (lhs)->writeonly)
9676 {
9677 unlink_stmt_vdef (stmt);
9678 gsi_remove (&gsi, true);
9679 release_defs (stmt);
9680 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9681 continue;
9682 }
9683 }
9684 /* For calls we can simply remove LHS when it is known
9685 to be write-only. */
9686 if (is_gimple_call (stmt)
9687 && gimple_get_lhs (stmt))
9688 {
9689 tree lhs = get_base_address (gimple_get_lhs (stmt));
9690
9691 if (VAR_P (lhs)
9692 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9693 && varpool_node::get (lhs)->writeonly)
9694 {
9695 gimple_call_set_lhs (stmt, NULL);
9696 update_stmt (stmt);
9697 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9698 }
9699 }
9700
9701 if (maybe_clean_eh_stmt (stmt)
9702 && gimple_purge_dead_eh_edges (bb))
9703 todo |= TODO_cleanup_cfg;
9704 gsi_next (&gsi);
9705 }
9706
9707 /* If we have a basic block with no successors that does not
9708 end with a control statement or a noreturn call end it with
9709 a call to __builtin_unreachable. This situation can occur
9710 when inlining a noreturn call that does in fact return. */
9711 if (EDGE_COUNT (bb->succs) == 0)
9712 {
9713 gimple *stmt = last_stmt (bb);
9714 if (!stmt
9715 || (!is_ctrl_stmt (stmt)
9716 && (!is_gimple_call (stmt)
9717 || !gimple_call_noreturn_p (stmt))))
9718 {
9719 if (stmt && is_gimple_call (stmt))
9720 gimple_call_set_ctrl_altering (stmt, false);
9721 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9722 stmt = gimple_build_call (fndecl, 0);
9723 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9724 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9725 if (!cfun->after_inlining)
9726 {
9727 gcall *call_stmt = dyn_cast <gcall *> (stmt);
9728 node->create_edge (cgraph_node::get_create (fndecl),
9729 call_stmt, bb->count);
9730 }
9731 }
9732 }
9733 }
9734 if (scale)
9735 {
9736 update_max_bb_count ();
9737 compute_function_frequency ();
9738 }
9739
9740 if (current_loops
9741 && (todo & TODO_cleanup_cfg))
9742 loops_state_set (LOOPS_NEED_FIXUP);
9743
9744 return todo;
9745 }
9746
9747 namespace {
9748
9749 const pass_data pass_data_fixup_cfg =
9750 {
9751 GIMPLE_PASS, /* type */
9752 "fixup_cfg", /* name */
9753 OPTGROUP_NONE, /* optinfo_flags */
9754 TV_NONE, /* tv_id */
9755 PROP_cfg, /* properties_required */
9756 0, /* properties_provided */
9757 0, /* properties_destroyed */
9758 0, /* todo_flags_start */
9759 0, /* todo_flags_finish */
9760 };
9761
9762 class pass_fixup_cfg : public gimple_opt_pass
9763 {
9764 public:
9765 pass_fixup_cfg (gcc::context *ctxt)
9766 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9767 {}
9768
9769 /* opt_pass methods: */
9770 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
9771 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9772
9773 }; // class pass_fixup_cfg
9774
9775 } // anon namespace
9776
9777 gimple_opt_pass *
9778 make_pass_fixup_cfg (gcc::context *ctxt)
9779 {
9780 return new pass_fixup_cfg (ctxt);
9781 }
9782
9783 /* Garbage collection support for edge_def. */
9784
9785 extern void gt_ggc_mx (tree&);
9786 extern void gt_ggc_mx (gimple *&);
9787 extern void gt_ggc_mx (rtx&);
9788 extern void gt_ggc_mx (basic_block&);
9789
9790 static void
9791 gt_ggc_mx (rtx_insn *& x)
9792 {
9793 if (x)
9794 gt_ggc_mx_rtx_def ((void *) x);
9795 }
9796
9797 void
9798 gt_ggc_mx (edge_def *e)
9799 {
9800 tree block = LOCATION_BLOCK (e->goto_locus);
9801 gt_ggc_mx (e->src);
9802 gt_ggc_mx (e->dest);
9803 if (current_ir_type () == IR_GIMPLE)
9804 gt_ggc_mx (e->insns.g);
9805 else
9806 gt_ggc_mx (e->insns.r);
9807 gt_ggc_mx (block);
9808 }
9809
9810 /* PCH support for edge_def. */
9811
9812 extern void gt_pch_nx (tree&);
9813 extern void gt_pch_nx (gimple *&);
9814 extern void gt_pch_nx (rtx&);
9815 extern void gt_pch_nx (basic_block&);
9816
9817 static void
9818 gt_pch_nx (rtx_insn *& x)
9819 {
9820 if (x)
9821 gt_pch_nx_rtx_def ((void *) x);
9822 }
9823
9824 void
9825 gt_pch_nx (edge_def *e)
9826 {
9827 tree block = LOCATION_BLOCK (e->goto_locus);
9828 gt_pch_nx (e->src);
9829 gt_pch_nx (e->dest);
9830 if (current_ir_type () == IR_GIMPLE)
9831 gt_pch_nx (e->insns.g);
9832 else
9833 gt_pch_nx (e->insns.r);
9834 gt_pch_nx (block);
9835 }
9836
9837 void
9838 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9839 {
9840 tree block = LOCATION_BLOCK (e->goto_locus);
9841 op (&(e->src), cookie);
9842 op (&(e->dest), cookie);
9843 if (current_ir_type () == IR_GIMPLE)
9844 op (&(e->insns.g), cookie);
9845 else
9846 op (&(e->insns.r), cookie);
9847 op (&(block), cookie);
9848 }
9849
9850 #if CHECKING_P
9851
9852 namespace selftest {
9853
9854 /* Helper function for CFG selftests: create a dummy function decl
9855 and push it as cfun. */
9856
9857 static tree
9858 push_fndecl (const char *name)
9859 {
9860 tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
9861 /* FIXME: this uses input_location: */
9862 tree fndecl = build_fn_decl (name, fn_type);
9863 tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
9864 NULL_TREE, integer_type_node);
9865 DECL_RESULT (fndecl) = retval;
9866 push_struct_function (fndecl);
9867 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9868 ASSERT_TRUE (fun != NULL);
9869 init_empty_tree_cfg_for_function (fun);
9870 ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
9871 ASSERT_EQ (0, n_edges_for_fn (fun));
9872 return fndecl;
9873 }
9874
9875 /* These tests directly create CFGs.
9876 Compare with the static fns within tree-cfg.c:
9877 - build_gimple_cfg
9878 - make_blocks: calls create_basic_block (seq, bb);
9879 - make_edges. */
9880
9881 /* Verify a simple cfg of the form:
9882 ENTRY -> A -> B -> C -> EXIT. */
9883
9884 static void
9885 test_linear_chain ()
9886 {
9887 gimple_register_cfg_hooks ();
9888
9889 tree fndecl = push_fndecl ("cfg_test_linear_chain");
9890 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9891
9892 /* Create some empty blocks. */
9893 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9894 basic_block bb_b = create_empty_bb (bb_a);
9895 basic_block bb_c = create_empty_bb (bb_b);
9896
9897 ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
9898 ASSERT_EQ (0, n_edges_for_fn (fun));
9899
9900 /* Create some edges: a simple linear chain of BBs. */
9901 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9902 make_edge (bb_a, bb_b, 0);
9903 make_edge (bb_b, bb_c, 0);
9904 make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9905
9906 /* Verify the edges. */
9907 ASSERT_EQ (4, n_edges_for_fn (fun));
9908 ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
9909 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
9910 ASSERT_EQ (1, bb_a->preds->length ());
9911 ASSERT_EQ (1, bb_a->succs->length ());
9912 ASSERT_EQ (1, bb_b->preds->length ());
9913 ASSERT_EQ (1, bb_b->succs->length ());
9914 ASSERT_EQ (1, bb_c->preds->length ());
9915 ASSERT_EQ (1, bb_c->succs->length ());
9916 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
9917 ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
9918
9919 /* Verify the dominance information
9920 Each BB in our simple chain should be dominated by the one before
9921 it. */
9922 calculate_dominance_info (CDI_DOMINATORS);
9923 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9924 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9925 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9926 ASSERT_EQ (1, dom_by_b.length ());
9927 ASSERT_EQ (bb_c, dom_by_b[0]);
9928 free_dominance_info (CDI_DOMINATORS);
9929 dom_by_b.release ();
9930
9931 /* Similarly for post-dominance: each BB in our chain is post-dominated
9932 by the one after it. */
9933 calculate_dominance_info (CDI_POST_DOMINATORS);
9934 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9935 ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9936 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9937 ASSERT_EQ (1, postdom_by_b.length ());
9938 ASSERT_EQ (bb_a, postdom_by_b[0]);
9939 free_dominance_info (CDI_POST_DOMINATORS);
9940 postdom_by_b.release ();
9941
9942 pop_cfun ();
9943 }
9944
9945 /* Verify a simple CFG of the form:
9946 ENTRY
9947 |
9948 A
9949 / \
9950 /t \f
9951 B C
9952 \ /
9953 \ /
9954 D
9955 |
9956 EXIT. */
9957
9958 static void
9959 test_diamond ()
9960 {
9961 gimple_register_cfg_hooks ();
9962
9963 tree fndecl = push_fndecl ("cfg_test_diamond");
9964 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9965
9966 /* Create some empty blocks. */
9967 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9968 basic_block bb_b = create_empty_bb (bb_a);
9969 basic_block bb_c = create_empty_bb (bb_a);
9970 basic_block bb_d = create_empty_bb (bb_b);
9971
9972 ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
9973 ASSERT_EQ (0, n_edges_for_fn (fun));
9974
9975 /* Create the edges. */
9976 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9977 make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
9978 make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
9979 make_edge (bb_b, bb_d, 0);
9980 make_edge (bb_c, bb_d, 0);
9981 make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9982
9983 /* Verify the edges. */
9984 ASSERT_EQ (6, n_edges_for_fn (fun));
9985 ASSERT_EQ (1, bb_a->preds->length ());
9986 ASSERT_EQ (2, bb_a->succs->length ());
9987 ASSERT_EQ (1, bb_b->preds->length ());
9988 ASSERT_EQ (1, bb_b->succs->length ());
9989 ASSERT_EQ (1, bb_c->preds->length ());
9990 ASSERT_EQ (1, bb_c->succs->length ());
9991 ASSERT_EQ (2, bb_d->preds->length ());
9992 ASSERT_EQ (1, bb_d->succs->length ());
9993
9994 /* Verify the dominance information. */
9995 calculate_dominance_info (CDI_DOMINATORS);
9996 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9997 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9998 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
9999 vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
10000 ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order. */
10001 dom_by_a.release ();
10002 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
10003 ASSERT_EQ (0, dom_by_b.length ());
10004 dom_by_b.release ();
10005 free_dominance_info (CDI_DOMINATORS);
10006
10007 /* Similarly for post-dominance. */
10008 calculate_dominance_info (CDI_POST_DOMINATORS);
10009 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
10010 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
10011 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
10012 vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
10013 ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order. */
10014 postdom_by_d.release ();
10015 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
10016 ASSERT_EQ (0, postdom_by_b.length ());
10017 postdom_by_b.release ();
10018 free_dominance_info (CDI_POST_DOMINATORS);
10019
10020 pop_cfun ();
10021 }
10022
10023 /* Verify that we can handle a CFG containing a "complete" aka
10024 fully-connected subgraph (where A B C D below all have edges
10025 pointing to each other node, also to themselves).
10026 e.g.:
10027 ENTRY EXIT
10028 | ^
10029 | /
10030 | /
10031 | /
10032 V/
10033 A<--->B
10034 ^^ ^^
10035 | \ / |
10036 | X |
10037 | / \ |
10038 VV VV
10039 C<--->D
10040 */
10041
10042 static void
10043 test_fully_connected ()
10044 {
10045 gimple_register_cfg_hooks ();
10046
10047 tree fndecl = push_fndecl ("cfg_fully_connected");
10048 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10049
10050 const int n = 4;
10051
10052 /* Create some empty blocks. */
10053 auto_vec <basic_block> subgraph_nodes;
10054 for (int i = 0; i < n; i++)
10055 subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
10056
10057 ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
10058 ASSERT_EQ (0, n_edges_for_fn (fun));
10059
10060 /* Create the edges. */
10061 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
10062 make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
10063 for (int i = 0; i < n; i++)
10064 for (int j = 0; j < n; j++)
10065 make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
10066
10067 /* Verify the edges. */
10068 ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
10069 /* The first one is linked to ENTRY/EXIT as well as itself and
10070 everything else. */
10071 ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
10072 ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
10073 /* The other ones in the subgraph are linked to everything in
10074 the subgraph (including themselves). */
10075 for (int i = 1; i < n; i++)
10076 {
10077 ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
10078 ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
10079 }
10080
10081 /* Verify the dominance information. */
10082 calculate_dominance_info (CDI_DOMINATORS);
10083 /* The initial block in the subgraph should be dominated by ENTRY. */
10084 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
10085 get_immediate_dominator (CDI_DOMINATORS,
10086 subgraph_nodes[0]));
10087 /* Every other block in the subgraph should be dominated by the
10088 initial block. */
10089 for (int i = 1; i < n; i++)
10090 ASSERT_EQ (subgraph_nodes[0],
10091 get_immediate_dominator (CDI_DOMINATORS,
10092 subgraph_nodes[i]));
10093 free_dominance_info (CDI_DOMINATORS);
10094
10095 /* Similarly for post-dominance. */
10096 calculate_dominance_info (CDI_POST_DOMINATORS);
10097 /* The initial block in the subgraph should be postdominated by EXIT. */
10098 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
10099 get_immediate_dominator (CDI_POST_DOMINATORS,
10100 subgraph_nodes[0]));
10101 /* Every other block in the subgraph should be postdominated by the
10102 initial block, since that leads to EXIT. */
10103 for (int i = 1; i < n; i++)
10104 ASSERT_EQ (subgraph_nodes[0],
10105 get_immediate_dominator (CDI_POST_DOMINATORS,
10106 subgraph_nodes[i]));
10107 free_dominance_info (CDI_POST_DOMINATORS);
10108
10109 pop_cfun ();
10110 }
10111
10112 /* Run all of the selftests within this file. */
10113
10114 void
10115 tree_cfg_c_tests ()
10116 {
10117 test_linear_chain ();
10118 test_diamond ();
10119 test_fully_connected ();
10120 }
10121
10122 } // namespace selftest
10123
10124 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
10125 - loop
10126 - nested loops
10127 - switch statement (a block with many out-edges)
10128 - something that jumps to itself
10129 - etc */
10130
10131 #endif /* CHECKING_P */