Daily bump.
[gcc.git] / gcc / tree-ssa-pre.c
1 /* Full and partial redundancy elimination and code hoisting on SSA GIMPLE.
2 Copyright (C) 2001-2021 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
4 <stevenb@suse.de>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "alloc-pool.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "cgraph.h"
34 #include "gimple-pretty-print.h"
35 #include "fold-const.h"
36 #include "cfganal.h"
37 #include "gimple-fold.h"
38 #include "tree-eh.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "tree-cfg.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-ssa.h"
45 #include "cfgloop.h"
46 #include "tree-ssa-sccvn.h"
47 #include "tree-scalar-evolution.h"
48 #include "dbgcnt.h"
49 #include "domwalk.h"
50 #include "tree-ssa-propagate.h"
51 #include "tree-ssa-dce.h"
52 #include "tree-cfgcleanup.h"
53 #include "alias.h"
54
55 /* Even though this file is called tree-ssa-pre.c, we actually
56 implement a bit more than just PRE here. All of them piggy-back
57 on GVN which is implemented in tree-ssa-sccvn.c.
58
59 1. Full Redundancy Elimination (FRE)
60 This is the elimination phase of GVN.
61
62 2. Partial Redundancy Elimination (PRE)
63 This is adds computation of AVAIL_OUT and ANTIC_IN and
64 doing expression insertion to form GVN-PRE.
65
66 3. Code hoisting
67 This optimization uses the ANTIC_IN sets computed for PRE
68 to move expressions further up than PRE would do, to make
69 multiple computations of the same value fully redundant.
70 This pass is explained below (after the explanation of the
71 basic algorithm for PRE).
72 */
73
74 /* TODO:
75
76 1. Avail sets can be shared by making an avail_find_leader that
77 walks up the dominator tree and looks in those avail sets.
78 This might affect code optimality, it's unclear right now.
79 Currently the AVAIL_OUT sets are the remaining quadraticness in
80 memory of GVN-PRE.
81 2. Strength reduction can be performed by anticipating expressions
82 we can repair later on.
83 3. We can do back-substitution or smarter value numbering to catch
84 commutative expressions split up over multiple statements.
85 */
86
87 /* For ease of terminology, "expression node" in the below refers to
88 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
89 represent the actual statement containing the expressions we care about,
90 and we cache the value number by putting it in the expression. */
91
92 /* Basic algorithm for Partial Redundancy Elimination:
93
94 First we walk the statements to generate the AVAIL sets, the
95 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
96 generation of values/expressions by a given block. We use them
97 when computing the ANTIC sets. The AVAIL sets consist of
98 SSA_NAME's that represent values, so we know what values are
99 available in what blocks. AVAIL is a forward dataflow problem. In
100 SSA, values are never killed, so we don't need a kill set, or a
101 fixpoint iteration, in order to calculate the AVAIL sets. In
102 traditional parlance, AVAIL sets tell us the downsafety of the
103 expressions/values.
104
105 Next, we generate the ANTIC sets. These sets represent the
106 anticipatable expressions. ANTIC is a backwards dataflow
107 problem. An expression is anticipatable in a given block if it could
108 be generated in that block. This means that if we had to perform
109 an insertion in that block, of the value of that expression, we
110 could. Calculating the ANTIC sets requires phi translation of
111 expressions, because the flow goes backwards through phis. We must
112 iterate to a fixpoint of the ANTIC sets, because we have a kill
113 set. Even in SSA form, values are not live over the entire
114 function, only from their definition point onwards. So we have to
115 remove values from the ANTIC set once we go past the definition
116 point of the leaders that make them up.
117 compute_antic/compute_antic_aux performs this computation.
118
119 Third, we perform insertions to make partially redundant
120 expressions fully redundant.
121
122 An expression is partially redundant (excluding partial
123 anticipation) if:
124
125 1. It is AVAIL in some, but not all, of the predecessors of a
126 given block.
127 2. It is ANTIC in all the predecessors.
128
129 In order to make it fully redundant, we insert the expression into
130 the predecessors where it is not available, but is ANTIC.
131
132 When optimizing for size, we only eliminate the partial redundancy
133 if we need to insert in only one predecessor. This avoids almost
134 completely the code size increase that PRE usually causes.
135
136 For the partial anticipation case, we only perform insertion if it
137 is partially anticipated in some block, and fully available in all
138 of the predecessors.
139
140 do_pre_regular_insertion/do_pre_partial_partial_insertion
141 performs these steps, driven by insert/insert_aux.
142
143 Fourth, we eliminate fully redundant expressions.
144 This is a simple statement walk that replaces redundant
145 calculations with the now available values. */
146
147 /* Basic algorithm for Code Hoisting:
148
149 Code hoisting is: Moving value computations up in the control flow
150 graph to make multiple copies redundant. Typically this is a size
151 optimization, but there are cases where it also is helpful for speed.
152
153 A simple code hoisting algorithm is implemented that piggy-backs on
154 the PRE infrastructure. For code hoisting, we have to know ANTIC_OUT
155 which is effectively ANTIC_IN - AVAIL_OUT. The latter two have to be
156 computed for PRE, and we can use them to perform a limited version of
157 code hoisting, too.
158
159 For the purpose of this implementation, a value is hoistable to a basic
160 block B if the following properties are met:
161
162 1. The value is in ANTIC_IN(B) -- the value will be computed on all
163 paths from B to function exit and it can be computed in B);
164
165 2. The value is not in AVAIL_OUT(B) -- there would be no need to
166 compute the value again and make it available twice;
167
168 3. All successors of B are dominated by B -- makes sure that inserting
169 a computation of the value in B will make the remaining
170 computations fully redundant;
171
172 4. At least one successor has the value in AVAIL_OUT -- to avoid
173 hoisting values up too far;
174
175 5. There are at least two successors of B -- hoisting in straight
176 line code is pointless.
177
178 The third condition is not strictly necessary, but it would complicate
179 the hoisting pass a lot. In fact, I don't know of any code hoisting
180 algorithm that does not have this requirement. Fortunately, experiments
181 have show that most candidate hoistable values are in regions that meet
182 this condition (e.g. diamond-shape regions).
183
184 The forth condition is necessary to avoid hoisting things up too far
185 away from the uses of the value. Nothing else limits the algorithm
186 from hoisting everything up as far as ANTIC_IN allows. Experiments
187 with SPEC and CSiBE have shown that hoisting up too far results in more
188 spilling, less benefits for code size, and worse benchmark scores.
189 Fortunately, in practice most of the interesting hoisting opportunities
190 are caught despite this limitation.
191
192 For hoistable values that meet all conditions, expressions are inserted
193 to make the calculation of the hoistable value fully redundant. We
194 perform code hoisting insertions after each round of PRE insertions,
195 because code hoisting never exposes new PRE opportunities, but PRE can
196 create new code hoisting opportunities.
197
198 The code hoisting algorithm is implemented in do_hoist_insert, driven
199 by insert/insert_aux. */
200
201 /* Representations of value numbers:
202
203 Value numbers are represented by a representative SSA_NAME. We
204 will create fake SSA_NAME's in situations where we need a
205 representative but do not have one (because it is a complex
206 expression). In order to facilitate storing the value numbers in
207 bitmaps, and keep the number of wasted SSA_NAME's down, we also
208 associate a value_id with each value number, and create full blown
209 ssa_name's only where we actually need them (IE in operands of
210 existing expressions).
211
212 Theoretically you could replace all the value_id's with
213 SSA_NAME_VERSION, but this would allocate a large number of
214 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
215 It would also require an additional indirection at each point we
216 use the value id. */
217
218 /* Representation of expressions on value numbers:
219
220 Expressions consisting of value numbers are represented the same
221 way as our VN internally represents them, with an additional
222 "pre_expr" wrapping around them in order to facilitate storing all
223 of the expressions in the same sets. */
224
225 /* Representation of sets:
226
227 The dataflow sets do not need to be sorted in any particular order
228 for the majority of their lifetime, are simply represented as two
229 bitmaps, one that keeps track of values present in the set, and one
230 that keeps track of expressions present in the set.
231
232 When we need them in topological order, we produce it on demand by
233 transforming the bitmap into an array and sorting it into topo
234 order. */
235
236 /* Type of expression, used to know which member of the PRE_EXPR union
237 is valid. */
238
239 enum pre_expr_kind
240 {
241 NAME,
242 NARY,
243 REFERENCE,
244 CONSTANT
245 };
246
247 union pre_expr_union
248 {
249 tree name;
250 tree constant;
251 vn_nary_op_t nary;
252 vn_reference_t reference;
253 };
254
255 typedef struct pre_expr_d : nofree_ptr_hash <pre_expr_d>
256 {
257 enum pre_expr_kind kind;
258 unsigned int id;
259 unsigned value_id;
260 location_t loc;
261 pre_expr_union u;
262
263 /* hash_table support. */
264 static inline hashval_t hash (const pre_expr_d *);
265 static inline int equal (const pre_expr_d *, const pre_expr_d *);
266 } *pre_expr;
267
268 #define PRE_EXPR_NAME(e) (e)->u.name
269 #define PRE_EXPR_NARY(e) (e)->u.nary
270 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
271 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
272
273 /* Compare E1 and E1 for equality. */
274
275 inline int
276 pre_expr_d::equal (const pre_expr_d *e1, const pre_expr_d *e2)
277 {
278 if (e1->kind != e2->kind)
279 return false;
280
281 switch (e1->kind)
282 {
283 case CONSTANT:
284 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1),
285 PRE_EXPR_CONSTANT (e2));
286 case NAME:
287 return PRE_EXPR_NAME (e1) == PRE_EXPR_NAME (e2);
288 case NARY:
289 return vn_nary_op_eq (PRE_EXPR_NARY (e1), PRE_EXPR_NARY (e2));
290 case REFERENCE:
291 return vn_reference_eq (PRE_EXPR_REFERENCE (e1),
292 PRE_EXPR_REFERENCE (e2));
293 default:
294 gcc_unreachable ();
295 }
296 }
297
298 /* Hash E. */
299
300 inline hashval_t
301 pre_expr_d::hash (const pre_expr_d *e)
302 {
303 switch (e->kind)
304 {
305 case CONSTANT:
306 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e));
307 case NAME:
308 return SSA_NAME_VERSION (PRE_EXPR_NAME (e));
309 case NARY:
310 return PRE_EXPR_NARY (e)->hashcode;
311 case REFERENCE:
312 return PRE_EXPR_REFERENCE (e)->hashcode;
313 default:
314 gcc_unreachable ();
315 }
316 }
317
318 /* Next global expression id number. */
319 static unsigned int next_expression_id;
320
321 /* Mapping from expression to id number we can use in bitmap sets. */
322 static vec<pre_expr> expressions;
323 static hash_table<pre_expr_d> *expression_to_id;
324 static vec<unsigned> name_to_id;
325
326 /* Allocate an expression id for EXPR. */
327
328 static inline unsigned int
329 alloc_expression_id (pre_expr expr)
330 {
331 struct pre_expr_d **slot;
332 /* Make sure we won't overflow. */
333 gcc_assert (next_expression_id + 1 > next_expression_id);
334 expr->id = next_expression_id++;
335 expressions.safe_push (expr);
336 if (expr->kind == NAME)
337 {
338 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
339 /* vec::safe_grow_cleared allocates no headroom. Avoid frequent
340 re-allocations by using vec::reserve upfront. */
341 unsigned old_len = name_to_id.length ();
342 name_to_id.reserve (num_ssa_names - old_len);
343 name_to_id.quick_grow_cleared (num_ssa_names);
344 gcc_assert (name_to_id[version] == 0);
345 name_to_id[version] = expr->id;
346 }
347 else
348 {
349 slot = expression_to_id->find_slot (expr, INSERT);
350 gcc_assert (!*slot);
351 *slot = expr;
352 }
353 return next_expression_id - 1;
354 }
355
356 /* Return the expression id for tree EXPR. */
357
358 static inline unsigned int
359 get_expression_id (const pre_expr expr)
360 {
361 return expr->id;
362 }
363
364 static inline unsigned int
365 lookup_expression_id (const pre_expr expr)
366 {
367 struct pre_expr_d **slot;
368
369 if (expr->kind == NAME)
370 {
371 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
372 if (name_to_id.length () <= version)
373 return 0;
374 return name_to_id[version];
375 }
376 else
377 {
378 slot = expression_to_id->find_slot (expr, NO_INSERT);
379 if (!slot)
380 return 0;
381 return ((pre_expr)*slot)->id;
382 }
383 }
384
385 /* Return the existing expression id for EXPR, or create one if one
386 does not exist yet. */
387
388 static inline unsigned int
389 get_or_alloc_expression_id (pre_expr expr)
390 {
391 unsigned int id = lookup_expression_id (expr);
392 if (id == 0)
393 return alloc_expression_id (expr);
394 return expr->id = id;
395 }
396
397 /* Return the expression that has expression id ID */
398
399 static inline pre_expr
400 expression_for_id (unsigned int id)
401 {
402 return expressions[id];
403 }
404
405 static object_allocator<pre_expr_d> pre_expr_pool ("pre_expr nodes");
406
407 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */
408
409 static pre_expr
410 get_or_alloc_expr_for_name (tree name)
411 {
412 struct pre_expr_d expr;
413 pre_expr result;
414 unsigned int result_id;
415
416 expr.kind = NAME;
417 expr.id = 0;
418 PRE_EXPR_NAME (&expr) = name;
419 result_id = lookup_expression_id (&expr);
420 if (result_id != 0)
421 return expression_for_id (result_id);
422
423 result = pre_expr_pool.allocate ();
424 result->kind = NAME;
425 result->loc = UNKNOWN_LOCATION;
426 result->value_id = VN_INFO (name)->value_id;
427 PRE_EXPR_NAME (result) = name;
428 alloc_expression_id (result);
429 return result;
430 }
431
432 /* Given an NARY, get or create a pre_expr to represent it. */
433
434 static pre_expr
435 get_or_alloc_expr_for_nary (vn_nary_op_t nary,
436 location_t loc = UNKNOWN_LOCATION)
437 {
438 struct pre_expr_d expr;
439 pre_expr result;
440 unsigned int result_id;
441
442 expr.kind = NARY;
443 expr.id = 0;
444 PRE_EXPR_NARY (&expr) = nary;
445 result_id = lookup_expression_id (&expr);
446 if (result_id != 0)
447 return expression_for_id (result_id);
448
449 result = pre_expr_pool.allocate ();
450 result->kind = NARY;
451 result->loc = loc;
452 result->value_id = nary->value_id;
453 PRE_EXPR_NARY (result) = nary;
454 alloc_expression_id (result);
455 return result;
456 }
457
458 /* Given an REFERENCE, get or create a pre_expr to represent it. */
459
460 static pre_expr
461 get_or_alloc_expr_for_reference (vn_reference_t reference,
462 location_t loc = UNKNOWN_LOCATION)
463 {
464 struct pre_expr_d expr;
465 pre_expr result;
466 unsigned int result_id;
467
468 expr.kind = REFERENCE;
469 expr.id = 0;
470 PRE_EXPR_REFERENCE (&expr) = reference;
471 result_id = lookup_expression_id (&expr);
472 if (result_id != 0)
473 return expression_for_id (result_id);
474
475 result = pre_expr_pool.allocate ();
476 result->kind = REFERENCE;
477 result->loc = loc;
478 result->value_id = reference->value_id;
479 PRE_EXPR_REFERENCE (result) = reference;
480 alloc_expression_id (result);
481 return result;
482 }
483
484
485 /* An unordered bitmap set. One bitmap tracks values, the other,
486 expressions. */
487 typedef class bitmap_set
488 {
489 public:
490 bitmap_head expressions;
491 bitmap_head values;
492 } *bitmap_set_t;
493
494 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
495 EXECUTE_IF_SET_IN_BITMAP (&(set)->expressions, 0, (id), (bi))
496
497 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \
498 EXECUTE_IF_SET_IN_BITMAP (&(set)->values, 0, (id), (bi))
499
500 /* Mapping from value id to expressions with that value_id. */
501 static vec<bitmap> value_expressions;
502 /* We just record a single expression for each constant value,
503 one of kind CONSTANT. */
504 static vec<pre_expr> constant_value_expressions;
505
506
507 /* This structure is used to keep track of statistics on what
508 optimization PRE was able to perform. */
509 static struct
510 {
511 /* The number of new expressions/temporaries generated by PRE. */
512 int insertions;
513
514 /* The number of inserts found due to partial anticipation */
515 int pa_insert;
516
517 /* The number of inserts made for code hoisting. */
518 int hoist_insert;
519
520 /* The number of new PHI nodes added by PRE. */
521 int phis;
522 } pre_stats;
523
524 static bool do_partial_partial;
525 static pre_expr bitmap_find_leader (bitmap_set_t, unsigned int);
526 static void bitmap_value_insert_into_set (bitmap_set_t, pre_expr);
527 static bool bitmap_value_replace_in_set (bitmap_set_t, pre_expr);
528 static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
529 static bool bitmap_set_contains_value (bitmap_set_t, unsigned int);
530 static void bitmap_insert_into_set (bitmap_set_t, pre_expr);
531 static bitmap_set_t bitmap_set_new (void);
532 static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *,
533 tree);
534 static tree find_or_generate_expression (basic_block, tree, gimple_seq *);
535 static unsigned int get_expr_value_id (pre_expr);
536
537 /* We can add and remove elements and entries to and from sets
538 and hash tables, so we use alloc pools for them. */
539
540 static object_allocator<bitmap_set> bitmap_set_pool ("Bitmap sets");
541 static bitmap_obstack grand_bitmap_obstack;
542
543 /* A three tuple {e, pred, v} used to cache phi translations in the
544 phi_translate_table. */
545
546 typedef struct expr_pred_trans_d : public typed_noop_remove <expr_pred_trans_d>
547 {
548 typedef expr_pred_trans_d value_type;
549 typedef expr_pred_trans_d compare_type;
550
551 /* The expression ID. */
552 unsigned e;
553
554 /* The value expression ID that resulted from the translation. */
555 unsigned v;
556
557 /* hash_table support. */
558 static inline void mark_empty (expr_pred_trans_d &);
559 static inline bool is_empty (const expr_pred_trans_d &);
560 static inline void mark_deleted (expr_pred_trans_d &);
561 static inline bool is_deleted (const expr_pred_trans_d &);
562 static const bool empty_zero_p = true;
563 static inline hashval_t hash (const expr_pred_trans_d &);
564 static inline int equal (const expr_pred_trans_d &, const expr_pred_trans_d &);
565 } *expr_pred_trans_t;
566 typedef const struct expr_pred_trans_d *const_expr_pred_trans_t;
567
568 inline bool
569 expr_pred_trans_d::is_empty (const expr_pred_trans_d &e)
570 {
571 return e.e == 0;
572 }
573
574 inline bool
575 expr_pred_trans_d::is_deleted (const expr_pred_trans_d &e)
576 {
577 return e.e == -1u;
578 }
579
580 inline void
581 expr_pred_trans_d::mark_empty (expr_pred_trans_d &e)
582 {
583 e.e = 0;
584 }
585
586 inline void
587 expr_pred_trans_d::mark_deleted (expr_pred_trans_d &e)
588 {
589 e.e = -1u;
590 }
591
592 inline hashval_t
593 expr_pred_trans_d::hash (const expr_pred_trans_d &e)
594 {
595 return e.e;
596 }
597
598 inline int
599 expr_pred_trans_d::equal (const expr_pred_trans_d &ve1,
600 const expr_pred_trans_d &ve2)
601 {
602 return ve1.e == ve2.e;
603 }
604
605 /* Sets that we need to keep track of. */
606 typedef struct bb_bitmap_sets
607 {
608 /* The EXP_GEN set, which represents expressions/values generated in
609 a basic block. */
610 bitmap_set_t exp_gen;
611
612 /* The PHI_GEN set, which represents PHI results generated in a
613 basic block. */
614 bitmap_set_t phi_gen;
615
616 /* The TMP_GEN set, which represents results/temporaries generated
617 in a basic block. IE the LHS of an expression. */
618 bitmap_set_t tmp_gen;
619
620 /* The AVAIL_OUT set, which represents which values are available in
621 a given basic block. */
622 bitmap_set_t avail_out;
623
624 /* The ANTIC_IN set, which represents which values are anticipatable
625 in a given basic block. */
626 bitmap_set_t antic_in;
627
628 /* The PA_IN set, which represents which values are
629 partially anticipatable in a given basic block. */
630 bitmap_set_t pa_in;
631
632 /* The NEW_SETS set, which is used during insertion to augment the
633 AVAIL_OUT set of blocks with the new insertions performed during
634 the current iteration. */
635 bitmap_set_t new_sets;
636
637 /* A cache for value_dies_in_block_x. */
638 bitmap expr_dies;
639
640 /* The live virtual operand on successor edges. */
641 tree vop_on_exit;
642
643 /* PHI translate cache for the single successor edge. */
644 hash_table<expr_pred_trans_d> *phi_translate_table;
645
646 /* True if we have visited this block during ANTIC calculation. */
647 unsigned int visited : 1;
648
649 /* True when the block contains a call that might not return. */
650 unsigned int contains_may_not_return_call : 1;
651 } *bb_value_sets_t;
652
653 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
654 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
655 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
656 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
657 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
658 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
659 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
660 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
661 #define PHI_TRANS_TABLE(BB) ((bb_value_sets_t) ((BB)->aux))->phi_translate_table
662 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
663 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
664 #define BB_LIVE_VOP_ON_EXIT(BB) ((bb_value_sets_t) ((BB)->aux))->vop_on_exit
665
666
667 /* Add the tuple mapping from {expression E, basic block PRED} to
668 the phi translation table and return whether it pre-existed. */
669
670 static inline bool
671 phi_trans_add (expr_pred_trans_t *entry, pre_expr e, basic_block pred)
672 {
673 if (!PHI_TRANS_TABLE (pred))
674 PHI_TRANS_TABLE (pred) = new hash_table<expr_pred_trans_d> (11);
675
676 expr_pred_trans_t slot;
677 expr_pred_trans_d tem;
678 unsigned id = get_expression_id (e);
679 tem.e = id;
680 slot = PHI_TRANS_TABLE (pred)->find_slot_with_hash (tem, id, INSERT);
681 if (slot->e)
682 {
683 *entry = slot;
684 return true;
685 }
686
687 *entry = slot;
688 slot->e = id;
689 return false;
690 }
691
692
693 /* Add expression E to the expression set of value id V. */
694
695 static void
696 add_to_value (unsigned int v, pre_expr e)
697 {
698 gcc_checking_assert (get_expr_value_id (e) == v);
699
700 if (value_id_constant_p (v))
701 {
702 if (e->kind != CONSTANT)
703 return;
704
705 if (-v >= constant_value_expressions.length ())
706 constant_value_expressions.safe_grow_cleared (-v + 1);
707
708 pre_expr leader = constant_value_expressions[-v];
709 if (!leader)
710 constant_value_expressions[-v] = e;
711 }
712 else
713 {
714 if (v >= value_expressions.length ())
715 value_expressions.safe_grow_cleared (v + 1);
716
717 bitmap set = value_expressions[v];
718 if (!set)
719 {
720 set = BITMAP_ALLOC (&grand_bitmap_obstack);
721 value_expressions[v] = set;
722 }
723 bitmap_set_bit (set, get_or_alloc_expression_id (e));
724 }
725 }
726
727 /* Create a new bitmap set and return it. */
728
729 static bitmap_set_t
730 bitmap_set_new (void)
731 {
732 bitmap_set_t ret = bitmap_set_pool.allocate ();
733 bitmap_initialize (&ret->expressions, &grand_bitmap_obstack);
734 bitmap_initialize (&ret->values, &grand_bitmap_obstack);
735 return ret;
736 }
737
738 /* Return the value id for a PRE expression EXPR. */
739
740 static unsigned int
741 get_expr_value_id (pre_expr expr)
742 {
743 /* ??? We cannot assert that expr has a value-id (it can be 0), because
744 we assign value-ids only to expressions that have a result
745 in set_hashtable_value_ids. */
746 return expr->value_id;
747 }
748
749 /* Return a VN valnum (SSA name or constant) for the PRE value-id VAL. */
750
751 static tree
752 vn_valnum_from_value_id (unsigned int val)
753 {
754 if (value_id_constant_p (val))
755 {
756 pre_expr vexpr = constant_value_expressions[-val];
757 if (vexpr)
758 return PRE_EXPR_CONSTANT (vexpr);
759 return NULL_TREE;
760 }
761
762 bitmap exprset = value_expressions[val];
763 bitmap_iterator bi;
764 unsigned int i;
765 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
766 {
767 pre_expr vexpr = expression_for_id (i);
768 if (vexpr->kind == NAME)
769 return VN_INFO (PRE_EXPR_NAME (vexpr))->valnum;
770 }
771 return NULL_TREE;
772 }
773
774 /* Insert an expression EXPR into a bitmapped set. */
775
776 static void
777 bitmap_insert_into_set (bitmap_set_t set, pre_expr expr)
778 {
779 unsigned int val = get_expr_value_id (expr);
780 if (! value_id_constant_p (val))
781 {
782 /* Note this is the only function causing multiple expressions
783 for the same value to appear in a set. This is needed for
784 TMP_GEN, PHI_GEN and NEW_SETs. */
785 bitmap_set_bit (&set->values, val);
786 bitmap_set_bit (&set->expressions, get_or_alloc_expression_id (expr));
787 }
788 }
789
790 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
791
792 static void
793 bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig)
794 {
795 bitmap_copy (&dest->expressions, &orig->expressions);
796 bitmap_copy (&dest->values, &orig->values);
797 }
798
799
800 /* Free memory used up by SET. */
801 static void
802 bitmap_set_free (bitmap_set_t set)
803 {
804 bitmap_clear (&set->expressions);
805 bitmap_clear (&set->values);
806 }
807
808 static void
809 pre_expr_DFS (pre_expr expr, bitmap_set_t set, bitmap val_visited,
810 vec<pre_expr> &post);
811
812 /* DFS walk leaders of VAL to their operands with leaders in SET, collecting
813 expressions in SET in postorder into POST. */
814
815 static void
816 pre_expr_DFS (unsigned val, bitmap_set_t set, bitmap val_visited,
817 vec<pre_expr> &post)
818 {
819 unsigned int i;
820 bitmap_iterator bi;
821
822 /* Iterate over all leaders and DFS recurse. Borrowed from
823 bitmap_find_leader. */
824 bitmap exprset = value_expressions[val];
825 if (!exprset->first->next)
826 {
827 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
828 if (bitmap_bit_p (&set->expressions, i))
829 pre_expr_DFS (expression_for_id (i), set, val_visited, post);
830 return;
831 }
832
833 EXECUTE_IF_AND_IN_BITMAP (exprset, &set->expressions, 0, i, bi)
834 pre_expr_DFS (expression_for_id (i), set, val_visited, post);
835 }
836
837 /* DFS walk EXPR to its operands with leaders in SET, collecting
838 expressions in SET in postorder into POST. */
839
840 static void
841 pre_expr_DFS (pre_expr expr, bitmap_set_t set, bitmap val_visited,
842 vec<pre_expr> &post)
843 {
844 switch (expr->kind)
845 {
846 case NARY:
847 {
848 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
849 for (unsigned i = 0; i < nary->length; i++)
850 {
851 if (TREE_CODE (nary->op[i]) != SSA_NAME)
852 continue;
853 unsigned int op_val_id = VN_INFO (nary->op[i])->value_id;
854 /* If we already found a leader for the value we've
855 recursed already. Avoid the costly bitmap_find_leader. */
856 if (bitmap_bit_p (&set->values, op_val_id)
857 && bitmap_set_bit (val_visited, op_val_id))
858 pre_expr_DFS (op_val_id, set, val_visited, post);
859 }
860 break;
861 }
862 case REFERENCE:
863 {
864 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
865 vec<vn_reference_op_s> operands = ref->operands;
866 vn_reference_op_t operand;
867 for (unsigned i = 0; operands.iterate (i, &operand); i++)
868 {
869 tree op[3];
870 op[0] = operand->op0;
871 op[1] = operand->op1;
872 op[2] = operand->op2;
873 for (unsigned n = 0; n < 3; ++n)
874 {
875 if (!op[n] || TREE_CODE (op[n]) != SSA_NAME)
876 continue;
877 unsigned op_val_id = VN_INFO (op[n])->value_id;
878 if (bitmap_bit_p (&set->values, op_val_id)
879 && bitmap_set_bit (val_visited, op_val_id))
880 pre_expr_DFS (op_val_id, set, val_visited, post);
881 }
882 }
883 break;
884 }
885 default:;
886 }
887 post.quick_push (expr);
888 }
889
890 /* Generate an topological-ordered array of bitmap set SET. */
891
892 static vec<pre_expr>
893 sorted_array_from_bitmap_set (bitmap_set_t set)
894 {
895 unsigned int i;
896 bitmap_iterator bi;
897 vec<pre_expr> result;
898
899 /* Pre-allocate enough space for the array. */
900 result.create (bitmap_count_bits (&set->expressions));
901
902 auto_bitmap val_visited (&grand_bitmap_obstack);
903 bitmap_tree_view (val_visited);
904 FOR_EACH_VALUE_ID_IN_SET (set, i, bi)
905 if (bitmap_set_bit (val_visited, i))
906 pre_expr_DFS (i, set, val_visited, result);
907
908 return result;
909 }
910
911 /* Subtract all expressions contained in ORIG from DEST. */
912
913 static bitmap_set_t
914 bitmap_set_subtract_expressions (bitmap_set_t dest, bitmap_set_t orig)
915 {
916 bitmap_set_t result = bitmap_set_new ();
917 bitmap_iterator bi;
918 unsigned int i;
919
920 bitmap_and_compl (&result->expressions, &dest->expressions,
921 &orig->expressions);
922
923 FOR_EACH_EXPR_ID_IN_SET (result, i, bi)
924 {
925 pre_expr expr = expression_for_id (i);
926 unsigned int value_id = get_expr_value_id (expr);
927 bitmap_set_bit (&result->values, value_id);
928 }
929
930 return result;
931 }
932
933 /* Subtract all values in bitmap set B from bitmap set A. */
934
935 static void
936 bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b)
937 {
938 unsigned int i;
939 bitmap_iterator bi;
940 unsigned to_remove = -1U;
941 bitmap_and_compl_into (&a->values, &b->values);
942 FOR_EACH_EXPR_ID_IN_SET (a, i, bi)
943 {
944 if (to_remove != -1U)
945 {
946 bitmap_clear_bit (&a->expressions, to_remove);
947 to_remove = -1U;
948 }
949 pre_expr expr = expression_for_id (i);
950 if (! bitmap_bit_p (&a->values, get_expr_value_id (expr)))
951 to_remove = i;
952 }
953 if (to_remove != -1U)
954 bitmap_clear_bit (&a->expressions, to_remove);
955 }
956
957
958 /* Return true if bitmapped set SET contains the value VALUE_ID. */
959
960 static bool
961 bitmap_set_contains_value (bitmap_set_t set, unsigned int value_id)
962 {
963 if (value_id_constant_p (value_id))
964 return true;
965
966 return bitmap_bit_p (&set->values, value_id);
967 }
968
969 /* Return true if two bitmap sets are equal. */
970
971 static bool
972 bitmap_set_equal (bitmap_set_t a, bitmap_set_t b)
973 {
974 return bitmap_equal_p (&a->values, &b->values);
975 }
976
977 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
978 and add it otherwise. Return true if any changes were made. */
979
980 static bool
981 bitmap_value_replace_in_set (bitmap_set_t set, pre_expr expr)
982 {
983 unsigned int val = get_expr_value_id (expr);
984 if (value_id_constant_p (val))
985 return false;
986
987 if (bitmap_set_contains_value (set, val))
988 {
989 /* The number of expressions having a given value is usually
990 significantly less than the total number of expressions in SET.
991 Thus, rather than check, for each expression in SET, whether it
992 has the value LOOKFOR, we walk the reverse mapping that tells us
993 what expressions have a given value, and see if any of those
994 expressions are in our set. For large testcases, this is about
995 5-10x faster than walking the bitmap. If this is somehow a
996 significant lose for some cases, we can choose which set to walk
997 based on the set size. */
998 unsigned int i;
999 bitmap_iterator bi;
1000 bitmap exprset = value_expressions[val];
1001 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
1002 {
1003 if (bitmap_clear_bit (&set->expressions, i))
1004 {
1005 bitmap_set_bit (&set->expressions, get_expression_id (expr));
1006 return i != get_expression_id (expr);
1007 }
1008 }
1009 gcc_unreachable ();
1010 }
1011
1012 bitmap_insert_into_set (set, expr);
1013 return true;
1014 }
1015
1016 /* Insert EXPR into SET if EXPR's value is not already present in
1017 SET. */
1018
1019 static void
1020 bitmap_value_insert_into_set (bitmap_set_t set, pre_expr expr)
1021 {
1022 unsigned int val = get_expr_value_id (expr);
1023
1024 gcc_checking_assert (expr->id == get_or_alloc_expression_id (expr));
1025
1026 /* Constant values are always considered to be part of the set. */
1027 if (value_id_constant_p (val))
1028 return;
1029
1030 /* If the value membership changed, add the expression. */
1031 if (bitmap_set_bit (&set->values, val))
1032 bitmap_set_bit (&set->expressions, expr->id);
1033 }
1034
1035 /* Print out EXPR to outfile. */
1036
1037 static void
1038 print_pre_expr (FILE *outfile, const pre_expr expr)
1039 {
1040 if (! expr)
1041 {
1042 fprintf (outfile, "NULL");
1043 return;
1044 }
1045 switch (expr->kind)
1046 {
1047 case CONSTANT:
1048 print_generic_expr (outfile, PRE_EXPR_CONSTANT (expr));
1049 break;
1050 case NAME:
1051 print_generic_expr (outfile, PRE_EXPR_NAME (expr));
1052 break;
1053 case NARY:
1054 {
1055 unsigned int i;
1056 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1057 fprintf (outfile, "{%s,", get_tree_code_name (nary->opcode));
1058 for (i = 0; i < nary->length; i++)
1059 {
1060 print_generic_expr (outfile, nary->op[i]);
1061 if (i != (unsigned) nary->length - 1)
1062 fprintf (outfile, ",");
1063 }
1064 fprintf (outfile, "}");
1065 }
1066 break;
1067
1068 case REFERENCE:
1069 {
1070 vn_reference_op_t vro;
1071 unsigned int i;
1072 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1073 fprintf (outfile, "{");
1074 for (i = 0;
1075 ref->operands.iterate (i, &vro);
1076 i++)
1077 {
1078 bool closebrace = false;
1079 if (vro->opcode != SSA_NAME
1080 && TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
1081 {
1082 fprintf (outfile, "%s", get_tree_code_name (vro->opcode));
1083 if (vro->op0)
1084 {
1085 fprintf (outfile, "<");
1086 closebrace = true;
1087 }
1088 }
1089 if (vro->op0)
1090 {
1091 print_generic_expr (outfile, vro->op0);
1092 if (vro->op1)
1093 {
1094 fprintf (outfile, ",");
1095 print_generic_expr (outfile, vro->op1);
1096 }
1097 if (vro->op2)
1098 {
1099 fprintf (outfile, ",");
1100 print_generic_expr (outfile, vro->op2);
1101 }
1102 }
1103 if (closebrace)
1104 fprintf (outfile, ">");
1105 if (i != ref->operands.length () - 1)
1106 fprintf (outfile, ",");
1107 }
1108 fprintf (outfile, "}");
1109 if (ref->vuse)
1110 {
1111 fprintf (outfile, "@");
1112 print_generic_expr (outfile, ref->vuse);
1113 }
1114 }
1115 break;
1116 }
1117 }
1118 void debug_pre_expr (pre_expr);
1119
1120 /* Like print_pre_expr but always prints to stderr. */
1121 DEBUG_FUNCTION void
1122 debug_pre_expr (pre_expr e)
1123 {
1124 print_pre_expr (stderr, e);
1125 fprintf (stderr, "\n");
1126 }
1127
1128 /* Print out SET to OUTFILE. */
1129
1130 static void
1131 print_bitmap_set (FILE *outfile, bitmap_set_t set,
1132 const char *setname, int blockindex)
1133 {
1134 fprintf (outfile, "%s[%d] := { ", setname, blockindex);
1135 if (set)
1136 {
1137 bool first = true;
1138 unsigned i;
1139 bitmap_iterator bi;
1140
1141 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
1142 {
1143 const pre_expr expr = expression_for_id (i);
1144
1145 if (!first)
1146 fprintf (outfile, ", ");
1147 first = false;
1148 print_pre_expr (outfile, expr);
1149
1150 fprintf (outfile, " (%04d)", get_expr_value_id (expr));
1151 }
1152 }
1153 fprintf (outfile, " }\n");
1154 }
1155
1156 void debug_bitmap_set (bitmap_set_t);
1157
1158 DEBUG_FUNCTION void
1159 debug_bitmap_set (bitmap_set_t set)
1160 {
1161 print_bitmap_set (stderr, set, "debug", 0);
1162 }
1163
1164 void debug_bitmap_sets_for (basic_block);
1165
1166 DEBUG_FUNCTION void
1167 debug_bitmap_sets_for (basic_block bb)
1168 {
1169 print_bitmap_set (stderr, AVAIL_OUT (bb), "avail_out", bb->index);
1170 print_bitmap_set (stderr, EXP_GEN (bb), "exp_gen", bb->index);
1171 print_bitmap_set (stderr, PHI_GEN (bb), "phi_gen", bb->index);
1172 print_bitmap_set (stderr, TMP_GEN (bb), "tmp_gen", bb->index);
1173 print_bitmap_set (stderr, ANTIC_IN (bb), "antic_in", bb->index);
1174 if (do_partial_partial)
1175 print_bitmap_set (stderr, PA_IN (bb), "pa_in", bb->index);
1176 print_bitmap_set (stderr, NEW_SETS (bb), "new_sets", bb->index);
1177 }
1178
1179 /* Print out the expressions that have VAL to OUTFILE. */
1180
1181 static void
1182 print_value_expressions (FILE *outfile, unsigned int val)
1183 {
1184 bitmap set = value_expressions[val];
1185 if (set)
1186 {
1187 bitmap_set x;
1188 char s[10];
1189 sprintf (s, "%04d", val);
1190 x.expressions = *set;
1191 print_bitmap_set (outfile, &x, s, 0);
1192 }
1193 }
1194
1195
1196 DEBUG_FUNCTION void
1197 debug_value_expressions (unsigned int val)
1198 {
1199 print_value_expressions (stderr, val);
1200 }
1201
1202 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1203 represent it. */
1204
1205 static pre_expr
1206 get_or_alloc_expr_for_constant (tree constant)
1207 {
1208 unsigned int result_id;
1209 struct pre_expr_d expr;
1210 pre_expr newexpr;
1211
1212 expr.kind = CONSTANT;
1213 PRE_EXPR_CONSTANT (&expr) = constant;
1214 result_id = lookup_expression_id (&expr);
1215 if (result_id != 0)
1216 return expression_for_id (result_id);
1217
1218 newexpr = pre_expr_pool.allocate ();
1219 newexpr->kind = CONSTANT;
1220 newexpr->loc = UNKNOWN_LOCATION;
1221 PRE_EXPR_CONSTANT (newexpr) = constant;
1222 alloc_expression_id (newexpr);
1223 newexpr->value_id = get_or_alloc_constant_value_id (constant);
1224 add_to_value (newexpr->value_id, newexpr);
1225 return newexpr;
1226 }
1227
1228 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1229 Currently only supports constants and SSA_NAMES. */
1230 static pre_expr
1231 get_or_alloc_expr_for (tree t)
1232 {
1233 if (TREE_CODE (t) == SSA_NAME)
1234 return get_or_alloc_expr_for_name (t);
1235 else if (is_gimple_min_invariant (t))
1236 return get_or_alloc_expr_for_constant (t);
1237 gcc_unreachable ();
1238 }
1239
1240 /* Return the folded version of T if T, when folded, is a gimple
1241 min_invariant or an SSA name. Otherwise, return T. */
1242
1243 static pre_expr
1244 fully_constant_expression (pre_expr e)
1245 {
1246 switch (e->kind)
1247 {
1248 case CONSTANT:
1249 return e;
1250 case NARY:
1251 {
1252 vn_nary_op_t nary = PRE_EXPR_NARY (e);
1253 tree res = vn_nary_simplify (nary);
1254 if (!res)
1255 return e;
1256 if (is_gimple_min_invariant (res))
1257 return get_or_alloc_expr_for_constant (res);
1258 if (TREE_CODE (res) == SSA_NAME)
1259 return get_or_alloc_expr_for_name (res);
1260 return e;
1261 }
1262 case REFERENCE:
1263 {
1264 vn_reference_t ref = PRE_EXPR_REFERENCE (e);
1265 tree folded;
1266 if ((folded = fully_constant_vn_reference_p (ref)))
1267 return get_or_alloc_expr_for_constant (folded);
1268 return e;
1269 }
1270 default:
1271 return e;
1272 }
1273 return e;
1274 }
1275
1276 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1277 it has the value it would have in BLOCK. Set *SAME_VALID to true
1278 in case the new vuse doesn't change the value id of the OPERANDS. */
1279
1280 static tree
1281 translate_vuse_through_block (vec<vn_reference_op_s> operands,
1282 alias_set_type set, alias_set_type base_set,
1283 tree type, tree vuse,
1284 basic_block phiblock,
1285 basic_block block, bool *same_valid)
1286 {
1287 gimple *phi = SSA_NAME_DEF_STMT (vuse);
1288 ao_ref ref;
1289 edge e = NULL;
1290 bool use_oracle;
1291
1292 if (same_valid)
1293 *same_valid = true;
1294
1295 if (gimple_bb (phi) != phiblock)
1296 return vuse;
1297
1298 unsigned int cnt = param_sccvn_max_alias_queries_per_access;
1299 use_oracle = ao_ref_init_from_vn_reference (&ref, set, base_set,
1300 type, operands);
1301
1302 /* Use the alias-oracle to find either the PHI node in this block,
1303 the first VUSE used in this block that is equivalent to vuse or
1304 the first VUSE which definition in this block kills the value. */
1305 if (gimple_code (phi) == GIMPLE_PHI)
1306 e = find_edge (block, phiblock);
1307 else if (use_oracle)
1308 while (cnt > 0
1309 && !stmt_may_clobber_ref_p_1 (phi, &ref))
1310 {
1311 --cnt;
1312 vuse = gimple_vuse (phi);
1313 phi = SSA_NAME_DEF_STMT (vuse);
1314 if (gimple_bb (phi) != phiblock)
1315 return vuse;
1316 if (gimple_code (phi) == GIMPLE_PHI)
1317 {
1318 e = find_edge (block, phiblock);
1319 break;
1320 }
1321 }
1322 else
1323 return NULL_TREE;
1324
1325 if (e)
1326 {
1327 if (use_oracle && same_valid)
1328 {
1329 bitmap visited = NULL;
1330 /* Try to find a vuse that dominates this phi node by skipping
1331 non-clobbering statements. */
1332 vuse = get_continuation_for_phi (phi, &ref, true,
1333 cnt, &visited, false, NULL, NULL);
1334 if (visited)
1335 BITMAP_FREE (visited);
1336 }
1337 else
1338 vuse = NULL_TREE;
1339 /* If we didn't find any, the value ID can't stay the same. */
1340 if (!vuse && same_valid)
1341 *same_valid = false;
1342 /* ??? We would like to return vuse here as this is the canonical
1343 upmost vdef that this reference is associated with. But during
1344 insertion of the references into the hash tables we only ever
1345 directly insert with their direct gimple_vuse, hence returning
1346 something else would make us not find the other expression. */
1347 return PHI_ARG_DEF (phi, e->dest_idx);
1348 }
1349
1350 return NULL_TREE;
1351 }
1352
1353 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1354 SET2 *or* SET3. This is used to avoid making a set consisting of the union
1355 of PA_IN and ANTIC_IN during insert and phi-translation. */
1356
1357 static inline pre_expr
1358 find_leader_in_sets (unsigned int val, bitmap_set_t set1, bitmap_set_t set2,
1359 bitmap_set_t set3 = NULL)
1360 {
1361 pre_expr result = NULL;
1362
1363 if (set1)
1364 result = bitmap_find_leader (set1, val);
1365 if (!result && set2)
1366 result = bitmap_find_leader (set2, val);
1367 if (!result && set3)
1368 result = bitmap_find_leader (set3, val);
1369 return result;
1370 }
1371
1372 /* Get the tree type for our PRE expression e. */
1373
1374 static tree
1375 get_expr_type (const pre_expr e)
1376 {
1377 switch (e->kind)
1378 {
1379 case NAME:
1380 return TREE_TYPE (PRE_EXPR_NAME (e));
1381 case CONSTANT:
1382 return TREE_TYPE (PRE_EXPR_CONSTANT (e));
1383 case REFERENCE:
1384 return PRE_EXPR_REFERENCE (e)->type;
1385 case NARY:
1386 return PRE_EXPR_NARY (e)->type;
1387 }
1388 gcc_unreachable ();
1389 }
1390
1391 /* Get a representative SSA_NAME for a given expression that is available in B.
1392 Since all of our sub-expressions are treated as values, we require
1393 them to be SSA_NAME's for simplicity.
1394 Prior versions of GVNPRE used to use "value handles" here, so that
1395 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In
1396 either case, the operands are really values (IE we do not expect
1397 them to be usable without finding leaders). */
1398
1399 static tree
1400 get_representative_for (const pre_expr e, basic_block b = NULL)
1401 {
1402 tree name, valnum = NULL_TREE;
1403 unsigned int value_id = get_expr_value_id (e);
1404
1405 switch (e->kind)
1406 {
1407 case NAME:
1408 return PRE_EXPR_NAME (e);
1409 case CONSTANT:
1410 return PRE_EXPR_CONSTANT (e);
1411 case NARY:
1412 case REFERENCE:
1413 {
1414 /* Go through all of the expressions representing this value
1415 and pick out an SSA_NAME. */
1416 unsigned int i;
1417 bitmap_iterator bi;
1418 bitmap exprs = value_expressions[value_id];
1419 EXECUTE_IF_SET_IN_BITMAP (exprs, 0, i, bi)
1420 {
1421 pre_expr rep = expression_for_id (i);
1422 if (rep->kind == NAME)
1423 {
1424 tree name = PRE_EXPR_NAME (rep);
1425 valnum = VN_INFO (name)->valnum;
1426 gimple *def = SSA_NAME_DEF_STMT (name);
1427 /* We have to return either a new representative or one
1428 that can be used for expression simplification and thus
1429 is available in B. */
1430 if (! b
1431 || gimple_nop_p (def)
1432 || dominated_by_p (CDI_DOMINATORS, b, gimple_bb (def)))
1433 return name;
1434 }
1435 else if (rep->kind == CONSTANT)
1436 return PRE_EXPR_CONSTANT (rep);
1437 }
1438 }
1439 break;
1440 }
1441
1442 /* If we reached here we couldn't find an SSA_NAME. This can
1443 happen when we've discovered a value that has never appeared in
1444 the program as set to an SSA_NAME, as the result of phi translation.
1445 Create one here.
1446 ??? We should be able to re-use this when we insert the statement
1447 to compute it. */
1448 name = make_temp_ssa_name (get_expr_type (e), gimple_build_nop (), "pretmp");
1449 vn_ssa_aux_t vn_info = VN_INFO (name);
1450 vn_info->value_id = value_id;
1451 vn_info->valnum = valnum ? valnum : name;
1452 /* ??? For now mark this SSA name for release by VN. */
1453 vn_info->needs_insertion = true;
1454 add_to_value (value_id, get_or_alloc_expr_for_name (name));
1455 if (dump_file && (dump_flags & TDF_DETAILS))
1456 {
1457 fprintf (dump_file, "Created SSA_NAME representative ");
1458 print_generic_expr (dump_file, name);
1459 fprintf (dump_file, " for expression:");
1460 print_pre_expr (dump_file, e);
1461 fprintf (dump_file, " (%04d)\n", value_id);
1462 }
1463
1464 return name;
1465 }
1466
1467
1468 static pre_expr
1469 phi_translate (bitmap_set_t, pre_expr, bitmap_set_t, bitmap_set_t, edge);
1470
1471 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1472 the phis in PRED. Return NULL if we can't find a leader for each part
1473 of the translated expression. */
1474
1475 static pre_expr
1476 phi_translate_1 (bitmap_set_t dest,
1477 pre_expr expr, bitmap_set_t set1, bitmap_set_t set2, edge e)
1478 {
1479 basic_block pred = e->src;
1480 basic_block phiblock = e->dest;
1481 location_t expr_loc = expr->loc;
1482 switch (expr->kind)
1483 {
1484 case NARY:
1485 {
1486 unsigned int i;
1487 bool changed = false;
1488 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1489 vn_nary_op_t newnary = XALLOCAVAR (struct vn_nary_op_s,
1490 sizeof_vn_nary_op (nary->length));
1491 memcpy (newnary, nary, sizeof_vn_nary_op (nary->length));
1492
1493 for (i = 0; i < newnary->length; i++)
1494 {
1495 if (TREE_CODE (newnary->op[i]) != SSA_NAME)
1496 continue;
1497 else
1498 {
1499 pre_expr leader, result;
1500 unsigned int op_val_id = VN_INFO (newnary->op[i])->value_id;
1501 leader = find_leader_in_sets (op_val_id, set1, set2);
1502 result = phi_translate (dest, leader, set1, set2, e);
1503 if (result && result != leader)
1504 /* If op has a leader in the sets we translate make
1505 sure to use the value of the translated expression.
1506 We might need a new representative for that. */
1507 newnary->op[i] = get_representative_for (result, pred);
1508 else if (!result)
1509 return NULL;
1510
1511 changed |= newnary->op[i] != nary->op[i];
1512 }
1513 }
1514 if (changed)
1515 {
1516 pre_expr constant;
1517 unsigned int new_val_id;
1518
1519 PRE_EXPR_NARY (expr) = newnary;
1520 constant = fully_constant_expression (expr);
1521 PRE_EXPR_NARY (expr) = nary;
1522 if (constant != expr)
1523 {
1524 /* For non-CONSTANTs we have to make sure we can eventually
1525 insert the expression. Which means we need to have a
1526 leader for it. */
1527 if (constant->kind != CONSTANT)
1528 {
1529 /* Do not allow simplifications to non-constants over
1530 backedges as this will likely result in a loop PHI node
1531 to be inserted and increased register pressure.
1532 See PR77498 - this avoids doing predcoms work in
1533 a less efficient way. */
1534 if (e->flags & EDGE_DFS_BACK)
1535 ;
1536 else
1537 {
1538 unsigned value_id = get_expr_value_id (constant);
1539 /* We want a leader in ANTIC_OUT or AVAIL_OUT here.
1540 dest has what we computed into ANTIC_OUT sofar
1541 so pick from that - since topological sorting
1542 by sorted_array_from_bitmap_set isn't perfect
1543 we may lose some cases here. */
1544 constant = find_leader_in_sets (value_id, dest,
1545 AVAIL_OUT (pred));
1546 if (constant)
1547 {
1548 if (dump_file && (dump_flags & TDF_DETAILS))
1549 {
1550 fprintf (dump_file, "simplifying ");
1551 print_pre_expr (dump_file, expr);
1552 fprintf (dump_file, " translated %d -> %d to ",
1553 phiblock->index, pred->index);
1554 PRE_EXPR_NARY (expr) = newnary;
1555 print_pre_expr (dump_file, expr);
1556 PRE_EXPR_NARY (expr) = nary;
1557 fprintf (dump_file, " to ");
1558 print_pre_expr (dump_file, constant);
1559 fprintf (dump_file, "\n");
1560 }
1561 return constant;
1562 }
1563 }
1564 }
1565 else
1566 return constant;
1567 }
1568
1569 /* vn_nary_* do not valueize operands. */
1570 for (i = 0; i < newnary->length; ++i)
1571 if (TREE_CODE (newnary->op[i]) == SSA_NAME)
1572 newnary->op[i] = VN_INFO (newnary->op[i])->valnum;
1573 tree result = vn_nary_op_lookup_pieces (newnary->length,
1574 newnary->opcode,
1575 newnary->type,
1576 &newnary->op[0],
1577 &nary);
1578 if (result && is_gimple_min_invariant (result))
1579 return get_or_alloc_expr_for_constant (result);
1580
1581 if (!nary || nary->predicated_values)
1582 {
1583 new_val_id = get_next_value_id ();
1584 nary = vn_nary_op_insert_pieces (newnary->length,
1585 newnary->opcode,
1586 newnary->type,
1587 &newnary->op[0],
1588 result, new_val_id);
1589 }
1590 expr = get_or_alloc_expr_for_nary (nary, expr_loc);
1591 add_to_value (get_expr_value_id (expr), expr);
1592 }
1593 return expr;
1594 }
1595 break;
1596
1597 case REFERENCE:
1598 {
1599 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1600 vec<vn_reference_op_s> operands = ref->operands;
1601 tree vuse = ref->vuse;
1602 tree newvuse = vuse;
1603 vec<vn_reference_op_s> newoperands = vNULL;
1604 bool changed = false, same_valid = true;
1605 unsigned int i, n;
1606 vn_reference_op_t operand;
1607 vn_reference_t newref;
1608
1609 for (i = 0; operands.iterate (i, &operand); i++)
1610 {
1611 pre_expr opresult;
1612 pre_expr leader;
1613 tree op[3];
1614 tree type = operand->type;
1615 vn_reference_op_s newop = *operand;
1616 op[0] = operand->op0;
1617 op[1] = operand->op1;
1618 op[2] = operand->op2;
1619 for (n = 0; n < 3; ++n)
1620 {
1621 unsigned int op_val_id;
1622 if (!op[n])
1623 continue;
1624 if (TREE_CODE (op[n]) != SSA_NAME)
1625 {
1626 /* We can't possibly insert these. */
1627 if (n != 0
1628 && !is_gimple_min_invariant (op[n]))
1629 break;
1630 continue;
1631 }
1632 op_val_id = VN_INFO (op[n])->value_id;
1633 leader = find_leader_in_sets (op_val_id, set1, set2);
1634 opresult = phi_translate (dest, leader, set1, set2, e);
1635 if (opresult && opresult != leader)
1636 {
1637 tree name = get_representative_for (opresult);
1638 changed |= name != op[n];
1639 op[n] = name;
1640 }
1641 else if (!opresult)
1642 break;
1643 }
1644 if (n != 3)
1645 {
1646 newoperands.release ();
1647 return NULL;
1648 }
1649 if (!changed)
1650 continue;
1651 if (!newoperands.exists ())
1652 newoperands = operands.copy ();
1653 /* We may have changed from an SSA_NAME to a constant */
1654 if (newop.opcode == SSA_NAME && TREE_CODE (op[0]) != SSA_NAME)
1655 newop.opcode = TREE_CODE (op[0]);
1656 newop.type = type;
1657 newop.op0 = op[0];
1658 newop.op1 = op[1];
1659 newop.op2 = op[2];
1660 newoperands[i] = newop;
1661 }
1662 gcc_checking_assert (i == operands.length ());
1663
1664 if (vuse)
1665 {
1666 newvuse = translate_vuse_through_block (newoperands.exists ()
1667 ? newoperands : operands,
1668 ref->set, ref->base_set,
1669 ref->type,
1670 vuse, phiblock, pred,
1671 changed
1672 ? NULL : &same_valid);
1673 if (newvuse == NULL_TREE)
1674 {
1675 newoperands.release ();
1676 return NULL;
1677 }
1678 }
1679
1680 if (changed || newvuse != vuse)
1681 {
1682 unsigned int new_val_id;
1683
1684 tree result = vn_reference_lookup_pieces (newvuse, ref->set,
1685 ref->base_set,
1686 ref->type,
1687 newoperands.exists ()
1688 ? newoperands : operands,
1689 &newref, VN_WALK);
1690 if (result)
1691 newoperands.release ();
1692
1693 /* We can always insert constants, so if we have a partial
1694 redundant constant load of another type try to translate it
1695 to a constant of appropriate type. */
1696 if (result && is_gimple_min_invariant (result))
1697 {
1698 tree tem = result;
1699 if (!useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1700 {
1701 tem = fold_unary (VIEW_CONVERT_EXPR, ref->type, result);
1702 if (tem && !is_gimple_min_invariant (tem))
1703 tem = NULL_TREE;
1704 }
1705 if (tem)
1706 return get_or_alloc_expr_for_constant (tem);
1707 }
1708
1709 /* If we'd have to convert things we would need to validate
1710 if we can insert the translated expression. So fail
1711 here for now - we cannot insert an alias with a different
1712 type in the VN tables either, as that would assert. */
1713 if (result
1714 && !useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1715 return NULL;
1716 else if (!result && newref
1717 && !useless_type_conversion_p (ref->type, newref->type))
1718 {
1719 newoperands.release ();
1720 return NULL;
1721 }
1722
1723 if (newref)
1724 new_val_id = newref->value_id;
1725 else
1726 {
1727 if (changed || !same_valid)
1728 new_val_id = get_next_value_id ();
1729 else
1730 new_val_id = ref->value_id;
1731 if (!newoperands.exists ())
1732 newoperands = operands.copy ();
1733 newref = vn_reference_insert_pieces (newvuse, ref->set,
1734 ref->base_set, ref->type,
1735 newoperands,
1736 result, new_val_id);
1737 newoperands = vNULL;
1738 }
1739 expr = get_or_alloc_expr_for_reference (newref, expr_loc);
1740 add_to_value (new_val_id, expr);
1741 }
1742 newoperands.release ();
1743 return expr;
1744 }
1745 break;
1746
1747 case NAME:
1748 {
1749 tree name = PRE_EXPR_NAME (expr);
1750 gimple *def_stmt = SSA_NAME_DEF_STMT (name);
1751 /* If the SSA name is defined by a PHI node in this block,
1752 translate it. */
1753 if (gimple_code (def_stmt) == GIMPLE_PHI
1754 && gimple_bb (def_stmt) == phiblock)
1755 {
1756 tree def = PHI_ARG_DEF (def_stmt, e->dest_idx);
1757
1758 /* Handle constant. */
1759 if (is_gimple_min_invariant (def))
1760 return get_or_alloc_expr_for_constant (def);
1761
1762 return get_or_alloc_expr_for_name (def);
1763 }
1764 /* Otherwise return it unchanged - it will get removed if its
1765 value is not available in PREDs AVAIL_OUT set of expressions
1766 by the subtraction of TMP_GEN. */
1767 return expr;
1768 }
1769
1770 default:
1771 gcc_unreachable ();
1772 }
1773 }
1774
1775 /* Wrapper around phi_translate_1 providing caching functionality. */
1776
1777 static pre_expr
1778 phi_translate (bitmap_set_t dest, pre_expr expr,
1779 bitmap_set_t set1, bitmap_set_t set2, edge e)
1780 {
1781 expr_pred_trans_t slot = NULL;
1782 pre_expr phitrans;
1783
1784 if (!expr)
1785 return NULL;
1786
1787 /* Constants contain no values that need translation. */
1788 if (expr->kind == CONSTANT)
1789 return expr;
1790
1791 if (value_id_constant_p (get_expr_value_id (expr)))
1792 return expr;
1793
1794 /* Don't add translations of NAMEs as those are cheap to translate. */
1795 if (expr->kind != NAME)
1796 {
1797 if (phi_trans_add (&slot, expr, e->src))
1798 return slot->v == 0 ? NULL : expression_for_id (slot->v);
1799 /* Store NULL for the value we want to return in the case of
1800 recursing. */
1801 slot->v = 0;
1802 }
1803
1804 /* Translate. */
1805 basic_block saved_valueize_bb = vn_context_bb;
1806 vn_context_bb = e->src;
1807 phitrans = phi_translate_1 (dest, expr, set1, set2, e);
1808 vn_context_bb = saved_valueize_bb;
1809
1810 if (slot)
1811 {
1812 /* We may have reallocated. */
1813 phi_trans_add (&slot, expr, e->src);
1814 if (phitrans)
1815 slot->v = get_expression_id (phitrans);
1816 else
1817 /* Remove failed translations again, they cause insert
1818 iteration to not pick up new opportunities reliably. */
1819 PHI_TRANS_TABLE (e->src)->clear_slot (slot);
1820 }
1821
1822 return phitrans;
1823 }
1824
1825
1826 /* For each expression in SET, translate the values through phi nodes
1827 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1828 expressions in DEST. */
1829
1830 static void
1831 phi_translate_set (bitmap_set_t dest, bitmap_set_t set, edge e)
1832 {
1833 bitmap_iterator bi;
1834 unsigned int i;
1835
1836 if (gimple_seq_empty_p (phi_nodes (e->dest)))
1837 {
1838 bitmap_set_copy (dest, set);
1839 return;
1840 }
1841
1842 /* Allocate the phi-translation cache where we have an idea about
1843 its size. hash-table implementation internals tell us that
1844 allocating the table to fit twice the number of elements will
1845 make sure we do not usually re-allocate. */
1846 if (!PHI_TRANS_TABLE (e->src))
1847 PHI_TRANS_TABLE (e->src) = new hash_table<expr_pred_trans_d>
1848 (2 * bitmap_count_bits (&set->expressions));
1849 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
1850 {
1851 pre_expr expr = expression_for_id (i);
1852 pre_expr translated = phi_translate (dest, expr, set, NULL, e);
1853 if (!translated)
1854 continue;
1855
1856 bitmap_insert_into_set (dest, translated);
1857 }
1858 }
1859
1860 /* Find the leader for a value (i.e., the name representing that
1861 value) in a given set, and return it. Return NULL if no leader
1862 is found. */
1863
1864 static pre_expr
1865 bitmap_find_leader (bitmap_set_t set, unsigned int val)
1866 {
1867 if (value_id_constant_p (val))
1868 return constant_value_expressions[-val];
1869
1870 if (bitmap_set_contains_value (set, val))
1871 {
1872 /* Rather than walk the entire bitmap of expressions, and see
1873 whether any of them has the value we are looking for, we look
1874 at the reverse mapping, which tells us the set of expressions
1875 that have a given value (IE value->expressions with that
1876 value) and see if any of those expressions are in our set.
1877 The number of expressions per value is usually significantly
1878 less than the number of expressions in the set. In fact, for
1879 large testcases, doing it this way is roughly 5-10x faster
1880 than walking the bitmap.
1881 If this is somehow a significant lose for some cases, we can
1882 choose which set to walk based on which set is smaller. */
1883 unsigned int i;
1884 bitmap_iterator bi;
1885 bitmap exprset = value_expressions[val];
1886
1887 if (!exprset->first->next)
1888 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
1889 if (bitmap_bit_p (&set->expressions, i))
1890 return expression_for_id (i);
1891
1892 EXECUTE_IF_AND_IN_BITMAP (exprset, &set->expressions, 0, i, bi)
1893 return expression_for_id (i);
1894 }
1895 return NULL;
1896 }
1897
1898 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1899 BLOCK by seeing if it is not killed in the block. Note that we are
1900 only determining whether there is a store that kills it. Because
1901 of the order in which clean iterates over values, we are guaranteed
1902 that altered operands will have caused us to be eliminated from the
1903 ANTIC_IN set already. */
1904
1905 static bool
1906 value_dies_in_block_x (pre_expr expr, basic_block block)
1907 {
1908 tree vuse = PRE_EXPR_REFERENCE (expr)->vuse;
1909 vn_reference_t refx = PRE_EXPR_REFERENCE (expr);
1910 gimple *def;
1911 gimple_stmt_iterator gsi;
1912 unsigned id = get_expression_id (expr);
1913 bool res = false;
1914 ao_ref ref;
1915
1916 if (!vuse)
1917 return false;
1918
1919 /* Lookup a previously calculated result. */
1920 if (EXPR_DIES (block)
1921 && bitmap_bit_p (EXPR_DIES (block), id * 2))
1922 return bitmap_bit_p (EXPR_DIES (block), id * 2 + 1);
1923
1924 /* A memory expression {e, VUSE} dies in the block if there is a
1925 statement that may clobber e. If, starting statement walk from the
1926 top of the basic block, a statement uses VUSE there can be no kill
1927 inbetween that use and the original statement that loaded {e, VUSE},
1928 so we can stop walking. */
1929 ref.base = NULL_TREE;
1930 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
1931 {
1932 tree def_vuse, def_vdef;
1933 def = gsi_stmt (gsi);
1934 def_vuse = gimple_vuse (def);
1935 def_vdef = gimple_vdef (def);
1936
1937 /* Not a memory statement. */
1938 if (!def_vuse)
1939 continue;
1940
1941 /* Not a may-def. */
1942 if (!def_vdef)
1943 {
1944 /* A load with the same VUSE, we're done. */
1945 if (def_vuse == vuse)
1946 break;
1947
1948 continue;
1949 }
1950
1951 /* Init ref only if we really need it. */
1952 if (ref.base == NULL_TREE
1953 && !ao_ref_init_from_vn_reference (&ref, refx->set, refx->base_set,
1954 refx->type, refx->operands))
1955 {
1956 res = true;
1957 break;
1958 }
1959 /* If the statement may clobber expr, it dies. */
1960 if (stmt_may_clobber_ref_p_1 (def, &ref))
1961 {
1962 res = true;
1963 break;
1964 }
1965 }
1966
1967 /* Remember the result. */
1968 if (!EXPR_DIES (block))
1969 EXPR_DIES (block) = BITMAP_ALLOC (&grand_bitmap_obstack);
1970 bitmap_set_bit (EXPR_DIES (block), id * 2);
1971 if (res)
1972 bitmap_set_bit (EXPR_DIES (block), id * 2 + 1);
1973
1974 return res;
1975 }
1976
1977
1978 /* Determine if OP is valid in SET1 U SET2, which it is when the union
1979 contains its value-id. */
1980
1981 static bool
1982 op_valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, tree op)
1983 {
1984 if (op && TREE_CODE (op) == SSA_NAME)
1985 {
1986 unsigned int value_id = VN_INFO (op)->value_id;
1987 if (!(bitmap_set_contains_value (set1, value_id)
1988 || (set2 && bitmap_set_contains_value (set2, value_id))))
1989 return false;
1990 }
1991 return true;
1992 }
1993
1994 /* Determine if the expression EXPR is valid in SET1 U SET2.
1995 ONLY SET2 CAN BE NULL.
1996 This means that we have a leader for each part of the expression
1997 (if it consists of values), or the expression is an SSA_NAME.
1998 For loads/calls, we also see if the vuse is killed in this block. */
1999
2000 static bool
2001 valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr)
2002 {
2003 switch (expr->kind)
2004 {
2005 case NAME:
2006 /* By construction all NAMEs are available. Non-available
2007 NAMEs are removed by subtracting TMP_GEN from the sets. */
2008 return true;
2009 case NARY:
2010 {
2011 unsigned int i;
2012 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2013 for (i = 0; i < nary->length; i++)
2014 if (!op_valid_in_sets (set1, set2, nary->op[i]))
2015 return false;
2016 return true;
2017 }
2018 break;
2019 case REFERENCE:
2020 {
2021 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2022 vn_reference_op_t vro;
2023 unsigned int i;
2024
2025 FOR_EACH_VEC_ELT (ref->operands, i, vro)
2026 {
2027 if (!op_valid_in_sets (set1, set2, vro->op0)
2028 || !op_valid_in_sets (set1, set2, vro->op1)
2029 || !op_valid_in_sets (set1, set2, vro->op2))
2030 return false;
2031 }
2032 return true;
2033 }
2034 default:
2035 gcc_unreachable ();
2036 }
2037 }
2038
2039 /* Clean the set of expressions SET1 that are no longer valid in SET1 or SET2.
2040 This means expressions that are made up of values we have no leaders for
2041 in SET1 or SET2. */
2042
2043 static void
2044 clean (bitmap_set_t set1, bitmap_set_t set2 = NULL)
2045 {
2046 vec<pre_expr> exprs = sorted_array_from_bitmap_set (set1);
2047 pre_expr expr;
2048 int i;
2049
2050 FOR_EACH_VEC_ELT (exprs, i, expr)
2051 {
2052 if (!valid_in_sets (set1, set2, expr))
2053 {
2054 unsigned int val = get_expr_value_id (expr);
2055 bitmap_clear_bit (&set1->expressions, get_expression_id (expr));
2056 /* We are entered with possibly multiple expressions for a value
2057 so before removing a value from the set see if there's an
2058 expression for it left. */
2059 if (! bitmap_find_leader (set1, val))
2060 bitmap_clear_bit (&set1->values, val);
2061 }
2062 }
2063 exprs.release ();
2064
2065 if (flag_checking)
2066 {
2067 unsigned j;
2068 bitmap_iterator bi;
2069 FOR_EACH_EXPR_ID_IN_SET (set1, j, bi)
2070 gcc_assert (valid_in_sets (set1, set2, expression_for_id (j)));
2071 }
2072 }
2073
2074 /* Clean the set of expressions that are no longer valid in SET because
2075 they are clobbered in BLOCK or because they trap and may not be executed. */
2076
2077 static void
2078 prune_clobbered_mems (bitmap_set_t set, basic_block block)
2079 {
2080 bitmap_iterator bi;
2081 unsigned i;
2082 unsigned to_remove = -1U;
2083 bool any_removed = false;
2084
2085 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
2086 {
2087 /* Remove queued expr. */
2088 if (to_remove != -1U)
2089 {
2090 bitmap_clear_bit (&set->expressions, to_remove);
2091 any_removed = true;
2092 to_remove = -1U;
2093 }
2094
2095 pre_expr expr = expression_for_id (i);
2096 if (expr->kind == REFERENCE)
2097 {
2098 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2099 if (ref->vuse)
2100 {
2101 gimple *def_stmt = SSA_NAME_DEF_STMT (ref->vuse);
2102 if (!gimple_nop_p (def_stmt)
2103 && ((gimple_bb (def_stmt) != block
2104 && !dominated_by_p (CDI_DOMINATORS,
2105 block, gimple_bb (def_stmt)))
2106 || (gimple_bb (def_stmt) == block
2107 && value_dies_in_block_x (expr, block))))
2108 to_remove = i;
2109 }
2110 }
2111 else if (expr->kind == NARY)
2112 {
2113 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2114 /* If the NARY may trap make sure the block does not contain
2115 a possible exit point.
2116 ??? This is overly conservative if we translate AVAIL_OUT
2117 as the available expression might be after the exit point. */
2118 if (BB_MAY_NOTRETURN (block)
2119 && vn_nary_may_trap (nary))
2120 to_remove = i;
2121 }
2122 }
2123
2124 /* Remove queued expr. */
2125 if (to_remove != -1U)
2126 {
2127 bitmap_clear_bit (&set->expressions, to_remove);
2128 any_removed = true;
2129 }
2130
2131 /* Above we only removed expressions, now clean the set of values
2132 which no longer have any corresponding expression. We cannot
2133 clear the value at the time we remove an expression since there
2134 may be multiple expressions per value.
2135 If we'd queue possibly to be removed values we could use
2136 the bitmap_find_leader way to see if there's still an expression
2137 for it. For some ratio of to be removed values and number of
2138 values/expressions in the set this might be faster than rebuilding
2139 the value-set. */
2140 if (any_removed)
2141 {
2142 bitmap_clear (&set->values);
2143 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
2144 {
2145 pre_expr expr = expression_for_id (i);
2146 unsigned int value_id = get_expr_value_id (expr);
2147 bitmap_set_bit (&set->values, value_id);
2148 }
2149 }
2150 }
2151
2152 /* Compute the ANTIC set for BLOCK.
2153
2154 If succs(BLOCK) > 1 then
2155 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
2156 else if succs(BLOCK) == 1 then
2157 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2158
2159 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2160
2161 Note that clean() is deferred until after the iteration. */
2162
2163 static bool
2164 compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
2165 {
2166 bitmap_set_t S, old, ANTIC_OUT;
2167 edge e;
2168 edge_iterator ei;
2169
2170 bool was_visited = BB_VISITED (block);
2171 bool changed = ! BB_VISITED (block);
2172 BB_VISITED (block) = 1;
2173 old = ANTIC_OUT = S = NULL;
2174
2175 /* If any edges from predecessors are abnormal, antic_in is empty,
2176 so do nothing. */
2177 if (block_has_abnormal_pred_edge)
2178 goto maybe_dump_sets;
2179
2180 old = ANTIC_IN (block);
2181 ANTIC_OUT = bitmap_set_new ();
2182
2183 /* If the block has no successors, ANTIC_OUT is empty. */
2184 if (EDGE_COUNT (block->succs) == 0)
2185 ;
2186 /* If we have one successor, we could have some phi nodes to
2187 translate through. */
2188 else if (single_succ_p (block))
2189 {
2190 e = single_succ_edge (block);
2191 gcc_assert (BB_VISITED (e->dest));
2192 phi_translate_set (ANTIC_OUT, ANTIC_IN (e->dest), e);
2193 }
2194 /* If we have multiple successors, we take the intersection of all of
2195 them. Note that in the case of loop exit phi nodes, we may have
2196 phis to translate through. */
2197 else
2198 {
2199 size_t i;
2200 edge first = NULL;
2201
2202 auto_vec<edge> worklist (EDGE_COUNT (block->succs));
2203 FOR_EACH_EDGE (e, ei, block->succs)
2204 {
2205 if (!first
2206 && BB_VISITED (e->dest))
2207 first = e;
2208 else if (BB_VISITED (e->dest))
2209 worklist.quick_push (e);
2210 else
2211 {
2212 /* Unvisited successors get their ANTIC_IN replaced by the
2213 maximal set to arrive at a maximum ANTIC_IN solution.
2214 We can ignore them in the intersection operation and thus
2215 need not explicitely represent that maximum solution. */
2216 if (dump_file && (dump_flags & TDF_DETAILS))
2217 fprintf (dump_file, "ANTIC_IN is MAX on %d->%d\n",
2218 e->src->index, e->dest->index);
2219 }
2220 }
2221
2222 /* Of multiple successors we have to have visited one already
2223 which is guaranteed by iteration order. */
2224 gcc_assert (first != NULL);
2225
2226 phi_translate_set (ANTIC_OUT, ANTIC_IN (first->dest), first);
2227
2228 /* If we have multiple successors we need to intersect the ANTIC_OUT
2229 sets. For values that's a simple intersection but for
2230 expressions it is a union. Given we want to have a single
2231 expression per value in our sets we have to canonicalize.
2232 Avoid randomness and running into cycles like for PR82129 and
2233 canonicalize the expression we choose to the one with the
2234 lowest id. This requires we actually compute the union first. */
2235 FOR_EACH_VEC_ELT (worklist, i, e)
2236 {
2237 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
2238 {
2239 bitmap_set_t tmp = bitmap_set_new ();
2240 phi_translate_set (tmp, ANTIC_IN (e->dest), e);
2241 bitmap_and_into (&ANTIC_OUT->values, &tmp->values);
2242 bitmap_ior_into (&ANTIC_OUT->expressions, &tmp->expressions);
2243 bitmap_set_free (tmp);
2244 }
2245 else
2246 {
2247 bitmap_and_into (&ANTIC_OUT->values, &ANTIC_IN (e->dest)->values);
2248 bitmap_ior_into (&ANTIC_OUT->expressions,
2249 &ANTIC_IN (e->dest)->expressions);
2250 }
2251 }
2252 if (! worklist.is_empty ())
2253 {
2254 /* Prune expressions not in the value set. */
2255 bitmap_iterator bi;
2256 unsigned int i;
2257 unsigned int to_clear = -1U;
2258 FOR_EACH_EXPR_ID_IN_SET (ANTIC_OUT, i, bi)
2259 {
2260 if (to_clear != -1U)
2261 {
2262 bitmap_clear_bit (&ANTIC_OUT->expressions, to_clear);
2263 to_clear = -1U;
2264 }
2265 pre_expr expr = expression_for_id (i);
2266 unsigned int value_id = get_expr_value_id (expr);
2267 if (!bitmap_bit_p (&ANTIC_OUT->values, value_id))
2268 to_clear = i;
2269 }
2270 if (to_clear != -1U)
2271 bitmap_clear_bit (&ANTIC_OUT->expressions, to_clear);
2272 }
2273 }
2274
2275 /* Prune expressions that are clobbered in block and thus become
2276 invalid if translated from ANTIC_OUT to ANTIC_IN. */
2277 prune_clobbered_mems (ANTIC_OUT, block);
2278
2279 /* Generate ANTIC_OUT - TMP_GEN. */
2280 S = bitmap_set_subtract_expressions (ANTIC_OUT, TMP_GEN (block));
2281
2282 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
2283 ANTIC_IN (block) = bitmap_set_subtract_expressions (EXP_GEN (block),
2284 TMP_GEN (block));
2285
2286 /* Then union in the ANTIC_OUT - TMP_GEN values,
2287 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2288 bitmap_ior_into (&ANTIC_IN (block)->values, &S->values);
2289 bitmap_ior_into (&ANTIC_IN (block)->expressions, &S->expressions);
2290
2291 /* clean (ANTIC_IN (block)) is defered to after the iteration converged
2292 because it can cause non-convergence, see for example PR81181. */
2293
2294 /* Intersect ANTIC_IN with the old ANTIC_IN. This is required until
2295 we properly represent the maximum expression set, thus not prune
2296 values without expressions during the iteration. */
2297 if (was_visited
2298 && bitmap_and_into (&ANTIC_IN (block)->values, &old->values))
2299 {
2300 if (dump_file && (dump_flags & TDF_DETAILS))
2301 fprintf (dump_file, "warning: intersecting with old ANTIC_IN "
2302 "shrinks the set\n");
2303 /* Prune expressions not in the value set. */
2304 bitmap_iterator bi;
2305 unsigned int i;
2306 unsigned int to_clear = -1U;
2307 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (block), i, bi)
2308 {
2309 if (to_clear != -1U)
2310 {
2311 bitmap_clear_bit (&ANTIC_IN (block)->expressions, to_clear);
2312 to_clear = -1U;
2313 }
2314 pre_expr expr = expression_for_id (i);
2315 unsigned int value_id = get_expr_value_id (expr);
2316 if (!bitmap_bit_p (&ANTIC_IN (block)->values, value_id))
2317 to_clear = i;
2318 }
2319 if (to_clear != -1U)
2320 bitmap_clear_bit (&ANTIC_IN (block)->expressions, to_clear);
2321 }
2322
2323 if (!bitmap_set_equal (old, ANTIC_IN (block)))
2324 changed = true;
2325
2326 maybe_dump_sets:
2327 if (dump_file && (dump_flags & TDF_DETAILS))
2328 {
2329 if (ANTIC_OUT)
2330 print_bitmap_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index);
2331
2332 if (changed)
2333 fprintf (dump_file, "[changed] ");
2334 print_bitmap_set (dump_file, ANTIC_IN (block), "ANTIC_IN",
2335 block->index);
2336
2337 if (S)
2338 print_bitmap_set (dump_file, S, "S", block->index);
2339 }
2340 if (old)
2341 bitmap_set_free (old);
2342 if (S)
2343 bitmap_set_free (S);
2344 if (ANTIC_OUT)
2345 bitmap_set_free (ANTIC_OUT);
2346 return changed;
2347 }
2348
2349 /* Compute PARTIAL_ANTIC for BLOCK.
2350
2351 If succs(BLOCK) > 1 then
2352 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2353 in ANTIC_OUT for all succ(BLOCK)
2354 else if succs(BLOCK) == 1 then
2355 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2356
2357 PA_IN[BLOCK] = clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK] - ANTIC_IN[BLOCK])
2358
2359 */
2360 static void
2361 compute_partial_antic_aux (basic_block block,
2362 bool block_has_abnormal_pred_edge)
2363 {
2364 bitmap_set_t old_PA_IN;
2365 bitmap_set_t PA_OUT;
2366 edge e;
2367 edge_iterator ei;
2368 unsigned long max_pa = param_max_partial_antic_length;
2369
2370 old_PA_IN = PA_OUT = NULL;
2371
2372 /* If any edges from predecessors are abnormal, antic_in is empty,
2373 so do nothing. */
2374 if (block_has_abnormal_pred_edge)
2375 goto maybe_dump_sets;
2376
2377 /* If there are too many partially anticipatable values in the
2378 block, phi_translate_set can take an exponential time: stop
2379 before the translation starts. */
2380 if (max_pa
2381 && single_succ_p (block)
2382 && bitmap_count_bits (&PA_IN (single_succ (block))->values) > max_pa)
2383 goto maybe_dump_sets;
2384
2385 old_PA_IN = PA_IN (block);
2386 PA_OUT = bitmap_set_new ();
2387
2388 /* If the block has no successors, ANTIC_OUT is empty. */
2389 if (EDGE_COUNT (block->succs) == 0)
2390 ;
2391 /* If we have one successor, we could have some phi nodes to
2392 translate through. Note that we can't phi translate across DFS
2393 back edges in partial antic, because it uses a union operation on
2394 the successors. For recurrences like IV's, we will end up
2395 generating a new value in the set on each go around (i + 3 (VH.1)
2396 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
2397 else if (single_succ_p (block))
2398 {
2399 e = single_succ_edge (block);
2400 if (!(e->flags & EDGE_DFS_BACK))
2401 phi_translate_set (PA_OUT, PA_IN (e->dest), e);
2402 }
2403 /* If we have multiple successors, we take the union of all of
2404 them. */
2405 else
2406 {
2407 size_t i;
2408
2409 auto_vec<edge> worklist (EDGE_COUNT (block->succs));
2410 FOR_EACH_EDGE (e, ei, block->succs)
2411 {
2412 if (e->flags & EDGE_DFS_BACK)
2413 continue;
2414 worklist.quick_push (e);
2415 }
2416 if (worklist.length () > 0)
2417 {
2418 FOR_EACH_VEC_ELT (worklist, i, e)
2419 {
2420 unsigned int i;
2421 bitmap_iterator bi;
2422
2423 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (e->dest), i, bi)
2424 bitmap_value_insert_into_set (PA_OUT,
2425 expression_for_id (i));
2426 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
2427 {
2428 bitmap_set_t pa_in = bitmap_set_new ();
2429 phi_translate_set (pa_in, PA_IN (e->dest), e);
2430 FOR_EACH_EXPR_ID_IN_SET (pa_in, i, bi)
2431 bitmap_value_insert_into_set (PA_OUT,
2432 expression_for_id (i));
2433 bitmap_set_free (pa_in);
2434 }
2435 else
2436 FOR_EACH_EXPR_ID_IN_SET (PA_IN (e->dest), i, bi)
2437 bitmap_value_insert_into_set (PA_OUT,
2438 expression_for_id (i));
2439 }
2440 }
2441 }
2442
2443 /* Prune expressions that are clobbered in block and thus become
2444 invalid if translated from PA_OUT to PA_IN. */
2445 prune_clobbered_mems (PA_OUT, block);
2446
2447 /* PA_IN starts with PA_OUT - TMP_GEN.
2448 Then we subtract things from ANTIC_IN. */
2449 PA_IN (block) = bitmap_set_subtract_expressions (PA_OUT, TMP_GEN (block));
2450
2451 /* For partial antic, we want to put back in the phi results, since
2452 we will properly avoid making them partially antic over backedges. */
2453 bitmap_ior_into (&PA_IN (block)->values, &PHI_GEN (block)->values);
2454 bitmap_ior_into (&PA_IN (block)->expressions, &PHI_GEN (block)->expressions);
2455
2456 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2457 bitmap_set_subtract_values (PA_IN (block), ANTIC_IN (block));
2458
2459 clean (PA_IN (block), ANTIC_IN (block));
2460
2461 maybe_dump_sets:
2462 if (dump_file && (dump_flags & TDF_DETAILS))
2463 {
2464 if (PA_OUT)
2465 print_bitmap_set (dump_file, PA_OUT, "PA_OUT", block->index);
2466
2467 print_bitmap_set (dump_file, PA_IN (block), "PA_IN", block->index);
2468 }
2469 if (old_PA_IN)
2470 bitmap_set_free (old_PA_IN);
2471 if (PA_OUT)
2472 bitmap_set_free (PA_OUT);
2473 }
2474
2475 /* Compute ANTIC and partial ANTIC sets. */
2476
2477 static void
2478 compute_antic (void)
2479 {
2480 bool changed = true;
2481 int num_iterations = 0;
2482 basic_block block;
2483 int i;
2484 edge_iterator ei;
2485 edge e;
2486
2487 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2488 We pre-build the map of blocks with incoming abnormal edges here. */
2489 auto_sbitmap has_abnormal_preds (last_basic_block_for_fn (cfun));
2490 bitmap_clear (has_abnormal_preds);
2491
2492 FOR_ALL_BB_FN (block, cfun)
2493 {
2494 BB_VISITED (block) = 0;
2495
2496 FOR_EACH_EDGE (e, ei, block->preds)
2497 if (e->flags & EDGE_ABNORMAL)
2498 {
2499 bitmap_set_bit (has_abnormal_preds, block->index);
2500 break;
2501 }
2502
2503 /* While we are here, give empty ANTIC_IN sets to each block. */
2504 ANTIC_IN (block) = bitmap_set_new ();
2505 if (do_partial_partial)
2506 PA_IN (block) = bitmap_set_new ();
2507 }
2508
2509 /* At the exit block we anticipate nothing. */
2510 BB_VISITED (EXIT_BLOCK_PTR_FOR_FN (cfun)) = 1;
2511
2512 /* For ANTIC computation we need a postorder that also guarantees that
2513 a block with a single successor is visited after its successor.
2514 RPO on the inverted CFG has this property. */
2515 auto_vec<int, 20> postorder;
2516 inverted_post_order_compute (&postorder);
2517
2518 auto_sbitmap worklist (last_basic_block_for_fn (cfun) + 1);
2519 bitmap_clear (worklist);
2520 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
2521 bitmap_set_bit (worklist, e->src->index);
2522 while (changed)
2523 {
2524 if (dump_file && (dump_flags & TDF_DETAILS))
2525 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2526 /* ??? We need to clear our PHI translation cache here as the
2527 ANTIC sets shrink and we restrict valid translations to
2528 those having operands with leaders in ANTIC. Same below
2529 for PA ANTIC computation. */
2530 num_iterations++;
2531 changed = false;
2532 for (i = postorder.length () - 1; i >= 0; i--)
2533 {
2534 if (bitmap_bit_p (worklist, postorder[i]))
2535 {
2536 basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
2537 bitmap_clear_bit (worklist, block->index);
2538 if (compute_antic_aux (block,
2539 bitmap_bit_p (has_abnormal_preds,
2540 block->index)))
2541 {
2542 FOR_EACH_EDGE (e, ei, block->preds)
2543 bitmap_set_bit (worklist, e->src->index);
2544 changed = true;
2545 }
2546 }
2547 }
2548 /* Theoretically possible, but *highly* unlikely. */
2549 gcc_checking_assert (num_iterations < 500);
2550 }
2551
2552 /* We have to clean after the dataflow problem converged as cleaning
2553 can cause non-convergence because it is based on expressions
2554 rather than values. */
2555 FOR_EACH_BB_FN (block, cfun)
2556 clean (ANTIC_IN (block));
2557
2558 statistics_histogram_event (cfun, "compute_antic iterations",
2559 num_iterations);
2560
2561 if (do_partial_partial)
2562 {
2563 /* For partial antic we ignore backedges and thus we do not need
2564 to perform any iteration when we process blocks in postorder. */
2565 for (i = postorder.length () - 1; i >= 0; i--)
2566 {
2567 basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
2568 compute_partial_antic_aux (block,
2569 bitmap_bit_p (has_abnormal_preds,
2570 block->index));
2571 }
2572 }
2573 }
2574
2575
2576 /* Inserted expressions are placed onto this worklist, which is used
2577 for performing quick dead code elimination of insertions we made
2578 that didn't turn out to be necessary. */
2579 static bitmap inserted_exprs;
2580
2581 /* The actual worker for create_component_ref_by_pieces. */
2582
2583 static tree
2584 create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
2585 unsigned int *operand, gimple_seq *stmts)
2586 {
2587 vn_reference_op_t currop = &ref->operands[*operand];
2588 tree genop;
2589 ++*operand;
2590 switch (currop->opcode)
2591 {
2592 case CALL_EXPR:
2593 gcc_unreachable ();
2594
2595 case MEM_REF:
2596 {
2597 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2598 stmts);
2599 if (!baseop)
2600 return NULL_TREE;
2601 tree offset = currop->op0;
2602 if (TREE_CODE (baseop) == ADDR_EXPR
2603 && handled_component_p (TREE_OPERAND (baseop, 0)))
2604 {
2605 poly_int64 off;
2606 tree base;
2607 base = get_addr_base_and_unit_offset (TREE_OPERAND (baseop, 0),
2608 &off);
2609 gcc_assert (base);
2610 offset = int_const_binop (PLUS_EXPR, offset,
2611 build_int_cst (TREE_TYPE (offset),
2612 off));
2613 baseop = build_fold_addr_expr (base);
2614 }
2615 genop = build2 (MEM_REF, currop->type, baseop, offset);
2616 MR_DEPENDENCE_CLIQUE (genop) = currop->clique;
2617 MR_DEPENDENCE_BASE (genop) = currop->base;
2618 REF_REVERSE_STORAGE_ORDER (genop) = currop->reverse;
2619 return genop;
2620 }
2621
2622 case TARGET_MEM_REF:
2623 {
2624 tree genop0 = NULL_TREE, genop1 = NULL_TREE;
2625 vn_reference_op_t nextop = &ref->operands[(*operand)++];
2626 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2627 stmts);
2628 if (!baseop)
2629 return NULL_TREE;
2630 if (currop->op0)
2631 {
2632 genop0 = find_or_generate_expression (block, currop->op0, stmts);
2633 if (!genop0)
2634 return NULL_TREE;
2635 }
2636 if (nextop->op0)
2637 {
2638 genop1 = find_or_generate_expression (block, nextop->op0, stmts);
2639 if (!genop1)
2640 return NULL_TREE;
2641 }
2642 genop = build5 (TARGET_MEM_REF, currop->type,
2643 baseop, currop->op2, genop0, currop->op1, genop1);
2644
2645 MR_DEPENDENCE_CLIQUE (genop) = currop->clique;
2646 MR_DEPENDENCE_BASE (genop) = currop->base;
2647 return genop;
2648 }
2649
2650 case ADDR_EXPR:
2651 if (currop->op0)
2652 {
2653 gcc_assert (is_gimple_min_invariant (currop->op0));
2654 return currop->op0;
2655 }
2656 /* Fallthrough. */
2657 case REALPART_EXPR:
2658 case IMAGPART_EXPR:
2659 case VIEW_CONVERT_EXPR:
2660 {
2661 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2662 stmts);
2663 if (!genop0)
2664 return NULL_TREE;
2665 return fold_build1 (currop->opcode, currop->type, genop0);
2666 }
2667
2668 case WITH_SIZE_EXPR:
2669 {
2670 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2671 stmts);
2672 if (!genop0)
2673 return NULL_TREE;
2674 tree genop1 = find_or_generate_expression (block, currop->op0, stmts);
2675 if (!genop1)
2676 return NULL_TREE;
2677 return fold_build2 (currop->opcode, currop->type, genop0, genop1);
2678 }
2679
2680 case BIT_FIELD_REF:
2681 {
2682 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2683 stmts);
2684 if (!genop0)
2685 return NULL_TREE;
2686 tree op1 = currop->op0;
2687 tree op2 = currop->op1;
2688 tree t = build3 (BIT_FIELD_REF, currop->type, genop0, op1, op2);
2689 REF_REVERSE_STORAGE_ORDER (t) = currop->reverse;
2690 return fold (t);
2691 }
2692
2693 /* For array ref vn_reference_op's, operand 1 of the array ref
2694 is op0 of the reference op and operand 3 of the array ref is
2695 op1. */
2696 case ARRAY_RANGE_REF:
2697 case ARRAY_REF:
2698 {
2699 tree genop0;
2700 tree genop1 = currop->op0;
2701 tree genop2 = currop->op1;
2702 tree genop3 = currop->op2;
2703 genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2704 stmts);
2705 if (!genop0)
2706 return NULL_TREE;
2707 genop1 = find_or_generate_expression (block, genop1, stmts);
2708 if (!genop1)
2709 return NULL_TREE;
2710 if (genop2)
2711 {
2712 tree domain_type = TYPE_DOMAIN (TREE_TYPE (genop0));
2713 /* Drop zero minimum index if redundant. */
2714 if (integer_zerop (genop2)
2715 && (!domain_type
2716 || integer_zerop (TYPE_MIN_VALUE (domain_type))))
2717 genop2 = NULL_TREE;
2718 else
2719 {
2720 genop2 = find_or_generate_expression (block, genop2, stmts);
2721 if (!genop2)
2722 return NULL_TREE;
2723 }
2724 }
2725 if (genop3)
2726 {
2727 tree elmt_type = TREE_TYPE (TREE_TYPE (genop0));
2728 /* We can't always put a size in units of the element alignment
2729 here as the element alignment may be not visible. See
2730 PR43783. Simply drop the element size for constant
2731 sizes. */
2732 if (TREE_CODE (genop3) == INTEGER_CST
2733 && TREE_CODE (TYPE_SIZE_UNIT (elmt_type)) == INTEGER_CST
2734 && wi::eq_p (wi::to_offset (TYPE_SIZE_UNIT (elmt_type)),
2735 (wi::to_offset (genop3)
2736 * vn_ref_op_align_unit (currop))))
2737 genop3 = NULL_TREE;
2738 else
2739 {
2740 genop3 = find_or_generate_expression (block, genop3, stmts);
2741 if (!genop3)
2742 return NULL_TREE;
2743 }
2744 }
2745 return build4 (currop->opcode, currop->type, genop0, genop1,
2746 genop2, genop3);
2747 }
2748 case COMPONENT_REF:
2749 {
2750 tree op0;
2751 tree op1;
2752 tree genop2 = currop->op1;
2753 op0 = create_component_ref_by_pieces_1 (block, ref, operand, stmts);
2754 if (!op0)
2755 return NULL_TREE;
2756 /* op1 should be a FIELD_DECL, which are represented by themselves. */
2757 op1 = currop->op0;
2758 if (genop2)
2759 {
2760 genop2 = find_or_generate_expression (block, genop2, stmts);
2761 if (!genop2)
2762 return NULL_TREE;
2763 }
2764 return fold_build3 (COMPONENT_REF, TREE_TYPE (op1), op0, op1, genop2);
2765 }
2766
2767 case SSA_NAME:
2768 {
2769 genop = find_or_generate_expression (block, currop->op0, stmts);
2770 return genop;
2771 }
2772 case STRING_CST:
2773 case INTEGER_CST:
2774 case POLY_INT_CST:
2775 case COMPLEX_CST:
2776 case VECTOR_CST:
2777 case REAL_CST:
2778 case CONSTRUCTOR:
2779 case VAR_DECL:
2780 case PARM_DECL:
2781 case CONST_DECL:
2782 case RESULT_DECL:
2783 case FUNCTION_DECL:
2784 return currop->op0;
2785
2786 default:
2787 gcc_unreachable ();
2788 }
2789 }
2790
2791 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2792 COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with
2793 trying to rename aggregates into ssa form directly, which is a no no.
2794
2795 Thus, this routine doesn't create temporaries, it just builds a
2796 single access expression for the array, calling
2797 find_or_generate_expression to build the innermost pieces.
2798
2799 This function is a subroutine of create_expression_by_pieces, and
2800 should not be called on it's own unless you really know what you
2801 are doing. */
2802
2803 static tree
2804 create_component_ref_by_pieces (basic_block block, vn_reference_t ref,
2805 gimple_seq *stmts)
2806 {
2807 unsigned int op = 0;
2808 return create_component_ref_by_pieces_1 (block, ref, &op, stmts);
2809 }
2810
2811 /* Find a simple leader for an expression, or generate one using
2812 create_expression_by_pieces from a NARY expression for the value.
2813 BLOCK is the basic_block we are looking for leaders in.
2814 OP is the tree expression to find a leader for or generate.
2815 Returns the leader or NULL_TREE on failure. */
2816
2817 static tree
2818 find_or_generate_expression (basic_block block, tree op, gimple_seq *stmts)
2819 {
2820 pre_expr expr = get_or_alloc_expr_for (op);
2821 unsigned int lookfor = get_expr_value_id (expr);
2822 pre_expr leader = bitmap_find_leader (AVAIL_OUT (block), lookfor);
2823 if (leader)
2824 {
2825 if (leader->kind == NAME)
2826 return PRE_EXPR_NAME (leader);
2827 else if (leader->kind == CONSTANT)
2828 return PRE_EXPR_CONSTANT (leader);
2829
2830 /* Defer. */
2831 return NULL_TREE;
2832 }
2833
2834 /* It must be a complex expression, so generate it recursively. Note
2835 that this is only necessary to handle gcc.dg/tree-ssa/ssa-pre28.c
2836 where the insert algorithm fails to insert a required expression. */
2837 bitmap exprset = value_expressions[lookfor];
2838 bitmap_iterator bi;
2839 unsigned int i;
2840 EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
2841 {
2842 pre_expr temp = expression_for_id (i);
2843 /* We cannot insert random REFERENCE expressions at arbitrary
2844 places. We can insert NARYs which eventually re-materializes
2845 its operand values. */
2846 if (temp->kind == NARY)
2847 return create_expression_by_pieces (block, temp, stmts,
2848 get_expr_type (expr));
2849 }
2850
2851 /* Defer. */
2852 return NULL_TREE;
2853 }
2854
2855 /* Create an expression in pieces, so that we can handle very complex
2856 expressions that may be ANTIC, but not necessary GIMPLE.
2857 BLOCK is the basic block the expression will be inserted into,
2858 EXPR is the expression to insert (in value form)
2859 STMTS is a statement list to append the necessary insertions into.
2860
2861 This function will die if we hit some value that shouldn't be
2862 ANTIC but is (IE there is no leader for it, or its components).
2863 The function returns NULL_TREE in case a different antic expression
2864 has to be inserted first.
2865 This function may also generate expressions that are themselves
2866 partially or fully redundant. Those that are will be either made
2867 fully redundant during the next iteration of insert (for partially
2868 redundant ones), or eliminated by eliminate (for fully redundant
2869 ones). */
2870
2871 static tree
2872 create_expression_by_pieces (basic_block block, pre_expr expr,
2873 gimple_seq *stmts, tree type)
2874 {
2875 tree name;
2876 tree folded;
2877 gimple_seq forced_stmts = NULL;
2878 unsigned int value_id;
2879 gimple_stmt_iterator gsi;
2880 tree exprtype = type ? type : get_expr_type (expr);
2881 pre_expr nameexpr;
2882 gassign *newstmt;
2883
2884 switch (expr->kind)
2885 {
2886 /* We may hit the NAME/CONSTANT case if we have to convert types
2887 that value numbering saw through. */
2888 case NAME:
2889 folded = PRE_EXPR_NAME (expr);
2890 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (folded))
2891 return NULL_TREE;
2892 if (useless_type_conversion_p (exprtype, TREE_TYPE (folded)))
2893 return folded;
2894 break;
2895 case CONSTANT:
2896 {
2897 folded = PRE_EXPR_CONSTANT (expr);
2898 tree tem = fold_convert (exprtype, folded);
2899 if (is_gimple_min_invariant (tem))
2900 return tem;
2901 break;
2902 }
2903 case REFERENCE:
2904 if (PRE_EXPR_REFERENCE (expr)->operands[0].opcode == CALL_EXPR)
2905 {
2906 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2907 unsigned int operand = 1;
2908 vn_reference_op_t currop = &ref->operands[0];
2909 tree sc = NULL_TREE;
2910 tree fn = find_or_generate_expression (block, currop->op0, stmts);
2911 if (!fn)
2912 return NULL_TREE;
2913 if (currop->op1)
2914 {
2915 sc = find_or_generate_expression (block, currop->op1, stmts);
2916 if (!sc)
2917 return NULL_TREE;
2918 }
2919 auto_vec<tree> args (ref->operands.length () - 1);
2920 while (operand < ref->operands.length ())
2921 {
2922 tree arg = create_component_ref_by_pieces_1 (block, ref,
2923 &operand, stmts);
2924 if (!arg)
2925 return NULL_TREE;
2926 args.quick_push (arg);
2927 }
2928 gcall *call = gimple_build_call_vec (fn, args);
2929 gimple_set_location (call, expr->loc);
2930 gimple_call_set_fntype (call, currop->type);
2931 if (sc)
2932 gimple_call_set_chain (call, sc);
2933 tree forcedname = make_ssa_name (TREE_TYPE (currop->type));
2934 gimple_call_set_lhs (call, forcedname);
2935 /* There's no CCP pass after PRE which would re-compute alignment
2936 information so make sure we re-materialize this here. */
2937 if (gimple_call_builtin_p (call, BUILT_IN_ASSUME_ALIGNED)
2938 && args.length () - 2 <= 1
2939 && tree_fits_uhwi_p (args[1])
2940 && (args.length () != 3 || tree_fits_uhwi_p (args[2])))
2941 {
2942 unsigned HOST_WIDE_INT halign = tree_to_uhwi (args[1]);
2943 unsigned HOST_WIDE_INT hmisalign
2944 = args.length () == 3 ? tree_to_uhwi (args[2]) : 0;
2945 if ((halign & (halign - 1)) == 0
2946 && (hmisalign & ~(halign - 1)) == 0
2947 && (unsigned int)halign != 0)
2948 set_ptr_info_alignment (get_ptr_info (forcedname),
2949 halign, hmisalign);
2950 }
2951 gimple_set_vuse (call, BB_LIVE_VOP_ON_EXIT (block));
2952 gimple_seq_add_stmt_without_update (&forced_stmts, call);
2953 folded = forcedname;
2954 }
2955 else
2956 {
2957 folded = create_component_ref_by_pieces (block,
2958 PRE_EXPR_REFERENCE (expr),
2959 stmts);
2960 if (!folded)
2961 return NULL_TREE;
2962 name = make_temp_ssa_name (exprtype, NULL, "pretmp");
2963 newstmt = gimple_build_assign (name, folded);
2964 gimple_set_location (newstmt, expr->loc);
2965 gimple_seq_add_stmt_without_update (&forced_stmts, newstmt);
2966 gimple_set_vuse (newstmt, BB_LIVE_VOP_ON_EXIT (block));
2967 folded = name;
2968 }
2969 break;
2970 case NARY:
2971 {
2972 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2973 tree *genop = XALLOCAVEC (tree, nary->length);
2974 unsigned i;
2975 for (i = 0; i < nary->length; ++i)
2976 {
2977 genop[i] = find_or_generate_expression (block, nary->op[i], stmts);
2978 if (!genop[i])
2979 return NULL_TREE;
2980 /* Ensure genop[] is properly typed for POINTER_PLUS_EXPR. It
2981 may have conversions stripped. */
2982 if (nary->opcode == POINTER_PLUS_EXPR)
2983 {
2984 if (i == 0)
2985 genop[i] = gimple_convert (&forced_stmts,
2986 nary->type, genop[i]);
2987 else if (i == 1)
2988 genop[i] = gimple_convert (&forced_stmts,
2989 sizetype, genop[i]);
2990 }
2991 else
2992 genop[i] = gimple_convert (&forced_stmts,
2993 TREE_TYPE (nary->op[i]), genop[i]);
2994 }
2995 if (nary->opcode == CONSTRUCTOR)
2996 {
2997 vec<constructor_elt, va_gc> *elts = NULL;
2998 for (i = 0; i < nary->length; ++i)
2999 CONSTRUCTOR_APPEND_ELT (elts, NULL_TREE, genop[i]);
3000 folded = build_constructor (nary->type, elts);
3001 name = make_temp_ssa_name (exprtype, NULL, "pretmp");
3002 newstmt = gimple_build_assign (name, folded);
3003 gimple_set_location (newstmt, expr->loc);
3004 gimple_seq_add_stmt_without_update (&forced_stmts, newstmt);
3005 folded = name;
3006 }
3007 else
3008 {
3009 switch (nary->length)
3010 {
3011 case 1:
3012 folded = gimple_build (&forced_stmts, expr->loc,
3013 nary->opcode, nary->type, genop[0]);
3014 break;
3015 case 2:
3016 folded = gimple_build (&forced_stmts, expr->loc, nary->opcode,
3017 nary->type, genop[0], genop[1]);
3018 break;
3019 case 3:
3020 folded = gimple_build (&forced_stmts, expr->loc, nary->opcode,
3021 nary->type, genop[0], genop[1],
3022 genop[2]);
3023 break;
3024 default:
3025 gcc_unreachable ();
3026 }
3027 }
3028 }
3029 break;
3030 default:
3031 gcc_unreachable ();
3032 }
3033
3034 folded = gimple_convert (&forced_stmts, exprtype, folded);
3035
3036 /* If there is nothing to insert, return the simplified result. */
3037 if (gimple_seq_empty_p (forced_stmts))
3038 return folded;
3039 /* If we simplified to a constant return it and discard eventually
3040 built stmts. */
3041 if (is_gimple_min_invariant (folded))
3042 {
3043 gimple_seq_discard (forced_stmts);
3044 return folded;
3045 }
3046 /* Likewise if we simplified to sth not queued for insertion. */
3047 bool found = false;
3048 gsi = gsi_last (forced_stmts);
3049 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
3050 {
3051 gimple *stmt = gsi_stmt (gsi);
3052 tree forcedname = gimple_get_lhs (stmt);
3053 if (forcedname == folded)
3054 {
3055 found = true;
3056 break;
3057 }
3058 }
3059 if (! found)
3060 {
3061 gimple_seq_discard (forced_stmts);
3062 return folded;
3063 }
3064 gcc_assert (TREE_CODE (folded) == SSA_NAME);
3065
3066 /* If we have any intermediate expressions to the value sets, add them
3067 to the value sets and chain them in the instruction stream. */
3068 if (forced_stmts)
3069 {
3070 gsi = gsi_start (forced_stmts);
3071 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3072 {
3073 gimple *stmt = gsi_stmt (gsi);
3074 tree forcedname = gimple_get_lhs (stmt);
3075 pre_expr nameexpr;
3076
3077 if (forcedname != folded)
3078 {
3079 vn_ssa_aux_t vn_info = VN_INFO (forcedname);
3080 vn_info->valnum = forcedname;
3081 vn_info->value_id = get_next_value_id ();
3082 nameexpr = get_or_alloc_expr_for_name (forcedname);
3083 add_to_value (vn_info->value_id, nameexpr);
3084 if (NEW_SETS (block))
3085 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
3086 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
3087 }
3088
3089 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (forcedname));
3090 }
3091 gimple_seq_add_seq (stmts, forced_stmts);
3092 }
3093
3094 name = folded;
3095
3096 /* Fold the last statement. */
3097 gsi = gsi_last (*stmts);
3098 if (fold_stmt_inplace (&gsi))
3099 update_stmt (gsi_stmt (gsi));
3100
3101 /* Add a value number to the temporary.
3102 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
3103 we are creating the expression by pieces, and this particular piece of
3104 the expression may have been represented. There is no harm in replacing
3105 here. */
3106 value_id = get_expr_value_id (expr);
3107 vn_ssa_aux_t vn_info = VN_INFO (name);
3108 vn_info->value_id = value_id;
3109 vn_info->valnum = vn_valnum_from_value_id (value_id);
3110 if (vn_info->valnum == NULL_TREE)
3111 vn_info->valnum = name;
3112 gcc_assert (vn_info->valnum != NULL_TREE);
3113 nameexpr = get_or_alloc_expr_for_name (name);
3114 add_to_value (value_id, nameexpr);
3115 if (NEW_SETS (block))
3116 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
3117 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
3118
3119 pre_stats.insertions++;
3120 if (dump_file && (dump_flags & TDF_DETAILS))
3121 {
3122 fprintf (dump_file, "Inserted ");
3123 print_gimple_stmt (dump_file, gsi_stmt (gsi_last (*stmts)), 0);
3124 fprintf (dump_file, " in predecessor %d (%04d)\n",
3125 block->index, value_id);
3126 }
3127
3128 return name;
3129 }
3130
3131
3132 /* Insert the to-be-made-available values of expression EXPRNUM for each
3133 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
3134 merge the result with a phi node, given the same value number as
3135 NODE. Return true if we have inserted new stuff. */
3136
3137 static bool
3138 insert_into_preds_of_block (basic_block block, unsigned int exprnum,
3139 vec<pre_expr> avail)
3140 {
3141 pre_expr expr = expression_for_id (exprnum);
3142 pre_expr newphi;
3143 unsigned int val = get_expr_value_id (expr);
3144 edge pred;
3145 bool insertions = false;
3146 bool nophi = false;
3147 basic_block bprime;
3148 pre_expr eprime;
3149 edge_iterator ei;
3150 tree type = get_expr_type (expr);
3151 tree temp;
3152 gphi *phi;
3153
3154 /* Make sure we aren't creating an induction variable. */
3155 if (bb_loop_depth (block) > 0 && EDGE_COUNT (block->preds) == 2)
3156 {
3157 bool firstinsideloop = false;
3158 bool secondinsideloop = false;
3159 firstinsideloop = flow_bb_inside_loop_p (block->loop_father,
3160 EDGE_PRED (block, 0)->src);
3161 secondinsideloop = flow_bb_inside_loop_p (block->loop_father,
3162 EDGE_PRED (block, 1)->src);
3163 /* Induction variables only have one edge inside the loop. */
3164 if ((firstinsideloop ^ secondinsideloop)
3165 && expr->kind != REFERENCE)
3166 {
3167 if (dump_file && (dump_flags & TDF_DETAILS))
3168 fprintf (dump_file, "Skipping insertion of phi for partial "
3169 "redundancy: Looks like an induction variable\n");
3170 nophi = true;
3171 }
3172 }
3173
3174 /* Make the necessary insertions. */
3175 FOR_EACH_EDGE (pred, ei, block->preds)
3176 {
3177 /* When we are not inserting a PHI node do not bother inserting
3178 into places that do not dominate the anticipated computations. */
3179 if (nophi && !dominated_by_p (CDI_DOMINATORS, block, pred->src))
3180 continue;
3181 gimple_seq stmts = NULL;
3182 tree builtexpr;
3183 bprime = pred->src;
3184 eprime = avail[pred->dest_idx];
3185 builtexpr = create_expression_by_pieces (bprime, eprime,
3186 &stmts, type);
3187 gcc_assert (!(pred->flags & EDGE_ABNORMAL));
3188 if (!gimple_seq_empty_p (stmts))
3189 {
3190 basic_block new_bb = gsi_insert_seq_on_edge_immediate (pred, stmts);
3191 gcc_assert (! new_bb);
3192 insertions = true;
3193 }
3194 if (!builtexpr)
3195 {
3196 /* We cannot insert a PHI node if we failed to insert
3197 on one edge. */
3198 nophi = true;
3199 continue;
3200 }
3201 if (is_gimple_min_invariant (builtexpr))
3202 avail[pred->dest_idx] = get_or_alloc_expr_for_constant (builtexpr);
3203 else
3204 avail[pred->dest_idx] = get_or_alloc_expr_for_name (builtexpr);
3205 }
3206 /* If we didn't want a phi node, and we made insertions, we still have
3207 inserted new stuff, and thus return true. If we didn't want a phi node,
3208 and didn't make insertions, we haven't added anything new, so return
3209 false. */
3210 if (nophi && insertions)
3211 return true;
3212 else if (nophi && !insertions)
3213 return false;
3214
3215 /* Now build a phi for the new variable. */
3216 temp = make_temp_ssa_name (type, NULL, "prephitmp");
3217 phi = create_phi_node (temp, block);
3218
3219 vn_ssa_aux_t vn_info = VN_INFO (temp);
3220 vn_info->value_id = val;
3221 vn_info->valnum = vn_valnum_from_value_id (val);
3222 if (vn_info->valnum == NULL_TREE)
3223 vn_info->valnum = temp;
3224 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp));
3225 FOR_EACH_EDGE (pred, ei, block->preds)
3226 {
3227 pre_expr ae = avail[pred->dest_idx];
3228 gcc_assert (get_expr_type (ae) == type
3229 || useless_type_conversion_p (type, get_expr_type (ae)));
3230 if (ae->kind == CONSTANT)
3231 add_phi_arg (phi, unshare_expr (PRE_EXPR_CONSTANT (ae)),
3232 pred, UNKNOWN_LOCATION);
3233 else
3234 add_phi_arg (phi, PRE_EXPR_NAME (ae), pred, UNKNOWN_LOCATION);
3235 }
3236
3237 newphi = get_or_alloc_expr_for_name (temp);
3238 add_to_value (val, newphi);
3239
3240 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3241 this insertion, since we test for the existence of this value in PHI_GEN
3242 before proceeding with the partial redundancy checks in insert_aux.
3243
3244 The value may exist in AVAIL_OUT, in particular, it could be represented
3245 by the expression we are trying to eliminate, in which case we want the
3246 replacement to occur. If it's not existing in AVAIL_OUT, we want it
3247 inserted there.
3248
3249 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3250 this block, because if it did, it would have existed in our dominator's
3251 AVAIL_OUT, and would have been skipped due to the full redundancy check.
3252 */
3253
3254 bitmap_insert_into_set (PHI_GEN (block), newphi);
3255 bitmap_value_replace_in_set (AVAIL_OUT (block),
3256 newphi);
3257 if (NEW_SETS (block))
3258 bitmap_insert_into_set (NEW_SETS (block), newphi);
3259
3260 /* If we insert a PHI node for a conversion of another PHI node
3261 in the same basic-block try to preserve range information.
3262 This is important so that followup loop passes receive optimal
3263 number of iteration analysis results. See PR61743. */
3264 if (expr->kind == NARY
3265 && CONVERT_EXPR_CODE_P (expr->u.nary->opcode)
3266 && TREE_CODE (expr->u.nary->op[0]) == SSA_NAME
3267 && gimple_bb (SSA_NAME_DEF_STMT (expr->u.nary->op[0])) == block
3268 && INTEGRAL_TYPE_P (type)
3269 && INTEGRAL_TYPE_P (TREE_TYPE (expr->u.nary->op[0]))
3270 && (TYPE_PRECISION (type)
3271 >= TYPE_PRECISION (TREE_TYPE (expr->u.nary->op[0])))
3272 && SSA_NAME_RANGE_INFO (expr->u.nary->op[0]))
3273 {
3274 wide_int min, max;
3275 if (get_range_info (expr->u.nary->op[0], &min, &max) == VR_RANGE
3276 && !wi::neg_p (min, SIGNED)
3277 && !wi::neg_p (max, SIGNED))
3278 /* Just handle extension and sign-changes of all-positive ranges. */
3279 set_range_info (temp,
3280 SSA_NAME_RANGE_TYPE (expr->u.nary->op[0]),
3281 wide_int_storage::from (min, TYPE_PRECISION (type),
3282 TYPE_SIGN (type)),
3283 wide_int_storage::from (max, TYPE_PRECISION (type),
3284 TYPE_SIGN (type)));
3285 }
3286
3287 if (dump_file && (dump_flags & TDF_DETAILS))
3288 {
3289 fprintf (dump_file, "Created phi ");
3290 print_gimple_stmt (dump_file, phi, 0);
3291 fprintf (dump_file, " in block %d (%04d)\n", block->index, val);
3292 }
3293 pre_stats.phis++;
3294 return true;
3295 }
3296
3297
3298
3299 /* Perform insertion of partially redundant or hoistable values.
3300 For BLOCK, do the following:
3301 1. Propagate the NEW_SETS of the dominator into the current block.
3302 If the block has multiple predecessors,
3303 2a. Iterate over the ANTIC expressions for the block to see if
3304 any of them are partially redundant.
3305 2b. If so, insert them into the necessary predecessors to make
3306 the expression fully redundant.
3307 2c. Insert a new PHI merging the values of the predecessors.
3308 2d. Insert the new PHI, and the new expressions, into the
3309 NEW_SETS set.
3310 If the block has multiple successors,
3311 3a. Iterate over the ANTIC values for the block to see if
3312 any of them are good candidates for hoisting.
3313 3b. If so, insert expressions computing the values in BLOCK,
3314 and add the new expressions into the NEW_SETS set.
3315 4. Recursively call ourselves on the dominator children of BLOCK.
3316
3317 Steps 1, 2a, and 4 are done by insert_aux. 2b, 2c and 2d are done by
3318 do_pre_regular_insertion and do_partial_insertion. 3a and 3b are
3319 done in do_hoist_insertion.
3320 */
3321
3322 static bool
3323 do_pre_regular_insertion (basic_block block, basic_block dom,
3324 vec<pre_expr> exprs)
3325 {
3326 bool new_stuff = false;
3327 pre_expr expr;
3328 auto_vec<pre_expr, 2> avail;
3329 int i;
3330
3331 avail.safe_grow (EDGE_COUNT (block->preds), true);
3332
3333 FOR_EACH_VEC_ELT (exprs, i, expr)
3334 {
3335 if (expr->kind == NARY
3336 || expr->kind == REFERENCE)
3337 {
3338 unsigned int val;
3339 bool by_some = false;
3340 bool cant_insert = false;
3341 bool all_same = true;
3342 pre_expr first_s = NULL;
3343 edge pred;
3344 basic_block bprime;
3345 pre_expr eprime = NULL;
3346 edge_iterator ei;
3347 pre_expr edoubleprime = NULL;
3348 bool do_insertion = false;
3349
3350 val = get_expr_value_id (expr);
3351 if (bitmap_set_contains_value (PHI_GEN (block), val))
3352 continue;
3353 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3354 {
3355 if (dump_file && (dump_flags & TDF_DETAILS))
3356 {
3357 fprintf (dump_file, "Found fully redundant value: ");
3358 print_pre_expr (dump_file, expr);
3359 fprintf (dump_file, "\n");
3360 }
3361 continue;
3362 }
3363
3364 FOR_EACH_EDGE (pred, ei, block->preds)
3365 {
3366 unsigned int vprime;
3367
3368 /* We should never run insertion for the exit block
3369 and so not come across fake pred edges. */
3370 gcc_assert (!(pred->flags & EDGE_FAKE));
3371 bprime = pred->src;
3372 /* We are looking at ANTIC_OUT of bprime. */
3373 eprime = phi_translate (NULL, expr, ANTIC_IN (block), NULL, pred);
3374
3375 /* eprime will generally only be NULL if the
3376 value of the expression, translated
3377 through the PHI for this predecessor, is
3378 undefined. If that is the case, we can't
3379 make the expression fully redundant,
3380 because its value is undefined along a
3381 predecessor path. We can thus break out
3382 early because it doesn't matter what the
3383 rest of the results are. */
3384 if (eprime == NULL)
3385 {
3386 avail[pred->dest_idx] = NULL;
3387 cant_insert = true;
3388 break;
3389 }
3390
3391 vprime = get_expr_value_id (eprime);
3392 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3393 vprime);
3394 if (edoubleprime == NULL)
3395 {
3396 avail[pred->dest_idx] = eprime;
3397 all_same = false;
3398 }
3399 else
3400 {
3401 avail[pred->dest_idx] = edoubleprime;
3402 by_some = true;
3403 /* We want to perform insertions to remove a redundancy on
3404 a path in the CFG we want to optimize for speed. */
3405 if (optimize_edge_for_speed_p (pred))
3406 do_insertion = true;
3407 if (first_s == NULL)
3408 first_s = edoubleprime;
3409 else if (!pre_expr_d::equal (first_s, edoubleprime))
3410 all_same = false;
3411 }
3412 }
3413 /* If we can insert it, it's not the same value
3414 already existing along every predecessor, and
3415 it's defined by some predecessor, it is
3416 partially redundant. */
3417 if (!cant_insert && !all_same && by_some)
3418 {
3419 if (!do_insertion)
3420 {
3421 if (dump_file && (dump_flags & TDF_DETAILS))
3422 {
3423 fprintf (dump_file, "Skipping partial redundancy for "
3424 "expression ");
3425 print_pre_expr (dump_file, expr);
3426 fprintf (dump_file, " (%04d), no redundancy on to be "
3427 "optimized for speed edge\n", val);
3428 }
3429 }
3430 else if (dbg_cnt (treepre_insert))
3431 {
3432 if (dump_file && (dump_flags & TDF_DETAILS))
3433 {
3434 fprintf (dump_file, "Found partial redundancy for "
3435 "expression ");
3436 print_pre_expr (dump_file, expr);
3437 fprintf (dump_file, " (%04d)\n",
3438 get_expr_value_id (expr));
3439 }
3440 if (insert_into_preds_of_block (block,
3441 get_expression_id (expr),
3442 avail))
3443 new_stuff = true;
3444 }
3445 }
3446 /* If all edges produce the same value and that value is
3447 an invariant, then the PHI has the same value on all
3448 edges. Note this. */
3449 else if (!cant_insert && all_same)
3450 {
3451 gcc_assert (edoubleprime->kind == CONSTANT
3452 || edoubleprime->kind == NAME);
3453
3454 tree temp = make_temp_ssa_name (get_expr_type (expr),
3455 NULL, "pretmp");
3456 gassign *assign
3457 = gimple_build_assign (temp,
3458 edoubleprime->kind == CONSTANT ?
3459 PRE_EXPR_CONSTANT (edoubleprime) :
3460 PRE_EXPR_NAME (edoubleprime));
3461 gimple_stmt_iterator gsi = gsi_after_labels (block);
3462 gsi_insert_before (&gsi, assign, GSI_NEW_STMT);
3463
3464 vn_ssa_aux_t vn_info = VN_INFO (temp);
3465 vn_info->value_id = val;
3466 vn_info->valnum = vn_valnum_from_value_id (val);
3467 if (vn_info->valnum == NULL_TREE)
3468 vn_info->valnum = temp;
3469 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp));
3470 pre_expr newe = get_or_alloc_expr_for_name (temp);
3471 add_to_value (val, newe);
3472 bitmap_value_replace_in_set (AVAIL_OUT (block), newe);
3473 bitmap_insert_into_set (NEW_SETS (block), newe);
3474 bitmap_insert_into_set (PHI_GEN (block), newe);
3475 }
3476 }
3477 }
3478
3479 return new_stuff;
3480 }
3481
3482
3483 /* Perform insertion for partially anticipatable expressions. There
3484 is only one case we will perform insertion for these. This case is
3485 if the expression is partially anticipatable, and fully available.
3486 In this case, we know that putting it earlier will enable us to
3487 remove the later computation. */
3488
3489 static bool
3490 do_pre_partial_partial_insertion (basic_block block, basic_block dom,
3491 vec<pre_expr> exprs)
3492 {
3493 bool new_stuff = false;
3494 pre_expr expr;
3495 auto_vec<pre_expr, 2> avail;
3496 int i;
3497
3498 avail.safe_grow (EDGE_COUNT (block->preds), true);
3499
3500 FOR_EACH_VEC_ELT (exprs, i, expr)
3501 {
3502 if (expr->kind == NARY
3503 || expr->kind == REFERENCE)
3504 {
3505 unsigned int val;
3506 bool by_all = true;
3507 bool cant_insert = false;
3508 edge pred;
3509 basic_block bprime;
3510 pre_expr eprime = NULL;
3511 edge_iterator ei;
3512
3513 val = get_expr_value_id (expr);
3514 if (bitmap_set_contains_value (PHI_GEN (block), val))
3515 continue;
3516 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3517 continue;
3518
3519 FOR_EACH_EDGE (pred, ei, block->preds)
3520 {
3521 unsigned int vprime;
3522 pre_expr edoubleprime;
3523
3524 /* We should never run insertion for the exit block
3525 and so not come across fake pred edges. */
3526 gcc_assert (!(pred->flags & EDGE_FAKE));
3527 bprime = pred->src;
3528 eprime = phi_translate (NULL, expr, ANTIC_IN (block),
3529 PA_IN (block), pred);
3530
3531 /* eprime will generally only be NULL if the
3532 value of the expression, translated
3533 through the PHI for this predecessor, is
3534 undefined. If that is the case, we can't
3535 make the expression fully redundant,
3536 because its value is undefined along a
3537 predecessor path. We can thus break out
3538 early because it doesn't matter what the
3539 rest of the results are. */
3540 if (eprime == NULL)
3541 {
3542 avail[pred->dest_idx] = NULL;
3543 cant_insert = true;
3544 break;
3545 }
3546
3547 vprime = get_expr_value_id (eprime);
3548 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime), vprime);
3549 avail[pred->dest_idx] = edoubleprime;
3550 if (edoubleprime == NULL)
3551 {
3552 by_all = false;
3553 break;
3554 }
3555 }
3556
3557 /* If we can insert it, it's not the same value
3558 already existing along every predecessor, and
3559 it's defined by some predecessor, it is
3560 partially redundant. */
3561 if (!cant_insert && by_all)
3562 {
3563 edge succ;
3564 bool do_insertion = false;
3565
3566 /* Insert only if we can remove a later expression on a path
3567 that we want to optimize for speed.
3568 The phi node that we will be inserting in BLOCK is not free,
3569 and inserting it for the sake of !optimize_for_speed successor
3570 may cause regressions on the speed path. */
3571 FOR_EACH_EDGE (succ, ei, block->succs)
3572 {
3573 if (bitmap_set_contains_value (PA_IN (succ->dest), val)
3574 || bitmap_set_contains_value (ANTIC_IN (succ->dest), val))
3575 {
3576 if (optimize_edge_for_speed_p (succ))
3577 do_insertion = true;
3578 }
3579 }
3580
3581 if (!do_insertion)
3582 {
3583 if (dump_file && (dump_flags & TDF_DETAILS))
3584 {
3585 fprintf (dump_file, "Skipping partial partial redundancy "
3586 "for expression ");
3587 print_pre_expr (dump_file, expr);
3588 fprintf (dump_file, " (%04d), not (partially) anticipated "
3589 "on any to be optimized for speed edges\n", val);
3590 }
3591 }
3592 else if (dbg_cnt (treepre_insert))
3593 {
3594 pre_stats.pa_insert++;
3595 if (dump_file && (dump_flags & TDF_DETAILS))
3596 {
3597 fprintf (dump_file, "Found partial partial redundancy "
3598 "for expression ");
3599 print_pre_expr (dump_file, expr);
3600 fprintf (dump_file, " (%04d)\n",
3601 get_expr_value_id (expr));
3602 }
3603 if (insert_into_preds_of_block (block,
3604 get_expression_id (expr),
3605 avail))
3606 new_stuff = true;
3607 }
3608 }
3609 }
3610 }
3611
3612 return new_stuff;
3613 }
3614
3615 /* Insert expressions in BLOCK to compute hoistable values up.
3616 Return TRUE if something was inserted, otherwise return FALSE.
3617 The caller has to make sure that BLOCK has at least two successors. */
3618
3619 static bool
3620 do_hoist_insertion (basic_block block)
3621 {
3622 edge e;
3623 edge_iterator ei;
3624 bool new_stuff = false;
3625 unsigned i;
3626 gimple_stmt_iterator last;
3627
3628 /* At least two successors, or else... */
3629 gcc_assert (EDGE_COUNT (block->succs) >= 2);
3630
3631 /* Check that all successors of BLOCK are dominated by block.
3632 We could use dominated_by_p() for this, but actually there is a much
3633 quicker check: any successor that is dominated by BLOCK can't have
3634 more than one predecessor edge. */
3635 FOR_EACH_EDGE (e, ei, block->succs)
3636 if (! single_pred_p (e->dest))
3637 return false;
3638
3639 /* Determine the insertion point. If we cannot safely insert before
3640 the last stmt if we'd have to, bail out. */
3641 last = gsi_last_bb (block);
3642 if (!gsi_end_p (last)
3643 && !is_ctrl_stmt (gsi_stmt (last))
3644 && stmt_ends_bb_p (gsi_stmt (last)))
3645 return false;
3646
3647 /* Compute the set of hoistable expressions from ANTIC_IN. First compute
3648 hoistable values. */
3649 bitmap_set hoistable_set;
3650
3651 /* A hoistable value must be in ANTIC_IN(block)
3652 but not in AVAIL_OUT(BLOCK). */
3653 bitmap_initialize (&hoistable_set.values, &grand_bitmap_obstack);
3654 bitmap_and_compl (&hoistable_set.values,
3655 &ANTIC_IN (block)->values, &AVAIL_OUT (block)->values);
3656
3657 /* Short-cut for a common case: hoistable_set is empty. */
3658 if (bitmap_empty_p (&hoistable_set.values))
3659 return false;
3660
3661 /* Compute which of the hoistable values is in AVAIL_OUT of
3662 at least one of the successors of BLOCK. */
3663 bitmap_head availout_in_some;
3664 bitmap_initialize (&availout_in_some, &grand_bitmap_obstack);
3665 FOR_EACH_EDGE (e, ei, block->succs)
3666 /* Do not consider expressions solely because their availability
3667 on loop exits. They'd be ANTIC-IN throughout the whole loop
3668 and thus effectively hoisted across loops by combination of
3669 PRE and hoisting. */
3670 if (! loop_exit_edge_p (block->loop_father, e))
3671 bitmap_ior_and_into (&availout_in_some, &hoistable_set.values,
3672 &AVAIL_OUT (e->dest)->values);
3673 bitmap_clear (&hoistable_set.values);
3674
3675 /* Short-cut for a common case: availout_in_some is empty. */
3676 if (bitmap_empty_p (&availout_in_some))
3677 return false;
3678
3679 /* Hack hoitable_set in-place so we can use sorted_array_from_bitmap_set. */
3680 bitmap_move (&hoistable_set.values, &availout_in_some);
3681 hoistable_set.expressions = ANTIC_IN (block)->expressions;
3682
3683 /* Now finally construct the topological-ordered expression set. */
3684 vec<pre_expr> exprs = sorted_array_from_bitmap_set (&hoistable_set);
3685
3686 bitmap_clear (&hoistable_set.values);
3687
3688 /* If there are candidate values for hoisting, insert expressions
3689 strategically to make the hoistable expressions fully redundant. */
3690 pre_expr expr;
3691 FOR_EACH_VEC_ELT (exprs, i, expr)
3692 {
3693 /* While we try to sort expressions topologically above the
3694 sorting doesn't work out perfectly. Catch expressions we
3695 already inserted. */
3696 unsigned int value_id = get_expr_value_id (expr);
3697 if (bitmap_set_contains_value (AVAIL_OUT (block), value_id))
3698 {
3699 if (dump_file && (dump_flags & TDF_DETAILS))
3700 {
3701 fprintf (dump_file,
3702 "Already inserted expression for ");
3703 print_pre_expr (dump_file, expr);
3704 fprintf (dump_file, " (%04d)\n", value_id);
3705 }
3706 continue;
3707 }
3708
3709 /* If we end up with a punned expression representation and this
3710 happens to be a float typed one give up - we can't know for
3711 sure whether all paths perform the floating-point load we are
3712 about to insert and on some targets this can cause correctness
3713 issues. See PR88240. */
3714 if (expr->kind == REFERENCE
3715 && PRE_EXPR_REFERENCE (expr)->punned
3716 && FLOAT_TYPE_P (get_expr_type (expr)))
3717 continue;
3718
3719 /* OK, we should hoist this value. Perform the transformation. */
3720 pre_stats.hoist_insert++;
3721 if (dump_file && (dump_flags & TDF_DETAILS))
3722 {
3723 fprintf (dump_file,
3724 "Inserting expression in block %d for code hoisting: ",
3725 block->index);
3726 print_pre_expr (dump_file, expr);
3727 fprintf (dump_file, " (%04d)\n", value_id);
3728 }
3729
3730 gimple_seq stmts = NULL;
3731 tree res = create_expression_by_pieces (block, expr, &stmts,
3732 get_expr_type (expr));
3733
3734 /* Do not return true if expression creation ultimately
3735 did not insert any statements. */
3736 if (gimple_seq_empty_p (stmts))
3737 res = NULL_TREE;
3738 else
3739 {
3740 if (gsi_end_p (last) || is_ctrl_stmt (gsi_stmt (last)))
3741 gsi_insert_seq_before (&last, stmts, GSI_SAME_STMT);
3742 else
3743 gsi_insert_seq_after (&last, stmts, GSI_NEW_STMT);
3744 }
3745
3746 /* Make sure to not return true if expression creation ultimately
3747 failed but also make sure to insert any stmts produced as they
3748 are tracked in inserted_exprs. */
3749 if (! res)
3750 continue;
3751
3752 new_stuff = true;
3753 }
3754
3755 exprs.release ();
3756
3757 return new_stuff;
3758 }
3759
3760 /* Perform insertion of partially redundant and hoistable values. */
3761
3762 static void
3763 insert (void)
3764 {
3765 basic_block bb;
3766
3767 FOR_ALL_BB_FN (bb, cfun)
3768 NEW_SETS (bb) = bitmap_set_new ();
3769
3770 int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
3771 int *bb_rpo = XNEWVEC (int, last_basic_block_for_fn (cfun) + 1);
3772 int rpo_num = pre_and_rev_post_order_compute (NULL, rpo, false);
3773 for (int i = 0; i < rpo_num; ++i)
3774 bb_rpo[rpo[i]] = i;
3775
3776 int num_iterations = 0;
3777 bool changed;
3778 do
3779 {
3780 num_iterations++;
3781 if (dump_file && dump_flags & TDF_DETAILS)
3782 fprintf (dump_file, "Starting insert iteration %d\n", num_iterations);
3783
3784 changed = false;
3785 for (int idx = 0; idx < rpo_num; ++idx)
3786 {
3787 basic_block block = BASIC_BLOCK_FOR_FN (cfun, rpo[idx]);
3788 basic_block dom = get_immediate_dominator (CDI_DOMINATORS, block);
3789 if (dom)
3790 {
3791 unsigned i;
3792 bitmap_iterator bi;
3793 bitmap_set_t newset;
3794
3795 /* First, update the AVAIL_OUT set with anything we may have
3796 inserted higher up in the dominator tree. */
3797 newset = NEW_SETS (dom);
3798
3799 /* Note that we need to value_replace both NEW_SETS, and
3800 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3801 represented by some non-simple expression here that we want
3802 to replace it with. */
3803 bool avail_out_changed = false;
3804 FOR_EACH_EXPR_ID_IN_SET (newset, i, bi)
3805 {
3806 pre_expr expr = expression_for_id (i);
3807 bitmap_value_replace_in_set (NEW_SETS (block), expr);
3808 avail_out_changed
3809 |= bitmap_value_replace_in_set (AVAIL_OUT (block), expr);
3810 }
3811 /* We need to iterate if AVAIL_OUT of an already processed
3812 block source changed. */
3813 if (avail_out_changed && !changed)
3814 {
3815 edge_iterator ei;
3816 edge e;
3817 FOR_EACH_EDGE (e, ei, block->succs)
3818 if (e->dest->index != EXIT_BLOCK
3819 && bb_rpo[e->dest->index] < idx)
3820 changed = true;
3821 }
3822
3823 /* Insert expressions for partial redundancies. */
3824 if (flag_tree_pre && !single_pred_p (block))
3825 {
3826 vec<pre_expr> exprs
3827 = sorted_array_from_bitmap_set (ANTIC_IN (block));
3828 /* Sorting is not perfect, iterate locally. */
3829 while (do_pre_regular_insertion (block, dom, exprs))
3830 ;
3831 exprs.release ();
3832 if (do_partial_partial)
3833 {
3834 exprs = sorted_array_from_bitmap_set (PA_IN (block));
3835 while (do_pre_partial_partial_insertion (block, dom,
3836 exprs))
3837 ;
3838 exprs.release ();
3839 }
3840 }
3841 }
3842 }
3843
3844 /* Clear the NEW sets before the next iteration. We have already
3845 fully propagated its contents. */
3846 if (changed)
3847 FOR_ALL_BB_FN (bb, cfun)
3848 bitmap_set_free (NEW_SETS (bb));
3849 }
3850 while (changed);
3851
3852 statistics_histogram_event (cfun, "insert iterations", num_iterations);
3853
3854 /* AVAIL_OUT is not needed after insertion so we don't have to
3855 propagate NEW_SETS from hoist insertion. */
3856 FOR_ALL_BB_FN (bb, cfun)
3857 {
3858 bitmap_set_free (NEW_SETS (bb));
3859 bitmap_set_pool.remove (NEW_SETS (bb));
3860 NEW_SETS (bb) = NULL;
3861 }
3862
3863 /* Insert expressions for hoisting. Do a backward walk here since
3864 inserting into BLOCK exposes new opportunities in its predecessors.
3865 Since PRE and hoist insertions can cause back-to-back iteration
3866 and we are interested in PRE insertion exposed hoisting opportunities
3867 but not in hoisting exposed PRE ones do hoist insertion only after
3868 PRE insertion iteration finished and do not iterate it. */
3869 if (flag_code_hoisting)
3870 for (int idx = rpo_num - 1; idx >= 0; --idx)
3871 {
3872 basic_block block = BASIC_BLOCK_FOR_FN (cfun, rpo[idx]);
3873 if (EDGE_COUNT (block->succs) >= 2)
3874 changed |= do_hoist_insertion (block);
3875 }
3876
3877 free (rpo);
3878 free (bb_rpo);
3879 }
3880
3881
3882 /* Compute the AVAIL set for all basic blocks.
3883
3884 This function performs value numbering of the statements in each basic
3885 block. The AVAIL sets are built from information we glean while doing
3886 this value numbering, since the AVAIL sets contain only one entry per
3887 value.
3888
3889 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3890 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3891
3892 static void
3893 compute_avail (void)
3894 {
3895
3896 basic_block block, son;
3897 basic_block *worklist;
3898 size_t sp = 0;
3899 unsigned i;
3900 tree name;
3901
3902 /* We pretend that default definitions are defined in the entry block.
3903 This includes function arguments and the static chain decl. */
3904 FOR_EACH_SSA_NAME (i, name, cfun)
3905 {
3906 pre_expr e;
3907 if (!SSA_NAME_IS_DEFAULT_DEF (name)
3908 || has_zero_uses (name)
3909 || virtual_operand_p (name))
3910 continue;
3911
3912 e = get_or_alloc_expr_for_name (name);
3913 add_to_value (get_expr_value_id (e), e);
3914 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun)), e);
3915 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3916 e);
3917 }
3918
3919 if (dump_file && (dump_flags & TDF_DETAILS))
3920 {
3921 print_bitmap_set (dump_file, TMP_GEN (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3922 "tmp_gen", ENTRY_BLOCK);
3923 print_bitmap_set (dump_file, AVAIL_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3924 "avail_out", ENTRY_BLOCK);
3925 }
3926
3927 /* Allocate the worklist. */
3928 worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
3929
3930 /* Seed the algorithm by putting the dominator children of the entry
3931 block on the worklist. */
3932 for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR_FOR_FN (cfun));
3933 son;
3934 son = next_dom_son (CDI_DOMINATORS, son))
3935 worklist[sp++] = son;
3936
3937 BB_LIVE_VOP_ON_EXIT (ENTRY_BLOCK_PTR_FOR_FN (cfun))
3938 = ssa_default_def (cfun, gimple_vop (cfun));
3939
3940 /* Loop until the worklist is empty. */
3941 while (sp)
3942 {
3943 gimple *stmt;
3944 basic_block dom;
3945
3946 /* Pick a block from the worklist. */
3947 block = worklist[--sp];
3948 vn_context_bb = block;
3949
3950 /* Initially, the set of available values in BLOCK is that of
3951 its immediate dominator. */
3952 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3953 if (dom)
3954 {
3955 bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
3956 BB_LIVE_VOP_ON_EXIT (block) = BB_LIVE_VOP_ON_EXIT (dom);
3957 }
3958
3959 /* Generate values for PHI nodes. */
3960 for (gphi_iterator gsi = gsi_start_phis (block); !gsi_end_p (gsi);
3961 gsi_next (&gsi))
3962 {
3963 tree result = gimple_phi_result (gsi.phi ());
3964
3965 /* We have no need for virtual phis, as they don't represent
3966 actual computations. */
3967 if (virtual_operand_p (result))
3968 {
3969 BB_LIVE_VOP_ON_EXIT (block) = result;
3970 continue;
3971 }
3972
3973 pre_expr e = get_or_alloc_expr_for_name (result);
3974 add_to_value (get_expr_value_id (e), e);
3975 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3976 bitmap_insert_into_set (PHI_GEN (block), e);
3977 }
3978
3979 BB_MAY_NOTRETURN (block) = 0;
3980
3981 /* Now compute value numbers and populate value sets with all
3982 the expressions computed in BLOCK. */
3983 for (gimple_stmt_iterator gsi = gsi_start_bb (block); !gsi_end_p (gsi);
3984 gsi_next (&gsi))
3985 {
3986 ssa_op_iter iter;
3987 tree op;
3988
3989 stmt = gsi_stmt (gsi);
3990
3991 /* Cache whether the basic-block has any non-visible side-effect
3992 or control flow.
3993 If this isn't a call or it is the last stmt in the
3994 basic-block then the CFG represents things correctly. */
3995 if (is_gimple_call (stmt) && !stmt_ends_bb_p (stmt))
3996 {
3997 /* Non-looping const functions always return normally.
3998 Otherwise the call might not return or have side-effects
3999 that forbids hoisting possibly trapping expressions
4000 before it. */
4001 int flags = gimple_call_flags (stmt);
4002 if (!(flags & ECF_CONST)
4003 || (flags & ECF_LOOPING_CONST_OR_PURE))
4004 BB_MAY_NOTRETURN (block) = 1;
4005 }
4006
4007 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
4008 {
4009 pre_expr e = get_or_alloc_expr_for_name (op);
4010
4011 add_to_value (get_expr_value_id (e), e);
4012 bitmap_insert_into_set (TMP_GEN (block), e);
4013 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
4014 }
4015
4016 if (gimple_vdef (stmt))
4017 BB_LIVE_VOP_ON_EXIT (block) = gimple_vdef (stmt);
4018
4019 if (gimple_has_side_effects (stmt)
4020 || stmt_could_throw_p (cfun, stmt)
4021 || is_gimple_debug (stmt))
4022 continue;
4023
4024 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
4025 {
4026 if (ssa_undefined_value_p (op))
4027 continue;
4028 pre_expr e = get_or_alloc_expr_for_name (op);
4029 bitmap_value_insert_into_set (EXP_GEN (block), e);
4030 }
4031
4032 switch (gimple_code (stmt))
4033 {
4034 case GIMPLE_RETURN:
4035 continue;
4036
4037 case GIMPLE_CALL:
4038 {
4039 vn_reference_t ref;
4040 vn_reference_s ref1;
4041 pre_expr result = NULL;
4042
4043 /* We can value number only calls to real functions. */
4044 if (gimple_call_internal_p (stmt))
4045 continue;
4046
4047 vn_reference_lookup_call (as_a <gcall *> (stmt), &ref, &ref1);
4048 if (!ref)
4049 continue;
4050
4051 /* If the value of the call is not invalidated in
4052 this block until it is computed, add the expression
4053 to EXP_GEN. */
4054 if (!gimple_vuse (stmt)
4055 || gimple_code
4056 (SSA_NAME_DEF_STMT (gimple_vuse (stmt))) == GIMPLE_PHI
4057 || gimple_bb (SSA_NAME_DEF_STMT
4058 (gimple_vuse (stmt))) != block)
4059 {
4060 result = get_or_alloc_expr_for_reference
4061 (ref, gimple_location (stmt));
4062 add_to_value (get_expr_value_id (result), result);
4063 bitmap_value_insert_into_set (EXP_GEN (block), result);
4064 }
4065 continue;
4066 }
4067
4068 case GIMPLE_ASSIGN:
4069 {
4070 pre_expr result = NULL;
4071 switch (vn_get_stmt_kind (stmt))
4072 {
4073 case VN_NARY:
4074 {
4075 enum tree_code code = gimple_assign_rhs_code (stmt);
4076 vn_nary_op_t nary;
4077
4078 /* COND_EXPR and VEC_COND_EXPR are awkward in
4079 that they contain an embedded complex expression.
4080 Don't even try to shove those through PRE. */
4081 if (code == COND_EXPR
4082 || code == VEC_COND_EXPR)
4083 continue;
4084
4085 vn_nary_op_lookup_stmt (stmt, &nary);
4086 if (!nary || nary->predicated_values)
4087 continue;
4088
4089 /* If the NARY traps and there was a preceding
4090 point in the block that might not return avoid
4091 adding the nary to EXP_GEN. */
4092 if (BB_MAY_NOTRETURN (block)
4093 && vn_nary_may_trap (nary))
4094 continue;
4095
4096 result = get_or_alloc_expr_for_nary
4097 (nary, gimple_location (stmt));
4098 break;
4099 }
4100
4101 case VN_REFERENCE:
4102 {
4103 tree rhs1 = gimple_assign_rhs1 (stmt);
4104 ao_ref rhs1_ref;
4105 ao_ref_init (&rhs1_ref, rhs1);
4106 alias_set_type set = ao_ref_alias_set (&rhs1_ref);
4107 alias_set_type base_set
4108 = ao_ref_base_alias_set (&rhs1_ref);
4109 vec<vn_reference_op_s> operands
4110 = vn_reference_operands_for_lookup (rhs1);
4111 vn_reference_t ref;
4112 vn_reference_lookup_pieces (gimple_vuse (stmt), set,
4113 base_set, TREE_TYPE (rhs1),
4114 operands, &ref, VN_WALK);
4115 if (!ref)
4116 {
4117 operands.release ();
4118 continue;
4119 }
4120
4121 /* If the REFERENCE traps and there was a preceding
4122 point in the block that might not return avoid
4123 adding the reference to EXP_GEN. */
4124 if (BB_MAY_NOTRETURN (block)
4125 && vn_reference_may_trap (ref))
4126 {
4127 operands.release ();
4128 continue;
4129 }
4130
4131 /* If the value of the reference is not invalidated in
4132 this block until it is computed, add the expression
4133 to EXP_GEN. */
4134 if (gimple_vuse (stmt))
4135 {
4136 gimple *def_stmt;
4137 bool ok = true;
4138 def_stmt = SSA_NAME_DEF_STMT (gimple_vuse (stmt));
4139 while (!gimple_nop_p (def_stmt)
4140 && gimple_code (def_stmt) != GIMPLE_PHI
4141 && gimple_bb (def_stmt) == block)
4142 {
4143 if (stmt_may_clobber_ref_p
4144 (def_stmt, gimple_assign_rhs1 (stmt)))
4145 {
4146 ok = false;
4147 break;
4148 }
4149 def_stmt
4150 = SSA_NAME_DEF_STMT (gimple_vuse (def_stmt));
4151 }
4152 if (!ok)
4153 {
4154 operands.release ();
4155 continue;
4156 }
4157 }
4158
4159 /* If the load was value-numbered to another
4160 load make sure we do not use its expression
4161 for insertion if it wouldn't be a valid
4162 replacement. */
4163 /* At the momemt we have a testcase
4164 for hoist insertion of aligned vs. misaligned
4165 variants in gcc.dg/torture/pr65270-1.c thus
4166 with just alignment to be considered we can
4167 simply replace the expression in the hashtable
4168 with the most conservative one. */
4169 vn_reference_op_t ref1 = &ref->operands.last ();
4170 while (ref1->opcode != TARGET_MEM_REF
4171 && ref1->opcode != MEM_REF
4172 && ref1 != &ref->operands[0])
4173 --ref1;
4174 vn_reference_op_t ref2 = &operands.last ();
4175 while (ref2->opcode != TARGET_MEM_REF
4176 && ref2->opcode != MEM_REF
4177 && ref2 != &operands[0])
4178 --ref2;
4179 if ((ref1->opcode == TARGET_MEM_REF
4180 || ref1->opcode == MEM_REF)
4181 && (TYPE_ALIGN (ref1->type)
4182 > TYPE_ALIGN (ref2->type)))
4183 ref1->type
4184 = build_aligned_type (ref1->type,
4185 TYPE_ALIGN (ref2->type));
4186 /* TBAA behavior is an obvious part so make sure
4187 that the hashtable one covers this as well
4188 by adjusting the ref alias set and its base. */
4189 if (ref->set == set
4190 || alias_set_subset_of (set, ref->set))
4191 ;
4192 else if (alias_set_subset_of (ref->set, set))
4193 {
4194 ref->set = set;
4195 if (ref1->opcode == MEM_REF)
4196 ref1->op0
4197 = wide_int_to_tree (TREE_TYPE (ref2->op0),
4198 wi::to_wide (ref1->op0));
4199 else
4200 ref1->op2
4201 = wide_int_to_tree (TREE_TYPE (ref2->op2),
4202 wi::to_wide (ref1->op2));
4203 }
4204 else
4205 {
4206 ref->set = 0;
4207 if (ref1->opcode == MEM_REF)
4208 ref1->op0
4209 = wide_int_to_tree (ptr_type_node,
4210 wi::to_wide (ref1->op0));
4211 else
4212 ref1->op2
4213 = wide_int_to_tree (ptr_type_node,
4214 wi::to_wide (ref1->op2));
4215 }
4216 operands.release ();
4217
4218 result = get_or_alloc_expr_for_reference
4219 (ref, gimple_location (stmt));
4220 break;
4221 }
4222
4223 default:
4224 continue;
4225 }
4226
4227 add_to_value (get_expr_value_id (result), result);
4228 bitmap_value_insert_into_set (EXP_GEN (block), result);
4229 continue;
4230 }
4231 default:
4232 break;
4233 }
4234 }
4235
4236 if (dump_file && (dump_flags & TDF_DETAILS))
4237 {
4238 print_bitmap_set (dump_file, EXP_GEN (block),
4239 "exp_gen", block->index);
4240 print_bitmap_set (dump_file, PHI_GEN (block),
4241 "phi_gen", block->index);
4242 print_bitmap_set (dump_file, TMP_GEN (block),
4243 "tmp_gen", block->index);
4244 print_bitmap_set (dump_file, AVAIL_OUT (block),
4245 "avail_out", block->index);
4246 }
4247
4248 /* Put the dominator children of BLOCK on the worklist of blocks
4249 to compute available sets for. */
4250 for (son = first_dom_son (CDI_DOMINATORS, block);
4251 son;
4252 son = next_dom_son (CDI_DOMINATORS, son))
4253 worklist[sp++] = son;
4254 }
4255 vn_context_bb = NULL;
4256
4257 free (worklist);
4258 }
4259
4260
4261 /* Initialize data structures used by PRE. */
4262
4263 static void
4264 init_pre (void)
4265 {
4266 basic_block bb;
4267
4268 next_expression_id = 1;
4269 expressions.create (0);
4270 expressions.safe_push (NULL);
4271 value_expressions.create (get_max_value_id () + 1);
4272 value_expressions.quick_grow_cleared (get_max_value_id () + 1);
4273 constant_value_expressions.create (get_max_constant_value_id () + 1);
4274 constant_value_expressions.quick_grow_cleared (get_max_constant_value_id () + 1);
4275 name_to_id.create (0);
4276
4277 inserted_exprs = BITMAP_ALLOC (NULL);
4278
4279 connect_infinite_loops_to_exit ();
4280 memset (&pre_stats, 0, sizeof (pre_stats));
4281
4282 alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets));
4283
4284 calculate_dominance_info (CDI_DOMINATORS);
4285
4286 bitmap_obstack_initialize (&grand_bitmap_obstack);
4287 expression_to_id = new hash_table<pre_expr_d> (num_ssa_names * 3);
4288 FOR_ALL_BB_FN (bb, cfun)
4289 {
4290 EXP_GEN (bb) = bitmap_set_new ();
4291 PHI_GEN (bb) = bitmap_set_new ();
4292 TMP_GEN (bb) = bitmap_set_new ();
4293 AVAIL_OUT (bb) = bitmap_set_new ();
4294 PHI_TRANS_TABLE (bb) = NULL;
4295 }
4296 }
4297
4298
4299 /* Deallocate data structures used by PRE. */
4300
4301 static void
4302 fini_pre ()
4303 {
4304 value_expressions.release ();
4305 constant_value_expressions.release ();
4306 expressions.release ();
4307 BITMAP_FREE (inserted_exprs);
4308 bitmap_obstack_release (&grand_bitmap_obstack);
4309 bitmap_set_pool.release ();
4310 pre_expr_pool.release ();
4311 delete expression_to_id;
4312 expression_to_id = NULL;
4313 name_to_id.release ();
4314
4315 basic_block bb;
4316 FOR_ALL_BB_FN (bb, cfun)
4317 if (bb->aux && PHI_TRANS_TABLE (bb))
4318 delete PHI_TRANS_TABLE (bb);
4319 free_aux_for_blocks ();
4320 }
4321
4322 namespace {
4323
4324 const pass_data pass_data_pre =
4325 {
4326 GIMPLE_PASS, /* type */
4327 "pre", /* name */
4328 OPTGROUP_NONE, /* optinfo_flags */
4329 TV_TREE_PRE, /* tv_id */
4330 ( PROP_cfg | PROP_ssa ), /* properties_required */
4331 0, /* properties_provided */
4332 0, /* properties_destroyed */
4333 TODO_rebuild_alias, /* todo_flags_start */
4334 0, /* todo_flags_finish */
4335 };
4336
4337 class pass_pre : public gimple_opt_pass
4338 {
4339 public:
4340 pass_pre (gcc::context *ctxt)
4341 : gimple_opt_pass (pass_data_pre, ctxt)
4342 {}
4343
4344 /* opt_pass methods: */
4345 virtual bool gate (function *)
4346 { return flag_tree_pre != 0 || flag_code_hoisting != 0; }
4347 virtual unsigned int execute (function *);
4348
4349 }; // class pass_pre
4350
4351 /* Valueization hook for RPO VN when we are calling back to it
4352 at ANTIC compute time. */
4353
4354 static tree
4355 pre_valueize (tree name)
4356 {
4357 if (TREE_CODE (name) == SSA_NAME)
4358 {
4359 tree tem = VN_INFO (name)->valnum;
4360 if (tem != VN_TOP && tem != name)
4361 {
4362 if (TREE_CODE (tem) != SSA_NAME
4363 || SSA_NAME_IS_DEFAULT_DEF (tem))
4364 return tem;
4365 /* We create temporary SSA names for representatives that
4366 do not have a definition (yet) but are not default defs either
4367 assume they are fine to use. */
4368 basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (tem));
4369 if (! def_bb
4370 || dominated_by_p (CDI_DOMINATORS, vn_context_bb, def_bb))
4371 return tem;
4372 /* ??? Now we could look for a leader. Ideally we'd somehow
4373 expose RPO VN leaders and get rid of AVAIL_OUT as well... */
4374 }
4375 }
4376 return name;
4377 }
4378
4379 unsigned int
4380 pass_pre::execute (function *fun)
4381 {
4382 unsigned int todo = 0;
4383
4384 do_partial_partial =
4385 flag_tree_partial_pre && optimize_function_for_speed_p (fun);
4386
4387 /* This has to happen before VN runs because
4388 loop_optimizer_init may create new phis, etc. */
4389 loop_optimizer_init (LOOPS_NORMAL);
4390 split_edges_for_insertion ();
4391 scev_initialize ();
4392 calculate_dominance_info (CDI_DOMINATORS);
4393
4394 run_rpo_vn (VN_WALK);
4395
4396 init_pre ();
4397
4398 vn_valueize = pre_valueize;
4399
4400 /* Insert can get quite slow on an incredibly large number of basic
4401 blocks due to some quadratic behavior. Until this behavior is
4402 fixed, don't run it when he have an incredibly large number of
4403 bb's. If we aren't going to run insert, there is no point in
4404 computing ANTIC, either, even though it's plenty fast nor do
4405 we require AVAIL. */
4406 if (n_basic_blocks_for_fn (fun) < 4000)
4407 {
4408 compute_avail ();
4409 compute_antic ();
4410 insert ();
4411 }
4412
4413 /* Make sure to remove fake edges before committing our inserts.
4414 This makes sure we don't end up with extra critical edges that
4415 we would need to split. */
4416 remove_fake_exit_edges ();
4417 gsi_commit_edge_inserts ();
4418
4419 /* Eliminate folds statements which might (should not...) end up
4420 not keeping virtual operands up-to-date. */
4421 gcc_assert (!need_ssa_update_p (fun));
4422
4423 statistics_counter_event (fun, "Insertions", pre_stats.insertions);
4424 statistics_counter_event (fun, "PA inserted", pre_stats.pa_insert);
4425 statistics_counter_event (fun, "HOIST inserted", pre_stats.hoist_insert);
4426 statistics_counter_event (fun, "New PHIs", pre_stats.phis);
4427
4428 todo |= eliminate_with_rpo_vn (inserted_exprs);
4429
4430 vn_valueize = NULL;
4431
4432 /* Because we don't follow exactly the standard PRE algorithm, and decide not
4433 to insert PHI nodes sometimes, and because value numbering of casts isn't
4434 perfect, we sometimes end up inserting dead code. This simple DCE-like
4435 pass removes any insertions we made that weren't actually used. */
4436 simple_dce_from_worklist (inserted_exprs);
4437
4438 fini_pre ();
4439
4440 scev_finalize ();
4441 loop_optimizer_finalize ();
4442
4443 /* TODO: tail_merge_optimize may merge all predecessors of a block, in which
4444 case we can merge the block with the remaining predecessor of the block.
4445 It should either:
4446 - call merge_blocks after each tail merge iteration
4447 - call merge_blocks after all tail merge iterations
4448 - mark TODO_cleanup_cfg when necessary
4449 - share the cfg cleanup with fini_pre. */
4450 todo |= tail_merge_optimize (todo);
4451
4452 free_rpo_vn ();
4453
4454 /* Tail merging invalidates the virtual SSA web, together with
4455 cfg-cleanup opportunities exposed by PRE this will wreck the
4456 SSA updating machinery. So make sure to run update-ssa
4457 manually, before eventually scheduling cfg-cleanup as part of
4458 the todo. */
4459 update_ssa (TODO_update_ssa_only_virtuals);
4460
4461 return todo;
4462 }
4463
4464 } // anon namespace
4465
4466 gimple_opt_pass *
4467 make_pass_pre (gcc::context *ctxt)
4468 {
4469 return new pass_pre (ctxt);
4470 }