Daily bump.
[gcc.git] / gcc / value-prof.c
1 /* Transformations based on profile information for values.
2 Copyright (C) 2003-2021 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "cfghooks.h"
28 #include "ssa.h"
29 #include "cgraph.h"
30 #include "coverage.h"
31 #include "data-streamer.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "tree-nested.h"
35 #include "calls.h"
36 #include "expr.h"
37 #include "value-prof.h"
38 #include "tree-eh.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "tree-cfg.h"
42 #include "gimple-pretty-print.h"
43 #include "dumpfile.h"
44 #include "builtins.h"
45
46 /* In this file value profile based optimizations are placed. Currently the
47 following optimizations are implemented (for more detailed descriptions
48 see comments at value_profile_transformations):
49
50 1) Division/modulo specialization. Provided that we can determine that the
51 operands of the division have some special properties, we may use it to
52 produce more effective code.
53
54 2) Indirect/virtual call specialization. If we can determine most
55 common function callee in indirect/virtual call. We can use this
56 information to improve code effectiveness (especially info for
57 the inliner).
58
59 3) Speculative prefetching. If we are able to determine that the difference
60 between addresses accessed by a memory reference is usually constant, we
61 may add the prefetch instructions.
62 FIXME: This transformation was removed together with RTL based value
63 profiling.
64
65
66 Value profiling internals
67 ==========================
68
69 Every value profiling transformation starts with defining what values
70 to profile. There are different histogram types (see HIST_TYPE_* in
71 value-prof.h) and each transformation can request one or more histogram
72 types per GIMPLE statement. The function gimple_find_values_to_profile()
73 collects the values to profile in a vec, and adds the number of counters
74 required for the different histogram types.
75
76 For a -fprofile-generate run, the statements for which values should be
77 recorded, are instrumented in instrument_values(). The instrumentation
78 is done by helper functions that can be found in tree-profile.c, where
79 new types of histograms can be added if necessary.
80
81 After a -fprofile-use, the value profiling data is read back in by
82 compute_value_histograms() that translates the collected data to
83 histograms and attaches them to the profiled statements via
84 gimple_add_histogram_value(). Histograms are stored in a hash table
85 that is attached to every intrumented function, see VALUE_HISTOGRAMS
86 in function.h.
87
88 The value-profile transformations driver is the function
89 gimple_value_profile_transformations(). It traverses all statements in
90 the to-be-transformed function, and looks for statements with one or
91 more histograms attached to it. If a statement has histograms, the
92 transformation functions are called on the statement.
93
94 Limitations / FIXME / TODO:
95 * Only one histogram of each type can be associated with a statement.
96 * Some value profile transformations are done in builtins.c (?!)
97 * Updating of histograms needs some TLC.
98 * The value profiling code could be used to record analysis results
99 from non-profiling (e.g. VRP).
100 * Adding new profilers should be simplified, starting with a cleanup
101 of what-happens-where and with making gimple_find_values_to_profile
102 and gimple_value_profile_transformations table-driven, perhaps...
103 */
104
105 static bool gimple_divmod_fixed_value_transform (gimple_stmt_iterator *);
106 static bool gimple_mod_pow2_value_transform (gimple_stmt_iterator *);
107 static bool gimple_mod_subtract_transform (gimple_stmt_iterator *);
108 static bool gimple_stringops_transform (gimple_stmt_iterator *);
109 static void dump_ic_profile (gimple_stmt_iterator *gsi);
110
111 /* Allocate histogram value. */
112
113 histogram_value
114 gimple_alloc_histogram_value (struct function *fun ATTRIBUTE_UNUSED,
115 enum hist_type type, gimple *stmt, tree value)
116 {
117 histogram_value hist = (histogram_value) xcalloc (1, sizeof (*hist));
118 hist->hvalue.value = value;
119 hist->hvalue.stmt = stmt;
120 hist->type = type;
121 return hist;
122 }
123
124 /* Hash value for histogram. */
125
126 static hashval_t
127 histogram_hash (const void *x)
128 {
129 return htab_hash_pointer (((const_histogram_value)x)->hvalue.stmt);
130 }
131
132 /* Return nonzero if statement for histogram_value X is Y. */
133
134 static int
135 histogram_eq (const void *x, const void *y)
136 {
137 return ((const_histogram_value) x)->hvalue.stmt == (const gimple *) y;
138 }
139
140 /* Set histogram for STMT. */
141
142 static void
143 set_histogram_value (struct function *fun, gimple *stmt, histogram_value hist)
144 {
145 void **loc;
146 if (!hist && !VALUE_HISTOGRAMS (fun))
147 return;
148 if (!VALUE_HISTOGRAMS (fun))
149 VALUE_HISTOGRAMS (fun) = htab_create (1, histogram_hash,
150 histogram_eq, NULL);
151 loc = htab_find_slot_with_hash (VALUE_HISTOGRAMS (fun), stmt,
152 htab_hash_pointer (stmt),
153 hist ? INSERT : NO_INSERT);
154 if (!hist)
155 {
156 if (loc)
157 htab_clear_slot (VALUE_HISTOGRAMS (fun), loc);
158 return;
159 }
160 *loc = hist;
161 }
162
163 /* Get histogram list for STMT. */
164
165 histogram_value
166 gimple_histogram_value (struct function *fun, gimple *stmt)
167 {
168 if (!VALUE_HISTOGRAMS (fun))
169 return NULL;
170 return (histogram_value) htab_find_with_hash (VALUE_HISTOGRAMS (fun), stmt,
171 htab_hash_pointer (stmt));
172 }
173
174 /* Add histogram for STMT. */
175
176 void
177 gimple_add_histogram_value (struct function *fun, gimple *stmt,
178 histogram_value hist)
179 {
180 hist->hvalue.next = gimple_histogram_value (fun, stmt);
181 set_histogram_value (fun, stmt, hist);
182 hist->fun = fun;
183 }
184
185 /* Remove histogram HIST from STMT's histogram list. */
186
187 void
188 gimple_remove_histogram_value (struct function *fun, gimple *stmt,
189 histogram_value hist)
190 {
191 histogram_value hist2 = gimple_histogram_value (fun, stmt);
192 if (hist == hist2)
193 {
194 set_histogram_value (fun, stmt, hist->hvalue.next);
195 }
196 else
197 {
198 while (hist2->hvalue.next != hist)
199 hist2 = hist2->hvalue.next;
200 hist2->hvalue.next = hist->hvalue.next;
201 }
202 free (hist->hvalue.counters);
203 if (flag_checking)
204 memset (hist, 0xab, sizeof (*hist));
205 free (hist);
206 }
207
208 /* Lookup histogram of type TYPE in the STMT. */
209
210 histogram_value
211 gimple_histogram_value_of_type (struct function *fun, gimple *stmt,
212 enum hist_type type)
213 {
214 histogram_value hist;
215 for (hist = gimple_histogram_value (fun, stmt); hist;
216 hist = hist->hvalue.next)
217 if (hist->type == type)
218 return hist;
219 return NULL;
220 }
221
222 /* Dump information about HIST to DUMP_FILE. */
223
224 static void
225 dump_histogram_value (FILE *dump_file, histogram_value hist)
226 {
227 switch (hist->type)
228 {
229 case HIST_TYPE_INTERVAL:
230 if (hist->hvalue.counters)
231 {
232 fprintf (dump_file, "Interval counter range [%d,%d]: [",
233 hist->hdata.intvl.int_start,
234 (hist->hdata.intvl.int_start
235 + hist->hdata.intvl.steps - 1));
236
237 unsigned int i;
238 for (i = 0; i < hist->hdata.intvl.steps; i++)
239 {
240 fprintf (dump_file, "%d:%" PRId64,
241 hist->hdata.intvl.int_start + i,
242 (int64_t) hist->hvalue.counters[i]);
243 if (i != hist->hdata.intvl.steps - 1)
244 fprintf (dump_file, ", ");
245 }
246 fprintf (dump_file, "] outside range: %" PRId64 ".\n",
247 (int64_t) hist->hvalue.counters[i]);
248 }
249 break;
250
251 case HIST_TYPE_POW2:
252 if (hist->hvalue.counters)
253 fprintf (dump_file, "Pow2 counter pow2:%" PRId64
254 " nonpow2:%" PRId64 ".\n",
255 (int64_t) hist->hvalue.counters[1],
256 (int64_t) hist->hvalue.counters[0]);
257 break;
258
259 case HIST_TYPE_TOPN_VALUES:
260 case HIST_TYPE_INDIR_CALL:
261 if (hist->hvalue.counters)
262 {
263 fprintf (dump_file,
264 (hist->type == HIST_TYPE_TOPN_VALUES
265 ? "Top N value counter" : "Indirect call counter"));
266 if (hist->hvalue.counters)
267 {
268 unsigned count = hist->hvalue.counters[1];
269 fprintf (dump_file, " all: %" PRId64 ", %" PRId64 " values: ",
270 (int64_t) hist->hvalue.counters[0], (int64_t) count);
271 for (unsigned i = 0; i < count; i++)
272 {
273 fprintf (dump_file, "[%" PRId64 ":%" PRId64 "]",
274 (int64_t) hist->hvalue.counters[2 * i + 2],
275 (int64_t) hist->hvalue.counters[2 * i + 3]);
276 if (i != count - 1)
277 fprintf (dump_file, ", ");
278 }
279 fprintf (dump_file, ".\n");
280 }
281 }
282 break;
283
284 case HIST_TYPE_AVERAGE:
285 if (hist->hvalue.counters)
286 fprintf (dump_file, "Average value sum:%" PRId64
287 " times:%" PRId64 ".\n",
288 (int64_t) hist->hvalue.counters[0],
289 (int64_t) hist->hvalue.counters[1]);
290 break;
291
292 case HIST_TYPE_IOR:
293 if (hist->hvalue.counters)
294 fprintf (dump_file, "IOR value ior:%" PRId64 ".\n",
295 (int64_t) hist->hvalue.counters[0]);
296 break;
297
298 case HIST_TYPE_TIME_PROFILE:
299 if (hist->hvalue.counters)
300 fprintf (dump_file, "Time profile time:%" PRId64 ".\n",
301 (int64_t) hist->hvalue.counters[0]);
302 break;
303 default:
304 gcc_unreachable ();
305 }
306 }
307
308 /* Dump information about HIST to DUMP_FILE. */
309
310 void
311 stream_out_histogram_value (struct output_block *ob, histogram_value hist)
312 {
313 struct bitpack_d bp;
314 unsigned int i;
315
316 bp = bitpack_create (ob->main_stream);
317 bp_pack_enum (&bp, hist_type, HIST_TYPE_MAX, hist->type);
318 bp_pack_value (&bp, hist->hvalue.next != NULL, 1);
319 streamer_write_bitpack (&bp);
320 switch (hist->type)
321 {
322 case HIST_TYPE_INTERVAL:
323 streamer_write_hwi (ob, hist->hdata.intvl.int_start);
324 streamer_write_uhwi (ob, hist->hdata.intvl.steps);
325 break;
326 default:
327 break;
328 }
329 for (i = 0; i < hist->n_counters; i++)
330 {
331 /* When user uses an unsigned type with a big value, constant converted
332 to gcov_type (a signed type) can be negative. */
333 gcov_type value = hist->hvalue.counters[i];
334 if (hist->type == HIST_TYPE_TOPN_VALUES
335 || hist->type == HIST_TYPE_IOR)
336 /* Note that the IOR counter tracks pointer values and these can have
337 sign bit set. */
338 ;
339 else
340 gcc_assert (value >= 0);
341
342 streamer_write_gcov_count (ob, value);
343 }
344 if (hist->hvalue.next)
345 stream_out_histogram_value (ob, hist->hvalue.next);
346 }
347
348 /* Dump information about HIST to DUMP_FILE. */
349
350 void
351 stream_in_histogram_value (class lto_input_block *ib, gimple *stmt)
352 {
353 enum hist_type type;
354 unsigned int ncounters = 0;
355 struct bitpack_d bp;
356 unsigned int i;
357 histogram_value new_val;
358 bool next;
359 histogram_value *next_p = NULL;
360
361 do
362 {
363 bp = streamer_read_bitpack (ib);
364 type = bp_unpack_enum (&bp, hist_type, HIST_TYPE_MAX);
365 next = bp_unpack_value (&bp, 1);
366 new_val = gimple_alloc_histogram_value (cfun, type, stmt);
367 switch (type)
368 {
369 case HIST_TYPE_INTERVAL:
370 new_val->hdata.intvl.int_start = streamer_read_hwi (ib);
371 new_val->hdata.intvl.steps = streamer_read_uhwi (ib);
372 ncounters = new_val->hdata.intvl.steps + 2;
373 break;
374
375 case HIST_TYPE_POW2:
376 case HIST_TYPE_AVERAGE:
377 ncounters = 2;
378 break;
379
380 case HIST_TYPE_TOPN_VALUES:
381 case HIST_TYPE_INDIR_CALL:
382 break;
383
384 case HIST_TYPE_IOR:
385 case HIST_TYPE_TIME_PROFILE:
386 ncounters = 1;
387 break;
388
389 default:
390 gcc_unreachable ();
391 }
392
393 /* TOP N counters have variable number of counters. */
394 if (type == HIST_TYPE_INDIR_CALL || type == HIST_TYPE_TOPN_VALUES)
395 {
396 gcov_type total = streamer_read_gcov_count (ib);
397 gcov_type ncounters = streamer_read_gcov_count (ib);
398 new_val->hvalue.counters = XNEWVAR (gcov_type,
399 sizeof (*new_val->hvalue.counters)
400 * (2 + 2 * ncounters));
401 new_val->hvalue.counters[0] = total;
402 new_val->hvalue.counters[1] = ncounters;
403 new_val->n_counters = 2 + 2 * ncounters;
404 for (i = 0; i < 2 * ncounters; i++)
405 new_val->hvalue.counters[2 + i] = streamer_read_gcov_count (ib);
406 }
407 else
408 {
409 new_val->hvalue.counters = XNEWVAR (gcov_type,
410 sizeof (*new_val->hvalue.counters)
411 * ncounters);
412 new_val->n_counters = ncounters;
413 for (i = 0; i < ncounters; i++)
414 new_val->hvalue.counters[i] = streamer_read_gcov_count (ib);
415 }
416
417 if (!next_p)
418 gimple_add_histogram_value (cfun, stmt, new_val);
419 else
420 *next_p = new_val;
421 next_p = &new_val->hvalue.next;
422 }
423 while (next);
424 }
425
426 /* Dump all histograms attached to STMT to DUMP_FILE. */
427
428 void
429 dump_histograms_for_stmt (struct function *fun, FILE *dump_file, gimple *stmt)
430 {
431 histogram_value hist;
432 for (hist = gimple_histogram_value (fun, stmt); hist; hist = hist->hvalue.next)
433 dump_histogram_value (dump_file, hist);
434 }
435
436 /* Remove all histograms associated with STMT. */
437
438 void
439 gimple_remove_stmt_histograms (struct function *fun, gimple *stmt)
440 {
441 histogram_value val;
442 while ((val = gimple_histogram_value (fun, stmt)) != NULL)
443 gimple_remove_histogram_value (fun, stmt, val);
444 }
445
446 /* Duplicate all histograms associates with OSTMT to STMT. */
447
448 void
449 gimple_duplicate_stmt_histograms (struct function *fun, gimple *stmt,
450 struct function *ofun, gimple *ostmt)
451 {
452 histogram_value val;
453 for (val = gimple_histogram_value (ofun, ostmt); val != NULL; val = val->hvalue.next)
454 {
455 histogram_value new_val = gimple_alloc_histogram_value (fun, val->type);
456 memcpy (new_val, val, sizeof (*val));
457 new_val->hvalue.stmt = stmt;
458 new_val->hvalue.counters = XNEWVAR (gcov_type, sizeof (*new_val->hvalue.counters) * new_val->n_counters);
459 memcpy (new_val->hvalue.counters, val->hvalue.counters, sizeof (*new_val->hvalue.counters) * new_val->n_counters);
460 gimple_add_histogram_value (fun, stmt, new_val);
461 }
462 }
463
464 /* Move all histograms associated with OSTMT to STMT. */
465
466 void
467 gimple_move_stmt_histograms (struct function *fun, gimple *stmt, gimple *ostmt)
468 {
469 histogram_value val = gimple_histogram_value (fun, ostmt);
470 if (val)
471 {
472 /* The following three statements can't be reordered,
473 because histogram hashtab relies on stmt field value
474 for finding the exact slot. */
475 set_histogram_value (fun, ostmt, NULL);
476 for (; val != NULL; val = val->hvalue.next)
477 val->hvalue.stmt = stmt;
478 set_histogram_value (fun, stmt, val);
479 }
480 }
481
482 static bool error_found = false;
483
484 /* Helper function for verify_histograms. For each histogram reachable via htab
485 walk verify that it was reached via statement walk. */
486
487 static int
488 visit_hist (void **slot, void *data)
489 {
490 hash_set<histogram_value> *visited = (hash_set<histogram_value> *) data;
491 histogram_value hist = *(histogram_value *) slot;
492
493 if (!visited->contains (hist)
494 && hist->type != HIST_TYPE_TIME_PROFILE)
495 {
496 error ("dead histogram");
497 dump_histogram_value (stderr, hist);
498 debug_gimple_stmt (hist->hvalue.stmt);
499 error_found = true;
500 }
501 return 1;
502 }
503
504 /* Verify sanity of the histograms. */
505
506 DEBUG_FUNCTION void
507 verify_histograms (void)
508 {
509 basic_block bb;
510 gimple_stmt_iterator gsi;
511 histogram_value hist;
512
513 error_found = false;
514 hash_set<histogram_value> visited_hists;
515 FOR_EACH_BB_FN (bb, cfun)
516 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
517 {
518 gimple *stmt = gsi_stmt (gsi);
519
520 for (hist = gimple_histogram_value (cfun, stmt); hist;
521 hist = hist->hvalue.next)
522 {
523 if (hist->hvalue.stmt != stmt)
524 {
525 error ("histogram value statement does not correspond to "
526 "the statement it is associated with");
527 debug_gimple_stmt (stmt);
528 dump_histogram_value (stderr, hist);
529 error_found = true;
530 }
531 visited_hists.add (hist);
532 }
533 }
534 if (VALUE_HISTOGRAMS (cfun))
535 htab_traverse (VALUE_HISTOGRAMS (cfun), visit_hist, &visited_hists);
536 if (error_found)
537 internal_error ("%qs failed", __func__);
538 }
539
540 /* Helper function for verify_histograms. For each histogram reachable via htab
541 walk verify that it was reached via statement walk. */
542
543 static int
544 free_hist (void **slot, void *data ATTRIBUTE_UNUSED)
545 {
546 histogram_value hist = *(histogram_value *) slot;
547 free (hist->hvalue.counters);
548 free (hist);
549 return 1;
550 }
551
552 void
553 free_histograms (struct function *fn)
554 {
555 if (VALUE_HISTOGRAMS (fn))
556 {
557 htab_traverse (VALUE_HISTOGRAMS (fn), free_hist, NULL);
558 htab_delete (VALUE_HISTOGRAMS (fn));
559 VALUE_HISTOGRAMS (fn) = NULL;
560 }
561 }
562
563 /* The overall number of invocations of the counter should match
564 execution count of basic block. Report it as error rather than
565 internal error as it might mean that user has misused the profile
566 somehow. */
567
568 static bool
569 check_counter (gimple *stmt, const char * name,
570 gcov_type *count, gcov_type *all, profile_count bb_count_d)
571 {
572 gcov_type bb_count = bb_count_d.ipa ().to_gcov_type ();
573 if (*all != bb_count || *count > *all)
574 {
575 dump_user_location_t locus;
576 locus = ((stmt != NULL)
577 ? dump_user_location_t (stmt)
578 : dump_user_location_t::from_function_decl
579 (current_function_decl));
580 if (flag_profile_correction)
581 {
582 if (dump_enabled_p ())
583 dump_printf_loc (MSG_MISSED_OPTIMIZATION, locus,
584 "correcting inconsistent value profile: %s "
585 "profiler overall count (%d) does not match BB "
586 "count (%d)\n", name, (int)*all, (int)bb_count);
587 *all = bb_count;
588 if (*count > *all)
589 *count = *all;
590 return false;
591 }
592 else
593 {
594 error_at (locus.get_location_t (), "corrupted value profile: %s "
595 "profile counter (%d out of %d) inconsistent with "
596 "basic-block count (%d)",
597 name,
598 (int) *count,
599 (int) *all,
600 (int) bb_count);
601 return true;
602 }
603 }
604
605 return false;
606 }
607
608 /* GIMPLE based transformations. */
609
610 bool
611 gimple_value_profile_transformations (void)
612 {
613 basic_block bb;
614 gimple_stmt_iterator gsi;
615 bool changed = false;
616
617 FOR_EACH_BB_FN (bb, cfun)
618 {
619 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
620 {
621 gimple *stmt = gsi_stmt (gsi);
622 histogram_value th = gimple_histogram_value (cfun, stmt);
623 if (!th)
624 continue;
625
626 if (dump_file)
627 {
628 fprintf (dump_file, "Trying transformations on stmt ");
629 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
630 dump_histograms_for_stmt (cfun, dump_file, stmt);
631 }
632
633 /* Transformations: */
634 /* The order of things in this conditional controls which
635 transformation is used when more than one is applicable. */
636 /* It is expected that any code added by the transformations
637 will be added before the current statement, and that the
638 current statement remain valid (although possibly
639 modified) upon return. */
640 if (gimple_mod_subtract_transform (&gsi)
641 || gimple_divmod_fixed_value_transform (&gsi)
642 || gimple_mod_pow2_value_transform (&gsi)
643 || gimple_stringops_transform (&gsi))
644 {
645 stmt = gsi_stmt (gsi);
646 changed = true;
647 /* Original statement may no longer be in the same block. */
648 if (bb != gimple_bb (stmt))
649 {
650 bb = gimple_bb (stmt);
651 gsi = gsi_for_stmt (stmt);
652 }
653 }
654
655 /* The function never thansforms a GIMPLE statement. */
656 if (dump_enabled_p ())
657 dump_ic_profile (&gsi);
658 }
659 }
660
661 return changed;
662 }
663
664 /* Generate code for transformation 1 (with parent gimple assignment
665 STMT and probability of taking the optimal path PROB, which is
666 equivalent to COUNT/ALL within roundoff error). This generates the
667 result into a temp and returns the temp; it does not replace or
668 alter the original STMT. */
669
670 static tree
671 gimple_divmod_fixed_value (gassign *stmt, tree value, profile_probability prob,
672 gcov_type count, gcov_type all)
673 {
674 gassign *stmt1, *stmt2;
675 gcond *stmt3;
676 tree tmp0, tmp1, tmp2;
677 gimple *bb1end, *bb2end, *bb3end;
678 basic_block bb, bb2, bb3, bb4;
679 tree optype, op1, op2;
680 edge e12, e13, e23, e24, e34;
681 gimple_stmt_iterator gsi;
682
683 gcc_assert (is_gimple_assign (stmt)
684 && (gimple_assign_rhs_code (stmt) == TRUNC_DIV_EXPR
685 || gimple_assign_rhs_code (stmt) == TRUNC_MOD_EXPR));
686
687 optype = TREE_TYPE (gimple_assign_lhs (stmt));
688 op1 = gimple_assign_rhs1 (stmt);
689 op2 = gimple_assign_rhs2 (stmt);
690
691 bb = gimple_bb (stmt);
692 gsi = gsi_for_stmt (stmt);
693
694 tmp0 = make_temp_ssa_name (optype, NULL, "PROF");
695 tmp1 = make_temp_ssa_name (optype, NULL, "PROF");
696 stmt1 = gimple_build_assign (tmp0, fold_convert (optype, value));
697 stmt2 = gimple_build_assign (tmp1, op2);
698 stmt3 = gimple_build_cond (NE_EXPR, tmp1, tmp0, NULL_TREE, NULL_TREE);
699 gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
700 gsi_insert_before (&gsi, stmt2, GSI_SAME_STMT);
701 gsi_insert_before (&gsi, stmt3, GSI_SAME_STMT);
702 bb1end = stmt3;
703
704 tmp2 = create_tmp_reg (optype, "PROF");
705 stmt1 = gimple_build_assign (tmp2, gimple_assign_rhs_code (stmt), op1, tmp0);
706 gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
707 bb2end = stmt1;
708
709 stmt1 = gimple_build_assign (tmp2, gimple_assign_rhs_code (stmt), op1, op2);
710 gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
711 bb3end = stmt1;
712
713 /* Fix CFG. */
714 /* Edge e23 connects bb2 to bb3, etc. */
715 e12 = split_block (bb, bb1end);
716 bb2 = e12->dest;
717 bb2->count = profile_count::from_gcov_type (count);
718 e23 = split_block (bb2, bb2end);
719 bb3 = e23->dest;
720 bb3->count = profile_count::from_gcov_type (all - count);
721 e34 = split_block (bb3, bb3end);
722 bb4 = e34->dest;
723 bb4->count = profile_count::from_gcov_type (all);
724
725 e12->flags &= ~EDGE_FALLTHRU;
726 e12->flags |= EDGE_FALSE_VALUE;
727 e12->probability = prob;
728
729 e13 = make_edge (bb, bb3, EDGE_TRUE_VALUE);
730 e13->probability = prob.invert ();
731
732 remove_edge (e23);
733
734 e24 = make_edge (bb2, bb4, EDGE_FALLTHRU);
735 e24->probability = profile_probability::always ();
736
737 e34->probability = profile_probability::always ();
738
739 return tmp2;
740 }
741
742 /* Return the n-th value count of TOPN_VALUE histogram. If
743 there's a value, return true and set VALUE and COUNT
744 arguments.
745
746 Counters have the following meaning.
747
748 abs (counters[0]) is the number of executions
749 for i in 0 ... TOPN-1
750 counters[2 * i + 1] is target
751 abs (counters[2 * i + 2]) is corresponding hitrate counter.
752
753 Value of counters[0] negative when counter became
754 full during merging and some values are lost. */
755
756 bool
757 get_nth_most_common_value (gimple *stmt, const char *counter_type,
758 histogram_value hist, gcov_type *value,
759 gcov_type *count, gcov_type *all, unsigned n)
760 {
761 unsigned counters = hist->hvalue.counters[1];
762 if (n >= counters)
763 return false;
764
765 *count = 0;
766 *value = 0;
767
768 gcov_type read_all = abs_hwi (hist->hvalue.counters[0]);
769
770 gcov_type v = hist->hvalue.counters[2 * n + 2];
771 gcov_type c = hist->hvalue.counters[2 * n + 3];
772
773 if (hist->hvalue.counters[0] < 0
774 && (flag_profile_reproducible == PROFILE_REPRODUCIBILITY_PARALLEL_RUNS
775 || (flag_profile_reproducible
776 == PROFILE_REPRODUCIBILITY_MULTITHREADED)))
777 return false;
778
779 /* Indirect calls can't be verified. */
780 if (stmt
781 && check_counter (stmt, counter_type, &c, &read_all,
782 gimple_bb (stmt)->count))
783 return false;
784
785 *all = read_all;
786
787 *value = v;
788 *count = c;
789 return true;
790 }
791
792 /* Do transform 1) on INSN if applicable. */
793
794 static bool
795 gimple_divmod_fixed_value_transform (gimple_stmt_iterator *si)
796 {
797 histogram_value histogram;
798 enum tree_code code;
799 gcov_type val, count, all;
800 tree result, value, tree_val;
801 profile_probability prob;
802 gassign *stmt;
803
804 stmt = dyn_cast <gassign *> (gsi_stmt (*si));
805 if (!stmt)
806 return false;
807
808 if (!INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (stmt))))
809 return false;
810
811 code = gimple_assign_rhs_code (stmt);
812
813 if (code != TRUNC_DIV_EXPR && code != TRUNC_MOD_EXPR)
814 return false;
815
816 histogram = gimple_histogram_value_of_type (cfun, stmt,
817 HIST_TYPE_TOPN_VALUES);
818 if (!histogram)
819 return false;
820
821 if (!get_nth_most_common_value (stmt, "divmod", histogram, &val, &count,
822 &all))
823 return false;
824
825 value = histogram->hvalue.value;
826 gimple_remove_histogram_value (cfun, stmt, histogram);
827
828 /* We require that count is at least half of all. */
829 if (simple_cst_equal (gimple_assign_rhs2 (stmt), value) != 1
830 || 2 * count < all
831 || optimize_bb_for_size_p (gimple_bb (stmt)))
832 return false;
833
834 /* Compute probability of taking the optimal path. */
835 if (all > 0)
836 prob = profile_probability::probability_in_gcov_type (count, all);
837 else
838 prob = profile_probability::never ();
839
840 if (sizeof (gcov_type) == sizeof (HOST_WIDE_INT))
841 tree_val = build_int_cst (get_gcov_type (), val);
842 else
843 {
844 HOST_WIDE_INT a[2];
845 a[0] = (unsigned HOST_WIDE_INT) val;
846 a[1] = val >> (HOST_BITS_PER_WIDE_INT - 1) >> 1;
847
848 tree_val = wide_int_to_tree (get_gcov_type (), wide_int::from_array (a, 2,
849 TYPE_PRECISION (get_gcov_type ()), false));
850 }
851 result = gimple_divmod_fixed_value (stmt, tree_val, prob, count, all);
852
853 if (dump_enabled_p ())
854 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
855 "Transformation done: div/mod by constant %T\n", tree_val);
856
857 gimple_assign_set_rhs_from_tree (si, result);
858 update_stmt (gsi_stmt (*si));
859
860 return true;
861 }
862
863 /* Generate code for transformation 2 (with parent gimple assign STMT and
864 probability of taking the optimal path PROB, which is equivalent to COUNT/ALL
865 within roundoff error). This generates the result into a temp and returns
866 the temp; it does not replace or alter the original STMT. */
867
868 static tree
869 gimple_mod_pow2 (gassign *stmt, profile_probability prob, gcov_type count, gcov_type all)
870 {
871 gassign *stmt1, *stmt2, *stmt3;
872 gcond *stmt4;
873 tree tmp2, tmp3;
874 gimple *bb1end, *bb2end, *bb3end;
875 basic_block bb, bb2, bb3, bb4;
876 tree optype, op1, op2;
877 edge e12, e13, e23, e24, e34;
878 gimple_stmt_iterator gsi;
879 tree result;
880
881 gcc_assert (is_gimple_assign (stmt)
882 && gimple_assign_rhs_code (stmt) == TRUNC_MOD_EXPR);
883
884 optype = TREE_TYPE (gimple_assign_lhs (stmt));
885 op1 = gimple_assign_rhs1 (stmt);
886 op2 = gimple_assign_rhs2 (stmt);
887
888 bb = gimple_bb (stmt);
889 gsi = gsi_for_stmt (stmt);
890
891 result = create_tmp_reg (optype, "PROF");
892 tmp2 = make_temp_ssa_name (optype, NULL, "PROF");
893 tmp3 = make_temp_ssa_name (optype, NULL, "PROF");
894 stmt2 = gimple_build_assign (tmp2, PLUS_EXPR, op2,
895 build_int_cst (optype, -1));
896 stmt3 = gimple_build_assign (tmp3, BIT_AND_EXPR, tmp2, op2);
897 stmt4 = gimple_build_cond (NE_EXPR, tmp3, build_int_cst (optype, 0),
898 NULL_TREE, NULL_TREE);
899 gsi_insert_before (&gsi, stmt2, GSI_SAME_STMT);
900 gsi_insert_before (&gsi, stmt3, GSI_SAME_STMT);
901 gsi_insert_before (&gsi, stmt4, GSI_SAME_STMT);
902 bb1end = stmt4;
903
904 /* tmp2 == op2-1 inherited from previous block. */
905 stmt1 = gimple_build_assign (result, BIT_AND_EXPR, op1, tmp2);
906 gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
907 bb2end = stmt1;
908
909 stmt1 = gimple_build_assign (result, gimple_assign_rhs_code (stmt),
910 op1, op2);
911 gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
912 bb3end = stmt1;
913
914 /* Fix CFG. */
915 /* Edge e23 connects bb2 to bb3, etc. */
916 e12 = split_block (bb, bb1end);
917 bb2 = e12->dest;
918 bb2->count = profile_count::from_gcov_type (count);
919 e23 = split_block (bb2, bb2end);
920 bb3 = e23->dest;
921 bb3->count = profile_count::from_gcov_type (all - count);
922 e34 = split_block (bb3, bb3end);
923 bb4 = e34->dest;
924 bb4->count = profile_count::from_gcov_type (all);
925
926 e12->flags &= ~EDGE_FALLTHRU;
927 e12->flags |= EDGE_FALSE_VALUE;
928 e12->probability = prob;
929
930 e13 = make_edge (bb, bb3, EDGE_TRUE_VALUE);
931 e13->probability = prob.invert ();
932
933 remove_edge (e23);
934
935 e24 = make_edge (bb2, bb4, EDGE_FALLTHRU);
936 e24->probability = profile_probability::always ();
937
938 e34->probability = profile_probability::always ();
939
940 return result;
941 }
942
943 /* Do transform 2) on INSN if applicable. */
944
945 static bool
946 gimple_mod_pow2_value_transform (gimple_stmt_iterator *si)
947 {
948 histogram_value histogram;
949 enum tree_code code;
950 gcov_type count, wrong_values, all;
951 tree lhs_type, result, value;
952 profile_probability prob;
953 gassign *stmt;
954
955 stmt = dyn_cast <gassign *> (gsi_stmt (*si));
956 if (!stmt)
957 return false;
958
959 lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
960 if (!INTEGRAL_TYPE_P (lhs_type))
961 return false;
962
963 code = gimple_assign_rhs_code (stmt);
964
965 if (code != TRUNC_MOD_EXPR || !TYPE_UNSIGNED (lhs_type))
966 return false;
967
968 histogram = gimple_histogram_value_of_type (cfun, stmt, HIST_TYPE_POW2);
969 if (!histogram)
970 return false;
971
972 value = histogram->hvalue.value;
973 wrong_values = histogram->hvalue.counters[0];
974 count = histogram->hvalue.counters[1];
975
976 gimple_remove_histogram_value (cfun, stmt, histogram);
977
978 /* We require that we hit a power of 2 at least half of all evaluations. */
979 if (simple_cst_equal (gimple_assign_rhs2 (stmt), value) != 1
980 || count < wrong_values
981 || optimize_bb_for_size_p (gimple_bb (stmt)))
982 return false;
983
984 /* Compute probability of taking the optimal path. */
985 all = count + wrong_values;
986
987 if (check_counter (stmt, "pow2", &count, &all, gimple_bb (stmt)->count))
988 return false;
989
990 if (dump_enabled_p ())
991 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
992 "Transformation done: mod power of 2\n");
993
994 if (all > 0)
995 prob = profile_probability::probability_in_gcov_type (count, all);
996 else
997 prob = profile_probability::never ();
998
999 result = gimple_mod_pow2 (stmt, prob, count, all);
1000
1001 gimple_assign_set_rhs_from_tree (si, result);
1002 update_stmt (gsi_stmt (*si));
1003
1004 return true;
1005 }
1006
1007 /* Generate code for transformations 3 and 4 (with parent gimple assign STMT, and
1008 NCOUNTS the number of cases to support. Currently only NCOUNTS==0 or 1 is
1009 supported and this is built into this interface. The probabilities of taking
1010 the optimal paths are PROB1 and PROB2, which are equivalent to COUNT1/ALL and
1011 COUNT2/ALL respectively within roundoff error). This generates the
1012 result into a temp and returns the temp; it does not replace or alter
1013 the original STMT. */
1014 /* FIXME: Generalize the interface to handle NCOUNTS > 1. */
1015
1016 static tree
1017 gimple_mod_subtract (gassign *stmt, profile_probability prob1,
1018 profile_probability prob2, int ncounts,
1019 gcov_type count1, gcov_type count2, gcov_type all)
1020 {
1021 gassign *stmt1;
1022 gimple *stmt2;
1023 gcond *stmt3;
1024 tree tmp1;
1025 gimple *bb1end, *bb2end = NULL, *bb3end;
1026 basic_block bb, bb2, bb3, bb4;
1027 tree optype, op1, op2;
1028 edge e12, e23 = 0, e24, e34, e14;
1029 gimple_stmt_iterator gsi;
1030 tree result;
1031
1032 gcc_assert (is_gimple_assign (stmt)
1033 && gimple_assign_rhs_code (stmt) == TRUNC_MOD_EXPR);
1034
1035 optype = TREE_TYPE (gimple_assign_lhs (stmt));
1036 op1 = gimple_assign_rhs1 (stmt);
1037 op2 = gimple_assign_rhs2 (stmt);
1038
1039 bb = gimple_bb (stmt);
1040 gsi = gsi_for_stmt (stmt);
1041
1042 result = create_tmp_reg (optype, "PROF");
1043 tmp1 = make_temp_ssa_name (optype, NULL, "PROF");
1044 stmt1 = gimple_build_assign (result, op1);
1045 stmt2 = gimple_build_assign (tmp1, op2);
1046 stmt3 = gimple_build_cond (LT_EXPR, result, tmp1, NULL_TREE, NULL_TREE);
1047 gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
1048 gsi_insert_before (&gsi, stmt2, GSI_SAME_STMT);
1049 gsi_insert_before (&gsi, stmt3, GSI_SAME_STMT);
1050 bb1end = stmt3;
1051
1052 if (ncounts) /* Assumed to be 0 or 1 */
1053 {
1054 stmt1 = gimple_build_assign (result, MINUS_EXPR, result, tmp1);
1055 stmt2 = gimple_build_cond (LT_EXPR, result, tmp1, NULL_TREE, NULL_TREE);
1056 gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
1057 gsi_insert_before (&gsi, stmt2, GSI_SAME_STMT);
1058 bb2end = stmt2;
1059 }
1060
1061 /* Fallback case. */
1062 stmt1 = gimple_build_assign (result, gimple_assign_rhs_code (stmt),
1063 result, tmp1);
1064 gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
1065 bb3end = stmt1;
1066
1067 /* Fix CFG. */
1068 /* Edge e23 connects bb2 to bb3, etc. */
1069 /* However block 3 is optional; if it is not there, references
1070 to 3 really refer to block 2. */
1071 e12 = split_block (bb, bb1end);
1072 bb2 = e12->dest;
1073 bb2->count = profile_count::from_gcov_type (all - count1);
1074
1075 if (ncounts) /* Assumed to be 0 or 1. */
1076 {
1077 e23 = split_block (bb2, bb2end);
1078 bb3 = e23->dest;
1079 bb3->count = profile_count::from_gcov_type (all - count1 - count2);
1080 }
1081
1082 e34 = split_block (ncounts ? bb3 : bb2, bb3end);
1083 bb4 = e34->dest;
1084 bb4->count = profile_count::from_gcov_type (all);
1085
1086 e12->flags &= ~EDGE_FALLTHRU;
1087 e12->flags |= EDGE_FALSE_VALUE;
1088 e12->probability = prob1.invert ();
1089
1090 e14 = make_edge (bb, bb4, EDGE_TRUE_VALUE);
1091 e14->probability = prob1;
1092
1093 if (ncounts) /* Assumed to be 0 or 1. */
1094 {
1095 e23->flags &= ~EDGE_FALLTHRU;
1096 e23->flags |= EDGE_FALSE_VALUE;
1097 e23->probability = prob2.invert ();
1098
1099 e24 = make_edge (bb2, bb4, EDGE_TRUE_VALUE);
1100 e24->probability = prob2;
1101 }
1102
1103 e34->probability = profile_probability::always ();
1104
1105 return result;
1106 }
1107
1108 /* Do transforms 3) and 4) on the statement pointed-to by SI if applicable. */
1109
1110 static bool
1111 gimple_mod_subtract_transform (gimple_stmt_iterator *si)
1112 {
1113 histogram_value histogram;
1114 enum tree_code code;
1115 gcov_type count, wrong_values, all;
1116 tree lhs_type, result;
1117 profile_probability prob1, prob2;
1118 unsigned int i, steps;
1119 gcov_type count1, count2;
1120 gassign *stmt;
1121 stmt = dyn_cast <gassign *> (gsi_stmt (*si));
1122 if (!stmt)
1123 return false;
1124
1125 lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
1126 if (!INTEGRAL_TYPE_P (lhs_type))
1127 return false;
1128
1129 code = gimple_assign_rhs_code (stmt);
1130
1131 if (code != TRUNC_MOD_EXPR || !TYPE_UNSIGNED (lhs_type))
1132 return false;
1133
1134 histogram = gimple_histogram_value_of_type (cfun, stmt, HIST_TYPE_INTERVAL);
1135 if (!histogram)
1136 return false;
1137
1138 all = 0;
1139 wrong_values = 0;
1140 for (i = 0; i < histogram->hdata.intvl.steps; i++)
1141 all += histogram->hvalue.counters[i];
1142
1143 wrong_values += histogram->hvalue.counters[i];
1144 wrong_values += histogram->hvalue.counters[i+1];
1145 steps = histogram->hdata.intvl.steps;
1146 all += wrong_values;
1147 count1 = histogram->hvalue.counters[0];
1148 count2 = histogram->hvalue.counters[1];
1149
1150 if (check_counter (stmt, "interval", &count1, &all, gimple_bb (stmt)->count))
1151 {
1152 gimple_remove_histogram_value (cfun, stmt, histogram);
1153 return false;
1154 }
1155
1156 if (flag_profile_correction && count1 + count2 > all)
1157 all = count1 + count2;
1158
1159 gcc_assert (count1 + count2 <= all);
1160
1161 /* We require that we use just subtractions in at least 50% of all
1162 evaluations. */
1163 count = 0;
1164 for (i = 0; i < histogram->hdata.intvl.steps; i++)
1165 {
1166 count += histogram->hvalue.counters[i];
1167 if (count * 2 >= all)
1168 break;
1169 }
1170 if (i == steps
1171 || optimize_bb_for_size_p (gimple_bb (stmt)))
1172 return false;
1173
1174 gimple_remove_histogram_value (cfun, stmt, histogram);
1175 if (dump_enabled_p ())
1176 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
1177 "Transformation done: mod subtract\n");
1178
1179 /* Compute probability of taking the optimal path(s). */
1180 if (all > 0)
1181 {
1182 prob1 = profile_probability::probability_in_gcov_type (count1, all);
1183 prob2 = profile_probability::probability_in_gcov_type (count2, all);
1184 }
1185 else
1186 {
1187 prob1 = prob2 = profile_probability::never ();
1188 }
1189
1190 /* In practice, "steps" is always 2. This interface reflects this,
1191 and will need to be changed if "steps" can change. */
1192 result = gimple_mod_subtract (stmt, prob1, prob2, i, count1, count2, all);
1193
1194 gimple_assign_set_rhs_from_tree (si, result);
1195 update_stmt (gsi_stmt (*si));
1196
1197 return true;
1198 }
1199
1200 typedef int_hash <unsigned int, 0, UINT_MAX> profile_id_hash;
1201
1202 static hash_map<profile_id_hash, cgraph_node *> *cgraph_node_map = 0;
1203
1204 /* Returns true if node graph is initialized. This
1205 is used to test if profile_id has been created
1206 for cgraph_nodes. */
1207
1208 bool
1209 coverage_node_map_initialized_p (void)
1210 {
1211 return cgraph_node_map != 0;
1212 }
1213
1214 /* Initialize map from PROFILE_ID to CGRAPH_NODE.
1215 When LOCAL is true, the PROFILE_IDs are computed. when it is false we assume
1216 that the PROFILE_IDs was already assigned. */
1217
1218 void
1219 init_node_map (bool local)
1220 {
1221 struct cgraph_node *n;
1222 cgraph_node_map = new hash_map<profile_id_hash, cgraph_node *>;
1223
1224 FOR_EACH_DEFINED_FUNCTION (n)
1225 if (n->has_gimple_body_p () || n->thunk)
1226 {
1227 cgraph_node **val;
1228 dump_user_location_t loc
1229 = dump_user_location_t::from_function_decl (n->decl);
1230 if (local)
1231 {
1232 n->profile_id = coverage_compute_profile_id (n);
1233 while ((val = cgraph_node_map->get (n->profile_id))
1234 || !n->profile_id)
1235 {
1236 if (dump_enabled_p ())
1237 dump_printf_loc (MSG_MISSED_OPTIMIZATION, loc,
1238 "Local profile-id %i conflict"
1239 " with nodes %s %s\n",
1240 n->profile_id,
1241 n->dump_name (),
1242 (*val)->dump_name ());
1243 n->profile_id = (n->profile_id + 1) & 0x7fffffff;
1244 }
1245 }
1246 else if (!n->profile_id)
1247 {
1248 if (dump_enabled_p ())
1249 dump_printf_loc (MSG_MISSED_OPTIMIZATION, loc,
1250 "Node %s has no profile-id"
1251 " (profile feedback missing?)\n",
1252 n->dump_name ());
1253 continue;
1254 }
1255 else if ((val = cgraph_node_map->get (n->profile_id)))
1256 {
1257 if (dump_enabled_p ())
1258 dump_printf_loc (MSG_MISSED_OPTIMIZATION, loc,
1259 "Node %s has IP profile-id %i conflict. "
1260 "Giving up.\n",
1261 n->dump_name (), n->profile_id);
1262 *val = NULL;
1263 continue;
1264 }
1265 cgraph_node_map->put (n->profile_id, n);
1266 }
1267 }
1268
1269 /* Delete the CGRAPH_NODE_MAP. */
1270
1271 void
1272 del_node_map (void)
1273 {
1274 delete cgraph_node_map;
1275 }
1276
1277 /* Return cgraph node for function with pid */
1278
1279 struct cgraph_node*
1280 find_func_by_profile_id (int profile_id)
1281 {
1282 cgraph_node **val = cgraph_node_map->get (profile_id);
1283 if (val)
1284 return *val;
1285 else
1286 return NULL;
1287 }
1288
1289 /* Do transformation
1290
1291 if (actual_callee_address == address_of_most_common_function/method)
1292 do direct call
1293 else
1294 old call
1295 */
1296
1297 gcall *
1298 gimple_ic (gcall *icall_stmt, struct cgraph_node *direct_call,
1299 profile_probability prob)
1300 {
1301 gcall *dcall_stmt;
1302 gassign *load_stmt;
1303 gcond *cond_stmt;
1304 tree tmp0, tmp1, tmp;
1305 basic_block cond_bb, dcall_bb, icall_bb, join_bb = NULL;
1306 edge e_cd, e_ci, e_di, e_dj = NULL, e_ij;
1307 gimple_stmt_iterator gsi;
1308 int lp_nr, dflags;
1309 edge e_eh, e;
1310 edge_iterator ei;
1311
1312 cond_bb = gimple_bb (icall_stmt);
1313 gsi = gsi_for_stmt (icall_stmt);
1314
1315 tmp0 = make_temp_ssa_name (ptr_type_node, NULL, "PROF");
1316 tmp1 = make_temp_ssa_name (ptr_type_node, NULL, "PROF");
1317 tmp = unshare_expr (gimple_call_fn (icall_stmt));
1318 load_stmt = gimple_build_assign (tmp0, tmp);
1319 gsi_insert_before (&gsi, load_stmt, GSI_SAME_STMT);
1320
1321 tmp = fold_convert (ptr_type_node, build_addr (direct_call->decl));
1322 load_stmt = gimple_build_assign (tmp1, tmp);
1323 gsi_insert_before (&gsi, load_stmt, GSI_SAME_STMT);
1324
1325 cond_stmt = gimple_build_cond (EQ_EXPR, tmp1, tmp0, NULL_TREE, NULL_TREE);
1326 gsi_insert_before (&gsi, cond_stmt, GSI_SAME_STMT);
1327
1328 if (TREE_CODE (gimple_vdef (icall_stmt)) == SSA_NAME)
1329 {
1330 unlink_stmt_vdef (icall_stmt);
1331 release_ssa_name (gimple_vdef (icall_stmt));
1332 }
1333 gimple_set_vdef (icall_stmt, NULL_TREE);
1334 gimple_set_vuse (icall_stmt, NULL_TREE);
1335 update_stmt (icall_stmt);
1336 dcall_stmt = as_a <gcall *> (gimple_copy (icall_stmt));
1337 gimple_call_set_fndecl (dcall_stmt, direct_call->decl);
1338 dflags = flags_from_decl_or_type (direct_call->decl);
1339 if ((dflags & ECF_NORETURN) != 0
1340 && should_remove_lhs_p (gimple_call_lhs (dcall_stmt)))
1341 gimple_call_set_lhs (dcall_stmt, NULL_TREE);
1342 gsi_insert_before (&gsi, dcall_stmt, GSI_SAME_STMT);
1343
1344 /* Fix CFG. */
1345 /* Edge e_cd connects cond_bb to dcall_bb, etc; note the first letters. */
1346 e_cd = split_block (cond_bb, cond_stmt);
1347 dcall_bb = e_cd->dest;
1348 dcall_bb->count = cond_bb->count.apply_probability (prob);
1349
1350 e_di = split_block (dcall_bb, dcall_stmt);
1351 icall_bb = e_di->dest;
1352 icall_bb->count = cond_bb->count - dcall_bb->count;
1353
1354 /* Do not disturb existing EH edges from the indirect call. */
1355 if (!stmt_ends_bb_p (icall_stmt))
1356 e_ij = split_block (icall_bb, icall_stmt);
1357 else
1358 {
1359 e_ij = find_fallthru_edge (icall_bb->succs);
1360 /* The indirect call might be noreturn. */
1361 if (e_ij != NULL)
1362 {
1363 e_ij->probability = profile_probability::always ();
1364 e_ij = single_pred_edge (split_edge (e_ij));
1365 }
1366 }
1367 if (e_ij != NULL)
1368 {
1369 join_bb = e_ij->dest;
1370 join_bb->count = cond_bb->count;
1371 }
1372
1373 e_cd->flags = (e_cd->flags & ~EDGE_FALLTHRU) | EDGE_TRUE_VALUE;
1374 e_cd->probability = prob;
1375
1376 e_ci = make_edge (cond_bb, icall_bb, EDGE_FALSE_VALUE);
1377 e_ci->probability = prob.invert ();
1378
1379 remove_edge (e_di);
1380
1381 if (e_ij != NULL)
1382 {
1383 if ((dflags & ECF_NORETURN) == 0)
1384 {
1385 e_dj = make_edge (dcall_bb, join_bb, EDGE_FALLTHRU);
1386 e_dj->probability = profile_probability::always ();
1387 }
1388 e_ij->probability = profile_probability::always ();
1389 }
1390
1391 /* Insert PHI node for the call result if necessary. */
1392 if (gimple_call_lhs (icall_stmt)
1393 && TREE_CODE (gimple_call_lhs (icall_stmt)) == SSA_NAME
1394 && (dflags & ECF_NORETURN) == 0)
1395 {
1396 tree result = gimple_call_lhs (icall_stmt);
1397 gphi *phi = create_phi_node (result, join_bb);
1398 gimple_call_set_lhs (icall_stmt,
1399 duplicate_ssa_name (result, icall_stmt));
1400 add_phi_arg (phi, gimple_call_lhs (icall_stmt), e_ij, UNKNOWN_LOCATION);
1401 gimple_call_set_lhs (dcall_stmt,
1402 duplicate_ssa_name (result, dcall_stmt));
1403 add_phi_arg (phi, gimple_call_lhs (dcall_stmt), e_dj, UNKNOWN_LOCATION);
1404 }
1405
1406 /* Build an EH edge for the direct call if necessary. */
1407 lp_nr = lookup_stmt_eh_lp (icall_stmt);
1408 if (lp_nr > 0 && stmt_could_throw_p (cfun, dcall_stmt))
1409 {
1410 add_stmt_to_eh_lp (dcall_stmt, lp_nr);
1411 }
1412
1413 FOR_EACH_EDGE (e_eh, ei, icall_bb->succs)
1414 if (e_eh->flags & (EDGE_EH | EDGE_ABNORMAL))
1415 {
1416 e = make_edge (dcall_bb, e_eh->dest, e_eh->flags);
1417 e->probability = e_eh->probability;
1418 for (gphi_iterator psi = gsi_start_phis (e_eh->dest);
1419 !gsi_end_p (psi); gsi_next (&psi))
1420 {
1421 gphi *phi = psi.phi ();
1422 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1423 PHI_ARG_DEF_FROM_EDGE (phi, e_eh));
1424 }
1425 }
1426 if (!stmt_could_throw_p (cfun, dcall_stmt))
1427 gimple_purge_dead_eh_edges (dcall_bb);
1428 return dcall_stmt;
1429 }
1430
1431 /* Dump info about indirect call profile. */
1432
1433 static void
1434 dump_ic_profile (gimple_stmt_iterator *gsi)
1435 {
1436 gcall *stmt;
1437 histogram_value histogram;
1438 gcov_type val, count, all;
1439 struct cgraph_node *direct_call;
1440
1441 stmt = dyn_cast <gcall *> (gsi_stmt (*gsi));
1442 if (!stmt)
1443 return;
1444
1445 if (gimple_call_fndecl (stmt) != NULL_TREE)
1446 return;
1447
1448 if (gimple_call_internal_p (stmt))
1449 return;
1450
1451 histogram = gimple_histogram_value_of_type (cfun, stmt, HIST_TYPE_INDIR_CALL);
1452 if (!histogram)
1453 return;
1454
1455 count = 0;
1456 all = histogram->hvalue.counters[0];
1457
1458 for (unsigned j = 0; j < GCOV_TOPN_MAXIMUM_TRACKED_VALUES; j++)
1459 {
1460 if (!get_nth_most_common_value (NULL, "indirect call", histogram, &val,
1461 &count, &all, j))
1462 return;
1463 if (!count)
1464 continue;
1465
1466 direct_call = find_func_by_profile_id ((int) val);
1467
1468 if (direct_call == NULL)
1469 dump_printf_loc (
1470 MSG_MISSED_OPTIMIZATION, stmt,
1471 "Indirect call -> direct call from other "
1472 "module %T=> %i (will resolve by ipa-profile only with LTO)\n",
1473 gimple_call_fn (stmt), (int) val);
1474 else
1475 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
1476 "Indirect call -> direct call "
1477 "%T => %T (will resolve by ipa-profile)\n",
1478 gimple_call_fn (stmt), direct_call->decl);
1479 dump_printf_loc (MSG_NOTE, stmt,
1480 "hist->count %" PRId64 " hist->all %" PRId64 "\n",
1481 count, all);
1482 }
1483 }
1484
1485 /* Return true if the stringop CALL shall be profiled. SIZE_ARG be
1486 set to the argument index for the size of the string operation. */
1487
1488 static bool
1489 interesting_stringop_to_profile_p (gcall *call, int *size_arg)
1490 {
1491 enum built_in_function fcode;
1492
1493 fcode = DECL_FUNCTION_CODE (gimple_call_fndecl (call));
1494 switch (fcode)
1495 {
1496 case BUILT_IN_MEMCPY:
1497 case BUILT_IN_MEMPCPY:
1498 case BUILT_IN_MEMMOVE:
1499 *size_arg = 2;
1500 return validate_gimple_arglist (call, POINTER_TYPE, POINTER_TYPE,
1501 INTEGER_TYPE, VOID_TYPE);
1502 case BUILT_IN_MEMSET:
1503 *size_arg = 2;
1504 return validate_gimple_arglist (call, POINTER_TYPE, INTEGER_TYPE,
1505 INTEGER_TYPE, VOID_TYPE);
1506 case BUILT_IN_BZERO:
1507 *size_arg = 1;
1508 return validate_gimple_arglist (call, POINTER_TYPE, INTEGER_TYPE,
1509 VOID_TYPE);
1510 default:
1511 return false;
1512 }
1513 }
1514
1515 /* Convert stringop (..., vcall_size)
1516 into
1517 if (vcall_size == icall_size)
1518 stringop (..., icall_size);
1519 else
1520 stringop (..., vcall_size);
1521 assuming we'll propagate a true constant into ICALL_SIZE later. */
1522
1523 static void
1524 gimple_stringop_fixed_value (gcall *vcall_stmt, tree icall_size, profile_probability prob,
1525 gcov_type count, gcov_type all)
1526 {
1527 gassign *tmp_stmt;
1528 gcond *cond_stmt;
1529 gcall *icall_stmt;
1530 tree tmp0, tmp1, vcall_size, optype;
1531 basic_block cond_bb, icall_bb, vcall_bb, join_bb;
1532 edge e_ci, e_cv, e_iv, e_ij, e_vj;
1533 gimple_stmt_iterator gsi;
1534 int size_arg;
1535
1536 if (!interesting_stringop_to_profile_p (vcall_stmt, &size_arg))
1537 gcc_unreachable ();
1538
1539 cond_bb = gimple_bb (vcall_stmt);
1540 gsi = gsi_for_stmt (vcall_stmt);
1541
1542 vcall_size = gimple_call_arg (vcall_stmt, size_arg);
1543 optype = TREE_TYPE (vcall_size);
1544
1545 tmp0 = make_temp_ssa_name (optype, NULL, "PROF");
1546 tmp1 = make_temp_ssa_name (optype, NULL, "PROF");
1547 tmp_stmt = gimple_build_assign (tmp0, fold_convert (optype, icall_size));
1548 gsi_insert_before (&gsi, tmp_stmt, GSI_SAME_STMT);
1549
1550 tmp_stmt = gimple_build_assign (tmp1, vcall_size);
1551 gsi_insert_before (&gsi, tmp_stmt, GSI_SAME_STMT);
1552
1553 cond_stmt = gimple_build_cond (EQ_EXPR, tmp1, tmp0, NULL_TREE, NULL_TREE);
1554 gsi_insert_before (&gsi, cond_stmt, GSI_SAME_STMT);
1555
1556 if (TREE_CODE (gimple_vdef (vcall_stmt)) == SSA_NAME)
1557 {
1558 unlink_stmt_vdef (vcall_stmt);
1559 release_ssa_name (gimple_vdef (vcall_stmt));
1560 }
1561 gimple_set_vdef (vcall_stmt, NULL);
1562 gimple_set_vuse (vcall_stmt, NULL);
1563 update_stmt (vcall_stmt);
1564 icall_stmt = as_a <gcall *> (gimple_copy (vcall_stmt));
1565 gimple_call_set_arg (icall_stmt, size_arg,
1566 fold_convert (optype, icall_size));
1567 gsi_insert_before (&gsi, icall_stmt, GSI_SAME_STMT);
1568
1569 /* Fix CFG. */
1570 /* Edge e_ci connects cond_bb to icall_bb, etc. */
1571 e_ci = split_block (cond_bb, cond_stmt);
1572 icall_bb = e_ci->dest;
1573 icall_bb->count = profile_count::from_gcov_type (count);
1574
1575 e_iv = split_block (icall_bb, icall_stmt);
1576 vcall_bb = e_iv->dest;
1577 vcall_bb->count = profile_count::from_gcov_type (all - count);
1578
1579 e_vj = split_block (vcall_bb, vcall_stmt);
1580 join_bb = e_vj->dest;
1581 join_bb->count = profile_count::from_gcov_type (all);
1582
1583 e_ci->flags = (e_ci->flags & ~EDGE_FALLTHRU) | EDGE_TRUE_VALUE;
1584 e_ci->probability = prob;
1585
1586 e_cv = make_edge (cond_bb, vcall_bb, EDGE_FALSE_VALUE);
1587 e_cv->probability = prob.invert ();
1588
1589 remove_edge (e_iv);
1590
1591 e_ij = make_edge (icall_bb, join_bb, EDGE_FALLTHRU);
1592 e_ij->probability = profile_probability::always ();
1593
1594 e_vj->probability = profile_probability::always ();
1595
1596 /* Insert PHI node for the call result if necessary. */
1597 if (gimple_call_lhs (vcall_stmt)
1598 && TREE_CODE (gimple_call_lhs (vcall_stmt)) == SSA_NAME)
1599 {
1600 tree result = gimple_call_lhs (vcall_stmt);
1601 gphi *phi = create_phi_node (result, join_bb);
1602 gimple_call_set_lhs (vcall_stmt,
1603 duplicate_ssa_name (result, vcall_stmt));
1604 add_phi_arg (phi, gimple_call_lhs (vcall_stmt), e_vj, UNKNOWN_LOCATION);
1605 gimple_call_set_lhs (icall_stmt,
1606 duplicate_ssa_name (result, icall_stmt));
1607 add_phi_arg (phi, gimple_call_lhs (icall_stmt), e_ij, UNKNOWN_LOCATION);
1608 }
1609
1610 /* Because these are all string op builtins, they're all nothrow. */
1611 gcc_assert (!stmt_could_throw_p (cfun, vcall_stmt));
1612 gcc_assert (!stmt_could_throw_p (cfun, icall_stmt));
1613 }
1614
1615 /* Find values inside STMT for that we want to measure histograms for
1616 division/modulo optimization. */
1617
1618 static bool
1619 gimple_stringops_transform (gimple_stmt_iterator *gsi)
1620 {
1621 gcall *stmt;
1622 tree blck_size;
1623 enum built_in_function fcode;
1624 histogram_value histogram;
1625 gcov_type count, all, val;
1626 tree dest, src;
1627 unsigned int dest_align, src_align;
1628 profile_probability prob;
1629 tree tree_val;
1630 int size_arg;
1631
1632 stmt = dyn_cast <gcall *> (gsi_stmt (*gsi));
1633 if (!stmt)
1634 return false;
1635
1636 if (!gimple_call_builtin_p (gsi_stmt (*gsi), BUILT_IN_NORMAL))
1637 return false;
1638
1639 if (!interesting_stringop_to_profile_p (stmt, &size_arg))
1640 return false;
1641
1642 blck_size = gimple_call_arg (stmt, size_arg);
1643 if (TREE_CODE (blck_size) == INTEGER_CST)
1644 return false;
1645
1646 histogram = gimple_histogram_value_of_type (cfun, stmt,
1647 HIST_TYPE_TOPN_VALUES);
1648 if (!histogram)
1649 return false;
1650
1651 if (!get_nth_most_common_value (stmt, "stringops", histogram, &val, &count,
1652 &all))
1653 return false;
1654
1655 gimple_remove_histogram_value (cfun, stmt, histogram);
1656
1657 /* We require that count is at least half of all. */
1658 if (2 * count < all || optimize_bb_for_size_p (gimple_bb (stmt)))
1659 return false;
1660 if (check_counter (stmt, "value", &count, &all, gimple_bb (stmt)->count))
1661 return false;
1662 if (all > 0)
1663 prob = profile_probability::probability_in_gcov_type (count, all);
1664 else
1665 prob = profile_probability::never ();
1666
1667 dest = gimple_call_arg (stmt, 0);
1668 dest_align = get_pointer_alignment (dest);
1669 fcode = DECL_FUNCTION_CODE (gimple_call_fndecl (stmt));
1670 switch (fcode)
1671 {
1672 case BUILT_IN_MEMCPY:
1673 case BUILT_IN_MEMPCPY:
1674 case BUILT_IN_MEMMOVE:
1675 src = gimple_call_arg (stmt, 1);
1676 src_align = get_pointer_alignment (src);
1677 if (!can_move_by_pieces (val, MIN (dest_align, src_align)))
1678 return false;
1679 break;
1680 case BUILT_IN_MEMSET:
1681 if (!can_store_by_pieces (val, builtin_memset_read_str,
1682 gimple_call_arg (stmt, 1),
1683 dest_align, true))
1684 return false;
1685 break;
1686 case BUILT_IN_BZERO:
1687 if (!can_store_by_pieces (val, builtin_memset_read_str,
1688 integer_zero_node,
1689 dest_align, true))
1690 return false;
1691 break;
1692 default:
1693 gcc_unreachable ();
1694 }
1695
1696 if (sizeof (gcov_type) == sizeof (HOST_WIDE_INT))
1697 tree_val = build_int_cst (get_gcov_type (), val);
1698 else
1699 {
1700 HOST_WIDE_INT a[2];
1701 a[0] = (unsigned HOST_WIDE_INT) val;
1702 a[1] = val >> (HOST_BITS_PER_WIDE_INT - 1) >> 1;
1703
1704 tree_val = wide_int_to_tree (get_gcov_type (), wide_int::from_array (a, 2,
1705 TYPE_PRECISION (get_gcov_type ()), false));
1706 }
1707
1708 if (dump_enabled_p ())
1709 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
1710 "Transformation done: single value %i stringop for %s\n",
1711 (int)val, built_in_names[(int)fcode]);
1712
1713 gimple_stringop_fixed_value (stmt, tree_val, prob, count, all);
1714
1715 return true;
1716 }
1717
1718 void
1719 stringop_block_profile (gimple *stmt, unsigned int *expected_align,
1720 HOST_WIDE_INT *expected_size)
1721 {
1722 histogram_value histogram;
1723 histogram = gimple_histogram_value_of_type (cfun, stmt, HIST_TYPE_AVERAGE);
1724
1725 if (!histogram)
1726 *expected_size = -1;
1727 else if (!histogram->hvalue.counters[1])
1728 {
1729 *expected_size = -1;
1730 gimple_remove_histogram_value (cfun, stmt, histogram);
1731 }
1732 else
1733 {
1734 gcov_type size;
1735 size = ((histogram->hvalue.counters[0]
1736 + histogram->hvalue.counters[1] / 2)
1737 / histogram->hvalue.counters[1]);
1738 /* Even if we can hold bigger value in SIZE, INT_MAX
1739 is safe "infinity" for code generation strategies. */
1740 if (size > INT_MAX)
1741 size = INT_MAX;
1742 *expected_size = size;
1743 gimple_remove_histogram_value (cfun, stmt, histogram);
1744 }
1745
1746 histogram = gimple_histogram_value_of_type (cfun, stmt, HIST_TYPE_IOR);
1747
1748 if (!histogram)
1749 *expected_align = 0;
1750 else if (!histogram->hvalue.counters[0])
1751 {
1752 gimple_remove_histogram_value (cfun, stmt, histogram);
1753 *expected_align = 0;
1754 }
1755 else
1756 {
1757 gcov_type count;
1758 unsigned int alignment;
1759
1760 count = histogram->hvalue.counters[0];
1761 alignment = 1;
1762 while (!(count & alignment)
1763 && (alignment <= UINT_MAX / 2 / BITS_PER_UNIT))
1764 alignment <<= 1;
1765 *expected_align = alignment * BITS_PER_UNIT;
1766 gimple_remove_histogram_value (cfun, stmt, histogram);
1767 }
1768 }
1769
1770 \f
1771 /* Find values inside STMT for that we want to measure histograms for
1772 division/modulo optimization. */
1773
1774 static void
1775 gimple_divmod_values_to_profile (gimple *stmt, histogram_values *values)
1776 {
1777 tree lhs, divisor, op0, type;
1778 histogram_value hist;
1779
1780 if (gimple_code (stmt) != GIMPLE_ASSIGN)
1781 return;
1782
1783 lhs = gimple_assign_lhs (stmt);
1784 type = TREE_TYPE (lhs);
1785 if (!INTEGRAL_TYPE_P (type))
1786 return;
1787
1788 switch (gimple_assign_rhs_code (stmt))
1789 {
1790 case TRUNC_DIV_EXPR:
1791 case TRUNC_MOD_EXPR:
1792 divisor = gimple_assign_rhs2 (stmt);
1793 op0 = gimple_assign_rhs1 (stmt);
1794
1795 if (TREE_CODE (divisor) == SSA_NAME)
1796 /* Check for the case where the divisor is the same value most
1797 of the time. */
1798 values->safe_push (gimple_alloc_histogram_value (cfun,
1799 HIST_TYPE_TOPN_VALUES,
1800 stmt, divisor));
1801
1802 /* For mod, check whether it is not often a noop (or replaceable by
1803 a few subtractions). */
1804 if (gimple_assign_rhs_code (stmt) == TRUNC_MOD_EXPR
1805 && TYPE_UNSIGNED (type)
1806 && TREE_CODE (divisor) == SSA_NAME)
1807 {
1808 tree val;
1809 /* Check for a special case where the divisor is power of 2. */
1810 values->safe_push (gimple_alloc_histogram_value (cfun,
1811 HIST_TYPE_POW2,
1812 stmt, divisor));
1813 val = build2 (TRUNC_DIV_EXPR, type, op0, divisor);
1814 hist = gimple_alloc_histogram_value (cfun, HIST_TYPE_INTERVAL,
1815 stmt, val);
1816 hist->hdata.intvl.int_start = 0;
1817 hist->hdata.intvl.steps = 2;
1818 values->safe_push (hist);
1819 }
1820 return;
1821
1822 default:
1823 return;
1824 }
1825 }
1826
1827 /* Find calls inside STMT for that we want to measure histograms for
1828 indirect/virtual call optimization. */
1829
1830 static void
1831 gimple_indirect_call_to_profile (gimple *stmt, histogram_values *values)
1832 {
1833 tree callee;
1834
1835 if (gimple_code (stmt) != GIMPLE_CALL
1836 || gimple_call_internal_p (stmt)
1837 || gimple_call_fndecl (stmt) != NULL_TREE)
1838 return;
1839
1840 callee = gimple_call_fn (stmt);
1841 histogram_value v = gimple_alloc_histogram_value (cfun, HIST_TYPE_INDIR_CALL,
1842 stmt, callee);
1843 values->safe_push (v);
1844
1845 return;
1846 }
1847
1848 /* Find values inside STMT for that we want to measure histograms for
1849 string operations. */
1850
1851 static void
1852 gimple_stringops_values_to_profile (gimple *gs, histogram_values *values)
1853 {
1854 gcall *stmt;
1855 tree blck_size;
1856 tree dest;
1857 int size_arg;
1858
1859 stmt = dyn_cast <gcall *> (gs);
1860 if (!stmt)
1861 return;
1862
1863 if (!gimple_call_builtin_p (gs, BUILT_IN_NORMAL))
1864 return;
1865
1866 if (!interesting_stringop_to_profile_p (stmt, &size_arg))
1867 return;
1868
1869 dest = gimple_call_arg (stmt, 0);
1870 blck_size = gimple_call_arg (stmt, size_arg);
1871
1872 if (TREE_CODE (blck_size) != INTEGER_CST)
1873 {
1874 values->safe_push (gimple_alloc_histogram_value (cfun,
1875 HIST_TYPE_TOPN_VALUES,
1876 stmt, blck_size));
1877 values->safe_push (gimple_alloc_histogram_value (cfun, HIST_TYPE_AVERAGE,
1878 stmt, blck_size));
1879 }
1880
1881 if (TREE_CODE (blck_size) != INTEGER_CST)
1882 values->safe_push (gimple_alloc_histogram_value (cfun, HIST_TYPE_IOR,
1883 stmt, dest));
1884 }
1885
1886 /* Find values inside STMT for that we want to measure histograms and adds
1887 them to list VALUES. */
1888
1889 static void
1890 gimple_values_to_profile (gimple *stmt, histogram_values *values)
1891 {
1892 gimple_divmod_values_to_profile (stmt, values);
1893 gimple_stringops_values_to_profile (stmt, values);
1894 gimple_indirect_call_to_profile (stmt, values);
1895 }
1896
1897 void
1898 gimple_find_values_to_profile (histogram_values *values)
1899 {
1900 basic_block bb;
1901 gimple_stmt_iterator gsi;
1902 unsigned i;
1903 histogram_value hist = NULL;
1904 values->create (0);
1905
1906 FOR_EACH_BB_FN (bb, cfun)
1907 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1908 gimple_values_to_profile (gsi_stmt (gsi), values);
1909
1910 values->safe_push (gimple_alloc_histogram_value (cfun,
1911 HIST_TYPE_TIME_PROFILE));
1912
1913 FOR_EACH_VEC_ELT (*values, i, hist)
1914 {
1915 switch (hist->type)
1916 {
1917 case HIST_TYPE_INTERVAL:
1918 hist->n_counters = hist->hdata.intvl.steps + 2;
1919 break;
1920
1921 case HIST_TYPE_POW2:
1922 hist->n_counters = 2;
1923 break;
1924
1925 case HIST_TYPE_TOPN_VALUES:
1926 case HIST_TYPE_INDIR_CALL:
1927 hist->n_counters = GCOV_TOPN_MEM_COUNTERS;
1928 break;
1929
1930 case HIST_TYPE_TIME_PROFILE:
1931 hist->n_counters = 1;
1932 break;
1933
1934 case HIST_TYPE_AVERAGE:
1935 hist->n_counters = 2;
1936 break;
1937
1938 case HIST_TYPE_IOR:
1939 hist->n_counters = 1;
1940 break;
1941
1942 default:
1943 gcc_unreachable ();
1944 }
1945 if (dump_file && hist->hvalue.stmt != NULL)
1946 {
1947 fprintf (dump_file, "Stmt ");
1948 print_gimple_stmt (dump_file, hist->hvalue.stmt, 0, TDF_SLIM);
1949 dump_histogram_value (dump_file, hist);
1950 }
1951 }
1952 }