Daily bump.
[gcc.git] / gcc / tree-sra.c
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2008-2021 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
27
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
31 conversions.
32
33 Both passes operate in four stages:
34
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
38
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
46
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
49 accesses.
50
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
55
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
60
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
64
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
67
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
73
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "backend.h"
78 #include "target.h"
79 #include "rtl.h"
80 #include "tree.h"
81 #include "gimple.h"
82 #include "predict.h"
83 #include "alloc-pool.h"
84 #include "tree-pass.h"
85 #include "ssa.h"
86 #include "cgraph.h"
87 #include "gimple-pretty-print.h"
88 #include "alias.h"
89 #include "fold-const.h"
90 #include "tree-eh.h"
91 #include "stor-layout.h"
92 #include "gimplify.h"
93 #include "gimple-iterator.h"
94 #include "gimplify-me.h"
95 #include "gimple-walk.h"
96 #include "tree-cfg.h"
97 #include "tree-dfa.h"
98 #include "tree-ssa.h"
99 #include "dbgcnt.h"
100 #include "builtins.h"
101 #include "tree-sra.h"
102
103
104 /* Enumeration of all aggregate reductions we can do. */
105 enum sra_mode { SRA_MODE_EARLY_IPA, /* early call regularization */
106 SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
107 SRA_MODE_INTRA }; /* late intraprocedural SRA */
108
109 /* Global variable describing which aggregate reduction we are performing at
110 the moment. */
111 static enum sra_mode sra_mode;
112
113 struct assign_link;
114
115 /* ACCESS represents each access to an aggregate variable (as a whole or a
116 part). It can also represent a group of accesses that refer to exactly the
117 same fragment of an aggregate (i.e. those that have exactly the same offset
118 and size). Such representatives for a single aggregate, once determined,
119 are linked in a linked list and have the group fields set.
120
121 Moreover, when doing intraprocedural SRA, a tree is built from those
122 representatives (by the means of first_child and next_sibling pointers), in
123 which all items in a subtree are "within" the root, i.e. their offset is
124 greater or equal to offset of the root and offset+size is smaller or equal
125 to offset+size of the root. Children of an access are sorted by offset.
126
127 Note that accesses to parts of vector and complex number types always
128 represented by an access to the whole complex number or a vector. It is a
129 duty of the modifying functions to replace them appropriately. */
130
131 struct access
132 {
133 /* Values returned by `get_ref_base_and_extent' for each component reference
134 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
135 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
136 HOST_WIDE_INT offset;
137 HOST_WIDE_INT size;
138 tree base;
139
140 /* Expression. It is context dependent so do not use it to create new
141 expressions to access the original aggregate. See PR 42154 for a
142 testcase. */
143 tree expr;
144 /* Type. */
145 tree type;
146
147 /* The statement this access belongs to. */
148 gimple *stmt;
149
150 /* Next group representative for this aggregate. */
151 struct access *next_grp;
152
153 /* Pointer to the group representative. Pointer to itself if the struct is
154 the representative. */
155 struct access *group_representative;
156
157 /* After access tree has been constructed, this points to the parent of the
158 current access, if there is one. NULL for roots. */
159 struct access *parent;
160
161 /* If this access has any children (in terms of the definition above), this
162 points to the first one. */
163 struct access *first_child;
164
165 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
166 described above. */
167 struct access *next_sibling;
168
169 /* Pointers to the first and last element in the linked list of assign
170 links for propagation from LHS to RHS. */
171 struct assign_link *first_rhs_link, *last_rhs_link;
172
173 /* Pointers to the first and last element in the linked list of assign
174 links for propagation from LHS to RHS. */
175 struct assign_link *first_lhs_link, *last_lhs_link;
176
177 /* Pointer to the next access in the work queues. */
178 struct access *next_rhs_queued, *next_lhs_queued;
179
180 /* Replacement variable for this access "region." Never to be accessed
181 directly, always only by the means of get_access_replacement() and only
182 when grp_to_be_replaced flag is set. */
183 tree replacement_decl;
184
185 /* Is this access made in reverse storage order? */
186 unsigned reverse : 1;
187
188 /* Is this particular access write access? */
189 unsigned write : 1;
190
191 /* Is this access currently in the rhs work queue? */
192 unsigned grp_rhs_queued : 1;
193
194 /* Is this access currently in the lhs work queue? */
195 unsigned grp_lhs_queued : 1;
196
197 /* Does this group contain a write access? This flag is propagated down the
198 access tree. */
199 unsigned grp_write : 1;
200
201 /* Does this group contain a read access? This flag is propagated down the
202 access tree. */
203 unsigned grp_read : 1;
204
205 /* Does this group contain a read access that comes from an assignment
206 statement? This flag is propagated down the access tree. */
207 unsigned grp_assignment_read : 1;
208
209 /* Does this group contain a write access that comes from an assignment
210 statement? This flag is propagated down the access tree. */
211 unsigned grp_assignment_write : 1;
212
213 /* Does this group contain a read access through a scalar type? This flag is
214 not propagated in the access tree in any direction. */
215 unsigned grp_scalar_read : 1;
216
217 /* Does this group contain a write access through a scalar type? This flag
218 is not propagated in the access tree in any direction. */
219 unsigned grp_scalar_write : 1;
220
221 /* In a root of an access tree, true means that the entire tree should be
222 totally scalarized - that all scalar leafs should be scalarized and
223 non-root grp_total_scalarization accesses should be honored. Otherwise,
224 non-root accesses with grp_total_scalarization should never get scalar
225 replacements. */
226 unsigned grp_total_scalarization : 1;
227
228 /* Other passes of the analysis use this bit to make function
229 analyze_access_subtree create scalar replacements for this group if
230 possible. */
231 unsigned grp_hint : 1;
232
233 /* Is the subtree rooted in this access fully covered by scalar
234 replacements? */
235 unsigned grp_covered : 1;
236
237 /* If set to true, this access and all below it in an access tree must not be
238 scalarized. */
239 unsigned grp_unscalarizable_region : 1;
240
241 /* Whether data have been written to parts of the aggregate covered by this
242 access which is not to be scalarized. This flag is propagated up in the
243 access tree. */
244 unsigned grp_unscalarized_data : 1;
245
246 /* Set if all accesses in the group consist of the same chain of
247 COMPONENT_REFs and ARRAY_REFs. */
248 unsigned grp_same_access_path : 1;
249
250 /* Does this access and/or group contain a write access through a
251 BIT_FIELD_REF? */
252 unsigned grp_partial_lhs : 1;
253
254 /* Set when a scalar replacement should be created for this variable. */
255 unsigned grp_to_be_replaced : 1;
256
257 /* Set when we want a replacement for the sole purpose of having it in
258 generated debug statements. */
259 unsigned grp_to_be_debug_replaced : 1;
260
261 /* Should TREE_NO_WARNING of a replacement be set? */
262 unsigned grp_no_warning : 1;
263 };
264
265 typedef struct access *access_p;
266
267
268 /* Alloc pool for allocating access structures. */
269 static object_allocator<struct access> access_pool ("SRA accesses");
270
271 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
272 are used to propagate subaccesses from rhs to lhs and vice versa as long as
273 they don't conflict with what is already there. In the RHS->LHS direction,
274 we also propagate grp_write flag to lazily mark that the access contains any
275 meaningful data. */
276 struct assign_link
277 {
278 struct access *lacc, *racc;
279 struct assign_link *next_rhs, *next_lhs;
280 };
281
282 /* Alloc pool for allocating assign link structures. */
283 static object_allocator<assign_link> assign_link_pool ("SRA links");
284
285 /* Base (tree) -> Vector (vec<access_p> *) map. */
286 static hash_map<tree, auto_vec<access_p> > *base_access_vec;
287
288 /* Hash to limit creation of artificial accesses */
289 static hash_map<tree, unsigned> *propagation_budget;
290
291 /* Candidate hash table helpers. */
292
293 struct uid_decl_hasher : nofree_ptr_hash <tree_node>
294 {
295 static inline hashval_t hash (const tree_node *);
296 static inline bool equal (const tree_node *, const tree_node *);
297 };
298
299 /* Hash a tree in a uid_decl_map. */
300
301 inline hashval_t
302 uid_decl_hasher::hash (const tree_node *item)
303 {
304 return item->decl_minimal.uid;
305 }
306
307 /* Return true if the DECL_UID in both trees are equal. */
308
309 inline bool
310 uid_decl_hasher::equal (const tree_node *a, const tree_node *b)
311 {
312 return (a->decl_minimal.uid == b->decl_minimal.uid);
313 }
314
315 /* Set of candidates. */
316 static bitmap candidate_bitmap;
317 static hash_table<uid_decl_hasher> *candidates;
318
319 /* For a candidate UID return the candidates decl. */
320
321 static inline tree
322 candidate (unsigned uid)
323 {
324 tree_node t;
325 t.decl_minimal.uid = uid;
326 return candidates->find_with_hash (&t, static_cast <hashval_t> (uid));
327 }
328
329 /* Bitmap of candidates which we should try to entirely scalarize away and
330 those which cannot be (because they are and need be used as a whole). */
331 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
332
333 /* Bitmap of candidates in the constant pool, which cannot be scalarized
334 because this would produce non-constant expressions (e.g. Ada). */
335 static bitmap disqualified_constants;
336
337 /* Obstack for creation of fancy names. */
338 static struct obstack name_obstack;
339
340 /* Head of a linked list of accesses that need to have its subaccesses
341 propagated to their assignment counterparts. */
342 static struct access *rhs_work_queue_head, *lhs_work_queue_head;
343
344 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
345 representative fields are dumped, otherwise those which only describe the
346 individual access are. */
347
348 static struct
349 {
350 /* Number of processed aggregates is readily available in
351 analyze_all_variable_accesses and so is not stored here. */
352
353 /* Number of created scalar replacements. */
354 int replacements;
355
356 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
357 expression. */
358 int exprs;
359
360 /* Number of statements created by generate_subtree_copies. */
361 int subtree_copies;
362
363 /* Number of statements created by load_assign_lhs_subreplacements. */
364 int subreplacements;
365
366 /* Number of times sra_modify_assign has deleted a statement. */
367 int deleted;
368
369 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
370 RHS reparately due to type conversions or nonexistent matching
371 references. */
372 int separate_lhs_rhs_handling;
373
374 /* Number of parameters that were removed because they were unused. */
375 int deleted_unused_parameters;
376
377 /* Number of scalars passed as parameters by reference that have been
378 converted to be passed by value. */
379 int scalar_by_ref_to_by_val;
380
381 /* Number of aggregate parameters that were replaced by one or more of their
382 components. */
383 int aggregate_params_reduced;
384
385 /* Numbber of components created when splitting aggregate parameters. */
386 int param_reductions_created;
387 } sra_stats;
388
389 static void
390 dump_access (FILE *f, struct access *access, bool grp)
391 {
392 fprintf (f, "access { ");
393 fprintf (f, "base = (%d)'", DECL_UID (access->base));
394 print_generic_expr (f, access->base);
395 fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
396 fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
397 fprintf (f, ", expr = ");
398 print_generic_expr (f, access->expr);
399 fprintf (f, ", type = ");
400 print_generic_expr (f, access->type);
401 fprintf (f, ", reverse = %d", access->reverse);
402 if (grp)
403 fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
404 "grp_assignment_write = %d, grp_scalar_read = %d, "
405 "grp_scalar_write = %d, grp_total_scalarization = %d, "
406 "grp_hint = %d, grp_covered = %d, "
407 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
408 "grp_same_access_path = %d, grp_partial_lhs = %d, "
409 "grp_to_be_replaced = %d, grp_to_be_debug_replaced = %d}\n",
410 access->grp_read, access->grp_write, access->grp_assignment_read,
411 access->grp_assignment_write, access->grp_scalar_read,
412 access->grp_scalar_write, access->grp_total_scalarization,
413 access->grp_hint, access->grp_covered,
414 access->grp_unscalarizable_region, access->grp_unscalarized_data,
415 access->grp_same_access_path, access->grp_partial_lhs,
416 access->grp_to_be_replaced, access->grp_to_be_debug_replaced);
417 else
418 fprintf (f, ", write = %d, grp_total_scalarization = %d, "
419 "grp_partial_lhs = %d}\n",
420 access->write, access->grp_total_scalarization,
421 access->grp_partial_lhs);
422 }
423
424 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
425
426 static void
427 dump_access_tree_1 (FILE *f, struct access *access, int level)
428 {
429 do
430 {
431 int i;
432
433 for (i = 0; i < level; i++)
434 fputs ("* ", f);
435
436 dump_access (f, access, true);
437
438 if (access->first_child)
439 dump_access_tree_1 (f, access->first_child, level + 1);
440
441 access = access->next_sibling;
442 }
443 while (access);
444 }
445
446 /* Dump all access trees for a variable, given the pointer to the first root in
447 ACCESS. */
448
449 static void
450 dump_access_tree (FILE *f, struct access *access)
451 {
452 for (; access; access = access->next_grp)
453 dump_access_tree_1 (f, access, 0);
454 }
455
456 /* Return true iff ACC is non-NULL and has subaccesses. */
457
458 static inline bool
459 access_has_children_p (struct access *acc)
460 {
461 return acc && acc->first_child;
462 }
463
464 /* Return true iff ACC is (partly) covered by at least one replacement. */
465
466 static bool
467 access_has_replacements_p (struct access *acc)
468 {
469 struct access *child;
470 if (acc->grp_to_be_replaced)
471 return true;
472 for (child = acc->first_child; child; child = child->next_sibling)
473 if (access_has_replacements_p (child))
474 return true;
475 return false;
476 }
477
478 /* Return a vector of pointers to accesses for the variable given in BASE or
479 NULL if there is none. */
480
481 static vec<access_p> *
482 get_base_access_vector (tree base)
483 {
484 return base_access_vec->get (base);
485 }
486
487 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
488 in ACCESS. Return NULL if it cannot be found. */
489
490 static struct access *
491 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
492 HOST_WIDE_INT size)
493 {
494 while (access && (access->offset != offset || access->size != size))
495 {
496 struct access *child = access->first_child;
497
498 while (child && (child->offset + child->size <= offset))
499 child = child->next_sibling;
500 access = child;
501 }
502
503 /* Total scalarization does not replace single field structures with their
504 single field but rather creates an access for them underneath. Look for
505 it. */
506 if (access)
507 while (access->first_child
508 && access->first_child->offset == offset
509 && access->first_child->size == size)
510 access = access->first_child;
511
512 return access;
513 }
514
515 /* Return the first group representative for DECL or NULL if none exists. */
516
517 static struct access *
518 get_first_repr_for_decl (tree base)
519 {
520 vec<access_p> *access_vec;
521
522 access_vec = get_base_access_vector (base);
523 if (!access_vec)
524 return NULL;
525
526 return (*access_vec)[0];
527 }
528
529 /* Find an access representative for the variable BASE and given OFFSET and
530 SIZE. Requires that access trees have already been built. Return NULL if
531 it cannot be found. */
532
533 static struct access *
534 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
535 HOST_WIDE_INT size)
536 {
537 struct access *access;
538
539 access = get_first_repr_for_decl (base);
540 while (access && (access->offset + access->size <= offset))
541 access = access->next_grp;
542 if (!access)
543 return NULL;
544
545 return find_access_in_subtree (access, offset, size);
546 }
547
548 /* Add LINK to the linked list of assign links of RACC. */
549
550 static void
551 add_link_to_rhs (struct access *racc, struct assign_link *link)
552 {
553 gcc_assert (link->racc == racc);
554
555 if (!racc->first_rhs_link)
556 {
557 gcc_assert (!racc->last_rhs_link);
558 racc->first_rhs_link = link;
559 }
560 else
561 racc->last_rhs_link->next_rhs = link;
562
563 racc->last_rhs_link = link;
564 link->next_rhs = NULL;
565 }
566
567 /* Add LINK to the linked list of lhs assign links of LACC. */
568
569 static void
570 add_link_to_lhs (struct access *lacc, struct assign_link *link)
571 {
572 gcc_assert (link->lacc == lacc);
573
574 if (!lacc->first_lhs_link)
575 {
576 gcc_assert (!lacc->last_lhs_link);
577 lacc->first_lhs_link = link;
578 }
579 else
580 lacc->last_lhs_link->next_lhs = link;
581
582 lacc->last_lhs_link = link;
583 link->next_lhs = NULL;
584 }
585
586 /* Move all link structures in their linked list in OLD_ACC to the linked list
587 in NEW_ACC. */
588 static void
589 relink_to_new_repr (struct access *new_acc, struct access *old_acc)
590 {
591 if (old_acc->first_rhs_link)
592 {
593
594 if (new_acc->first_rhs_link)
595 {
596 gcc_assert (!new_acc->last_rhs_link->next_rhs);
597 gcc_assert (!old_acc->last_rhs_link
598 || !old_acc->last_rhs_link->next_rhs);
599
600 new_acc->last_rhs_link->next_rhs = old_acc->first_rhs_link;
601 new_acc->last_rhs_link = old_acc->last_rhs_link;
602 }
603 else
604 {
605 gcc_assert (!new_acc->last_rhs_link);
606
607 new_acc->first_rhs_link = old_acc->first_rhs_link;
608 new_acc->last_rhs_link = old_acc->last_rhs_link;
609 }
610 old_acc->first_rhs_link = old_acc->last_rhs_link = NULL;
611 }
612 else
613 gcc_assert (!old_acc->last_rhs_link);
614
615 if (old_acc->first_lhs_link)
616 {
617
618 if (new_acc->first_lhs_link)
619 {
620 gcc_assert (!new_acc->last_lhs_link->next_lhs);
621 gcc_assert (!old_acc->last_lhs_link
622 || !old_acc->last_lhs_link->next_lhs);
623
624 new_acc->last_lhs_link->next_lhs = old_acc->first_lhs_link;
625 new_acc->last_lhs_link = old_acc->last_lhs_link;
626 }
627 else
628 {
629 gcc_assert (!new_acc->last_lhs_link);
630
631 new_acc->first_lhs_link = old_acc->first_lhs_link;
632 new_acc->last_lhs_link = old_acc->last_lhs_link;
633 }
634 old_acc->first_lhs_link = old_acc->last_lhs_link = NULL;
635 }
636 else
637 gcc_assert (!old_acc->last_lhs_link);
638
639 }
640
641 /* Add ACCESS to the work to queue for propagation of subaccesses from RHS to
642 LHS (which is actually a stack). */
643
644 static void
645 add_access_to_rhs_work_queue (struct access *access)
646 {
647 if (access->first_rhs_link && !access->grp_rhs_queued)
648 {
649 gcc_assert (!access->next_rhs_queued);
650 access->next_rhs_queued = rhs_work_queue_head;
651 access->grp_rhs_queued = 1;
652 rhs_work_queue_head = access;
653 }
654 }
655
656 /* Add ACCESS to the work to queue for propagation of subaccesses from LHS to
657 RHS (which is actually a stack). */
658
659 static void
660 add_access_to_lhs_work_queue (struct access *access)
661 {
662 if (access->first_lhs_link && !access->grp_lhs_queued)
663 {
664 gcc_assert (!access->next_lhs_queued);
665 access->next_lhs_queued = lhs_work_queue_head;
666 access->grp_lhs_queued = 1;
667 lhs_work_queue_head = access;
668 }
669 }
670
671 /* Pop an access from the work queue for propagating from RHS to LHS, and
672 return it, assuming there is one. */
673
674 static struct access *
675 pop_access_from_rhs_work_queue (void)
676 {
677 struct access *access = rhs_work_queue_head;
678
679 rhs_work_queue_head = access->next_rhs_queued;
680 access->next_rhs_queued = NULL;
681 access->grp_rhs_queued = 0;
682 return access;
683 }
684
685 /* Pop an access from the work queue for propagating from LHS to RHS, and
686 return it, assuming there is one. */
687
688 static struct access *
689 pop_access_from_lhs_work_queue (void)
690 {
691 struct access *access = lhs_work_queue_head;
692
693 lhs_work_queue_head = access->next_lhs_queued;
694 access->next_lhs_queued = NULL;
695 access->grp_lhs_queued = 0;
696 return access;
697 }
698
699 /* Allocate necessary structures. */
700
701 static void
702 sra_initialize (void)
703 {
704 candidate_bitmap = BITMAP_ALLOC (NULL);
705 candidates = new hash_table<uid_decl_hasher>
706 (vec_safe_length (cfun->local_decls) / 2);
707 should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
708 cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
709 disqualified_constants = BITMAP_ALLOC (NULL);
710 gcc_obstack_init (&name_obstack);
711 base_access_vec = new hash_map<tree, auto_vec<access_p> >;
712 memset (&sra_stats, 0, sizeof (sra_stats));
713 }
714
715 /* Deallocate all general structures. */
716
717 static void
718 sra_deinitialize (void)
719 {
720 BITMAP_FREE (candidate_bitmap);
721 delete candidates;
722 candidates = NULL;
723 BITMAP_FREE (should_scalarize_away_bitmap);
724 BITMAP_FREE (cannot_scalarize_away_bitmap);
725 BITMAP_FREE (disqualified_constants);
726 access_pool.release ();
727 assign_link_pool.release ();
728 obstack_free (&name_obstack, NULL);
729
730 delete base_access_vec;
731 }
732
733 /* Return true if DECL is a VAR_DECL in the constant pool, false otherwise. */
734
735 static bool constant_decl_p (tree decl)
736 {
737 return VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl);
738 }
739
740 /* Remove DECL from candidates for SRA and write REASON to the dump file if
741 there is one. */
742
743 static void
744 disqualify_candidate (tree decl, const char *reason)
745 {
746 if (bitmap_clear_bit (candidate_bitmap, DECL_UID (decl)))
747 candidates->remove_elt_with_hash (decl, DECL_UID (decl));
748 if (constant_decl_p (decl))
749 bitmap_set_bit (disqualified_constants, DECL_UID (decl));
750
751 if (dump_file && (dump_flags & TDF_DETAILS))
752 {
753 fprintf (dump_file, "! Disqualifying ");
754 print_generic_expr (dump_file, decl);
755 fprintf (dump_file, " - %s\n", reason);
756 }
757 }
758
759 /* Return true iff the type contains a field or an element which does not allow
760 scalarization. Use VISITED_TYPES to avoid re-checking already checked
761 (sub-)types. */
762
763 static bool
764 type_internals_preclude_sra_p_1 (tree type, const char **msg,
765 hash_set<tree> *visited_types)
766 {
767 tree fld;
768 tree et;
769
770 if (visited_types->contains (type))
771 return false;
772 visited_types->add (type);
773
774 switch (TREE_CODE (type))
775 {
776 case RECORD_TYPE:
777 case UNION_TYPE:
778 case QUAL_UNION_TYPE:
779 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
780 if (TREE_CODE (fld) == FIELD_DECL)
781 {
782 if (TREE_CODE (fld) == FUNCTION_DECL)
783 continue;
784 tree ft = TREE_TYPE (fld);
785
786 if (TREE_THIS_VOLATILE (fld))
787 {
788 *msg = "volatile structure field";
789 return true;
790 }
791 if (!DECL_FIELD_OFFSET (fld))
792 {
793 *msg = "no structure field offset";
794 return true;
795 }
796 if (!DECL_SIZE (fld))
797 {
798 *msg = "zero structure field size";
799 return true;
800 }
801 if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
802 {
803 *msg = "structure field offset not fixed";
804 return true;
805 }
806 if (!tree_fits_uhwi_p (DECL_SIZE (fld)))
807 {
808 *msg = "structure field size not fixed";
809 return true;
810 }
811 if (!tree_fits_shwi_p (bit_position (fld)))
812 {
813 *msg = "structure field size too big";
814 return true;
815 }
816 if (AGGREGATE_TYPE_P (ft)
817 && int_bit_position (fld) % BITS_PER_UNIT != 0)
818 {
819 *msg = "structure field is bit field";
820 return true;
821 }
822
823 if (AGGREGATE_TYPE_P (ft)
824 && type_internals_preclude_sra_p_1 (ft, msg, visited_types))
825 return true;
826 }
827
828 return false;
829
830 case ARRAY_TYPE:
831 et = TREE_TYPE (type);
832
833 if (TYPE_VOLATILE (et))
834 {
835 *msg = "element type is volatile";
836 return true;
837 }
838
839 if (AGGREGATE_TYPE_P (et)
840 && type_internals_preclude_sra_p_1 (et, msg, visited_types))
841 return true;
842
843 return false;
844
845 default:
846 return false;
847 }
848 }
849
850 /* Return true iff the type contains a field or an element which does not allow
851 scalarization. */
852
853 bool
854 type_internals_preclude_sra_p (tree type, const char **msg)
855 {
856 hash_set<tree> visited_types;
857 return type_internals_preclude_sra_p_1 (type, msg, &visited_types);
858 }
859
860
861 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
862 the three fields. Also add it to the vector of accesses corresponding to
863 the base. Finally, return the new access. */
864
865 static struct access *
866 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
867 {
868 struct access *access = access_pool.allocate ();
869
870 memset (access, 0, sizeof (struct access));
871 access->base = base;
872 access->offset = offset;
873 access->size = size;
874
875 base_access_vec->get_or_insert (base).safe_push (access);
876
877 return access;
878 }
879
880 static bool maybe_add_sra_candidate (tree);
881
882 /* Create and insert access for EXPR. Return created access, or NULL if it is
883 not possible. Also scan for uses of constant pool as we go along and add
884 to candidates. */
885
886 static struct access *
887 create_access (tree expr, gimple *stmt, bool write)
888 {
889 struct access *access;
890 poly_int64 poffset, psize, pmax_size;
891 tree base = expr;
892 bool reverse, unscalarizable_region = false;
893
894 base = get_ref_base_and_extent (expr, &poffset, &psize, &pmax_size,
895 &reverse);
896
897 /* For constant-pool entries, check we can substitute the constant value. */
898 if (constant_decl_p (base))
899 {
900 gcc_assert (!bitmap_bit_p (disqualified_constants, DECL_UID (base)));
901 if (expr != base
902 && !is_gimple_reg_type (TREE_TYPE (expr))
903 && dump_file && (dump_flags & TDF_DETAILS))
904 {
905 /* This occurs in Ada with accesses to ARRAY_RANGE_REFs,
906 and elements of multidimensional arrays (which are
907 multi-element arrays in their own right). */
908 fprintf (dump_file, "Allowing non-reg-type load of part"
909 " of constant-pool entry: ");
910 print_generic_expr (dump_file, expr);
911 }
912 maybe_add_sra_candidate (base);
913 }
914
915 if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
916 return NULL;
917
918 HOST_WIDE_INT offset, size, max_size;
919 if (!poffset.is_constant (&offset)
920 || !psize.is_constant (&size)
921 || !pmax_size.is_constant (&max_size))
922 {
923 disqualify_candidate (base, "Encountered a polynomial-sized access.");
924 return NULL;
925 }
926
927 if (size != max_size)
928 {
929 size = max_size;
930 unscalarizable_region = true;
931 }
932 if (size == 0)
933 return NULL;
934 if (offset < 0)
935 {
936 disqualify_candidate (base, "Encountered a negative offset access.");
937 return NULL;
938 }
939 if (size < 0)
940 {
941 disqualify_candidate (base, "Encountered an unconstrained access.");
942 return NULL;
943 }
944 if (offset + size > tree_to_shwi (DECL_SIZE (base)))
945 {
946 disqualify_candidate (base, "Encountered an access beyond the base.");
947 return NULL;
948 }
949
950 access = create_access_1 (base, offset, size);
951 access->expr = expr;
952 access->type = TREE_TYPE (expr);
953 access->write = write;
954 access->grp_unscalarizable_region = unscalarizable_region;
955 access->stmt = stmt;
956 access->reverse = reverse;
957
958 return access;
959 }
960
961
962 /* Return true iff TYPE is scalarizable - i.e. a RECORD_TYPE or fixed-length
963 ARRAY_TYPE with fields that are either of gimple register types (excluding
964 bit-fields) or (recursively) scalarizable types. CONST_DECL must be true if
965 we are considering a decl from constant pool. If it is false, char arrays
966 will be refused. */
967
968 static bool
969 scalarizable_type_p (tree type, bool const_decl)
970 {
971 if (is_gimple_reg_type (type))
972 return true;
973 if (type_contains_placeholder_p (type))
974 return false;
975
976 bool have_predecessor_field = false;
977 HOST_WIDE_INT prev_pos = 0;
978
979 switch (TREE_CODE (type))
980 {
981 case RECORD_TYPE:
982 for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
983 if (TREE_CODE (fld) == FIELD_DECL)
984 {
985 tree ft = TREE_TYPE (fld);
986
987 if (zerop (DECL_SIZE (fld)))
988 continue;
989
990 HOST_WIDE_INT pos = int_bit_position (fld);
991 if (have_predecessor_field
992 && pos <= prev_pos)
993 return false;
994
995 have_predecessor_field = true;
996 prev_pos = pos;
997
998 if (DECL_BIT_FIELD (fld))
999 return false;
1000
1001 if (!scalarizable_type_p (ft, const_decl))
1002 return false;
1003 }
1004
1005 return true;
1006
1007 case ARRAY_TYPE:
1008 {
1009 HOST_WIDE_INT min_elem_size;
1010 if (const_decl)
1011 min_elem_size = 0;
1012 else
1013 min_elem_size = BITS_PER_UNIT;
1014
1015 if (TYPE_DOMAIN (type) == NULL_TREE
1016 || !tree_fits_shwi_p (TYPE_SIZE (type))
1017 || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (type)))
1018 || (tree_to_shwi (TYPE_SIZE (TREE_TYPE (type))) <= min_elem_size)
1019 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
1020 return false;
1021 if (tree_to_shwi (TYPE_SIZE (type)) == 0
1022 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE)
1023 /* Zero-element array, should not prevent scalarization. */
1024 ;
1025 else if ((tree_to_shwi (TYPE_SIZE (type)) <= 0)
1026 || !tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
1027 /* Variable-length array, do not allow scalarization. */
1028 return false;
1029
1030 tree elem = TREE_TYPE (type);
1031 if (!scalarizable_type_p (elem, const_decl))
1032 return false;
1033 return true;
1034 }
1035 default:
1036 return false;
1037 }
1038 }
1039
1040 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it. */
1041
1042 static inline bool
1043 contains_view_convert_expr_p (const_tree ref)
1044 {
1045 while (handled_component_p (ref))
1046 {
1047 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
1048 return true;
1049 ref = TREE_OPERAND (ref, 0);
1050 }
1051
1052 return false;
1053 }
1054
1055 /* Return true if REF contains a VIEW_CONVERT_EXPR or a COMPONENT_REF with a
1056 bit-field field declaration. If TYPE_CHANGING_P is non-NULL, set the bool
1057 it points to will be set if REF contains any of the above or a MEM_REF
1058 expression that effectively performs type conversion. */
1059
1060 static bool
1061 contains_vce_or_bfcref_p (const_tree ref, bool *type_changing_p = NULL)
1062 {
1063 while (handled_component_p (ref))
1064 {
1065 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
1066 || (TREE_CODE (ref) == COMPONENT_REF
1067 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
1068 {
1069 if (type_changing_p)
1070 *type_changing_p = true;
1071 return true;
1072 }
1073 ref = TREE_OPERAND (ref, 0);
1074 }
1075
1076 if (!type_changing_p
1077 || TREE_CODE (ref) != MEM_REF
1078 || TREE_CODE (TREE_OPERAND (ref, 0)) != ADDR_EXPR)
1079 return false;
1080
1081 tree mem = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
1082 if (TYPE_MAIN_VARIANT (TREE_TYPE (ref))
1083 != TYPE_MAIN_VARIANT (TREE_TYPE (mem)))
1084 *type_changing_p = true;
1085
1086 return false;
1087 }
1088
1089 /* Search the given tree for a declaration by skipping handled components and
1090 exclude it from the candidates. */
1091
1092 static void
1093 disqualify_base_of_expr (tree t, const char *reason)
1094 {
1095 t = get_base_address (t);
1096 if (t && DECL_P (t))
1097 disqualify_candidate (t, reason);
1098 }
1099
1100 /* Scan expression EXPR and create access structures for all accesses to
1101 candidates for scalarization. Return the created access or NULL if none is
1102 created. */
1103
1104 static struct access *
1105 build_access_from_expr_1 (tree expr, gimple *stmt, bool write)
1106 {
1107 struct access *ret = NULL;
1108 bool partial_ref;
1109
1110 if (TREE_CODE (expr) == BIT_FIELD_REF
1111 || TREE_CODE (expr) == IMAGPART_EXPR
1112 || TREE_CODE (expr) == REALPART_EXPR)
1113 {
1114 expr = TREE_OPERAND (expr, 0);
1115 partial_ref = true;
1116 }
1117 else
1118 partial_ref = false;
1119
1120 if (storage_order_barrier_p (expr))
1121 {
1122 disqualify_base_of_expr (expr, "storage order barrier.");
1123 return NULL;
1124 }
1125
1126 /* We need to dive through V_C_Es in order to get the size of its parameter
1127 and not the result type. Ada produces such statements. We are also
1128 capable of handling the topmost V_C_E but not any of those buried in other
1129 handled components. */
1130 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
1131 expr = TREE_OPERAND (expr, 0);
1132
1133 if (contains_view_convert_expr_p (expr))
1134 {
1135 disqualify_base_of_expr (expr, "V_C_E under a different handled "
1136 "component.");
1137 return NULL;
1138 }
1139 if (TREE_THIS_VOLATILE (expr))
1140 {
1141 disqualify_base_of_expr (expr, "part of a volatile reference.");
1142 return NULL;
1143 }
1144
1145 switch (TREE_CODE (expr))
1146 {
1147 case MEM_REF:
1148 if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR)
1149 return NULL;
1150 /* fall through */
1151 case VAR_DECL:
1152 case PARM_DECL:
1153 case RESULT_DECL:
1154 case COMPONENT_REF:
1155 case ARRAY_REF:
1156 case ARRAY_RANGE_REF:
1157 ret = create_access (expr, stmt, write);
1158 break;
1159
1160 default:
1161 break;
1162 }
1163
1164 if (write && partial_ref && ret)
1165 ret->grp_partial_lhs = 1;
1166
1167 return ret;
1168 }
1169
1170 /* Scan expression EXPR and create access structures for all accesses to
1171 candidates for scalarization. Return true if any access has been inserted.
1172 STMT must be the statement from which the expression is taken, WRITE must be
1173 true if the expression is a store and false otherwise. */
1174
1175 static bool
1176 build_access_from_expr (tree expr, gimple *stmt, bool write)
1177 {
1178 struct access *access;
1179
1180 access = build_access_from_expr_1 (expr, stmt, write);
1181 if (access)
1182 {
1183 /* This means the aggregate is accesses as a whole in a way other than an
1184 assign statement and thus cannot be removed even if we had a scalar
1185 replacement for everything. */
1186 if (cannot_scalarize_away_bitmap)
1187 bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1188 return true;
1189 }
1190 return false;
1191 }
1192
1193 /* Return the single non-EH successor edge of BB or NULL if there is none or
1194 more than one. */
1195
1196 static edge
1197 single_non_eh_succ (basic_block bb)
1198 {
1199 edge e, res = NULL;
1200 edge_iterator ei;
1201
1202 FOR_EACH_EDGE (e, ei, bb->succs)
1203 if (!(e->flags & EDGE_EH))
1204 {
1205 if (res)
1206 return NULL;
1207 res = e;
1208 }
1209
1210 return res;
1211 }
1212
1213 /* Disqualify LHS and RHS for scalarization if STMT has to terminate its BB and
1214 there is no alternative spot where to put statements SRA might need to
1215 generate after it. The spot we are looking for is an edge leading to a
1216 single non-EH successor, if it exists and is indeed single. RHS may be
1217 NULL, in that case ignore it. */
1218
1219 static bool
1220 disqualify_if_bad_bb_terminating_stmt (gimple *stmt, tree lhs, tree rhs)
1221 {
1222 if (stmt_ends_bb_p (stmt))
1223 {
1224 if (single_non_eh_succ (gimple_bb (stmt)))
1225 return false;
1226
1227 disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1228 if (rhs)
1229 disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1230 return true;
1231 }
1232 return false;
1233 }
1234
1235 /* Return true if the nature of BASE is such that it contains data even if
1236 there is no write to it in the function. */
1237
1238 static bool
1239 comes_initialized_p (tree base)
1240 {
1241 return TREE_CODE (base) == PARM_DECL || constant_decl_p (base);
1242 }
1243
1244 /* Scan expressions occurring in STMT, create access structures for all accesses
1245 to candidates for scalarization and remove those candidates which occur in
1246 statements or expressions that prevent them from being split apart. Return
1247 true if any access has been inserted. */
1248
1249 static bool
1250 build_accesses_from_assign (gimple *stmt)
1251 {
1252 tree lhs, rhs;
1253 struct access *lacc, *racc;
1254
1255 if (!gimple_assign_single_p (stmt)
1256 /* Scope clobbers don't influence scalarization. */
1257 || gimple_clobber_p (stmt))
1258 return false;
1259
1260 lhs = gimple_assign_lhs (stmt);
1261 rhs = gimple_assign_rhs1 (stmt);
1262
1263 if (disqualify_if_bad_bb_terminating_stmt (stmt, lhs, rhs))
1264 return false;
1265
1266 racc = build_access_from_expr_1 (rhs, stmt, false);
1267 lacc = build_access_from_expr_1 (lhs, stmt, true);
1268
1269 if (lacc)
1270 {
1271 lacc->grp_assignment_write = 1;
1272 if (storage_order_barrier_p (rhs))
1273 lacc->grp_unscalarizable_region = 1;
1274
1275 if (should_scalarize_away_bitmap && !is_gimple_reg_type (lacc->type))
1276 {
1277 bool type_changing_p = false;
1278 contains_vce_or_bfcref_p (lhs, &type_changing_p);
1279 if (type_changing_p)
1280 bitmap_set_bit (cannot_scalarize_away_bitmap,
1281 DECL_UID (lacc->base));
1282 }
1283 }
1284
1285 if (racc)
1286 {
1287 racc->grp_assignment_read = 1;
1288 if (should_scalarize_away_bitmap && !is_gimple_reg_type (racc->type))
1289 {
1290 bool type_changing_p = false;
1291 contains_vce_or_bfcref_p (rhs, &type_changing_p);
1292
1293 if (type_changing_p || gimple_has_volatile_ops (stmt))
1294 bitmap_set_bit (cannot_scalarize_away_bitmap,
1295 DECL_UID (racc->base));
1296 else
1297 bitmap_set_bit (should_scalarize_away_bitmap,
1298 DECL_UID (racc->base));
1299 }
1300 if (storage_order_barrier_p (lhs))
1301 racc->grp_unscalarizable_region = 1;
1302 }
1303
1304 if (lacc && racc
1305 && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1306 && !lacc->grp_unscalarizable_region
1307 && !racc->grp_unscalarizable_region
1308 && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1309 && lacc->size == racc->size
1310 && useless_type_conversion_p (lacc->type, racc->type))
1311 {
1312 struct assign_link *link;
1313
1314 link = assign_link_pool.allocate ();
1315 memset (link, 0, sizeof (struct assign_link));
1316
1317 link->lacc = lacc;
1318 link->racc = racc;
1319 add_link_to_rhs (racc, link);
1320 add_link_to_lhs (lacc, link);
1321 add_access_to_rhs_work_queue (racc);
1322 add_access_to_lhs_work_queue (lacc);
1323
1324 /* Let's delay marking the areas as written until propagation of accesses
1325 across link, unless the nature of rhs tells us that its data comes
1326 from elsewhere. */
1327 if (!comes_initialized_p (racc->base))
1328 lacc->write = false;
1329 }
1330
1331 return lacc || racc;
1332 }
1333
1334 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1335 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1336
1337 static bool
1338 asm_visit_addr (gimple *, tree op, tree, void *)
1339 {
1340 op = get_base_address (op);
1341 if (op
1342 && DECL_P (op))
1343 disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1344
1345 return false;
1346 }
1347
1348 /* Scan function and look for interesting expressions and create access
1349 structures for them. Return true iff any access is created. */
1350
1351 static bool
1352 scan_function (void)
1353 {
1354 basic_block bb;
1355 bool ret = false;
1356
1357 FOR_EACH_BB_FN (bb, cfun)
1358 {
1359 gimple_stmt_iterator gsi;
1360 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1361 {
1362 gimple *stmt = gsi_stmt (gsi);
1363 tree t;
1364 unsigned i;
1365
1366 switch (gimple_code (stmt))
1367 {
1368 case GIMPLE_RETURN:
1369 t = gimple_return_retval (as_a <greturn *> (stmt));
1370 if (t != NULL_TREE)
1371 ret |= build_access_from_expr (t, stmt, false);
1372 break;
1373
1374 case GIMPLE_ASSIGN:
1375 ret |= build_accesses_from_assign (stmt);
1376 break;
1377
1378 case GIMPLE_CALL:
1379 for (i = 0; i < gimple_call_num_args (stmt); i++)
1380 ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1381 stmt, false);
1382
1383 t = gimple_call_lhs (stmt);
1384 if (t && !disqualify_if_bad_bb_terminating_stmt (stmt, t, NULL))
1385 ret |= build_access_from_expr (t, stmt, true);
1386 break;
1387
1388 case GIMPLE_ASM:
1389 {
1390 gasm *asm_stmt = as_a <gasm *> (stmt);
1391 walk_stmt_load_store_addr_ops (asm_stmt, NULL, NULL, NULL,
1392 asm_visit_addr);
1393 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
1394 {
1395 t = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
1396 ret |= build_access_from_expr (t, asm_stmt, false);
1397 }
1398 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
1399 {
1400 t = TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
1401 ret |= build_access_from_expr (t, asm_stmt, true);
1402 }
1403 }
1404 break;
1405
1406 default:
1407 break;
1408 }
1409 }
1410 }
1411
1412 return ret;
1413 }
1414
1415 /* Helper of QSORT function. There are pointers to accesses in the array. An
1416 access is considered smaller than another if it has smaller offset or if the
1417 offsets are the same but is size is bigger. */
1418
1419 static int
1420 compare_access_positions (const void *a, const void *b)
1421 {
1422 const access_p *fp1 = (const access_p *) a;
1423 const access_p *fp2 = (const access_p *) b;
1424 const access_p f1 = *fp1;
1425 const access_p f2 = *fp2;
1426
1427 if (f1->offset != f2->offset)
1428 return f1->offset < f2->offset ? -1 : 1;
1429
1430 if (f1->size == f2->size)
1431 {
1432 if (f1->type == f2->type)
1433 return 0;
1434 /* Put any non-aggregate type before any aggregate type. */
1435 else if (!is_gimple_reg_type (f1->type)
1436 && is_gimple_reg_type (f2->type))
1437 return 1;
1438 else if (is_gimple_reg_type (f1->type)
1439 && !is_gimple_reg_type (f2->type))
1440 return -1;
1441 /* Put any complex or vector type before any other scalar type. */
1442 else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1443 && TREE_CODE (f1->type) != VECTOR_TYPE
1444 && (TREE_CODE (f2->type) == COMPLEX_TYPE
1445 || TREE_CODE (f2->type) == VECTOR_TYPE))
1446 return 1;
1447 else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1448 || TREE_CODE (f1->type) == VECTOR_TYPE)
1449 && TREE_CODE (f2->type) != COMPLEX_TYPE
1450 && TREE_CODE (f2->type) != VECTOR_TYPE)
1451 return -1;
1452 /* Put any integral type before any non-integral type. When splicing, we
1453 make sure that those with insufficient precision and occupying the
1454 same space are not scalarized. */
1455 else if (INTEGRAL_TYPE_P (f1->type)
1456 && !INTEGRAL_TYPE_P (f2->type))
1457 return -1;
1458 else if (!INTEGRAL_TYPE_P (f1->type)
1459 && INTEGRAL_TYPE_P (f2->type))
1460 return 1;
1461 /* Put the integral type with the bigger precision first. */
1462 else if (INTEGRAL_TYPE_P (f1->type)
1463 && INTEGRAL_TYPE_P (f2->type)
1464 && (TYPE_PRECISION (f2->type) != TYPE_PRECISION (f1->type)))
1465 return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1466 /* Stabilize the sort. */
1467 return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1468 }
1469
1470 /* We want the bigger accesses first, thus the opposite operator in the next
1471 line: */
1472 return f1->size > f2->size ? -1 : 1;
1473 }
1474
1475
1476 /* Append a name of the declaration to the name obstack. A helper function for
1477 make_fancy_name. */
1478
1479 static void
1480 make_fancy_decl_name (tree decl)
1481 {
1482 char buffer[32];
1483
1484 tree name = DECL_NAME (decl);
1485 if (name)
1486 obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1487 IDENTIFIER_LENGTH (name));
1488 else
1489 {
1490 sprintf (buffer, "D%u", DECL_UID (decl));
1491 obstack_grow (&name_obstack, buffer, strlen (buffer));
1492 }
1493 }
1494
1495 /* Helper for make_fancy_name. */
1496
1497 static void
1498 make_fancy_name_1 (tree expr)
1499 {
1500 char buffer[32];
1501 tree index;
1502
1503 if (DECL_P (expr))
1504 {
1505 make_fancy_decl_name (expr);
1506 return;
1507 }
1508
1509 switch (TREE_CODE (expr))
1510 {
1511 case COMPONENT_REF:
1512 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1513 obstack_1grow (&name_obstack, '$');
1514 make_fancy_decl_name (TREE_OPERAND (expr, 1));
1515 break;
1516
1517 case ARRAY_REF:
1518 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1519 obstack_1grow (&name_obstack, '$');
1520 /* Arrays with only one element may not have a constant as their
1521 index. */
1522 index = TREE_OPERAND (expr, 1);
1523 if (TREE_CODE (index) != INTEGER_CST)
1524 break;
1525 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1526 obstack_grow (&name_obstack, buffer, strlen (buffer));
1527 break;
1528
1529 case ADDR_EXPR:
1530 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1531 break;
1532
1533 case MEM_REF:
1534 make_fancy_name_1 (TREE_OPERAND (expr, 0));
1535 if (!integer_zerop (TREE_OPERAND (expr, 1)))
1536 {
1537 obstack_1grow (&name_obstack, '$');
1538 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1539 TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1540 obstack_grow (&name_obstack, buffer, strlen (buffer));
1541 }
1542 break;
1543
1544 case BIT_FIELD_REF:
1545 case REALPART_EXPR:
1546 case IMAGPART_EXPR:
1547 gcc_unreachable (); /* we treat these as scalars. */
1548 break;
1549 default:
1550 break;
1551 }
1552 }
1553
1554 /* Create a human readable name for replacement variable of ACCESS. */
1555
1556 static char *
1557 make_fancy_name (tree expr)
1558 {
1559 make_fancy_name_1 (expr);
1560 obstack_1grow (&name_obstack, '\0');
1561 return XOBFINISH (&name_obstack, char *);
1562 }
1563
1564 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1565 EXP_TYPE at the given OFFSET and with storage order REVERSE. If BASE is
1566 something for which get_addr_base_and_unit_offset returns NULL, gsi must
1567 be non-NULL and is used to insert new statements either before or below
1568 the current one as specified by INSERT_AFTER. This function is not capable
1569 of handling bitfields. */
1570
1571 tree
1572 build_ref_for_offset (location_t loc, tree base, poly_int64 offset,
1573 bool reverse, tree exp_type, gimple_stmt_iterator *gsi,
1574 bool insert_after)
1575 {
1576 tree prev_base = base;
1577 tree off;
1578 tree mem_ref;
1579 poly_int64 base_offset;
1580 unsigned HOST_WIDE_INT misalign;
1581 unsigned int align;
1582
1583 /* Preserve address-space information. */
1584 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1585 if (as != TYPE_ADDR_SPACE (exp_type))
1586 exp_type = build_qualified_type (exp_type,
1587 TYPE_QUALS (exp_type)
1588 | ENCODE_QUAL_ADDR_SPACE (as));
1589
1590 poly_int64 byte_offset = exact_div (offset, BITS_PER_UNIT);
1591 get_object_alignment_1 (base, &align, &misalign);
1592 base = get_addr_base_and_unit_offset (base, &base_offset);
1593
1594 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1595 offset such as array[var_index]. */
1596 if (!base)
1597 {
1598 gassign *stmt;
1599 tree tmp, addr;
1600
1601 gcc_checking_assert (gsi);
1602 tmp = make_ssa_name (build_pointer_type (TREE_TYPE (prev_base)));
1603 addr = build_fold_addr_expr (unshare_expr (prev_base));
1604 STRIP_USELESS_TYPE_CONVERSION (addr);
1605 stmt = gimple_build_assign (tmp, addr);
1606 gimple_set_location (stmt, loc);
1607 if (insert_after)
1608 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1609 else
1610 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1611
1612 off = build_int_cst (reference_alias_ptr_type (prev_base), byte_offset);
1613 base = tmp;
1614 }
1615 else if (TREE_CODE (base) == MEM_REF)
1616 {
1617 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1618 base_offset + byte_offset);
1619 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1620 base = unshare_expr (TREE_OPERAND (base, 0));
1621 }
1622 else
1623 {
1624 off = build_int_cst (reference_alias_ptr_type (prev_base),
1625 base_offset + byte_offset);
1626 base = build_fold_addr_expr (unshare_expr (base));
1627 }
1628
1629 unsigned int align_bound = known_alignment (misalign + offset);
1630 if (align_bound != 0)
1631 align = MIN (align, align_bound);
1632 if (align != TYPE_ALIGN (exp_type))
1633 exp_type = build_aligned_type (exp_type, align);
1634
1635 mem_ref = fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1636 REF_REVERSE_STORAGE_ORDER (mem_ref) = reverse;
1637 if (TREE_THIS_VOLATILE (prev_base))
1638 TREE_THIS_VOLATILE (mem_ref) = 1;
1639 if (TREE_SIDE_EFFECTS (prev_base))
1640 TREE_SIDE_EFFECTS (mem_ref) = 1;
1641 return mem_ref;
1642 }
1643
1644 /* Construct and return a memory reference that is equal to a portion of
1645 MODEL->expr but is based on BASE. If this cannot be done, return NULL. */
1646
1647 static tree
1648 build_reconstructed_reference (location_t, tree base, struct access *model)
1649 {
1650 tree expr = model->expr, prev_expr = NULL;
1651 while (!types_compatible_p (TREE_TYPE (expr), TREE_TYPE (base)))
1652 {
1653 if (!handled_component_p (expr))
1654 return NULL_TREE;
1655 prev_expr = expr;
1656 expr = TREE_OPERAND (expr, 0);
1657 }
1658
1659 /* Guard against broken VIEW_CONVERT_EXPRs... */
1660 if (!prev_expr)
1661 return NULL_TREE;
1662
1663 TREE_OPERAND (prev_expr, 0) = base;
1664 tree ref = unshare_expr (model->expr);
1665 TREE_OPERAND (prev_expr, 0) = expr;
1666 return ref;
1667 }
1668
1669 /* Construct a memory reference to a part of an aggregate BASE at the given
1670 OFFSET and of the same type as MODEL. In case this is a reference to a
1671 bit-field, the function will replicate the last component_ref of model's
1672 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1673 build_ref_for_offset. */
1674
1675 static tree
1676 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1677 struct access *model, gimple_stmt_iterator *gsi,
1678 bool insert_after)
1679 {
1680 gcc_assert (offset >= 0);
1681 if (TREE_CODE (model->expr) == COMPONENT_REF
1682 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1683 {
1684 /* This access represents a bit-field. */
1685 tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
1686
1687 offset -= int_bit_position (fld);
1688 exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1689 t = build_ref_for_offset (loc, base, offset, model->reverse, exp_type,
1690 gsi, insert_after);
1691 /* The flag will be set on the record type. */
1692 REF_REVERSE_STORAGE_ORDER (t) = 0;
1693 return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
1694 NULL_TREE);
1695 }
1696 else
1697 {
1698 tree res;
1699 if (model->grp_same_access_path
1700 && !TREE_THIS_VOLATILE (base)
1701 && (TYPE_ADDR_SPACE (TREE_TYPE (base))
1702 == TYPE_ADDR_SPACE (TREE_TYPE (model->expr)))
1703 && offset <= model->offset
1704 /* build_reconstructed_reference can still fail if we have already
1705 massaged BASE because of another type incompatibility. */
1706 && (res = build_reconstructed_reference (loc, base, model)))
1707 return res;
1708 else
1709 return build_ref_for_offset (loc, base, offset, model->reverse,
1710 model->type, gsi, insert_after);
1711 }
1712 }
1713
1714 /* Attempt to build a memory reference that we could but into a gimple
1715 debug_bind statement. Similar to build_ref_for_model but punts if it has to
1716 create statements and return s NULL instead. This function also ignores
1717 alignment issues and so its results should never end up in non-debug
1718 statements. */
1719
1720 static tree
1721 build_debug_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1722 struct access *model)
1723 {
1724 poly_int64 base_offset;
1725 tree off;
1726
1727 if (TREE_CODE (model->expr) == COMPONENT_REF
1728 && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1729 return NULL_TREE;
1730
1731 base = get_addr_base_and_unit_offset (base, &base_offset);
1732 if (!base)
1733 return NULL_TREE;
1734 if (TREE_CODE (base) == MEM_REF)
1735 {
1736 off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1737 base_offset + offset / BITS_PER_UNIT);
1738 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1739 base = unshare_expr (TREE_OPERAND (base, 0));
1740 }
1741 else
1742 {
1743 off = build_int_cst (reference_alias_ptr_type (base),
1744 base_offset + offset / BITS_PER_UNIT);
1745 base = build_fold_addr_expr (unshare_expr (base));
1746 }
1747
1748 return fold_build2_loc (loc, MEM_REF, model->type, base, off);
1749 }
1750
1751 /* Construct a memory reference consisting of component_refs and array_refs to
1752 a part of an aggregate *RES (which is of type TYPE). The requested part
1753 should have type EXP_TYPE at be the given OFFSET. This function might not
1754 succeed, it returns true when it does and only then *RES points to something
1755 meaningful. This function should be used only to build expressions that we
1756 might need to present to user (e.g. in warnings). In all other situations,
1757 build_ref_for_model or build_ref_for_offset should be used instead. */
1758
1759 static bool
1760 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1761 tree exp_type)
1762 {
1763 while (1)
1764 {
1765 tree fld;
1766 tree tr_size, index, minidx;
1767 HOST_WIDE_INT el_size;
1768
1769 if (offset == 0 && exp_type
1770 && types_compatible_p (exp_type, type))
1771 return true;
1772
1773 switch (TREE_CODE (type))
1774 {
1775 case UNION_TYPE:
1776 case QUAL_UNION_TYPE:
1777 case RECORD_TYPE:
1778 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1779 {
1780 HOST_WIDE_INT pos, size;
1781 tree tr_pos, expr, *expr_ptr;
1782
1783 if (TREE_CODE (fld) != FIELD_DECL)
1784 continue;
1785
1786 tr_pos = bit_position (fld);
1787 if (!tr_pos || !tree_fits_uhwi_p (tr_pos))
1788 continue;
1789 pos = tree_to_uhwi (tr_pos);
1790 gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1791 tr_size = DECL_SIZE (fld);
1792 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1793 continue;
1794 size = tree_to_uhwi (tr_size);
1795 if (size == 0)
1796 {
1797 if (pos != offset)
1798 continue;
1799 }
1800 else if (pos > offset || (pos + size) <= offset)
1801 continue;
1802
1803 expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1804 NULL_TREE);
1805 expr_ptr = &expr;
1806 if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1807 offset - pos, exp_type))
1808 {
1809 *res = expr;
1810 return true;
1811 }
1812 }
1813 return false;
1814
1815 case ARRAY_TYPE:
1816 tr_size = TYPE_SIZE (TREE_TYPE (type));
1817 if (!tr_size || !tree_fits_uhwi_p (tr_size))
1818 return false;
1819 el_size = tree_to_uhwi (tr_size);
1820
1821 minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1822 if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1823 return false;
1824 index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1825 if (!integer_zerop (minidx))
1826 index = int_const_binop (PLUS_EXPR, index, minidx);
1827 *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1828 NULL_TREE, NULL_TREE);
1829 offset = offset % el_size;
1830 type = TREE_TYPE (type);
1831 break;
1832
1833 default:
1834 if (offset != 0)
1835 return false;
1836
1837 if (exp_type)
1838 return false;
1839 else
1840 return true;
1841 }
1842 }
1843 }
1844
1845 /* Print message to dump file why a variable was rejected. */
1846
1847 static void
1848 reject (tree var, const char *msg)
1849 {
1850 if (dump_file && (dump_flags & TDF_DETAILS))
1851 {
1852 fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1853 print_generic_expr (dump_file, var);
1854 fprintf (dump_file, "\n");
1855 }
1856 }
1857
1858 /* Return true if VAR is a candidate for SRA. */
1859
1860 static bool
1861 maybe_add_sra_candidate (tree var)
1862 {
1863 tree type = TREE_TYPE (var);
1864 const char *msg;
1865 tree_node **slot;
1866
1867 if (!AGGREGATE_TYPE_P (type))
1868 {
1869 reject (var, "not aggregate");
1870 return false;
1871 }
1872 /* Allow constant-pool entries that "need to live in memory". */
1873 if (needs_to_live_in_memory (var) && !constant_decl_p (var))
1874 {
1875 reject (var, "needs to live in memory");
1876 return false;
1877 }
1878 if (TREE_THIS_VOLATILE (var))
1879 {
1880 reject (var, "is volatile");
1881 return false;
1882 }
1883 if (!COMPLETE_TYPE_P (type))
1884 {
1885 reject (var, "has incomplete type");
1886 return false;
1887 }
1888 if (!tree_fits_shwi_p (TYPE_SIZE (type)))
1889 {
1890 reject (var, "type size not fixed");
1891 return false;
1892 }
1893 if (tree_to_shwi (TYPE_SIZE (type)) == 0)
1894 {
1895 reject (var, "type size is zero");
1896 return false;
1897 }
1898 if (type_internals_preclude_sra_p (type, &msg))
1899 {
1900 reject (var, msg);
1901 return false;
1902 }
1903 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1904 we also want to schedule it rather late. Thus we ignore it in
1905 the early pass. */
1906 (sra_mode == SRA_MODE_EARLY_INTRA
1907 && is_va_list_type (type)))
1908 {
1909 reject (var, "is va_list");
1910 return false;
1911 }
1912
1913 bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1914 slot = candidates->find_slot_with_hash (var, DECL_UID (var), INSERT);
1915 *slot = var;
1916
1917 if (dump_file && (dump_flags & TDF_DETAILS))
1918 {
1919 fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1920 print_generic_expr (dump_file, var);
1921 fprintf (dump_file, "\n");
1922 }
1923
1924 return true;
1925 }
1926
1927 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1928 those with type which is suitable for scalarization. */
1929
1930 static bool
1931 find_var_candidates (void)
1932 {
1933 tree var, parm;
1934 unsigned int i;
1935 bool ret = false;
1936
1937 for (parm = DECL_ARGUMENTS (current_function_decl);
1938 parm;
1939 parm = DECL_CHAIN (parm))
1940 ret |= maybe_add_sra_candidate (parm);
1941
1942 FOR_EACH_LOCAL_DECL (cfun, i, var)
1943 {
1944 if (!VAR_P (var))
1945 continue;
1946
1947 ret |= maybe_add_sra_candidate (var);
1948 }
1949
1950 return ret;
1951 }
1952
1953 /* Return true if EXP is a reference chain of COMPONENT_REFs and AREAY_REFs
1954 ending either with a DECL or a MEM_REF with zero offset. */
1955
1956 static bool
1957 path_comparable_for_same_access (tree expr)
1958 {
1959 while (handled_component_p (expr))
1960 {
1961 if (TREE_CODE (expr) == ARRAY_REF)
1962 {
1963 /* SSA name indices can occur here too when the array is of sie one.
1964 But we cannot just re-use array_refs with SSA names elsewhere in
1965 the function, so disallow non-constant indices. TODO: Remove this
1966 limitation after teaching build_reconstructed_reference to replace
1967 the index with the index type lower bound. */
1968 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST)
1969 return false;
1970 }
1971 expr = TREE_OPERAND (expr, 0);
1972 }
1973
1974 if (TREE_CODE (expr) == MEM_REF)
1975 {
1976 if (!zerop (TREE_OPERAND (expr, 1)))
1977 return false;
1978 }
1979 else
1980 gcc_assert (DECL_P (expr));
1981
1982 return true;
1983 }
1984
1985 /* Assuming that EXP1 consists of only COMPONENT_REFs and ARRAY_REFs, return
1986 true if the chain of these handled components are exactly the same as EXP2
1987 and the expression under them is the same DECL or an equivalent MEM_REF.
1988 The reference picked by compare_access_positions must go to EXP1. */
1989
1990 static bool
1991 same_access_path_p (tree exp1, tree exp2)
1992 {
1993 if (TREE_CODE (exp1) != TREE_CODE (exp2))
1994 {
1995 /* Special case single-field structures loaded sometimes as the field
1996 and sometimes as the structure. If the field is of a scalar type,
1997 compare_access_positions will put it into exp1.
1998
1999 TODO: The gimple register type condition can be removed if teach
2000 compare_access_positions to put inner types first. */
2001 if (is_gimple_reg_type (TREE_TYPE (exp1))
2002 && TREE_CODE (exp1) == COMPONENT_REF
2003 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (exp1, 0)))
2004 == TYPE_MAIN_VARIANT (TREE_TYPE (exp2))))
2005 exp1 = TREE_OPERAND (exp1, 0);
2006 else
2007 return false;
2008 }
2009
2010 if (!operand_equal_p (exp1, exp2, OEP_ADDRESS_OF))
2011 return false;
2012
2013 return true;
2014 }
2015
2016 /* Sort all accesses for the given variable, check for partial overlaps and
2017 return NULL if there are any. If there are none, pick a representative for
2018 each combination of offset and size and create a linked list out of them.
2019 Return the pointer to the first representative and make sure it is the first
2020 one in the vector of accesses. */
2021
2022 static struct access *
2023 sort_and_splice_var_accesses (tree var)
2024 {
2025 int i, j, access_count;
2026 struct access *res, **prev_acc_ptr = &res;
2027 vec<access_p> *access_vec;
2028 bool first = true;
2029 HOST_WIDE_INT low = -1, high = 0;
2030
2031 access_vec = get_base_access_vector (var);
2032 if (!access_vec)
2033 return NULL;
2034 access_count = access_vec->length ();
2035
2036 /* Sort by <OFFSET, SIZE>. */
2037 access_vec->qsort (compare_access_positions);
2038
2039 i = 0;
2040 while (i < access_count)
2041 {
2042 struct access *access = (*access_vec)[i];
2043 bool grp_write = access->write;
2044 bool grp_read = !access->write;
2045 bool grp_scalar_write = access->write
2046 && is_gimple_reg_type (access->type);
2047 bool grp_scalar_read = !access->write
2048 && is_gimple_reg_type (access->type);
2049 bool grp_assignment_read = access->grp_assignment_read;
2050 bool grp_assignment_write = access->grp_assignment_write;
2051 bool multiple_scalar_reads = false;
2052 bool grp_partial_lhs = access->grp_partial_lhs;
2053 bool first_scalar = is_gimple_reg_type (access->type);
2054 bool unscalarizable_region = access->grp_unscalarizable_region;
2055 bool grp_same_access_path = true;
2056 bool bf_non_full_precision
2057 = (INTEGRAL_TYPE_P (access->type)
2058 && TYPE_PRECISION (access->type) != access->size
2059 && TREE_CODE (access->expr) == COMPONENT_REF
2060 && DECL_BIT_FIELD (TREE_OPERAND (access->expr, 1)));
2061
2062 if (first || access->offset >= high)
2063 {
2064 first = false;
2065 low = access->offset;
2066 high = access->offset + access->size;
2067 }
2068 else if (access->offset > low && access->offset + access->size > high)
2069 return NULL;
2070 else
2071 gcc_assert (access->offset >= low
2072 && access->offset + access->size <= high);
2073
2074 grp_same_access_path = path_comparable_for_same_access (access->expr);
2075
2076 j = i + 1;
2077 while (j < access_count)
2078 {
2079 struct access *ac2 = (*access_vec)[j];
2080 if (ac2->offset != access->offset || ac2->size != access->size)
2081 break;
2082 if (ac2->write)
2083 {
2084 grp_write = true;
2085 grp_scalar_write = (grp_scalar_write
2086 || is_gimple_reg_type (ac2->type));
2087 }
2088 else
2089 {
2090 grp_read = true;
2091 if (is_gimple_reg_type (ac2->type))
2092 {
2093 if (grp_scalar_read)
2094 multiple_scalar_reads = true;
2095 else
2096 grp_scalar_read = true;
2097 }
2098 }
2099 grp_assignment_read |= ac2->grp_assignment_read;
2100 grp_assignment_write |= ac2->grp_assignment_write;
2101 grp_partial_lhs |= ac2->grp_partial_lhs;
2102 unscalarizable_region |= ac2->grp_unscalarizable_region;
2103 relink_to_new_repr (access, ac2);
2104
2105 /* If there are both aggregate-type and scalar-type accesses with
2106 this combination of size and offset, the comparison function
2107 should have put the scalars first. */
2108 gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
2109 /* It also prefers integral types to non-integral. However, when the
2110 precision of the selected type does not span the entire area and
2111 should also be used for a non-integer (i.e. float), we must not
2112 let that happen. Normally analyze_access_subtree expands the type
2113 to cover the entire area but for bit-fields it doesn't. */
2114 if (bf_non_full_precision && !INTEGRAL_TYPE_P (ac2->type))
2115 {
2116 if (dump_file && (dump_flags & TDF_DETAILS))
2117 {
2118 fprintf (dump_file, "Cannot scalarize the following access "
2119 "because insufficient precision integer type was "
2120 "selected.\n ");
2121 dump_access (dump_file, access, false);
2122 }
2123 unscalarizable_region = true;
2124 }
2125
2126 if (grp_same_access_path
2127 && !same_access_path_p (access->expr, ac2->expr))
2128 grp_same_access_path = false;
2129
2130 ac2->group_representative = access;
2131 j++;
2132 }
2133
2134 i = j;
2135
2136 access->group_representative = access;
2137 access->grp_write = grp_write;
2138 access->grp_read = grp_read;
2139 access->grp_scalar_read = grp_scalar_read;
2140 access->grp_scalar_write = grp_scalar_write;
2141 access->grp_assignment_read = grp_assignment_read;
2142 access->grp_assignment_write = grp_assignment_write;
2143 access->grp_hint = multiple_scalar_reads && !constant_decl_p (var);
2144 access->grp_partial_lhs = grp_partial_lhs;
2145 access->grp_unscalarizable_region = unscalarizable_region;
2146 access->grp_same_access_path = grp_same_access_path;
2147
2148 *prev_acc_ptr = access;
2149 prev_acc_ptr = &access->next_grp;
2150 }
2151
2152 gcc_assert (res == (*access_vec)[0]);
2153 return res;
2154 }
2155
2156 /* Create a variable for the given ACCESS which determines the type, name and a
2157 few other properties. Return the variable declaration and store it also to
2158 ACCESS->replacement. REG_TREE is used when creating a declaration to base a
2159 default-definition SSA name on in order to facilitate an uninitialized
2160 warning. It is used instead of the actual ACCESS type if that is not of a
2161 gimple register type. */
2162
2163 static tree
2164 create_access_replacement (struct access *access, tree reg_type = NULL_TREE)
2165 {
2166 tree repl;
2167
2168 tree type = access->type;
2169 if (reg_type && !is_gimple_reg_type (type))
2170 type = reg_type;
2171
2172 if (access->grp_to_be_debug_replaced)
2173 {
2174 repl = create_tmp_var_raw (access->type);
2175 DECL_CONTEXT (repl) = current_function_decl;
2176 }
2177 else
2178 /* Drop any special alignment on the type if it's not on the main
2179 variant. This avoids issues with weirdo ABIs like AAPCS. */
2180 repl = create_tmp_var (build_qualified_type (TYPE_MAIN_VARIANT (type),
2181 TYPE_QUALS (type)), "SR");
2182 if (access->grp_partial_lhs
2183 && is_gimple_reg_type (type))
2184 DECL_NOT_GIMPLE_REG_P (repl) = 1;
2185
2186 DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
2187 DECL_ARTIFICIAL (repl) = 1;
2188 DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
2189
2190 if (DECL_NAME (access->base)
2191 && !DECL_IGNORED_P (access->base)
2192 && !DECL_ARTIFICIAL (access->base))
2193 {
2194 char *pretty_name = make_fancy_name (access->expr);
2195 tree debug_expr = unshare_expr_without_location (access->expr), d;
2196 bool fail = false;
2197
2198 DECL_NAME (repl) = get_identifier (pretty_name);
2199 DECL_NAMELESS (repl) = 1;
2200 obstack_free (&name_obstack, pretty_name);
2201
2202 /* Get rid of any SSA_NAMEs embedded in debug_expr,
2203 as DECL_DEBUG_EXPR isn't considered when looking for still
2204 used SSA_NAMEs and thus they could be freed. All debug info
2205 generation cares is whether something is constant or variable
2206 and that get_ref_base_and_extent works properly on the
2207 expression. It cannot handle accesses at a non-constant offset
2208 though, so just give up in those cases. */
2209 for (d = debug_expr;
2210 !fail && (handled_component_p (d) || TREE_CODE (d) == MEM_REF);
2211 d = TREE_OPERAND (d, 0))
2212 switch (TREE_CODE (d))
2213 {
2214 case ARRAY_REF:
2215 case ARRAY_RANGE_REF:
2216 if (TREE_OPERAND (d, 1)
2217 && TREE_CODE (TREE_OPERAND (d, 1)) != INTEGER_CST)
2218 fail = true;
2219 if (TREE_OPERAND (d, 3)
2220 && TREE_CODE (TREE_OPERAND (d, 3)) != INTEGER_CST)
2221 fail = true;
2222 /* FALLTHRU */
2223 case COMPONENT_REF:
2224 if (TREE_OPERAND (d, 2)
2225 && TREE_CODE (TREE_OPERAND (d, 2)) != INTEGER_CST)
2226 fail = true;
2227 break;
2228 case MEM_REF:
2229 if (TREE_CODE (TREE_OPERAND (d, 0)) != ADDR_EXPR)
2230 fail = true;
2231 else
2232 d = TREE_OPERAND (d, 0);
2233 break;
2234 default:
2235 break;
2236 }
2237 if (!fail)
2238 {
2239 SET_DECL_DEBUG_EXPR (repl, debug_expr);
2240 DECL_HAS_DEBUG_EXPR_P (repl) = 1;
2241 }
2242 if (access->grp_no_warning)
2243 TREE_NO_WARNING (repl) = 1;
2244 else
2245 TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
2246 }
2247 else
2248 TREE_NO_WARNING (repl) = 1;
2249
2250 if (dump_file)
2251 {
2252 if (access->grp_to_be_debug_replaced)
2253 {
2254 fprintf (dump_file, "Created a debug-only replacement for ");
2255 print_generic_expr (dump_file, access->base);
2256 fprintf (dump_file, " offset: %u, size: %u\n",
2257 (unsigned) access->offset, (unsigned) access->size);
2258 }
2259 else
2260 {
2261 fprintf (dump_file, "Created a replacement for ");
2262 print_generic_expr (dump_file, access->base);
2263 fprintf (dump_file, " offset: %u, size: %u: ",
2264 (unsigned) access->offset, (unsigned) access->size);
2265 print_generic_expr (dump_file, repl, TDF_UID);
2266 fprintf (dump_file, "\n");
2267 }
2268 }
2269 sra_stats.replacements++;
2270
2271 return repl;
2272 }
2273
2274 /* Return ACCESS scalar replacement, which must exist. */
2275
2276 static inline tree
2277 get_access_replacement (struct access *access)
2278 {
2279 gcc_checking_assert (access->replacement_decl);
2280 return access->replacement_decl;
2281 }
2282
2283
2284 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2285 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2286 to it is not "within" the root. Return false iff some accesses partially
2287 overlap. */
2288
2289 static bool
2290 build_access_subtree (struct access **access)
2291 {
2292 struct access *root = *access, *last_child = NULL;
2293 HOST_WIDE_INT limit = root->offset + root->size;
2294
2295 *access = (*access)->next_grp;
2296 while (*access && (*access)->offset + (*access)->size <= limit)
2297 {
2298 if (!last_child)
2299 root->first_child = *access;
2300 else
2301 last_child->next_sibling = *access;
2302 last_child = *access;
2303 (*access)->parent = root;
2304 (*access)->grp_write |= root->grp_write;
2305
2306 if (!build_access_subtree (access))
2307 return false;
2308 }
2309
2310 if (*access && (*access)->offset < limit)
2311 return false;
2312
2313 return true;
2314 }
2315
2316 /* Build a tree of access representatives, ACCESS is the pointer to the first
2317 one, others are linked in a list by the next_grp field. Return false iff
2318 some accesses partially overlap. */
2319
2320 static bool
2321 build_access_trees (struct access *access)
2322 {
2323 while (access)
2324 {
2325 struct access *root = access;
2326
2327 if (!build_access_subtree (&access))
2328 return false;
2329 root->next_grp = access;
2330 }
2331 return true;
2332 }
2333
2334 /* Traverse the access forest where ROOT is the first root and verify that
2335 various important invariants hold true. */
2336
2337 DEBUG_FUNCTION void
2338 verify_sra_access_forest (struct access *root)
2339 {
2340 struct access *access = root;
2341 tree first_base = root->base;
2342 gcc_assert (DECL_P (first_base));
2343 do
2344 {
2345 gcc_assert (access->base == first_base);
2346 if (access->parent)
2347 gcc_assert (access->offset >= access->parent->offset
2348 && access->size <= access->parent->size);
2349 if (access->next_sibling)
2350 gcc_assert (access->next_sibling->offset
2351 >= access->offset + access->size);
2352
2353 poly_int64 poffset, psize, pmax_size;
2354 bool reverse;
2355 tree base = get_ref_base_and_extent (access->expr, &poffset, &psize,
2356 &pmax_size, &reverse);
2357 HOST_WIDE_INT offset, size, max_size;
2358 if (!poffset.is_constant (&offset)
2359 || !psize.is_constant (&size)
2360 || !pmax_size.is_constant (&max_size))
2361 gcc_unreachable ();
2362 gcc_assert (base == first_base);
2363 gcc_assert (offset == access->offset);
2364 gcc_assert (access->grp_unscalarizable_region
2365 || access->grp_total_scalarization
2366 || size == max_size);
2367 gcc_assert (access->grp_unscalarizable_region
2368 || !is_gimple_reg_type (access->type)
2369 || size == access->size);
2370 gcc_assert (reverse == access->reverse);
2371
2372 if (access->first_child)
2373 {
2374 gcc_assert (access->first_child->parent == access);
2375 access = access->first_child;
2376 }
2377 else if (access->next_sibling)
2378 {
2379 gcc_assert (access->next_sibling->parent == access->parent);
2380 access = access->next_sibling;
2381 }
2382 else
2383 {
2384 while (access->parent && !access->next_sibling)
2385 access = access->parent;
2386 if (access->next_sibling)
2387 access = access->next_sibling;
2388 else
2389 {
2390 gcc_assert (access == root);
2391 root = root->next_grp;
2392 access = root;
2393 }
2394 }
2395 }
2396 while (access);
2397 }
2398
2399 /* Verify access forests of all candidates with accesses by calling
2400 verify_access_forest on each on them. */
2401
2402 DEBUG_FUNCTION void
2403 verify_all_sra_access_forests (void)
2404 {
2405 bitmap_iterator bi;
2406 unsigned i;
2407 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2408 {
2409 tree var = candidate (i);
2410 struct access *access = get_first_repr_for_decl (var);
2411 if (access)
2412 {
2413 gcc_assert (access->base == var);
2414 verify_sra_access_forest (access);
2415 }
2416 }
2417 }
2418
2419 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2420 array. */
2421
2422 static bool
2423 expr_with_var_bounded_array_refs_p (tree expr)
2424 {
2425 while (handled_component_p (expr))
2426 {
2427 if (TREE_CODE (expr) == ARRAY_REF
2428 && !tree_fits_shwi_p (array_ref_low_bound (expr)))
2429 return true;
2430 expr = TREE_OPERAND (expr, 0);
2431 }
2432 return false;
2433 }
2434
2435 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2436 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. If TOTALLY
2437 is set, we are totally scalarizing the aggregate. Also set all sorts of
2438 access flags appropriately along the way, notably always set grp_read and
2439 grp_assign_read according to MARK_READ and grp_write when MARK_WRITE is
2440 true.
2441
2442 Creating a replacement for a scalar access is considered beneficial if its
2443 grp_hint ot TOTALLY is set (this means either that there is more than one
2444 direct read access or that we are attempting total scalarization) or
2445 according to the following table:
2446
2447 Access written to through a scalar type (once or more times)
2448 |
2449 | Written to in an assignment statement
2450 | |
2451 | | Access read as scalar _once_
2452 | | |
2453 | | | Read in an assignment statement
2454 | | | |
2455 | | | | Scalarize Comment
2456 -----------------------------------------------------------------------------
2457 0 0 0 0 No access for the scalar
2458 0 0 0 1 No access for the scalar
2459 0 0 1 0 No Single read - won't help
2460 0 0 1 1 No The same case
2461 0 1 0 0 No access for the scalar
2462 0 1 0 1 No access for the scalar
2463 0 1 1 0 Yes s = *g; return s.i;
2464 0 1 1 1 Yes The same case as above
2465 1 0 0 0 No Won't help
2466 1 0 0 1 Yes s.i = 1; *g = s;
2467 1 0 1 0 Yes s.i = 5; g = s.i;
2468 1 0 1 1 Yes The same case as above
2469 1 1 0 0 No Won't help.
2470 1 1 0 1 Yes s.i = 1; *g = s;
2471 1 1 1 0 Yes s = *g; return s.i;
2472 1 1 1 1 Yes Any of the above yeses */
2473
2474 static bool
2475 analyze_access_subtree (struct access *root, struct access *parent,
2476 bool allow_replacements, bool totally)
2477 {
2478 struct access *child;
2479 HOST_WIDE_INT limit = root->offset + root->size;
2480 HOST_WIDE_INT covered_to = root->offset;
2481 bool scalar = is_gimple_reg_type (root->type);
2482 bool hole = false, sth_created = false;
2483
2484 if (parent)
2485 {
2486 if (parent->grp_read)
2487 root->grp_read = 1;
2488 if (parent->grp_assignment_read)
2489 root->grp_assignment_read = 1;
2490 if (parent->grp_write)
2491 root->grp_write = 1;
2492 if (parent->grp_assignment_write)
2493 root->grp_assignment_write = 1;
2494 if (!parent->grp_same_access_path)
2495 root->grp_same_access_path = 0;
2496 }
2497
2498 if (root->grp_unscalarizable_region)
2499 allow_replacements = false;
2500
2501 if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2502 allow_replacements = false;
2503
2504 for (child = root->first_child; child; child = child->next_sibling)
2505 {
2506 hole |= covered_to < child->offset;
2507 sth_created |= analyze_access_subtree (child, root,
2508 allow_replacements && !scalar,
2509 totally);
2510
2511 root->grp_unscalarized_data |= child->grp_unscalarized_data;
2512 if (child->grp_covered)
2513 covered_to += child->size;
2514 else
2515 hole = true;
2516 }
2517
2518 if (allow_replacements && scalar && !root->first_child
2519 && (totally || !root->grp_total_scalarization)
2520 && (totally
2521 || root->grp_hint
2522 || ((root->grp_scalar_read || root->grp_assignment_read)
2523 && (root->grp_scalar_write || root->grp_assignment_write))))
2524 {
2525 /* Always create access replacements that cover the whole access.
2526 For integral types this means the precision has to match.
2527 Avoid assumptions based on the integral type kind, too. */
2528 if (INTEGRAL_TYPE_P (root->type)
2529 && (TREE_CODE (root->type) != INTEGER_TYPE
2530 || TYPE_PRECISION (root->type) != root->size)
2531 /* But leave bitfield accesses alone. */
2532 && (TREE_CODE (root->expr) != COMPONENT_REF
2533 || !DECL_BIT_FIELD (TREE_OPERAND (root->expr, 1))))
2534 {
2535 tree rt = root->type;
2536 gcc_assert ((root->offset % BITS_PER_UNIT) == 0
2537 && (root->size % BITS_PER_UNIT) == 0);
2538 root->type = build_nonstandard_integer_type (root->size,
2539 TYPE_UNSIGNED (rt));
2540 root->expr = build_ref_for_offset (UNKNOWN_LOCATION, root->base,
2541 root->offset, root->reverse,
2542 root->type, NULL, false);
2543
2544 if (dump_file && (dump_flags & TDF_DETAILS))
2545 {
2546 fprintf (dump_file, "Changing the type of a replacement for ");
2547 print_generic_expr (dump_file, root->base);
2548 fprintf (dump_file, " offset: %u, size: %u ",
2549 (unsigned) root->offset, (unsigned) root->size);
2550 fprintf (dump_file, " to an integer.\n");
2551 }
2552 }
2553
2554 root->grp_to_be_replaced = 1;
2555 root->replacement_decl = create_access_replacement (root);
2556 sth_created = true;
2557 hole = false;
2558 }
2559 else
2560 {
2561 if (allow_replacements
2562 && scalar && !root->first_child
2563 && !root->grp_total_scalarization
2564 && (root->grp_scalar_write || root->grp_assignment_write)
2565 && !bitmap_bit_p (cannot_scalarize_away_bitmap,
2566 DECL_UID (root->base)))
2567 {
2568 gcc_checking_assert (!root->grp_scalar_read
2569 && !root->grp_assignment_read);
2570 sth_created = true;
2571 if (MAY_HAVE_DEBUG_BIND_STMTS)
2572 {
2573 root->grp_to_be_debug_replaced = 1;
2574 root->replacement_decl = create_access_replacement (root);
2575 }
2576 }
2577
2578 if (covered_to < limit)
2579 hole = true;
2580 if (scalar || !allow_replacements)
2581 root->grp_total_scalarization = 0;
2582 }
2583
2584 if (!hole || totally)
2585 root->grp_covered = 1;
2586 else if (root->grp_write || comes_initialized_p (root->base))
2587 root->grp_unscalarized_data = 1; /* not covered and written to */
2588 return sth_created;
2589 }
2590
2591 /* Analyze all access trees linked by next_grp by the means of
2592 analyze_access_subtree. */
2593 static bool
2594 analyze_access_trees (struct access *access)
2595 {
2596 bool ret = false;
2597
2598 while (access)
2599 {
2600 if (analyze_access_subtree (access, NULL, true,
2601 access->grp_total_scalarization))
2602 ret = true;
2603 access = access->next_grp;
2604 }
2605
2606 return ret;
2607 }
2608
2609 /* Return true iff a potential new child of ACC at offset OFFSET and with size
2610 SIZE would conflict with an already existing one. If exactly such a child
2611 already exists in ACC, store a pointer to it in EXACT_MATCH. */
2612
2613 static bool
2614 child_would_conflict_in_acc (struct access *acc, HOST_WIDE_INT norm_offset,
2615 HOST_WIDE_INT size, struct access **exact_match)
2616 {
2617 struct access *child;
2618
2619 for (child = acc->first_child; child; child = child->next_sibling)
2620 {
2621 if (child->offset == norm_offset && child->size == size)
2622 {
2623 *exact_match = child;
2624 return true;
2625 }
2626
2627 if (child->offset < norm_offset + size
2628 && child->offset + child->size > norm_offset)
2629 return true;
2630 }
2631
2632 return false;
2633 }
2634
2635 /* Create a new child access of PARENT, with all properties just like MODEL
2636 except for its offset and with its grp_write false and grp_read true.
2637 Return the new access or NULL if it cannot be created. Note that this
2638 access is created long after all splicing and sorting, it's not located in
2639 any access vector and is automatically a representative of its group. Set
2640 the gpr_write flag of the new accesss if SET_GRP_WRITE is true. */
2641
2642 static struct access *
2643 create_artificial_child_access (struct access *parent, struct access *model,
2644 HOST_WIDE_INT new_offset,
2645 bool set_grp_read, bool set_grp_write)
2646 {
2647 struct access **child;
2648 tree expr = parent->base;
2649
2650 gcc_assert (!model->grp_unscalarizable_region);
2651
2652 struct access *access = access_pool.allocate ();
2653 memset (access, 0, sizeof (struct access));
2654 if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2655 model->type))
2656 {
2657 access->grp_no_warning = true;
2658 expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2659 new_offset, model, NULL, false);
2660 }
2661
2662 access->base = parent->base;
2663 access->expr = expr;
2664 access->offset = new_offset;
2665 access->size = model->size;
2666 access->type = model->type;
2667 access->parent = parent;
2668 access->grp_read = set_grp_read;
2669 access->grp_write = set_grp_write;
2670 access->reverse = model->reverse;
2671
2672 child = &parent->first_child;
2673 while (*child && (*child)->offset < new_offset)
2674 child = &(*child)->next_sibling;
2675
2676 access->next_sibling = *child;
2677 *child = access;
2678
2679 return access;
2680 }
2681
2682
2683 /* Beginning with ACCESS, traverse its whole access subtree and mark all
2684 sub-trees as written to. If any of them has not been marked so previously
2685 and has assignment links leading from it, re-enqueue it. */
2686
2687 static void
2688 subtree_mark_written_and_rhs_enqueue (struct access *access)
2689 {
2690 if (access->grp_write)
2691 return;
2692 access->grp_write = true;
2693 add_access_to_rhs_work_queue (access);
2694
2695 struct access *child;
2696 for (child = access->first_child; child; child = child->next_sibling)
2697 subtree_mark_written_and_rhs_enqueue (child);
2698 }
2699
2700 /* If there is still budget to create a propagation access for DECL, return
2701 true and decrement the budget. Otherwise return false. */
2702
2703 static bool
2704 budget_for_propagation_access (tree decl)
2705 {
2706 unsigned b, *p = propagation_budget->get (decl);
2707 if (p)
2708 b = *p;
2709 else
2710 b = param_sra_max_propagations;
2711
2712 if (b == 0)
2713 return false;
2714 b--;
2715
2716 if (b == 0 && dump_file && (dump_flags & TDF_DETAILS))
2717 {
2718 fprintf (dump_file, "The propagation budget of ");
2719 print_generic_expr (dump_file, decl);
2720 fprintf (dump_file, " (UID: %u) has been exhausted.\n", DECL_UID (decl));
2721 }
2722 propagation_budget->put (decl, b);
2723 return true;
2724 }
2725
2726 /* Propagate subaccesses and grp_write flags of RACC across an assignment link
2727 to LACC. Enqueue sub-accesses as necessary so that the write flag is
2728 propagated transitively. Return true if anything changed. Additionally, if
2729 RACC is a scalar access but LACC is not, change the type of the latter, if
2730 possible. */
2731
2732 static bool
2733 propagate_subaccesses_from_rhs (struct access *lacc, struct access *racc)
2734 {
2735 struct access *rchild;
2736 HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2737 bool ret = false;
2738
2739 /* IF the LHS is still not marked as being written to, we only need to do so
2740 if the RHS at this level actually was. */
2741 if (!lacc->grp_write)
2742 {
2743 gcc_checking_assert (!comes_initialized_p (racc->base));
2744 if (racc->grp_write)
2745 {
2746 subtree_mark_written_and_rhs_enqueue (lacc);
2747 ret = true;
2748 }
2749 }
2750
2751 if (is_gimple_reg_type (lacc->type)
2752 || lacc->grp_unscalarizable_region
2753 || racc->grp_unscalarizable_region)
2754 {
2755 if (!lacc->grp_write)
2756 {
2757 ret = true;
2758 subtree_mark_written_and_rhs_enqueue (lacc);
2759 }
2760 return ret;
2761 }
2762
2763 if (is_gimple_reg_type (racc->type))
2764 {
2765 if (!lacc->grp_write)
2766 {
2767 ret = true;
2768 subtree_mark_written_and_rhs_enqueue (lacc);
2769 }
2770 if (!lacc->first_child && !racc->first_child)
2771 {
2772 /* We are about to change the access type from aggregate to scalar,
2773 so we need to put the reverse flag onto the access, if any. */
2774 const bool reverse = TYPE_REVERSE_STORAGE_ORDER (lacc->type);
2775 tree t = lacc->base;
2776
2777 lacc->type = racc->type;
2778 if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
2779 lacc->offset, racc->type))
2780 {
2781 lacc->expr = t;
2782 lacc->grp_same_access_path = true;
2783 }
2784 else
2785 {
2786 lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2787 lacc->base, lacc->offset,
2788 racc, NULL, false);
2789 if (TREE_CODE (lacc->expr) == MEM_REF)
2790 REF_REVERSE_STORAGE_ORDER (lacc->expr) = reverse;
2791 lacc->grp_no_warning = true;
2792 lacc->grp_same_access_path = false;
2793 }
2794 lacc->reverse = reverse;
2795 }
2796 return ret;
2797 }
2798
2799 for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2800 {
2801 struct access *new_acc = NULL;
2802 HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2803
2804 if (child_would_conflict_in_acc (lacc, norm_offset, rchild->size,
2805 &new_acc))
2806 {
2807 if (new_acc)
2808 {
2809 if (!new_acc->grp_write && rchild->grp_write)
2810 {
2811 gcc_assert (!lacc->grp_write);
2812 subtree_mark_written_and_rhs_enqueue (new_acc);
2813 ret = true;
2814 }
2815
2816 rchild->grp_hint = 1;
2817 new_acc->grp_hint |= new_acc->grp_read;
2818 if (rchild->first_child
2819 && propagate_subaccesses_from_rhs (new_acc, rchild))
2820 {
2821 ret = 1;
2822 add_access_to_rhs_work_queue (new_acc);
2823 }
2824 }
2825 else
2826 {
2827 if (!lacc->grp_write)
2828 {
2829 ret = true;
2830 subtree_mark_written_and_rhs_enqueue (lacc);
2831 }
2832 }
2833 continue;
2834 }
2835
2836 if (rchild->grp_unscalarizable_region
2837 || !budget_for_propagation_access (lacc->base))
2838 {
2839 if (rchild->grp_write && !lacc->grp_write)
2840 {
2841 ret = true;
2842 subtree_mark_written_and_rhs_enqueue (lacc);
2843 }
2844 continue;
2845 }
2846
2847 rchild->grp_hint = 1;
2848 /* Because get_ref_base_and_extent always includes padding in size for
2849 accesses to DECLs but not necessarily for COMPONENT_REFs of the same
2850 type, we might be actually attempting to here to create a child of the
2851 same type as the parent. */
2852 if (!types_compatible_p (lacc->type, rchild->type))
2853 new_acc = create_artificial_child_access (lacc, rchild, norm_offset,
2854 false,
2855 (lacc->grp_write
2856 || rchild->grp_write));
2857 else
2858 new_acc = lacc;
2859 gcc_checking_assert (new_acc);
2860 if (racc->first_child)
2861 propagate_subaccesses_from_rhs (new_acc, rchild);
2862
2863 add_access_to_rhs_work_queue (lacc);
2864 ret = true;
2865 }
2866
2867 return ret;
2868 }
2869
2870 /* Propagate subaccesses of LACC across an assignment link to RACC if they
2871 should inhibit total scalarization of the corresponding area. No flags are
2872 being propagated in the process. Return true if anything changed. */
2873
2874 static bool
2875 propagate_subaccesses_from_lhs (struct access *lacc, struct access *racc)
2876 {
2877 if (is_gimple_reg_type (racc->type)
2878 || lacc->grp_unscalarizable_region
2879 || racc->grp_unscalarizable_region)
2880 return false;
2881
2882 /* TODO: Do we want set some new racc flag to stop potential total
2883 scalarization if lacc is a scalar access (and none fo the two have
2884 children)? */
2885
2886 bool ret = false;
2887 HOST_WIDE_INT norm_delta = racc->offset - lacc->offset;
2888 for (struct access *lchild = lacc->first_child;
2889 lchild;
2890 lchild = lchild->next_sibling)
2891 {
2892 struct access *matching_acc = NULL;
2893 HOST_WIDE_INT norm_offset = lchild->offset + norm_delta;
2894
2895 if (lchild->grp_unscalarizable_region
2896 || child_would_conflict_in_acc (racc, norm_offset, lchild->size,
2897 &matching_acc)
2898 || !budget_for_propagation_access (racc->base))
2899 {
2900 if (matching_acc
2901 && propagate_subaccesses_from_lhs (lchild, matching_acc))
2902 add_access_to_lhs_work_queue (matching_acc);
2903 continue;
2904 }
2905
2906 /* Because get_ref_base_and_extent always includes padding in size for
2907 accesses to DECLs but not necessarily for COMPONENT_REFs of the same
2908 type, we might be actually attempting to here to create a child of the
2909 same type as the parent. */
2910 if (!types_compatible_p (racc->type, lchild->type))
2911 {
2912 struct access *new_acc
2913 = create_artificial_child_access (racc, lchild, norm_offset,
2914 true, false);
2915 propagate_subaccesses_from_lhs (lchild, new_acc);
2916 }
2917 else
2918 propagate_subaccesses_from_lhs (lchild, racc);
2919 ret = true;
2920 }
2921 return ret;
2922 }
2923
2924 /* Propagate all subaccesses across assignment links. */
2925
2926 static void
2927 propagate_all_subaccesses (void)
2928 {
2929 propagation_budget = new hash_map<tree, unsigned>;
2930 while (rhs_work_queue_head)
2931 {
2932 struct access *racc = pop_access_from_rhs_work_queue ();
2933 struct assign_link *link;
2934
2935 if (racc->group_representative)
2936 racc= racc->group_representative;
2937 gcc_assert (racc->first_rhs_link);
2938
2939 for (link = racc->first_rhs_link; link; link = link->next_rhs)
2940 {
2941 struct access *lacc = link->lacc;
2942
2943 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2944 continue;
2945 lacc = lacc->group_representative;
2946
2947 bool reque_parents = false;
2948 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (racc->base)))
2949 {
2950 if (!lacc->grp_write)
2951 {
2952 subtree_mark_written_and_rhs_enqueue (lacc);
2953 reque_parents = true;
2954 }
2955 }
2956 else if (propagate_subaccesses_from_rhs (lacc, racc))
2957 reque_parents = true;
2958
2959 if (reque_parents)
2960 do
2961 {
2962 add_access_to_rhs_work_queue (lacc);
2963 lacc = lacc->parent;
2964 }
2965 while (lacc);
2966 }
2967 }
2968
2969 while (lhs_work_queue_head)
2970 {
2971 struct access *lacc = pop_access_from_lhs_work_queue ();
2972 struct assign_link *link;
2973
2974 if (lacc->group_representative)
2975 lacc = lacc->group_representative;
2976 gcc_assert (lacc->first_lhs_link);
2977
2978 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2979 continue;
2980
2981 for (link = lacc->first_lhs_link; link; link = link->next_lhs)
2982 {
2983 struct access *racc = link->racc;
2984
2985 if (racc->group_representative)
2986 racc = racc->group_representative;
2987 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (racc->base)))
2988 continue;
2989 if (propagate_subaccesses_from_lhs (lacc, racc))
2990 add_access_to_lhs_work_queue (racc);
2991 }
2992 }
2993 delete propagation_budget;
2994 }
2995
2996 /* Return true if the forest beginning with ROOT does not contain
2997 unscalarizable regions or non-byte aligned accesses. */
2998
2999 static bool
3000 can_totally_scalarize_forest_p (struct access *root)
3001 {
3002 struct access *access = root;
3003 do
3004 {
3005 if (access->grp_unscalarizable_region
3006 || (access->offset % BITS_PER_UNIT) != 0
3007 || (access->size % BITS_PER_UNIT) != 0
3008 || (is_gimple_reg_type (access->type)
3009 && access->first_child))
3010 return false;
3011
3012 if (access->first_child)
3013 access = access->first_child;
3014 else if (access->next_sibling)
3015 access = access->next_sibling;
3016 else
3017 {
3018 while (access->parent && !access->next_sibling)
3019 access = access->parent;
3020 if (access->next_sibling)
3021 access = access->next_sibling;
3022 else
3023 {
3024 gcc_assert (access == root);
3025 root = root->next_grp;
3026 access = root;
3027 }
3028 }
3029 }
3030 while (access);
3031 return true;
3032 }
3033
3034 /* Create and return an ACCESS in PARENT spanning from POS with SIZE, TYPE and
3035 reference EXPR for total scalarization purposes and mark it as such. Within
3036 the children of PARENT, link it in between PTR and NEXT_SIBLING. */
3037
3038 static struct access *
3039 create_total_scalarization_access (struct access *parent, HOST_WIDE_INT pos,
3040 HOST_WIDE_INT size, tree type, tree expr,
3041 struct access **ptr,
3042 struct access *next_sibling)
3043 {
3044 struct access *access = access_pool.allocate ();
3045 memset (access, 0, sizeof (struct access));
3046 access->base = parent->base;
3047 access->offset = pos;
3048 access->size = size;
3049 access->expr = expr;
3050 access->type = type;
3051 access->parent = parent;
3052 access->grp_write = parent->grp_write;
3053 access->grp_total_scalarization = 1;
3054 access->grp_hint = 1;
3055 access->grp_same_access_path = path_comparable_for_same_access (expr);
3056 access->reverse = reverse_storage_order_for_component_p (expr);
3057
3058 access->next_sibling = next_sibling;
3059 *ptr = access;
3060 return access;
3061 }
3062
3063 /* Create and return an ACCESS in PARENT spanning from POS with SIZE, TYPE and
3064 reference EXPR for total scalarization purposes and mark it as such, link it
3065 at *PTR and reshape the tree so that those elements at *PTR and their
3066 siblings which fall within the part described by POS and SIZE are moved to
3067 be children of the new access. If a partial overlap is detected, return
3068 NULL. */
3069
3070 static struct access *
3071 create_total_access_and_reshape (struct access *parent, HOST_WIDE_INT pos,
3072 HOST_WIDE_INT size, tree type, tree expr,
3073 struct access **ptr)
3074 {
3075 struct access **p = ptr;
3076
3077 while (*p && (*p)->offset < pos + size)
3078 {
3079 if ((*p)->offset + (*p)->size > pos + size)
3080 return NULL;
3081 p = &(*p)->next_sibling;
3082 }
3083
3084 struct access *next_child = *ptr;
3085 struct access *new_acc
3086 = create_total_scalarization_access (parent, pos, size, type, expr,
3087 ptr, *p);
3088 if (p != ptr)
3089 {
3090 new_acc->first_child = next_child;
3091 *p = NULL;
3092 for (struct access *a = next_child; a; a = a->next_sibling)
3093 a->parent = new_acc;
3094 }
3095 return new_acc;
3096 }
3097
3098 static bool totally_scalarize_subtree (struct access *root);
3099
3100 /* Return true if INNER is either the same type as OUTER or if it is the type
3101 of a record field in OUTER at offset zero, possibly in nested
3102 sub-records. */
3103
3104 static bool
3105 access_and_field_type_match_p (tree outer, tree inner)
3106 {
3107 if (TYPE_MAIN_VARIANT (outer) == TYPE_MAIN_VARIANT (inner))
3108 return true;
3109 if (TREE_CODE (outer) != RECORD_TYPE)
3110 return false;
3111 tree fld = TYPE_FIELDS (outer);
3112 while (fld)
3113 {
3114 if (TREE_CODE (fld) == FIELD_DECL)
3115 {
3116 if (!zerop (DECL_FIELD_OFFSET (fld)))
3117 return false;
3118 if (TYPE_MAIN_VARIANT (TREE_TYPE (fld)) == inner)
3119 return true;
3120 if (TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE)
3121 fld = TYPE_FIELDS (TREE_TYPE (fld));
3122 else
3123 return false;
3124 }
3125 else
3126 fld = DECL_CHAIN (fld);
3127 }
3128 return false;
3129 }
3130
3131 /* Return type of total_should_skip_creating_access indicating whether a total
3132 scalarization access for a field/element should be created, whether it
3133 already exists or whether the entire total scalarization has to fail. */
3134
3135 enum total_sra_field_state {TOTAL_FLD_CREATE, TOTAL_FLD_DONE, TOTAL_FLD_FAILED};
3136
3137 /* Do all the necessary steps in total scalarization when the given aggregate
3138 type has a TYPE at POS with the given SIZE should be put into PARENT and
3139 when we have processed all its siblings with smaller offsets up until and
3140 including LAST_SEEN_SIBLING (which can be NULL).
3141
3142 If some further siblings are to be skipped, set *LAST_SEEN_SIBLING as
3143 appropriate. Return TOTAL_FLD_CREATE id the caller should carry on with
3144 creating a new access, TOTAL_FLD_DONE if access or accesses capable of
3145 representing the described part of the aggregate for the purposes of total
3146 scalarization already exist or TOTAL_FLD_FAILED if there is a problem which
3147 prevents total scalarization from happening at all. */
3148
3149 static enum total_sra_field_state
3150 total_should_skip_creating_access (struct access *parent,
3151 struct access **last_seen_sibling,
3152 tree type, HOST_WIDE_INT pos,
3153 HOST_WIDE_INT size)
3154 {
3155 struct access *next_child;
3156 if (!*last_seen_sibling)
3157 next_child = parent->first_child;
3158 else
3159 next_child = (*last_seen_sibling)->next_sibling;
3160
3161 /* First, traverse the chain of siblings until it points to an access with
3162 offset at least equal to POS. Check all skipped accesses whether they
3163 span the POS boundary and if so, return with a failure. */
3164 while (next_child && next_child->offset < pos)
3165 {
3166 if (next_child->offset + next_child->size > pos)
3167 return TOTAL_FLD_FAILED;
3168 *last_seen_sibling = next_child;
3169 next_child = next_child->next_sibling;
3170 }
3171
3172 /* Now check whether next_child has exactly the right POS and SIZE and if so,
3173 whether it can represent what we need and can be totally scalarized
3174 itself. */
3175 if (next_child && next_child->offset == pos
3176 && next_child->size == size)
3177 {
3178 if (!is_gimple_reg_type (next_child->type)
3179 && (!access_and_field_type_match_p (type, next_child->type)
3180 || !totally_scalarize_subtree (next_child)))
3181 return TOTAL_FLD_FAILED;
3182
3183 *last_seen_sibling = next_child;
3184 return TOTAL_FLD_DONE;
3185 }
3186
3187 /* If the child we're looking at would partially overlap, we just cannot
3188 totally scalarize. */
3189 if (next_child
3190 && next_child->offset < pos + size
3191 && next_child->offset + next_child->size > pos + size)
3192 return TOTAL_FLD_FAILED;
3193
3194 if (is_gimple_reg_type (type))
3195 {
3196 /* We don't scalarize accesses that are children of other scalar type
3197 accesses, so if we go on and create an access for a register type,
3198 there should not be any pre-existing children. There are rare cases
3199 where the requested type is a vector but we already have register
3200 accesses for all its elements which is equally good. Detect that
3201 situation or whether we need to bail out. */
3202
3203 HOST_WIDE_INT covered = pos;
3204 bool skipping = false;
3205 while (next_child
3206 && next_child->offset + next_child->size <= pos + size)
3207 {
3208 if (next_child->offset != covered
3209 || !is_gimple_reg_type (next_child->type))
3210 return TOTAL_FLD_FAILED;
3211
3212 covered += next_child->size;
3213 *last_seen_sibling = next_child;
3214 next_child = next_child->next_sibling;
3215 skipping = true;
3216 }
3217
3218 if (skipping)
3219 {
3220 if (covered != pos + size)
3221 return TOTAL_FLD_FAILED;
3222 else
3223 return TOTAL_FLD_DONE;
3224 }
3225 }
3226
3227 return TOTAL_FLD_CREATE;
3228 }
3229
3230 /* Go over sub-tree rooted in ROOT and attempt to create scalar accesses
3231 spanning all uncovered areas covered by ROOT, return false if the attempt
3232 failed. All created accesses will have grp_unscalarizable_region set (and
3233 should be ignored if the function returns false). */
3234
3235 static bool
3236 totally_scalarize_subtree (struct access *root)
3237 {
3238 gcc_checking_assert (!root->grp_unscalarizable_region);
3239 gcc_checking_assert (!is_gimple_reg_type (root->type));
3240
3241 struct access *last_seen_sibling = NULL;
3242
3243 switch (TREE_CODE (root->type))
3244 {
3245 case RECORD_TYPE:
3246 for (tree fld = TYPE_FIELDS (root->type); fld; fld = DECL_CHAIN (fld))
3247 if (TREE_CODE (fld) == FIELD_DECL)
3248 {
3249 tree ft = TREE_TYPE (fld);
3250 HOST_WIDE_INT fsize = tree_to_uhwi (DECL_SIZE (fld));
3251 if (!fsize)
3252 continue;
3253
3254 HOST_WIDE_INT pos = root->offset + int_bit_position (fld);
3255 enum total_sra_field_state
3256 state = total_should_skip_creating_access (root,
3257 &last_seen_sibling,
3258 ft, pos, fsize);
3259 switch (state)
3260 {
3261 case TOTAL_FLD_FAILED:
3262 return false;
3263 case TOTAL_FLD_DONE:
3264 continue;
3265 case TOTAL_FLD_CREATE:
3266 break;
3267 default:
3268 gcc_unreachable ();
3269 }
3270
3271 struct access **p = (last_seen_sibling
3272 ? &last_seen_sibling->next_sibling
3273 : &root->first_child);
3274 tree nref = build3 (COMPONENT_REF, ft, root->expr, fld, NULL_TREE);
3275 struct access *new_child
3276 = create_total_access_and_reshape (root, pos, fsize, ft, nref, p);
3277 if (!new_child)
3278 return false;
3279
3280 if (!is_gimple_reg_type (ft)
3281 && !totally_scalarize_subtree (new_child))
3282 return false;
3283 last_seen_sibling = new_child;
3284 }
3285 break;
3286 case ARRAY_TYPE:
3287 {
3288 tree elemtype = TREE_TYPE (root->type);
3289 tree elem_size = TYPE_SIZE (elemtype);
3290 gcc_assert (elem_size && tree_fits_shwi_p (elem_size));
3291 HOST_WIDE_INT el_size = tree_to_shwi (elem_size);
3292 gcc_assert (el_size > 0);
3293
3294 tree minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (root->type));
3295 gcc_assert (TREE_CODE (minidx) == INTEGER_CST);
3296 tree maxidx = TYPE_MAX_VALUE (TYPE_DOMAIN (root->type));
3297 /* Skip (some) zero-length arrays; others have MAXIDX == MINIDX - 1. */
3298 if (!maxidx)
3299 goto out;
3300 gcc_assert (TREE_CODE (maxidx) == INTEGER_CST);
3301 tree domain = TYPE_DOMAIN (root->type);
3302 /* MINIDX and MAXIDX are inclusive, and must be interpreted in
3303 DOMAIN (e.g. signed int, whereas min/max may be size_int). */
3304 offset_int idx = wi::to_offset (minidx);
3305 offset_int max = wi::to_offset (maxidx);
3306 if (!TYPE_UNSIGNED (domain))
3307 {
3308 idx = wi::sext (idx, TYPE_PRECISION (domain));
3309 max = wi::sext (max, TYPE_PRECISION (domain));
3310 }
3311 for (HOST_WIDE_INT pos = root->offset;
3312 idx <= max;
3313 pos += el_size, ++idx)
3314 {
3315 enum total_sra_field_state
3316 state = total_should_skip_creating_access (root,
3317 &last_seen_sibling,
3318 elemtype, pos,
3319 el_size);
3320 switch (state)
3321 {
3322 case TOTAL_FLD_FAILED:
3323 return false;
3324 case TOTAL_FLD_DONE:
3325 continue;
3326 case TOTAL_FLD_CREATE:
3327 break;
3328 default:
3329 gcc_unreachable ();
3330 }
3331
3332 struct access **p = (last_seen_sibling
3333 ? &last_seen_sibling->next_sibling
3334 : &root->first_child);
3335 tree nref = build4 (ARRAY_REF, elemtype, root->expr,
3336 wide_int_to_tree (domain, idx),
3337 NULL_TREE, NULL_TREE);
3338 struct access *new_child
3339 = create_total_access_and_reshape (root, pos, el_size, elemtype,
3340 nref, p);
3341 if (!new_child)
3342 return false;
3343
3344 if (!is_gimple_reg_type (elemtype)
3345 && !totally_scalarize_subtree (new_child))
3346 return false;
3347 last_seen_sibling = new_child;
3348 }
3349 }
3350 break;
3351 default:
3352 gcc_unreachable ();
3353 }
3354
3355 out:
3356 return true;
3357 }
3358
3359 /* Go through all accesses collected throughout the (intraprocedural) analysis
3360 stage, exclude overlapping ones, identify representatives and build trees
3361 out of them, making decisions about scalarization on the way. Return true
3362 iff there are any to-be-scalarized variables after this stage. */
3363
3364 static bool
3365 analyze_all_variable_accesses (void)
3366 {
3367 int res = 0;
3368 bitmap tmp = BITMAP_ALLOC (NULL);
3369 bitmap_iterator bi;
3370 unsigned i;
3371
3372 bitmap_copy (tmp, candidate_bitmap);
3373 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
3374 {
3375 tree var = candidate (i);
3376 struct access *access;
3377
3378 access = sort_and_splice_var_accesses (var);
3379 if (!access || !build_access_trees (access))
3380 disqualify_candidate (var,
3381 "No or inhibitingly overlapping accesses.");
3382 }
3383
3384 propagate_all_subaccesses ();
3385
3386 bool optimize_speed_p = !optimize_function_for_size_p (cfun);
3387 /* If the user didn't set PARAM_SRA_MAX_SCALARIZATION_SIZE_<...>,
3388 fall back to a target default. */
3389 unsigned HOST_WIDE_INT max_scalarization_size
3390 = get_move_ratio (optimize_speed_p) * UNITS_PER_WORD;
3391
3392 if (optimize_speed_p)
3393 {
3394 if (global_options_set.x_param_sra_max_scalarization_size_speed)
3395 max_scalarization_size = param_sra_max_scalarization_size_speed;
3396 }
3397 else
3398 {
3399 if (global_options_set.x_param_sra_max_scalarization_size_size)
3400 max_scalarization_size = param_sra_max_scalarization_size_size;
3401 }
3402 max_scalarization_size *= BITS_PER_UNIT;
3403
3404 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
3405 if (bitmap_bit_p (should_scalarize_away_bitmap, i)
3406 && !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
3407 {
3408 tree var = candidate (i);
3409 if (!VAR_P (var))
3410 continue;
3411
3412 if (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var))) > max_scalarization_size)
3413 {
3414 if (dump_file && (dump_flags & TDF_DETAILS))
3415 {
3416 fprintf (dump_file, "Too big to totally scalarize: ");
3417 print_generic_expr (dump_file, var);
3418 fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
3419 }
3420 continue;
3421 }
3422
3423 bool all_types_ok = true;
3424 for (struct access *access = get_first_repr_for_decl (var);
3425 access;
3426 access = access->next_grp)
3427 if (!can_totally_scalarize_forest_p (access)
3428 || !scalarizable_type_p (access->type, constant_decl_p (var)))
3429 {
3430 all_types_ok = false;
3431 break;
3432 }
3433 if (!all_types_ok)
3434 continue;
3435
3436 if (dump_file && (dump_flags & TDF_DETAILS))
3437 {
3438 fprintf (dump_file, "Will attempt to totally scalarize ");
3439 print_generic_expr (dump_file, var);
3440 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
3441 }
3442 bool scalarized = true;
3443 for (struct access *access = get_first_repr_for_decl (var);
3444 access;
3445 access = access->next_grp)
3446 if (!is_gimple_reg_type (access->type)
3447 && !totally_scalarize_subtree (access))
3448 {
3449 scalarized = false;
3450 break;
3451 }
3452
3453 if (scalarized)
3454 for (struct access *access = get_first_repr_for_decl (var);
3455 access;
3456 access = access->next_grp)
3457 access->grp_total_scalarization = true;
3458 }
3459
3460 if (flag_checking)
3461 verify_all_sra_access_forests ();
3462
3463 bitmap_copy (tmp, candidate_bitmap);
3464 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
3465 {
3466 tree var = candidate (i);
3467 struct access *access = get_first_repr_for_decl (var);
3468
3469 if (analyze_access_trees (access))
3470 {
3471 res++;
3472 if (dump_file && (dump_flags & TDF_DETAILS))
3473 {
3474 fprintf (dump_file, "\nAccess trees for ");
3475 print_generic_expr (dump_file, var);
3476 fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
3477 dump_access_tree (dump_file, access);
3478 fprintf (dump_file, "\n");
3479 }
3480 }
3481 else
3482 disqualify_candidate (var, "No scalar replacements to be created.");
3483 }
3484
3485 BITMAP_FREE (tmp);
3486
3487 if (res)
3488 {
3489 statistics_counter_event (cfun, "Scalarized aggregates", res);
3490 return true;
3491 }
3492 else
3493 return false;
3494 }
3495
3496 /* Generate statements copying scalar replacements of accesses within a subtree
3497 into or out of AGG. ACCESS, all its children, siblings and their children
3498 are to be processed. AGG is an aggregate type expression (can be a
3499 declaration but does not have to be, it can for example also be a mem_ref or
3500 a series of handled components). TOP_OFFSET is the offset of the processed
3501 subtree which has to be subtracted from offsets of individual accesses to
3502 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
3503 replacements in the interval <start_offset, start_offset + chunk_size>,
3504 otherwise copy all. GSI is a statement iterator used to place the new
3505 statements. WRITE should be true when the statements should write from AGG
3506 to the replacement and false if vice versa. if INSERT_AFTER is true, new
3507 statements will be added after the current statement in GSI, they will be
3508 added before the statement otherwise. */
3509
3510 static void
3511 generate_subtree_copies (struct access *access, tree agg,
3512 HOST_WIDE_INT top_offset,
3513 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
3514 gimple_stmt_iterator *gsi, bool write,
3515 bool insert_after, location_t loc)
3516 {
3517 /* Never write anything into constant pool decls. See PR70602. */
3518 if (!write && constant_decl_p (agg))
3519 return;
3520 do
3521 {
3522 if (chunk_size && access->offset >= start_offset + chunk_size)
3523 return;
3524
3525 if (access->grp_to_be_replaced
3526 && (chunk_size == 0
3527 || access->offset + access->size > start_offset))
3528 {
3529 tree expr, repl = get_access_replacement (access);
3530 gassign *stmt;
3531
3532 expr = build_ref_for_model (loc, agg, access->offset - top_offset,
3533 access, gsi, insert_after);
3534
3535 if (write)
3536 {
3537 if (access->grp_partial_lhs)
3538 expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
3539 !insert_after,
3540 insert_after ? GSI_NEW_STMT
3541 : GSI_SAME_STMT);
3542 stmt = gimple_build_assign (repl, expr);
3543 }
3544 else
3545 {
3546 TREE_NO_WARNING (repl) = 1;
3547 if (access->grp_partial_lhs)
3548 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
3549 !insert_after,
3550 insert_after ? GSI_NEW_STMT
3551 : GSI_SAME_STMT);
3552 stmt = gimple_build_assign (expr, repl);
3553 }
3554 gimple_set_location (stmt, loc);
3555
3556 if (insert_after)
3557 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3558 else
3559 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3560 update_stmt (stmt);
3561 sra_stats.subtree_copies++;
3562 }
3563 else if (write
3564 && access->grp_to_be_debug_replaced
3565 && (chunk_size == 0
3566 || access->offset + access->size > start_offset))
3567 {
3568 gdebug *ds;
3569 tree drhs = build_debug_ref_for_model (loc, agg,
3570 access->offset - top_offset,
3571 access);
3572 ds = gimple_build_debug_bind (get_access_replacement (access),
3573 drhs, gsi_stmt (*gsi));
3574 if (insert_after)
3575 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
3576 else
3577 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3578 }
3579
3580 if (access->first_child)
3581 generate_subtree_copies (access->first_child, agg, top_offset,
3582 start_offset, chunk_size, gsi,
3583 write, insert_after, loc);
3584
3585 access = access->next_sibling;
3586 }
3587 while (access);
3588 }
3589
3590 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
3591 root of the subtree to be processed. GSI is the statement iterator used
3592 for inserting statements which are added after the current statement if
3593 INSERT_AFTER is true or before it otherwise. */
3594
3595 static void
3596 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
3597 bool insert_after, location_t loc)
3598
3599 {
3600 struct access *child;
3601
3602 if (access->grp_to_be_replaced)
3603 {
3604 gassign *stmt;
3605
3606 stmt = gimple_build_assign (get_access_replacement (access),
3607 build_zero_cst (access->type));
3608 if (insert_after)
3609 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3610 else
3611 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3612 update_stmt (stmt);
3613 gimple_set_location (stmt, loc);
3614 }
3615 else if (access->grp_to_be_debug_replaced)
3616 {
3617 gdebug *ds
3618 = gimple_build_debug_bind (get_access_replacement (access),
3619 build_zero_cst (access->type),
3620 gsi_stmt (*gsi));
3621 if (insert_after)
3622 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
3623 else
3624 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3625 }
3626
3627 for (child = access->first_child; child; child = child->next_sibling)
3628 init_subtree_with_zero (child, gsi, insert_after, loc);
3629 }
3630
3631 /* Clobber all scalar replacements in an access subtree. ACCESS is the
3632 root of the subtree to be processed. GSI is the statement iterator used
3633 for inserting statements which are added after the current statement if
3634 INSERT_AFTER is true or before it otherwise. */
3635
3636 static void
3637 clobber_subtree (struct access *access, gimple_stmt_iterator *gsi,
3638 bool insert_after, location_t loc)
3639
3640 {
3641 struct access *child;
3642
3643 if (access->grp_to_be_replaced)
3644 {
3645 tree rep = get_access_replacement (access);
3646 tree clobber = build_clobber (access->type);
3647 gimple *stmt = gimple_build_assign (rep, clobber);
3648
3649 if (insert_after)
3650 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3651 else
3652 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3653 update_stmt (stmt);
3654 gimple_set_location (stmt, loc);
3655 }
3656
3657 for (child = access->first_child; child; child = child->next_sibling)
3658 clobber_subtree (child, gsi, insert_after, loc);
3659 }
3660
3661 /* Search for an access representative for the given expression EXPR and
3662 return it or NULL if it cannot be found. */
3663
3664 static struct access *
3665 get_access_for_expr (tree expr)
3666 {
3667 poly_int64 poffset, psize, pmax_size;
3668 HOST_WIDE_INT offset, max_size;
3669 tree base;
3670 bool reverse;
3671
3672 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
3673 a different size than the size of its argument and we need the latter
3674 one. */
3675 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3676 expr = TREE_OPERAND (expr, 0);
3677
3678 base = get_ref_base_and_extent (expr, &poffset, &psize, &pmax_size,
3679 &reverse);
3680 if (!known_size_p (pmax_size)
3681 || !pmax_size.is_constant (&max_size)
3682 || !poffset.is_constant (&offset)
3683 || !DECL_P (base))
3684 return NULL;
3685
3686 if (tree basesize = DECL_SIZE (base))
3687 {
3688 poly_int64 sz;
3689 if (offset < 0
3690 || !poly_int_tree_p (basesize, &sz)
3691 || known_le (sz, offset))
3692 return NULL;
3693 }
3694
3695 if (max_size == 0
3696 || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
3697 return NULL;
3698
3699 return get_var_base_offset_size_access (base, offset, max_size);
3700 }
3701
3702 /* Replace the expression EXPR with a scalar replacement if there is one and
3703 generate other statements to do type conversion or subtree copying if
3704 necessary. GSI is used to place newly created statements, WRITE is true if
3705 the expression is being written to (it is on a LHS of a statement or output
3706 in an assembly statement). */
3707
3708 static bool
3709 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
3710 {
3711 location_t loc;
3712 struct access *access;
3713 tree type, bfr, orig_expr;
3714 bool partial_cplx_access = false;
3715
3716 if (TREE_CODE (*expr) == BIT_FIELD_REF)
3717 {
3718 bfr = *expr;
3719 expr = &TREE_OPERAND (*expr, 0);
3720 }
3721 else
3722 bfr = NULL_TREE;
3723
3724 if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
3725 {
3726 expr = &TREE_OPERAND (*expr, 0);
3727 partial_cplx_access = true;
3728 }
3729 access = get_access_for_expr (*expr);
3730 if (!access)
3731 return false;
3732 type = TREE_TYPE (*expr);
3733 orig_expr = *expr;
3734
3735 loc = gimple_location (gsi_stmt (*gsi));
3736 gimple_stmt_iterator alt_gsi = gsi_none ();
3737 if (write && stmt_ends_bb_p (gsi_stmt (*gsi)))
3738 {
3739 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
3740 gsi = &alt_gsi;
3741 }
3742
3743 if (access->grp_to_be_replaced)
3744 {
3745 tree repl = get_access_replacement (access);
3746 /* If we replace a non-register typed access simply use the original
3747 access expression to extract the scalar component afterwards.
3748 This happens if scalarizing a function return value or parameter
3749 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
3750 gcc.c-torture/compile/20011217-1.c.
3751
3752 We also want to use this when accessing a complex or vector which can
3753 be accessed as a different type too, potentially creating a need for
3754 type conversion (see PR42196) and when scalarized unions are involved
3755 in assembler statements (see PR42398). */
3756 if (!bfr && !useless_type_conversion_p (type, access->type))
3757 {
3758 tree ref;
3759
3760 ref = build_ref_for_model (loc, orig_expr, 0, access, gsi, false);
3761
3762 if (partial_cplx_access)
3763 {
3764 /* VIEW_CONVERT_EXPRs in partial complex access are always fine in
3765 the case of a write because in such case the replacement cannot
3766 be a gimple register. In the case of a load, we have to
3767 differentiate in between a register an non-register
3768 replacement. */
3769 tree t = build1 (VIEW_CONVERT_EXPR, type, repl);
3770 gcc_checking_assert (!write || access->grp_partial_lhs);
3771 if (!access->grp_partial_lhs)
3772 {
3773 tree tmp = make_ssa_name (type);
3774 gassign *stmt = gimple_build_assign (tmp, t);
3775 /* This is always a read. */
3776 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3777 t = tmp;
3778 }
3779 *expr = t;
3780 }
3781 else if (write)
3782 {
3783 gassign *stmt;
3784
3785 if (access->grp_partial_lhs)
3786 ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
3787 false, GSI_NEW_STMT);
3788 stmt = gimple_build_assign (repl, ref);
3789 gimple_set_location (stmt, loc);
3790 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3791 }
3792 else
3793 {
3794 gassign *stmt;
3795
3796 if (access->grp_partial_lhs)
3797 repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
3798 true, GSI_SAME_STMT);
3799 stmt = gimple_build_assign (ref, repl);
3800 gimple_set_location (stmt, loc);
3801 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3802 }
3803 }
3804 else
3805 *expr = repl;
3806 sra_stats.exprs++;
3807 }
3808 else if (write && access->grp_to_be_debug_replaced)
3809 {
3810 gdebug *ds = gimple_build_debug_bind (get_access_replacement (access),
3811 NULL_TREE,
3812 gsi_stmt (*gsi));
3813 gsi_insert_after (gsi, ds, GSI_NEW_STMT);
3814 }
3815
3816 if (access->first_child)
3817 {
3818 HOST_WIDE_INT start_offset, chunk_size;
3819 if (bfr
3820 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 1))
3821 && tree_fits_uhwi_p (TREE_OPERAND (bfr, 2)))
3822 {
3823 chunk_size = tree_to_uhwi (TREE_OPERAND (bfr, 1));
3824 start_offset = access->offset
3825 + tree_to_uhwi (TREE_OPERAND (bfr, 2));
3826 }
3827 else
3828 start_offset = chunk_size = 0;
3829
3830 generate_subtree_copies (access->first_child, orig_expr, access->offset,
3831 start_offset, chunk_size, gsi, write, write,
3832 loc);
3833 }
3834 return true;
3835 }
3836
3837 /* Where scalar replacements of the RHS have been written to when a replacement
3838 of a LHS of an assigments cannot be direclty loaded from a replacement of
3839 the RHS. */
3840 enum unscalarized_data_handling { SRA_UDH_NONE, /* Nothing done so far. */
3841 SRA_UDH_RIGHT, /* Data flushed to the RHS. */
3842 SRA_UDH_LEFT }; /* Data flushed to the LHS. */
3843
3844 struct subreplacement_assignment_data
3845 {
3846 /* Offset of the access representing the lhs of the assignment. */
3847 HOST_WIDE_INT left_offset;
3848
3849 /* LHS and RHS of the original assignment. */
3850 tree assignment_lhs, assignment_rhs;
3851
3852 /* Access representing the rhs of the whole assignment. */
3853 struct access *top_racc;
3854
3855 /* Stmt iterator used for statement insertions after the original assignment.
3856 It points to the main GSI used to traverse a BB during function body
3857 modification. */
3858 gimple_stmt_iterator *new_gsi;
3859
3860 /* Stmt iterator used for statement insertions before the original
3861 assignment. Keeps on pointing to the original statement. */
3862 gimple_stmt_iterator old_gsi;
3863
3864 /* Location of the assignment. */
3865 location_t loc;
3866
3867 /* Keeps the information whether we have needed to refresh replacements of
3868 the LHS and from which side of the assignments this takes place. */
3869 enum unscalarized_data_handling refreshed;
3870 };
3871
3872 /* Store all replacements in the access tree rooted in TOP_RACC either to their
3873 base aggregate if there are unscalarized data or directly to LHS of the
3874 statement that is pointed to by GSI otherwise. */
3875
3876 static void
3877 handle_unscalarized_data_in_subtree (struct subreplacement_assignment_data *sad)
3878 {
3879 tree src;
3880 if (sad->top_racc->grp_unscalarized_data)
3881 {
3882 src = sad->assignment_rhs;
3883 sad->refreshed = SRA_UDH_RIGHT;
3884 }
3885 else
3886 {
3887 src = sad->assignment_lhs;
3888 sad->refreshed = SRA_UDH_LEFT;
3889 }
3890 generate_subtree_copies (sad->top_racc->first_child, src,
3891 sad->top_racc->offset, 0, 0,
3892 &sad->old_gsi, false, false, sad->loc);
3893 }
3894
3895 /* Try to generate statements to load all sub-replacements in an access subtree
3896 formed by children of LACC from scalar replacements in the SAD->top_racc
3897 subtree. If that is not possible, refresh the SAD->top_racc base aggregate
3898 and load the accesses from it. */
3899
3900 static void
3901 load_assign_lhs_subreplacements (struct access *lacc,
3902 struct subreplacement_assignment_data *sad)
3903 {
3904 for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
3905 {
3906 HOST_WIDE_INT offset;
3907 offset = lacc->offset - sad->left_offset + sad->top_racc->offset;
3908
3909 if (lacc->grp_to_be_replaced)
3910 {
3911 struct access *racc;
3912 gassign *stmt;
3913 tree rhs;
3914
3915 racc = find_access_in_subtree (sad->top_racc, offset, lacc->size);
3916 if (racc && racc->grp_to_be_replaced)
3917 {
3918 rhs = get_access_replacement (racc);
3919 if (!useless_type_conversion_p (lacc->type, racc->type))
3920 rhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3921 lacc->type, rhs);
3922
3923 if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
3924 rhs = force_gimple_operand_gsi (&sad->old_gsi, rhs, true,
3925 NULL_TREE, true, GSI_SAME_STMT);
3926 }
3927 else
3928 {
3929 /* No suitable access on the right hand side, need to load from
3930 the aggregate. See if we have to update it first... */
3931 if (sad->refreshed == SRA_UDH_NONE)
3932 handle_unscalarized_data_in_subtree (sad);
3933
3934 if (sad->refreshed == SRA_UDH_LEFT)
3935 rhs = build_ref_for_model (sad->loc, sad->assignment_lhs,
3936 lacc->offset - sad->left_offset,
3937 lacc, sad->new_gsi, true);
3938 else
3939 rhs = build_ref_for_model (sad->loc, sad->assignment_rhs,
3940 lacc->offset - sad->left_offset,
3941 lacc, sad->new_gsi, true);
3942 if (lacc->grp_partial_lhs)
3943 rhs = force_gimple_operand_gsi (sad->new_gsi,
3944 rhs, true, NULL_TREE,
3945 false, GSI_NEW_STMT);
3946 }
3947
3948 stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
3949 gsi_insert_after (sad->new_gsi, stmt, GSI_NEW_STMT);
3950 gimple_set_location (stmt, sad->loc);
3951 update_stmt (stmt);
3952 sra_stats.subreplacements++;
3953 }
3954 else
3955 {
3956 if (sad->refreshed == SRA_UDH_NONE
3957 && lacc->grp_read && !lacc->grp_covered)
3958 handle_unscalarized_data_in_subtree (sad);
3959
3960 if (lacc && lacc->grp_to_be_debug_replaced)
3961 {
3962 gdebug *ds;
3963 tree drhs;
3964 struct access *racc = find_access_in_subtree (sad->top_racc,
3965 offset,
3966 lacc->size);
3967
3968 if (racc && racc->grp_to_be_replaced)
3969 {
3970 if (racc->grp_write || constant_decl_p (racc->base))
3971 drhs = get_access_replacement (racc);
3972 else
3973 drhs = NULL;
3974 }
3975 else if (sad->refreshed == SRA_UDH_LEFT)
3976 drhs = build_debug_ref_for_model (sad->loc, lacc->base,
3977 lacc->offset, lacc);
3978 else if (sad->refreshed == SRA_UDH_RIGHT)
3979 drhs = build_debug_ref_for_model (sad->loc, sad->top_racc->base,
3980 offset, lacc);
3981 else
3982 drhs = NULL_TREE;
3983 if (drhs
3984 && !useless_type_conversion_p (lacc->type, TREE_TYPE (drhs)))
3985 drhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3986 lacc->type, drhs);
3987 ds = gimple_build_debug_bind (get_access_replacement (lacc),
3988 drhs, gsi_stmt (sad->old_gsi));
3989 gsi_insert_after (sad->new_gsi, ds, GSI_NEW_STMT);
3990 }
3991 }
3992
3993 if (lacc->first_child)
3994 load_assign_lhs_subreplacements (lacc, sad);
3995 }
3996 }
3997
3998 /* Result code for SRA assignment modification. */
3999 enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
4000 SRA_AM_MODIFIED, /* stmt changed but not
4001 removed */
4002 SRA_AM_REMOVED }; /* stmt eliminated */
4003
4004 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
4005 to the assignment and GSI is the statement iterator pointing at it. Returns
4006 the same values as sra_modify_assign. */
4007
4008 static enum assignment_mod_result
4009 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
4010 {
4011 tree lhs = gimple_assign_lhs (stmt);
4012 struct access *acc = get_access_for_expr (lhs);
4013 if (!acc)
4014 return SRA_AM_NONE;
4015 location_t loc = gimple_location (stmt);
4016
4017 if (gimple_clobber_p (stmt))
4018 {
4019 /* Clobber the replacement variable. */
4020 clobber_subtree (acc, gsi, !acc->grp_covered, loc);
4021 /* Remove clobbers of fully scalarized variables, they are dead. */
4022 if (acc->grp_covered)
4023 {
4024 unlink_stmt_vdef (stmt);
4025 gsi_remove (gsi, true);
4026 release_defs (stmt);
4027 return SRA_AM_REMOVED;
4028 }
4029 else
4030 return SRA_AM_MODIFIED;
4031 }
4032
4033 if (CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt)) > 0)
4034 {
4035 /* I have never seen this code path trigger but if it can happen the
4036 following should handle it gracefully. */
4037 if (access_has_children_p (acc))
4038 generate_subtree_copies (acc->first_child, lhs, acc->offset, 0, 0, gsi,
4039 true, true, loc);
4040 return SRA_AM_MODIFIED;
4041 }
4042
4043 if (acc->grp_covered)
4044 {
4045 init_subtree_with_zero (acc, gsi, false, loc);
4046 unlink_stmt_vdef (stmt);
4047 gsi_remove (gsi, true);
4048 release_defs (stmt);
4049 return SRA_AM_REMOVED;
4050 }
4051 else
4052 {
4053 init_subtree_with_zero (acc, gsi, true, loc);
4054 return SRA_AM_MODIFIED;
4055 }
4056 }
4057
4058 /* Create and return a new suitable default definition SSA_NAME for RACC which
4059 is an access describing an uninitialized part of an aggregate that is being
4060 loaded. REG_TREE is used instead of the actual RACC type if that is not of
4061 a gimple register type. */
4062
4063 static tree
4064 get_repl_default_def_ssa_name (struct access *racc, tree reg_type)
4065 {
4066 gcc_checking_assert (!racc->grp_to_be_replaced
4067 && !racc->grp_to_be_debug_replaced);
4068 if (!racc->replacement_decl)
4069 racc->replacement_decl = create_access_replacement (racc, reg_type);
4070 return get_or_create_ssa_default_def (cfun, racc->replacement_decl);
4071 }
4072
4073 /* Examine both sides of the assignment statement pointed to by STMT, replace
4074 them with a scalare replacement if there is one and generate copying of
4075 replacements if scalarized aggregates have been used in the assignment. GSI
4076 is used to hold generated statements for type conversions and subtree
4077 copying. */
4078
4079 static enum assignment_mod_result
4080 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
4081 {
4082 struct access *lacc, *racc;
4083 tree lhs, rhs;
4084 bool modify_this_stmt = false;
4085 bool force_gimple_rhs = false;
4086 location_t loc;
4087 gimple_stmt_iterator orig_gsi = *gsi;
4088
4089 if (!gimple_assign_single_p (stmt))
4090 return SRA_AM_NONE;
4091 lhs = gimple_assign_lhs (stmt);
4092 rhs = gimple_assign_rhs1 (stmt);
4093
4094 if (TREE_CODE (rhs) == CONSTRUCTOR)
4095 return sra_modify_constructor_assign (stmt, gsi);
4096
4097 if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
4098 || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
4099 || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
4100 {
4101 modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (stmt),
4102 gsi, false);
4103 modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (stmt),
4104 gsi, true);
4105 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
4106 }
4107
4108 lacc = get_access_for_expr (lhs);
4109 racc = get_access_for_expr (rhs);
4110 if (!lacc && !racc)
4111 return SRA_AM_NONE;
4112 /* Avoid modifying initializations of constant-pool replacements. */
4113 if (racc && (racc->replacement_decl == lhs))
4114 return SRA_AM_NONE;
4115
4116 loc = gimple_location (stmt);
4117 if (lacc && lacc->grp_to_be_replaced)
4118 {
4119 lhs = get_access_replacement (lacc);
4120 gimple_assign_set_lhs (stmt, lhs);
4121 modify_this_stmt = true;
4122 if (lacc->grp_partial_lhs)
4123 force_gimple_rhs = true;
4124 sra_stats.exprs++;
4125 }
4126
4127 if (racc && racc->grp_to_be_replaced)
4128 {
4129 rhs = get_access_replacement (racc);
4130 modify_this_stmt = true;
4131 if (racc->grp_partial_lhs)
4132 force_gimple_rhs = true;
4133 sra_stats.exprs++;
4134 }
4135 else if (racc
4136 && !racc->grp_unscalarized_data
4137 && !racc->grp_unscalarizable_region
4138 && TREE_CODE (lhs) == SSA_NAME
4139 && !access_has_replacements_p (racc))
4140 {
4141 rhs = get_repl_default_def_ssa_name (racc, TREE_TYPE (lhs));
4142 modify_this_stmt = true;
4143 sra_stats.exprs++;
4144 }
4145
4146 if (modify_this_stmt)
4147 {
4148 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
4149 {
4150 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
4151 ??? This should move to fold_stmt which we simply should
4152 call after building a VIEW_CONVERT_EXPR here. */
4153 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
4154 && !contains_bitfld_component_ref_p (lhs))
4155 {
4156 lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
4157 gimple_assign_set_lhs (stmt, lhs);
4158 }
4159 else if (lacc
4160 && AGGREGATE_TYPE_P (TREE_TYPE (rhs))
4161 && !contains_vce_or_bfcref_p (rhs))
4162 rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
4163
4164 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
4165 {
4166 rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
4167 rhs);
4168 if (is_gimple_reg_type (TREE_TYPE (lhs))
4169 && TREE_CODE (lhs) != SSA_NAME)
4170 force_gimple_rhs = true;
4171 }
4172 }
4173 }
4174
4175 if (lacc && lacc->grp_to_be_debug_replaced)
4176 {
4177 tree dlhs = get_access_replacement (lacc);
4178 tree drhs = unshare_expr (rhs);
4179 if (!useless_type_conversion_p (TREE_TYPE (dlhs), TREE_TYPE (drhs)))
4180 {
4181 if (AGGREGATE_TYPE_P (TREE_TYPE (drhs))
4182 && !contains_vce_or_bfcref_p (drhs))
4183 drhs = build_debug_ref_for_model (loc, drhs, 0, lacc);
4184 if (drhs
4185 && !useless_type_conversion_p (TREE_TYPE (dlhs),
4186 TREE_TYPE (drhs)))
4187 drhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
4188 TREE_TYPE (dlhs), drhs);
4189 }
4190 gdebug *ds = gimple_build_debug_bind (dlhs, drhs, stmt);
4191 gsi_insert_before (gsi, ds, GSI_SAME_STMT);
4192 }
4193
4194 /* From this point on, the function deals with assignments in between
4195 aggregates when at least one has scalar reductions of some of its
4196 components. There are three possible scenarios: Both the LHS and RHS have
4197 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
4198
4199 In the first case, we would like to load the LHS components from RHS
4200 components whenever possible. If that is not possible, we would like to
4201 read it directly from the RHS (after updating it by storing in it its own
4202 components). If there are some necessary unscalarized data in the LHS,
4203 those will be loaded by the original assignment too. If neither of these
4204 cases happen, the original statement can be removed. Most of this is done
4205 by load_assign_lhs_subreplacements.
4206
4207 In the second case, we would like to store all RHS scalarized components
4208 directly into LHS and if they cover the aggregate completely, remove the
4209 statement too. In the third case, we want the LHS components to be loaded
4210 directly from the RHS (DSE will remove the original statement if it
4211 becomes redundant).
4212
4213 This is a bit complex but manageable when types match and when unions do
4214 not cause confusion in a way that we cannot really load a component of LHS
4215 from the RHS or vice versa (the access representing this level can have
4216 subaccesses that are accessible only through a different union field at a
4217 higher level - different from the one used in the examined expression).
4218 Unions are fun.
4219
4220 Therefore, I specially handle a fourth case, happening when there is a
4221 specific type cast or it is impossible to locate a scalarized subaccess on
4222 the other side of the expression. If that happens, I simply "refresh" the
4223 RHS by storing in it is scalarized components leave the original statement
4224 there to do the copying and then load the scalar replacements of the LHS.
4225 This is what the first branch does. */
4226
4227 if (modify_this_stmt
4228 || gimple_has_volatile_ops (stmt)
4229 || contains_vce_or_bfcref_p (rhs)
4230 || contains_vce_or_bfcref_p (lhs)
4231 || stmt_ends_bb_p (stmt))
4232 {
4233 /* No need to copy into a constant-pool, it comes pre-initialized. */
4234 if (access_has_children_p (racc) && !constant_decl_p (racc->base))
4235 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
4236 gsi, false, false, loc);
4237 if (access_has_children_p (lacc))
4238 {
4239 gimple_stmt_iterator alt_gsi = gsi_none ();
4240 if (stmt_ends_bb_p (stmt))
4241 {
4242 alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
4243 gsi = &alt_gsi;
4244 }
4245 generate_subtree_copies (lacc->first_child, lhs, lacc->offset, 0, 0,
4246 gsi, true, true, loc);
4247 }
4248 sra_stats.separate_lhs_rhs_handling++;
4249
4250 /* This gimplification must be done after generate_subtree_copies,
4251 lest we insert the subtree copies in the middle of the gimplified
4252 sequence. */
4253 if (force_gimple_rhs)
4254 rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
4255 true, GSI_SAME_STMT);
4256 if (gimple_assign_rhs1 (stmt) != rhs)
4257 {
4258 modify_this_stmt = true;
4259 gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
4260 gcc_assert (stmt == gsi_stmt (orig_gsi));
4261 }
4262
4263 return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
4264 }
4265 else
4266 {
4267 if (access_has_children_p (lacc)
4268 && access_has_children_p (racc)
4269 /* When an access represents an unscalarizable region, it usually
4270 represents accesses with variable offset and thus must not be used
4271 to generate new memory accesses. */
4272 && !lacc->grp_unscalarizable_region
4273 && !racc->grp_unscalarizable_region)
4274 {
4275 struct subreplacement_assignment_data sad;
4276
4277 sad.left_offset = lacc->offset;
4278 sad.assignment_lhs = lhs;
4279 sad.assignment_rhs = rhs;
4280 sad.top_racc = racc;
4281 sad.old_gsi = *gsi;
4282 sad.new_gsi = gsi;
4283 sad.loc = gimple_location (stmt);
4284 sad.refreshed = SRA_UDH_NONE;
4285
4286 if (lacc->grp_read && !lacc->grp_covered)
4287 handle_unscalarized_data_in_subtree (&sad);
4288
4289 load_assign_lhs_subreplacements (lacc, &sad);
4290 if (sad.refreshed != SRA_UDH_RIGHT)
4291 {
4292 gsi_next (gsi);
4293 unlink_stmt_vdef (stmt);
4294 gsi_remove (&sad.old_gsi, true);
4295 release_defs (stmt);
4296 sra_stats.deleted++;
4297 return SRA_AM_REMOVED;
4298 }
4299 }
4300 else
4301 {
4302 if (access_has_children_p (racc)
4303 && !racc->grp_unscalarized_data
4304 && TREE_CODE (lhs) != SSA_NAME)
4305 {
4306 if (dump_file)
4307 {
4308 fprintf (dump_file, "Removing load: ");
4309 print_gimple_stmt (dump_file, stmt, 0);
4310 }
4311 generate_subtree_copies (racc->first_child, lhs,
4312 racc->offset, 0, 0, gsi,
4313 false, false, loc);
4314 gcc_assert (stmt == gsi_stmt (*gsi));
4315 unlink_stmt_vdef (stmt);
4316 gsi_remove (gsi, true);
4317 release_defs (stmt);
4318 sra_stats.deleted++;
4319 return SRA_AM_REMOVED;
4320 }
4321 /* Restore the aggregate RHS from its components so the
4322 prevailing aggregate copy does the right thing. */
4323 if (access_has_children_p (racc))
4324 generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
4325 gsi, false, false, loc);
4326 /* Re-load the components of the aggregate copy destination.
4327 But use the RHS aggregate to load from to expose more
4328 optimization opportunities. */
4329 if (access_has_children_p (lacc))
4330 generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
4331 0, 0, gsi, true, true, loc);
4332 }
4333
4334 return SRA_AM_NONE;
4335 }
4336 }
4337
4338 /* Set any scalar replacements of values in the constant pool to the initial
4339 value of the constant. (Constant-pool decls like *.LC0 have effectively
4340 been initialized before the program starts, we must do the same for their
4341 replacements.) Thus, we output statements like 'SR.1 = *.LC0[0];' into
4342 the function's entry block. */
4343
4344 static void
4345 initialize_constant_pool_replacements (void)
4346 {
4347 gimple_seq seq = NULL;
4348 gimple_stmt_iterator gsi = gsi_start (seq);
4349 bitmap_iterator bi;
4350 unsigned i;
4351
4352 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
4353 {
4354 tree var = candidate (i);
4355 if (!constant_decl_p (var))
4356 continue;
4357
4358 struct access *access = get_first_repr_for_decl (var);
4359
4360 while (access)
4361 {
4362 if (access->replacement_decl)
4363 {
4364 gassign *stmt
4365 = gimple_build_assign (get_access_replacement (access),
4366 unshare_expr (access->expr));
4367 if (dump_file && (dump_flags & TDF_DETAILS))
4368 {
4369 fprintf (dump_file, "Generating constant initializer: ");
4370 print_gimple_stmt (dump_file, stmt, 0);
4371 fprintf (dump_file, "\n");
4372 }
4373 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4374 update_stmt (stmt);
4375 }
4376
4377 if (access->first_child)
4378 access = access->first_child;
4379 else if (access->next_sibling)
4380 access = access->next_sibling;
4381 else
4382 {
4383 while (access->parent && !access->next_sibling)
4384 access = access->parent;
4385 if (access->next_sibling)
4386 access = access->next_sibling;
4387 else
4388 access = access->next_grp;
4389 }
4390 }
4391 }
4392
4393 seq = gsi_seq (gsi);
4394 if (seq)
4395 gsi_insert_seq_on_edge_immediate (
4396 single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
4397 }
4398
4399 /* Traverse the function body and all modifications as decided in
4400 analyze_all_variable_accesses. Return true iff the CFG has been
4401 changed. */
4402
4403 static bool
4404 sra_modify_function_body (void)
4405 {
4406 bool cfg_changed = false;
4407 basic_block bb;
4408
4409 initialize_constant_pool_replacements ();
4410
4411 FOR_EACH_BB_FN (bb, cfun)
4412 {
4413 gimple_stmt_iterator gsi = gsi_start_bb (bb);
4414 while (!gsi_end_p (gsi))
4415 {
4416 gimple *stmt = gsi_stmt (gsi);
4417 enum assignment_mod_result assign_result;
4418 bool modified = false, deleted = false;
4419 tree *t;
4420 unsigned i;
4421
4422 switch (gimple_code (stmt))
4423 {
4424 case GIMPLE_RETURN:
4425 t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
4426 if (*t != NULL_TREE)
4427 modified |= sra_modify_expr (t, &gsi, false);
4428 break;
4429
4430 case GIMPLE_ASSIGN:
4431 assign_result = sra_modify_assign (stmt, &gsi);
4432 modified |= assign_result == SRA_AM_MODIFIED;
4433 deleted = assign_result == SRA_AM_REMOVED;
4434 break;
4435
4436 case GIMPLE_CALL:
4437 /* Operands must be processed before the lhs. */
4438 for (i = 0; i < gimple_call_num_args (stmt); i++)
4439 {
4440 t = gimple_call_arg_ptr (stmt, i);
4441 modified |= sra_modify_expr (t, &gsi, false);
4442 }
4443
4444 if (gimple_call_lhs (stmt))
4445 {
4446 t = gimple_call_lhs_ptr (stmt);
4447 modified |= sra_modify_expr (t, &gsi, true);
4448 }
4449 break;
4450
4451 case GIMPLE_ASM:
4452 {
4453 gasm *asm_stmt = as_a <gasm *> (stmt);
4454 for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
4455 {
4456 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
4457 modified |= sra_modify_expr (t, &gsi, false);
4458 }
4459 for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
4460 {
4461 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
4462 modified |= sra_modify_expr (t, &gsi, true);
4463 }
4464 }
4465 break;
4466
4467 default:
4468 break;
4469 }
4470
4471 if (modified)
4472 {
4473 update_stmt (stmt);
4474 if (maybe_clean_eh_stmt (stmt)
4475 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4476 cfg_changed = true;
4477 }
4478 if (!deleted)
4479 gsi_next (&gsi);
4480 }
4481 }
4482
4483 gsi_commit_edge_inserts ();
4484 return cfg_changed;
4485 }
4486
4487 /* Generate statements initializing scalar replacements of parts of function
4488 parameters. */
4489
4490 static void
4491 initialize_parameter_reductions (void)
4492 {
4493 gimple_stmt_iterator gsi;
4494 gimple_seq seq = NULL;
4495 tree parm;
4496
4497 gsi = gsi_start (seq);
4498 for (parm = DECL_ARGUMENTS (current_function_decl);
4499 parm;
4500 parm = DECL_CHAIN (parm))
4501 {
4502 vec<access_p> *access_vec;
4503 struct access *access;
4504
4505 if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4506 continue;
4507 access_vec = get_base_access_vector (parm);
4508 if (!access_vec)
4509 continue;
4510
4511 for (access = (*access_vec)[0];
4512 access;
4513 access = access->next_grp)
4514 generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
4515 EXPR_LOCATION (parm));
4516 }
4517
4518 seq = gsi_seq (gsi);
4519 if (seq)
4520 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
4521 }
4522
4523 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
4524 it reveals there are components of some aggregates to be scalarized, it runs
4525 the required transformations. */
4526 static unsigned int
4527 perform_intra_sra (void)
4528 {
4529 int ret = 0;
4530 sra_initialize ();
4531
4532 if (!find_var_candidates ())
4533 goto out;
4534
4535 if (!scan_function ())
4536 goto out;
4537
4538 if (!analyze_all_variable_accesses ())
4539 goto out;
4540
4541 if (sra_modify_function_body ())
4542 ret = TODO_update_ssa | TODO_cleanup_cfg;
4543 else
4544 ret = TODO_update_ssa;
4545 initialize_parameter_reductions ();
4546
4547 statistics_counter_event (cfun, "Scalar replacements created",
4548 sra_stats.replacements);
4549 statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
4550 statistics_counter_event (cfun, "Subtree copy stmts",
4551 sra_stats.subtree_copies);
4552 statistics_counter_event (cfun, "Subreplacement stmts",
4553 sra_stats.subreplacements);
4554 statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
4555 statistics_counter_event (cfun, "Separate LHS and RHS handling",
4556 sra_stats.separate_lhs_rhs_handling);
4557
4558 out:
4559 sra_deinitialize ();
4560 return ret;
4561 }
4562
4563 /* Perform early intraprocedural SRA. */
4564 static unsigned int
4565 early_intra_sra (void)
4566 {
4567 sra_mode = SRA_MODE_EARLY_INTRA;
4568 return perform_intra_sra ();
4569 }
4570
4571 /* Perform "late" intraprocedural SRA. */
4572 static unsigned int
4573 late_intra_sra (void)
4574 {
4575 sra_mode = SRA_MODE_INTRA;
4576 return perform_intra_sra ();
4577 }
4578
4579
4580 static bool
4581 gate_intra_sra (void)
4582 {
4583 return flag_tree_sra != 0 && dbg_cnt (tree_sra);
4584 }
4585
4586
4587 namespace {
4588
4589 const pass_data pass_data_sra_early =
4590 {
4591 GIMPLE_PASS, /* type */
4592 "esra", /* name */
4593 OPTGROUP_NONE, /* optinfo_flags */
4594 TV_TREE_SRA, /* tv_id */
4595 ( PROP_cfg | PROP_ssa ), /* properties_required */
4596 0, /* properties_provided */
4597 0, /* properties_destroyed */
4598 0, /* todo_flags_start */
4599 TODO_update_ssa, /* todo_flags_finish */
4600 };
4601
4602 class pass_sra_early : public gimple_opt_pass
4603 {
4604 public:
4605 pass_sra_early (gcc::context *ctxt)
4606 : gimple_opt_pass (pass_data_sra_early, ctxt)
4607 {}
4608
4609 /* opt_pass methods: */
4610 virtual bool gate (function *) { return gate_intra_sra (); }
4611 virtual unsigned int execute (function *) { return early_intra_sra (); }
4612
4613 }; // class pass_sra_early
4614
4615 } // anon namespace
4616
4617 gimple_opt_pass *
4618 make_pass_sra_early (gcc::context *ctxt)
4619 {
4620 return new pass_sra_early (ctxt);
4621 }
4622
4623 namespace {
4624
4625 const pass_data pass_data_sra =
4626 {
4627 GIMPLE_PASS, /* type */
4628 "sra", /* name */
4629 OPTGROUP_NONE, /* optinfo_flags */
4630 TV_TREE_SRA, /* tv_id */
4631 ( PROP_cfg | PROP_ssa ), /* properties_required */
4632 0, /* properties_provided */
4633 0, /* properties_destroyed */
4634 TODO_update_address_taken, /* todo_flags_start */
4635 TODO_update_ssa, /* todo_flags_finish */
4636 };
4637
4638 class pass_sra : public gimple_opt_pass
4639 {
4640 public:
4641 pass_sra (gcc::context *ctxt)
4642 : gimple_opt_pass (pass_data_sra, ctxt)
4643 {}
4644
4645 /* opt_pass methods: */
4646 virtual bool gate (function *) { return gate_intra_sra (); }
4647 virtual unsigned int execute (function *) { return late_intra_sra (); }
4648
4649 }; // class pass_sra
4650
4651 } // anon namespace
4652
4653 gimple_opt_pass *
4654 make_pass_sra (gcc::context *ctxt)
4655 {
4656 return new pass_sra (ctxt);
4657 }