1 /* Tree based points-to analysis
2 Copyright (C) 2005-2021 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "alloc-pool.h"
29 #include "tree-pass.h"
32 #include "tree-pretty-print.h"
33 #include "diagnostic-core.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
37 #include "gimple-iterator.h"
38 #include "tree-into-ssa.h"
40 #include "gimple-walk.h"
42 #include "stringpool.h"
47 /* The idea behind this analyzer is to generate set constraints from the
48 program, then solve the resulting constraints in order to generate the
51 Set constraints are a way of modeling program analysis problems that
52 involve sets. They consist of an inclusion constraint language,
53 describing the variables (each variable is a set) and operations that
54 are involved on the variables, and a set of rules that derive facts
55 from these operations. To solve a system of set constraints, you derive
56 all possible facts under the rules, which gives you the correct sets
59 See "Efficient Field-sensitive pointer analysis for C" by "David
60 J. Pearce and Paul H. J. Kelly and Chris Hankin", at
61 http://citeseer.ist.psu.edu/pearce04efficient.html
63 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
64 of C Code in a Second" by "Nevin Heintze and Olivier Tardieu" at
65 http://citeseer.ist.psu.edu/heintze01ultrafast.html
67 There are three types of real constraint expressions, DEREF,
68 ADDRESSOF, and SCALAR. Each constraint expression consists
69 of a constraint type, a variable, and an offset.
71 SCALAR is a constraint expression type used to represent x, whether
72 it appears on the LHS or the RHS of a statement.
73 DEREF is a constraint expression type used to represent *x, whether
74 it appears on the LHS or the RHS of a statement.
75 ADDRESSOF is a constraint expression used to represent &x, whether
76 it appears on the LHS or the RHS of a statement.
78 Each pointer variable in the program is assigned an integer id, and
79 each field of a structure variable is assigned an integer id as well.
81 Structure variables are linked to their list of fields through a "next
82 field" in each variable that points to the next field in offset
84 Each variable for a structure field has
86 1. "size", that tells the size in bits of that field.
87 2. "fullsize", that tells the size in bits of the entire structure.
88 3. "offset", that tells the offset in bits from the beginning of the
89 structure to this field.
101 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
102 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
103 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
106 In order to solve the system of set constraints, the following is
109 1. Each constraint variable x has a solution set associated with it,
112 2. Constraints are separated into direct, copy, and complex.
113 Direct constraints are ADDRESSOF constraints that require no extra
114 processing, such as P = &Q
115 Copy constraints are those of the form P = Q.
116 Complex constraints are all the constraints involving dereferences
117 and offsets (including offsetted copies).
119 3. All direct constraints of the form P = &Q are processed, such
120 that Q is added to Sol(P)
122 4. All complex constraints for a given constraint variable are stored in a
123 linked list attached to that variable's node.
125 5. A directed graph is built out of the copy constraints. Each
126 constraint variable is a node in the graph, and an edge from
127 Q to P is added for each copy constraint of the form P = Q
129 6. The graph is then walked, and solution sets are
130 propagated along the copy edges, such that an edge from Q to P
131 causes Sol(P) <- Sol(P) union Sol(Q).
133 7. As we visit each node, all complex constraints associated with
134 that node are processed by adding appropriate copy edges to the graph, or the
135 appropriate variables to the solution set.
137 8. The process of walking the graph is iterated until no solution
140 Prior to walking the graph in steps 6 and 7, We perform static
141 cycle elimination on the constraint graph, as well
142 as off-line variable substitution.
144 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
145 on and turned into anything), but isn't. You can just see what offset
146 inside the pointed-to struct it's going to access.
148 TODO: Constant bounded arrays can be handled as if they were structs of the
149 same number of elements.
151 TODO: Modeling heap and incoming pointers becomes much better if we
152 add fields to them as we discover them, which we could do.
154 TODO: We could handle unions, but to be honest, it's probably not
155 worth the pain or slowdown. */
157 /* IPA-PTA optimizations possible.
159 When the indirect function called is ANYTHING we can add disambiguation
160 based on the function signatures (or simply the parameter count which
161 is the varinfo size). We also do not need to consider functions that
162 do not have their address taken.
164 The is_global_var bit which marks escape points is overly conservative
165 in IPA mode. Split it to is_escape_point and is_global_var - only
166 externally visible globals are escape points in IPA mode.
167 There is now is_ipa_escape_point but this is only used in a few
170 The way we introduce DECL_PT_UID to avoid fixing up all points-to
171 sets in the translation unit when we copy a DECL during inlining
172 pessimizes precision. The advantage is that the DECL_PT_UID keeps
173 compile-time and memory usage overhead low - the points-to sets
174 do not grow or get unshared as they would during a fixup phase.
175 An alternative solution is to delay IPA PTA until after all
176 inlining transformations have been applied.
178 The way we propagate clobber/use information isn't optimized.
179 It should use a new complex constraint that properly filters
180 out local variables of the callee (though that would make
181 the sets invalid after inlining). OTOH we might as well
182 admit defeat to WHOPR and simply do all the clobber/use analysis
183 and propagation after PTA finished but before we threw away
184 points-to information for memory variables. WHOPR and PTA
185 do not play along well anyway - the whole constraint solving
186 would need to be done in WPA phase and it will be very interesting
187 to apply the results to local SSA names during LTRANS phase.
189 We probably should compute a per-function unit-ESCAPE solution
190 propagating it simply like the clobber / uses solutions. The
191 solution can go alongside the non-IPA escaped solution and be
192 used to query which vars escape the unit through a function.
193 This is also required to make the escaped-HEAP trick work in IPA mode.
195 We never put function decls in points-to sets so we do not
196 keep the set of called functions for indirect calls.
198 And probably more. */
200 static bool use_field_sensitive
= true;
201 static int in_ipa_mode
= 0;
203 /* Used for predecessor bitmaps. */
204 static bitmap_obstack predbitmap_obstack
;
206 /* Used for points-to sets. */
207 static bitmap_obstack pta_obstack
;
209 /* Used for oldsolution members of variables. */
210 static bitmap_obstack oldpta_obstack
;
212 /* Used for per-solver-iteration bitmaps. */
213 static bitmap_obstack iteration_obstack
;
215 static unsigned int create_variable_info_for (tree
, const char *, bool);
216 typedef struct constraint_graph
*constraint_graph_t
;
217 static void unify_nodes (constraint_graph_t
, unsigned int, unsigned int, bool);
220 typedef struct constraint
*constraint_t
;
223 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
225 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
227 static struct constraint_stats
229 unsigned int total_vars
;
230 unsigned int nonpointer_vars
;
231 unsigned int unified_vars_static
;
232 unsigned int unified_vars_dynamic
;
233 unsigned int iterations
;
234 unsigned int num_edges
;
235 unsigned int num_implicit_edges
;
236 unsigned int points_to_sets_created
;
241 /* ID of this variable */
244 /* True if this is a variable created by the constraint analysis, such as
245 heap variables and constraints we had to break up. */
246 unsigned int is_artificial_var
: 1;
248 /* True if this is a special variable whose solution set should not be
250 unsigned int is_special_var
: 1;
252 /* True for variables whose size is not known or variable. */
253 unsigned int is_unknown_size_var
: 1;
255 /* True for (sub-)fields that represent a whole variable. */
256 unsigned int is_full_var
: 1;
258 /* True if this is a heap variable. */
259 unsigned int is_heap_var
: 1;
261 /* True if this is a register variable. */
262 unsigned int is_reg_var
: 1;
264 /* True if this field may contain pointers. */
265 unsigned int may_have_pointers
: 1;
267 /* True if this field has only restrict qualified pointers. */
268 unsigned int only_restrict_pointers
: 1;
270 /* True if this represents a heap var created for a restrict qualified
272 unsigned int is_restrict_var
: 1;
274 /* True if this represents a global variable. */
275 unsigned int is_global_var
: 1;
277 /* True if this represents a module escape point for IPA analysis. */
278 unsigned int is_ipa_escape_point
: 1;
280 /* True if this represents a IPA function info. */
281 unsigned int is_fn_info
: 1;
283 /* ??? Store somewhere better. */
286 /* The ID of the variable for the next field in this structure
287 or zero for the last field in this structure. */
290 /* The ID of the variable for the first field in this structure. */
293 /* Offset of this variable, in bits, from the base variable */
294 unsigned HOST_WIDE_INT offset
;
296 /* Size of the variable, in bits. */
297 unsigned HOST_WIDE_INT size
;
299 /* Full size of the base variable, in bits. */
300 unsigned HOST_WIDE_INT fullsize
;
302 /* In IPA mode the shadow UID in case the variable needs to be duplicated in
303 the final points-to solution because it reaches its containing
304 function recursively. Zero if none is needed. */
305 unsigned int shadow_var_uid
;
307 /* Name of this variable */
310 /* Tree that this variable is associated with. */
313 /* Points-to set for this variable. */
316 /* Old points-to set for this variable. */
319 typedef struct variable_info
*varinfo_t
;
321 static varinfo_t
first_vi_for_offset (varinfo_t
, unsigned HOST_WIDE_INT
);
322 static varinfo_t
first_or_preceding_vi_for_offset (varinfo_t
,
323 unsigned HOST_WIDE_INT
);
324 static varinfo_t
lookup_vi_for_tree (tree
);
325 static inline bool type_can_have_subvars (const_tree
);
326 static void make_param_constraints (varinfo_t
);
328 /* Pool of variable info structures. */
329 static object_allocator
<variable_info
> variable_info_pool
330 ("Variable info pool");
332 /* Map varinfo to final pt_solution. */
333 static hash_map
<varinfo_t
, pt_solution
*> *final_solutions
;
334 struct obstack final_solutions_obstack
;
336 /* Table of variable info structures for constraint variables.
337 Indexed directly by variable info id. */
338 static vec
<varinfo_t
> varmap
;
340 /* Return the varmap element N */
342 static inline varinfo_t
343 get_varinfo (unsigned int n
)
348 /* Return the next variable in the list of sub-variables of VI
349 or NULL if VI is the last sub-variable. */
351 static inline varinfo_t
352 vi_next (varinfo_t vi
)
354 return get_varinfo (vi
->next
);
357 /* Static IDs for the special variables. Variable ID zero is unused
358 and used as terminator for the sub-variable chain. */
359 enum { nothing_id
= 1, anything_id
= 2, string_id
= 3,
360 escaped_id
= 4, nonlocal_id
= 5,
361 storedanything_id
= 6, integer_id
= 7 };
363 /* Return a new variable info structure consisting for a variable
364 named NAME, and using constraint graph node NODE. Append it
365 to the vector of variable info structures. */
368 new_var_info (tree t
, const char *name
, bool add_id
)
370 unsigned index
= varmap
.length ();
371 varinfo_t ret
= variable_info_pool
.allocate ();
373 if (dump_file
&& add_id
)
375 char *tempname
= xasprintf ("%s(%d)", name
, index
);
376 name
= ggc_strdup (tempname
);
383 /* Vars without decl are artificial and do not have sub-variables. */
384 ret
->is_artificial_var
= (t
== NULL_TREE
);
385 ret
->is_special_var
= false;
386 ret
->is_unknown_size_var
= false;
387 ret
->is_full_var
= (t
== NULL_TREE
);
388 ret
->is_heap_var
= false;
389 ret
->may_have_pointers
= true;
390 ret
->only_restrict_pointers
= false;
391 ret
->is_restrict_var
= false;
393 ret
->is_global_var
= (t
== NULL_TREE
);
394 ret
->is_ipa_escape_point
= false;
395 ret
->is_fn_info
= false;
397 ret
->is_global_var
= (is_global_var (t
)
398 /* We have to treat even local register variables
400 || (VAR_P (t
) && DECL_HARD_REGISTER (t
)));
401 ret
->is_reg_var
= (t
&& TREE_CODE (t
) == SSA_NAME
);
402 ret
->solution
= BITMAP_ALLOC (&pta_obstack
);
403 ret
->oldsolution
= NULL
;
405 ret
->shadow_var_uid
= 0;
410 varmap
.safe_push (ret
);
415 /* A map mapping call statements to per-stmt variables for uses
416 and clobbers specific to the call. */
417 static hash_map
<gimple
*, varinfo_t
> *call_stmt_vars
;
419 /* Lookup or create the variable for the call statement CALL. */
422 get_call_vi (gcall
*call
)
427 varinfo_t
*slot_p
= &call_stmt_vars
->get_or_insert (call
, &existed
);
431 vi
= new_var_info (NULL_TREE
, "CALLUSED", true);
435 vi
->is_full_var
= true;
436 vi
->is_reg_var
= true;
438 vi2
= new_var_info (NULL_TREE
, "CALLCLOBBERED", true);
442 vi2
->is_full_var
= true;
443 vi2
->is_reg_var
= true;
451 /* Lookup the variable for the call statement CALL representing
452 the uses. Returns NULL if there is nothing special about this call. */
455 lookup_call_use_vi (gcall
*call
)
457 varinfo_t
*slot_p
= call_stmt_vars
->get (call
);
464 /* Lookup the variable for the call statement CALL representing
465 the clobbers. Returns NULL if there is nothing special about this call. */
468 lookup_call_clobber_vi (gcall
*call
)
470 varinfo_t uses
= lookup_call_use_vi (call
);
474 return vi_next (uses
);
477 /* Lookup or create the variable for the call statement CALL representing
481 get_call_use_vi (gcall
*call
)
483 return get_call_vi (call
);
486 /* Lookup or create the variable for the call statement CALL representing
489 static varinfo_t ATTRIBUTE_UNUSED
490 get_call_clobber_vi (gcall
*call
)
492 return vi_next (get_call_vi (call
));
496 enum constraint_expr_type
{SCALAR
, DEREF
, ADDRESSOF
};
498 /* An expression that appears in a constraint. */
500 struct constraint_expr
502 /* Constraint type. */
503 constraint_expr_type type
;
505 /* Variable we are referring to in the constraint. */
508 /* Offset, in bits, of this constraint from the beginning of
509 variables it ends up referring to.
511 IOW, in a deref constraint, we would deref, get the result set,
512 then add OFFSET to each member. */
513 HOST_WIDE_INT offset
;
516 /* Use 0x8000... as special unknown offset. */
517 #define UNKNOWN_OFFSET HOST_WIDE_INT_MIN
519 typedef struct constraint_expr ce_s
;
520 static void get_constraint_for_1 (tree
, vec
<ce_s
> *, bool, bool);
521 static void get_constraint_for (tree
, vec
<ce_s
> *);
522 static void get_constraint_for_rhs (tree
, vec
<ce_s
> *);
523 static void do_deref (vec
<ce_s
> *);
525 /* Our set constraints are made up of two constraint expressions, one
528 As described in the introduction, our set constraints each represent an
529 operation between set valued variables.
533 struct constraint_expr lhs
;
534 struct constraint_expr rhs
;
537 /* List of constraints that we use to build the constraint graph from. */
539 static vec
<constraint_t
> constraints
;
540 static object_allocator
<constraint
> constraint_pool ("Constraint pool");
542 /* The constraint graph is represented as an array of bitmaps
543 containing successor nodes. */
545 struct constraint_graph
547 /* Size of this graph, which may be different than the number of
548 nodes in the variable map. */
551 /* Explicit successors of each node. */
554 /* Implicit predecessors of each node (Used for variable
556 bitmap
*implicit_preds
;
558 /* Explicit predecessors of each node (Used for variable substitution). */
561 /* Indirect cycle representatives, or -1 if the node has no indirect
563 int *indirect_cycles
;
565 /* Representative node for a node. rep[a] == a unless the node has
569 /* Equivalence class representative for a label. This is used for
570 variable substitution. */
573 /* Pointer equivalence label for a node. All nodes with the same
574 pointer equivalence label can be unified together at some point
575 (either during constraint optimization or after the constraint
579 /* Pointer equivalence representative for a label. This is used to
580 handle nodes that are pointer equivalent but not location
581 equivalent. We can unite these once the addressof constraints
582 are transformed into initial points-to sets. */
585 /* Pointer equivalence label for each node, used during variable
587 unsigned int *pointer_label
;
589 /* Location equivalence label for each node, used during location
590 equivalence finding. */
591 unsigned int *loc_label
;
593 /* Pointed-by set for each node, used during location equivalence
594 finding. This is pointed-by rather than pointed-to, because it
595 is constructed using the predecessor graph. */
598 /* Points to sets for pointer equivalence. This is *not* the actual
599 points-to sets for nodes. */
602 /* Bitmap of nodes where the bit is set if the node is a direct
603 node. Used for variable substitution. */
604 sbitmap direct_nodes
;
606 /* Bitmap of nodes where the bit is set if the node is address
607 taken. Used for variable substitution. */
608 bitmap address_taken
;
610 /* Vector of complex constraints for each graph node. Complex
611 constraints are those involving dereferences or offsets that are
613 vec
<constraint_t
> *complex;
616 static constraint_graph_t graph
;
618 /* During variable substitution and the offline version of indirect
619 cycle finding, we create nodes to represent dereferences and
620 address taken constraints. These represent where these start and
622 #define FIRST_REF_NODE (varmap).length ()
623 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
625 /* Return the representative node for NODE, if NODE has been unioned
627 This function performs path compression along the way to finding
628 the representative. */
631 find (unsigned int node
)
633 gcc_checking_assert (node
< graph
->size
);
634 if (graph
->rep
[node
] != node
)
635 return graph
->rep
[node
] = find (graph
->rep
[node
]);
639 /* Union the TO and FROM nodes to the TO nodes.
640 Note that at some point in the future, we may want to do
641 union-by-rank, in which case we are going to have to return the
642 node we unified to. */
645 unite (unsigned int to
, unsigned int from
)
647 gcc_checking_assert (to
< graph
->size
&& from
< graph
->size
);
648 if (to
!= from
&& graph
->rep
[from
] != to
)
650 graph
->rep
[from
] = to
;
656 /* Create a new constraint consisting of LHS and RHS expressions. */
659 new_constraint (const struct constraint_expr lhs
,
660 const struct constraint_expr rhs
)
662 constraint_t ret
= constraint_pool
.allocate ();
668 /* Print out constraint C to FILE. */
671 dump_constraint (FILE *file
, constraint_t c
)
673 if (c
->lhs
.type
== ADDRESSOF
)
675 else if (c
->lhs
.type
== DEREF
)
677 fprintf (file
, "%s", get_varinfo (c
->lhs
.var
)->name
);
678 if (c
->lhs
.offset
== UNKNOWN_OFFSET
)
679 fprintf (file
, " + UNKNOWN");
680 else if (c
->lhs
.offset
!= 0)
681 fprintf (file
, " + " HOST_WIDE_INT_PRINT_DEC
, c
->lhs
.offset
);
682 fprintf (file
, " = ");
683 if (c
->rhs
.type
== ADDRESSOF
)
685 else if (c
->rhs
.type
== DEREF
)
687 fprintf (file
, "%s", get_varinfo (c
->rhs
.var
)->name
);
688 if (c
->rhs
.offset
== UNKNOWN_OFFSET
)
689 fprintf (file
, " + UNKNOWN");
690 else if (c
->rhs
.offset
!= 0)
691 fprintf (file
, " + " HOST_WIDE_INT_PRINT_DEC
, c
->rhs
.offset
);
695 void debug_constraint (constraint_t
);
696 void debug_constraints (void);
697 void debug_constraint_graph (void);
698 void debug_solution_for_var (unsigned int);
699 void debug_sa_points_to_info (void);
700 void debug_varinfo (varinfo_t
);
701 void debug_varmap (void);
703 /* Print out constraint C to stderr. */
706 debug_constraint (constraint_t c
)
708 dump_constraint (stderr
, c
);
709 fprintf (stderr
, "\n");
712 /* Print out all constraints to FILE */
715 dump_constraints (FILE *file
, int from
)
719 for (i
= from
; constraints
.iterate (i
, &c
); i
++)
722 dump_constraint (file
, c
);
723 fprintf (file
, "\n");
727 /* Print out all constraints to stderr. */
730 debug_constraints (void)
732 dump_constraints (stderr
, 0);
735 /* Print the constraint graph in dot format. */
738 dump_constraint_graph (FILE *file
)
742 /* Only print the graph if it has already been initialized: */
746 /* Prints the header of the dot file: */
747 fprintf (file
, "strict digraph {\n");
748 fprintf (file
, " node [\n shape = box\n ]\n");
749 fprintf (file
, " edge [\n fontsize = \"12\"\n ]\n");
750 fprintf (file
, "\n // List of nodes and complex constraints in "
751 "the constraint graph:\n");
753 /* The next lines print the nodes in the graph together with the
754 complex constraints attached to them. */
755 for (i
= 1; i
< graph
->size
; i
++)
757 if (i
== FIRST_REF_NODE
)
761 if (i
< FIRST_REF_NODE
)
762 fprintf (file
, "\"%s\"", get_varinfo (i
)->name
);
764 fprintf (file
, "\"*%s\"", get_varinfo (i
- FIRST_REF_NODE
)->name
);
765 if (graph
->complex[i
].exists ())
769 fprintf (file
, " [label=\"\\N\\n");
770 for (j
= 0; graph
->complex[i
].iterate (j
, &c
); ++j
)
772 dump_constraint (file
, c
);
773 fprintf (file
, "\\l");
775 fprintf (file
, "\"]");
777 fprintf (file
, ";\n");
780 /* Go over the edges. */
781 fprintf (file
, "\n // Edges in the constraint graph:\n");
782 for (i
= 1; i
< graph
->size
; i
++)
788 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->succs
[i
], 0, j
, bi
)
790 unsigned to
= find (j
);
793 if (i
< FIRST_REF_NODE
)
794 fprintf (file
, "\"%s\"", get_varinfo (i
)->name
);
796 fprintf (file
, "\"*%s\"", get_varinfo (i
- FIRST_REF_NODE
)->name
);
797 fprintf (file
, " -> ");
798 if (to
< FIRST_REF_NODE
)
799 fprintf (file
, "\"%s\"", get_varinfo (to
)->name
);
801 fprintf (file
, "\"*%s\"", get_varinfo (to
- FIRST_REF_NODE
)->name
);
802 fprintf (file
, ";\n");
806 /* Prints the tail of the dot file. */
807 fprintf (file
, "}\n");
810 /* Print out the constraint graph to stderr. */
813 debug_constraint_graph (void)
815 dump_constraint_graph (stderr
);
820 The solver is a simple worklist solver, that works on the following
823 sbitmap changed_nodes = all zeroes;
825 For each node that is not already collapsed:
827 set bit in changed nodes
829 while (changed_count > 0)
831 compute topological ordering for constraint graph
833 find and collapse cycles in the constraint graph (updating
834 changed if necessary)
836 for each node (n) in the graph in topological order:
839 Process each complex constraint associated with the node,
840 updating changed if necessary.
842 For each outgoing edge from n, propagate the solution from n to
843 the destination of the edge, updating changed as necessary.
847 /* Return true if two constraint expressions A and B are equal. */
850 constraint_expr_equal (struct constraint_expr a
, struct constraint_expr b
)
852 return a
.type
== b
.type
&& a
.var
== b
.var
&& a
.offset
== b
.offset
;
855 /* Return true if constraint expression A is less than constraint expression
856 B. This is just arbitrary, but consistent, in order to give them an
860 constraint_expr_less (struct constraint_expr a
, struct constraint_expr b
)
862 if (a
.type
== b
.type
)
865 return a
.offset
< b
.offset
;
867 return a
.var
< b
.var
;
870 return a
.type
< b
.type
;
873 /* Return true if constraint A is less than constraint B. This is just
874 arbitrary, but consistent, in order to give them an ordering. */
877 constraint_less (const constraint_t
&a
, const constraint_t
&b
)
879 if (constraint_expr_less (a
->lhs
, b
->lhs
))
881 else if (constraint_expr_less (b
->lhs
, a
->lhs
))
884 return constraint_expr_less (a
->rhs
, b
->rhs
);
887 /* Return true if two constraints A and B are equal. */
890 constraint_equal (struct constraint a
, struct constraint b
)
892 return constraint_expr_equal (a
.lhs
, b
.lhs
)
893 && constraint_expr_equal (a
.rhs
, b
.rhs
);
897 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
900 constraint_vec_find (vec
<constraint_t
> vec
,
901 struct constraint lookfor
)
909 place
= vec
.lower_bound (&lookfor
, constraint_less
);
910 if (place
>= vec
.length ())
913 if (!constraint_equal (*found
, lookfor
))
918 /* Union two constraint vectors, TO and FROM. Put the result in TO.
919 Returns true of TO set is changed. */
922 constraint_set_union (vec
<constraint_t
> *to
,
923 vec
<constraint_t
> *from
)
927 bool any_change
= false;
929 FOR_EACH_VEC_ELT (*from
, i
, c
)
931 if (constraint_vec_find (*to
, *c
) == NULL
)
933 unsigned int place
= to
->lower_bound (c
, constraint_less
);
934 to
->safe_insert (place
, c
);
941 /* Expands the solution in SET to all sub-fields of variables included. */
944 solution_set_expand (bitmap set
, bitmap
*expanded
)
952 *expanded
= BITMAP_ALLOC (&iteration_obstack
);
954 /* In a first pass expand to the head of the variables we need to
955 add all sub-fields off. This avoids quadratic behavior. */
956 EXECUTE_IF_SET_IN_BITMAP (set
, 0, j
, bi
)
958 varinfo_t v
= get_varinfo (j
);
959 if (v
->is_artificial_var
962 bitmap_set_bit (*expanded
, v
->head
);
965 /* In the second pass now expand all head variables with subfields. */
966 EXECUTE_IF_SET_IN_BITMAP (*expanded
, 0, j
, bi
)
968 varinfo_t v
= get_varinfo (j
);
971 for (v
= vi_next (v
); v
!= NULL
; v
= vi_next (v
))
972 bitmap_set_bit (*expanded
, v
->id
);
975 /* And finally set the rest of the bits from SET. */
976 bitmap_ior_into (*expanded
, set
);
981 /* Union solution sets TO and DELTA, and add INC to each member of DELTA in the
985 set_union_with_increment (bitmap to
, bitmap delta
, HOST_WIDE_INT inc
,
986 bitmap
*expanded_delta
)
988 bool changed
= false;
992 /* If the solution of DELTA contains anything it is good enough to transfer
994 if (bitmap_bit_p (delta
, anything_id
))
995 return bitmap_set_bit (to
, anything_id
);
997 /* If the offset is unknown we have to expand the solution to
999 if (inc
== UNKNOWN_OFFSET
)
1001 delta
= solution_set_expand (delta
, expanded_delta
);
1002 changed
|= bitmap_ior_into (to
, delta
);
1006 /* For non-zero offset union the offsetted solution into the destination. */
1007 EXECUTE_IF_SET_IN_BITMAP (delta
, 0, i
, bi
)
1009 varinfo_t vi
= get_varinfo (i
);
1011 /* If this is a variable with just one field just set its bit
1013 if (vi
->is_artificial_var
1014 || vi
->is_unknown_size_var
1016 changed
|= bitmap_set_bit (to
, i
);
1019 HOST_WIDE_INT fieldoffset
= vi
->offset
+ inc
;
1020 unsigned HOST_WIDE_INT size
= vi
->size
;
1022 /* If the offset makes the pointer point to before the
1023 variable use offset zero for the field lookup. */
1024 if (fieldoffset
< 0)
1025 vi
= get_varinfo (vi
->head
);
1027 vi
= first_or_preceding_vi_for_offset (vi
, fieldoffset
);
1031 changed
|= bitmap_set_bit (to
, vi
->id
);
1036 /* We have to include all fields that overlap the current field
1040 while (vi
->offset
< fieldoffset
+ size
);
1047 /* Insert constraint C into the list of complex constraints for graph
1051 insert_into_complex (constraint_graph_t graph
,
1052 unsigned int var
, constraint_t c
)
1054 vec
<constraint_t
> complex = graph
->complex[var
];
1055 unsigned int place
= complex.lower_bound (c
, constraint_less
);
1057 /* Only insert constraints that do not already exist. */
1058 if (place
>= complex.length ()
1059 || !constraint_equal (*c
, *complex[place
]))
1060 graph
->complex[var
].safe_insert (place
, c
);
1064 /* Condense two variable nodes into a single variable node, by moving
1065 all associated info from FROM to TO. Returns true if TO node's
1066 constraint set changes after the merge. */
1069 merge_node_constraints (constraint_graph_t graph
, unsigned int to
,
1074 bool any_change
= false;
1076 gcc_checking_assert (find (from
) == to
);
1078 /* Move all complex constraints from src node into to node */
1079 FOR_EACH_VEC_ELT (graph
->complex[from
], i
, c
)
1081 /* In complex constraints for node FROM, we may have either
1082 a = *FROM, and *FROM = a, or an offseted constraint which are
1083 always added to the rhs node's constraints. */
1085 if (c
->rhs
.type
== DEREF
)
1087 else if (c
->lhs
.type
== DEREF
)
1093 any_change
= constraint_set_union (&graph
->complex[to
],
1094 &graph
->complex[from
]);
1095 graph
->complex[from
].release ();
1100 /* Remove edges involving NODE from GRAPH. */
1103 clear_edges_for_node (constraint_graph_t graph
, unsigned int node
)
1105 if (graph
->succs
[node
])
1106 BITMAP_FREE (graph
->succs
[node
]);
1109 /* Merge GRAPH nodes FROM and TO into node TO. */
1112 merge_graph_nodes (constraint_graph_t graph
, unsigned int to
,
1115 if (graph
->indirect_cycles
[from
] != -1)
1117 /* If we have indirect cycles with the from node, and we have
1118 none on the to node, the to node has indirect cycles from the
1119 from node now that they are unified.
1120 If indirect cycles exist on both, unify the nodes that they
1121 are in a cycle with, since we know they are in a cycle with
1123 if (graph
->indirect_cycles
[to
] == -1)
1124 graph
->indirect_cycles
[to
] = graph
->indirect_cycles
[from
];
1127 /* Merge all the successor edges. */
1128 if (graph
->succs
[from
])
1130 if (!graph
->succs
[to
])
1131 graph
->succs
[to
] = BITMAP_ALLOC (&pta_obstack
);
1132 bitmap_ior_into (graph
->succs
[to
],
1133 graph
->succs
[from
]);
1136 clear_edges_for_node (graph
, from
);
1140 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1141 it doesn't exist in the graph already. */
1144 add_implicit_graph_edge (constraint_graph_t graph
, unsigned int to
,
1150 if (!graph
->implicit_preds
[to
])
1151 graph
->implicit_preds
[to
] = BITMAP_ALLOC (&predbitmap_obstack
);
1153 if (bitmap_set_bit (graph
->implicit_preds
[to
], from
))
1154 stats
.num_implicit_edges
++;
1157 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1158 it doesn't exist in the graph already.
1159 Return false if the edge already existed, true otherwise. */
1162 add_pred_graph_edge (constraint_graph_t graph
, unsigned int to
,
1165 if (!graph
->preds
[to
])
1166 graph
->preds
[to
] = BITMAP_ALLOC (&predbitmap_obstack
);
1167 bitmap_set_bit (graph
->preds
[to
], from
);
1170 /* Add a graph edge to GRAPH, going from FROM to TO if
1171 it doesn't exist in the graph already.
1172 Return false if the edge already existed, true otherwise. */
1175 add_graph_edge (constraint_graph_t graph
, unsigned int to
,
1186 if (!graph
->succs
[from
])
1187 graph
->succs
[from
] = BITMAP_ALLOC (&pta_obstack
);
1188 if (bitmap_set_bit (graph
->succs
[from
], to
))
1191 if (to
< FIRST_REF_NODE
&& from
< FIRST_REF_NODE
)
1199 /* Initialize the constraint graph structure to contain SIZE nodes. */
1202 init_graph (unsigned int size
)
1206 graph
= XCNEW (struct constraint_graph
);
1208 graph
->succs
= XCNEWVEC (bitmap
, graph
->size
);
1209 graph
->indirect_cycles
= XNEWVEC (int, graph
->size
);
1210 graph
->rep
= XNEWVEC (unsigned int, graph
->size
);
1211 /* ??? Macros do not support template types with multiple arguments,
1212 so we use a typedef to work around it. */
1213 typedef vec
<constraint_t
> vec_constraint_t_heap
;
1214 graph
->complex = XCNEWVEC (vec_constraint_t_heap
, size
);
1215 graph
->pe
= XCNEWVEC (unsigned int, graph
->size
);
1216 graph
->pe_rep
= XNEWVEC (int, graph
->size
);
1218 for (j
= 0; j
< graph
->size
; j
++)
1221 graph
->pe_rep
[j
] = -1;
1222 graph
->indirect_cycles
[j
] = -1;
1226 /* Build the constraint graph, adding only predecessor edges right now. */
1229 build_pred_graph (void)
1235 graph
->implicit_preds
= XCNEWVEC (bitmap
, graph
->size
);
1236 graph
->preds
= XCNEWVEC (bitmap
, graph
->size
);
1237 graph
->pointer_label
= XCNEWVEC (unsigned int, graph
->size
);
1238 graph
->loc_label
= XCNEWVEC (unsigned int, graph
->size
);
1239 graph
->pointed_by
= XCNEWVEC (bitmap
, graph
->size
);
1240 graph
->points_to
= XCNEWVEC (bitmap
, graph
->size
);
1241 graph
->eq_rep
= XNEWVEC (int, graph
->size
);
1242 graph
->direct_nodes
= sbitmap_alloc (graph
->size
);
1243 graph
->address_taken
= BITMAP_ALLOC (&predbitmap_obstack
);
1244 bitmap_clear (graph
->direct_nodes
);
1246 for (j
= 1; j
< FIRST_REF_NODE
; j
++)
1248 if (!get_varinfo (j
)->is_special_var
)
1249 bitmap_set_bit (graph
->direct_nodes
, j
);
1252 for (j
= 0; j
< graph
->size
; j
++)
1253 graph
->eq_rep
[j
] = -1;
1255 for (j
= 0; j
< varmap
.length (); j
++)
1256 graph
->indirect_cycles
[j
] = -1;
1258 FOR_EACH_VEC_ELT (constraints
, i
, c
)
1260 struct constraint_expr lhs
= c
->lhs
;
1261 struct constraint_expr rhs
= c
->rhs
;
1262 unsigned int lhsvar
= lhs
.var
;
1263 unsigned int rhsvar
= rhs
.var
;
1265 if (lhs
.type
== DEREF
)
1268 if (rhs
.offset
== 0 && lhs
.offset
== 0 && rhs
.type
== SCALAR
)
1269 add_pred_graph_edge (graph
, FIRST_REF_NODE
+ lhsvar
, rhsvar
);
1271 else if (rhs
.type
== DEREF
)
1274 if (rhs
.offset
== 0 && lhs
.offset
== 0 && lhs
.type
== SCALAR
)
1275 add_pred_graph_edge (graph
, lhsvar
, FIRST_REF_NODE
+ rhsvar
);
1277 bitmap_clear_bit (graph
->direct_nodes
, lhsvar
);
1279 else if (rhs
.type
== ADDRESSOF
)
1284 if (graph
->points_to
[lhsvar
] == NULL
)
1285 graph
->points_to
[lhsvar
] = BITMAP_ALLOC (&predbitmap_obstack
);
1286 bitmap_set_bit (graph
->points_to
[lhsvar
], rhsvar
);
1288 if (graph
->pointed_by
[rhsvar
] == NULL
)
1289 graph
->pointed_by
[rhsvar
] = BITMAP_ALLOC (&predbitmap_obstack
);
1290 bitmap_set_bit (graph
->pointed_by
[rhsvar
], lhsvar
);
1292 /* Implicitly, *x = y */
1293 add_implicit_graph_edge (graph
, FIRST_REF_NODE
+ lhsvar
, rhsvar
);
1295 /* All related variables are no longer direct nodes. */
1296 bitmap_clear_bit (graph
->direct_nodes
, rhsvar
);
1297 v
= get_varinfo (rhsvar
);
1298 if (!v
->is_full_var
)
1300 v
= get_varinfo (v
->head
);
1303 bitmap_clear_bit (graph
->direct_nodes
, v
->id
);
1308 bitmap_set_bit (graph
->address_taken
, rhsvar
);
1310 else if (lhsvar
> anything_id
1311 && lhsvar
!= rhsvar
&& lhs
.offset
== 0 && rhs
.offset
== 0)
1314 add_pred_graph_edge (graph
, lhsvar
, rhsvar
);
1315 /* Implicitly, *x = *y */
1316 add_implicit_graph_edge (graph
, FIRST_REF_NODE
+ lhsvar
,
1317 FIRST_REF_NODE
+ rhsvar
);
1319 else if (lhs
.offset
!= 0 || rhs
.offset
!= 0)
1321 if (rhs
.offset
!= 0)
1322 bitmap_clear_bit (graph
->direct_nodes
, lhs
.var
);
1323 else if (lhs
.offset
!= 0)
1324 bitmap_clear_bit (graph
->direct_nodes
, rhs
.var
);
1329 /* Build the constraint graph, adding successor edges. */
1332 build_succ_graph (void)
1337 FOR_EACH_VEC_ELT (constraints
, i
, c
)
1339 struct constraint_expr lhs
;
1340 struct constraint_expr rhs
;
1341 unsigned int lhsvar
;
1342 unsigned int rhsvar
;
1349 lhsvar
= find (lhs
.var
);
1350 rhsvar
= find (rhs
.var
);
1352 if (lhs
.type
== DEREF
)
1354 if (rhs
.offset
== 0 && lhs
.offset
== 0 && rhs
.type
== SCALAR
)
1355 add_graph_edge (graph
, FIRST_REF_NODE
+ lhsvar
, rhsvar
);
1357 else if (rhs
.type
== DEREF
)
1359 if (rhs
.offset
== 0 && lhs
.offset
== 0 && lhs
.type
== SCALAR
)
1360 add_graph_edge (graph
, lhsvar
, FIRST_REF_NODE
+ rhsvar
);
1362 else if (rhs
.type
== ADDRESSOF
)
1365 gcc_checking_assert (find (rhs
.var
) == rhs
.var
);
1366 bitmap_set_bit (get_varinfo (lhsvar
)->solution
, rhsvar
);
1368 else if (lhsvar
> anything_id
1369 && lhsvar
!= rhsvar
&& lhs
.offset
== 0 && rhs
.offset
== 0)
1371 add_graph_edge (graph
, lhsvar
, rhsvar
);
1375 /* Add edges from STOREDANYTHING to all non-direct nodes that can
1376 receive pointers. */
1377 t
= find (storedanything_id
);
1378 for (i
= integer_id
+ 1; i
< FIRST_REF_NODE
; ++i
)
1380 if (!bitmap_bit_p (graph
->direct_nodes
, i
)
1381 && get_varinfo (i
)->may_have_pointers
)
1382 add_graph_edge (graph
, find (i
), t
);
1385 /* Everything stored to ANYTHING also potentially escapes. */
1386 add_graph_edge (graph
, find (escaped_id
), t
);
1390 /* Changed variables on the last iteration. */
1391 static bitmap changed
;
1393 /* Strongly Connected Component visitation info. */
1398 scc_info (size_t size
);
1401 auto_sbitmap visited
;
1402 auto_sbitmap deleted
;
1404 unsigned int *node_mapping
;
1406 auto_vec
<unsigned> scc_stack
;
1410 /* Recursive routine to find strongly connected components in GRAPH.
1411 SI is the SCC info to store the information in, and N is the id of current
1412 graph node we are processing.
1414 This is Tarjan's strongly connected component finding algorithm, as
1415 modified by Nuutila to keep only non-root nodes on the stack.
1416 The algorithm can be found in "On finding the strongly connected
1417 connected components in a directed graph" by Esko Nuutila and Eljas
1418 Soisalon-Soininen, in Information Processing Letters volume 49,
1419 number 1, pages 9-14. */
1422 scc_visit (constraint_graph_t graph
, class scc_info
*si
, unsigned int n
)
1426 unsigned int my_dfs
;
1428 bitmap_set_bit (si
->visited
, n
);
1429 si
->dfs
[n
] = si
->current_index
++;
1430 my_dfs
= si
->dfs
[n
];
1432 /* Visit all the successors. */
1433 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->succs
[n
], 0, i
, bi
)
1437 if (i
> LAST_REF_NODE
)
1441 if (bitmap_bit_p (si
->deleted
, w
))
1444 if (!bitmap_bit_p (si
->visited
, w
))
1445 scc_visit (graph
, si
, w
);
1447 unsigned int t
= find (w
);
1448 gcc_checking_assert (find (n
) == n
);
1449 if (si
->dfs
[t
] < si
->dfs
[n
])
1450 si
->dfs
[n
] = si
->dfs
[t
];
1453 /* See if any components have been identified. */
1454 if (si
->dfs
[n
] == my_dfs
)
1456 if (si
->scc_stack
.length () > 0
1457 && si
->dfs
[si
->scc_stack
.last ()] >= my_dfs
)
1459 bitmap scc
= BITMAP_ALLOC (NULL
);
1460 unsigned int lowest_node
;
1463 bitmap_set_bit (scc
, n
);
1465 while (si
->scc_stack
.length () != 0
1466 && si
->dfs
[si
->scc_stack
.last ()] >= my_dfs
)
1468 unsigned int w
= si
->scc_stack
.pop ();
1470 bitmap_set_bit (scc
, w
);
1473 lowest_node
= bitmap_first_set_bit (scc
);
1474 gcc_assert (lowest_node
< FIRST_REF_NODE
);
1476 /* Collapse the SCC nodes into a single node, and mark the
1478 EXECUTE_IF_SET_IN_BITMAP (scc
, 0, i
, bi
)
1480 if (i
< FIRST_REF_NODE
)
1482 if (unite (lowest_node
, i
))
1483 unify_nodes (graph
, lowest_node
, i
, false);
1487 unite (lowest_node
, i
);
1488 graph
->indirect_cycles
[i
- FIRST_REF_NODE
] = lowest_node
;
1492 bitmap_set_bit (si
->deleted
, n
);
1495 si
->scc_stack
.safe_push (n
);
1498 /* Unify node FROM into node TO, updating the changed count if
1499 necessary when UPDATE_CHANGED is true. */
1502 unify_nodes (constraint_graph_t graph
, unsigned int to
, unsigned int from
,
1503 bool update_changed
)
1505 gcc_checking_assert (to
!= from
&& find (to
) == to
);
1507 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1508 fprintf (dump_file
, "Unifying %s to %s\n",
1509 get_varinfo (from
)->name
,
1510 get_varinfo (to
)->name
);
1513 stats
.unified_vars_dynamic
++;
1515 stats
.unified_vars_static
++;
1517 merge_graph_nodes (graph
, to
, from
);
1518 if (merge_node_constraints (graph
, to
, from
))
1521 bitmap_set_bit (changed
, to
);
1524 /* Mark TO as changed if FROM was changed. If TO was already marked
1525 as changed, decrease the changed count. */
1528 && bitmap_clear_bit (changed
, from
))
1529 bitmap_set_bit (changed
, to
);
1530 varinfo_t fromvi
= get_varinfo (from
);
1531 if (fromvi
->solution
)
1533 /* If the solution changes because of the merging, we need to mark
1534 the variable as changed. */
1535 varinfo_t tovi
= get_varinfo (to
);
1536 if (bitmap_ior_into (tovi
->solution
, fromvi
->solution
))
1539 bitmap_set_bit (changed
, to
);
1542 BITMAP_FREE (fromvi
->solution
);
1543 if (fromvi
->oldsolution
)
1544 BITMAP_FREE (fromvi
->oldsolution
);
1546 if (stats
.iterations
> 0
1547 && tovi
->oldsolution
)
1548 BITMAP_FREE (tovi
->oldsolution
);
1550 if (graph
->succs
[to
])
1551 bitmap_clear_bit (graph
->succs
[to
], to
);
1554 /* Information needed to compute the topological ordering of a graph. */
1558 /* sbitmap of visited nodes. */
1560 /* Array that stores the topological order of the graph, *in
1562 vec
<unsigned> topo_order
;
1566 /* Initialize and return a topological info structure. */
1568 static struct topo_info
*
1569 init_topo_info (void)
1571 size_t size
= graph
->size
;
1572 struct topo_info
*ti
= XNEW (struct topo_info
);
1573 ti
->visited
= sbitmap_alloc (size
);
1574 bitmap_clear (ti
->visited
);
1575 ti
->topo_order
.create (1);
1580 /* Free the topological sort info pointed to by TI. */
1583 free_topo_info (struct topo_info
*ti
)
1585 sbitmap_free (ti
->visited
);
1586 ti
->topo_order
.release ();
1590 /* Visit the graph in topological order, and store the order in the
1591 topo_info structure. */
1594 topo_visit (constraint_graph_t graph
, struct topo_info
*ti
,
1600 bitmap_set_bit (ti
->visited
, n
);
1602 if (graph
->succs
[n
])
1603 EXECUTE_IF_SET_IN_BITMAP (graph
->succs
[n
], 0, j
, bi
)
1605 if (!bitmap_bit_p (ti
->visited
, j
))
1606 topo_visit (graph
, ti
, j
);
1609 ti
->topo_order
.safe_push (n
);
1612 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1613 starting solution for y. */
1616 do_sd_constraint (constraint_graph_t graph
, constraint_t c
,
1617 bitmap delta
, bitmap
*expanded_delta
)
1619 unsigned int lhs
= c
->lhs
.var
;
1621 bitmap sol
= get_varinfo (lhs
)->solution
;
1624 HOST_WIDE_INT roffset
= c
->rhs
.offset
;
1626 /* Our IL does not allow this. */
1627 gcc_checking_assert (c
->lhs
.offset
== 0);
1629 /* If the solution of Y contains anything it is good enough to transfer
1631 if (bitmap_bit_p (delta
, anything_id
))
1633 flag
|= bitmap_set_bit (sol
, anything_id
);
1637 /* If we do not know at with offset the rhs is dereferenced compute
1638 the reachability set of DELTA, conservatively assuming it is
1639 dereferenced at all valid offsets. */
1640 if (roffset
== UNKNOWN_OFFSET
)
1642 delta
= solution_set_expand (delta
, expanded_delta
);
1643 /* No further offset processing is necessary. */
1647 /* For each variable j in delta (Sol(y)), add
1648 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1649 EXECUTE_IF_SET_IN_BITMAP (delta
, 0, j
, bi
)
1651 varinfo_t v
= get_varinfo (j
);
1652 HOST_WIDE_INT fieldoffset
= v
->offset
+ roffset
;
1653 unsigned HOST_WIDE_INT size
= v
->size
;
1658 else if (roffset
!= 0)
1660 if (fieldoffset
< 0)
1661 v
= get_varinfo (v
->head
);
1663 v
= first_or_preceding_vi_for_offset (v
, fieldoffset
);
1666 /* We have to include all fields that overlap the current field
1667 shifted by roffset. */
1672 /* Adding edges from the special vars is pointless.
1673 They don't have sets that can change. */
1674 if (get_varinfo (t
)->is_special_var
)
1675 flag
|= bitmap_ior_into (sol
, get_varinfo (t
)->solution
);
1676 /* Merging the solution from ESCAPED needlessly increases
1677 the set. Use ESCAPED as representative instead. */
1678 else if (v
->id
== escaped_id
)
1679 flag
|= bitmap_set_bit (sol
, escaped_id
);
1680 else if (v
->may_have_pointers
1681 && add_graph_edge (graph
, lhs
, t
))
1682 flag
|= bitmap_ior_into (sol
, get_varinfo (t
)->solution
);
1690 while (v
->offset
< fieldoffset
+ size
);
1694 /* If the LHS solution changed, mark the var as changed. */
1697 get_varinfo (lhs
)->solution
= sol
;
1698 bitmap_set_bit (changed
, lhs
);
1702 /* Process a constraint C that represents *(x + off) = y using DELTA
1703 as the starting solution for x. */
1706 do_ds_constraint (constraint_t c
, bitmap delta
, bitmap
*expanded_delta
)
1708 unsigned int rhs
= c
->rhs
.var
;
1709 bitmap sol
= get_varinfo (rhs
)->solution
;
1712 HOST_WIDE_INT loff
= c
->lhs
.offset
;
1713 bool escaped_p
= false;
1715 /* Our IL does not allow this. */
1716 gcc_checking_assert (c
->rhs
.offset
== 0);
1718 /* If the solution of y contains ANYTHING simply use the ANYTHING
1719 solution. This avoids needlessly increasing the points-to sets. */
1720 if (bitmap_bit_p (sol
, anything_id
))
1721 sol
= get_varinfo (find (anything_id
))->solution
;
1723 /* If the solution for x contains ANYTHING we have to merge the
1724 solution of y into all pointer variables which we do via
1726 if (bitmap_bit_p (delta
, anything_id
))
1728 unsigned t
= find (storedanything_id
);
1729 if (add_graph_edge (graph
, t
, rhs
))
1731 if (bitmap_ior_into (get_varinfo (t
)->solution
, sol
))
1732 bitmap_set_bit (changed
, t
);
1737 /* If we do not know at with offset the rhs is dereferenced compute
1738 the reachability set of DELTA, conservatively assuming it is
1739 dereferenced at all valid offsets. */
1740 if (loff
== UNKNOWN_OFFSET
)
1742 delta
= solution_set_expand (delta
, expanded_delta
);
1746 /* For each member j of delta (Sol(x)), add an edge from y to j and
1747 union Sol(y) into Sol(j) */
1748 EXECUTE_IF_SET_IN_BITMAP (delta
, 0, j
, bi
)
1750 varinfo_t v
= get_varinfo (j
);
1752 HOST_WIDE_INT fieldoffset
= v
->offset
+ loff
;
1753 unsigned HOST_WIDE_INT size
= v
->size
;
1759 if (fieldoffset
< 0)
1760 v
= get_varinfo (v
->head
);
1762 v
= first_or_preceding_vi_for_offset (v
, fieldoffset
);
1765 /* We have to include all fields that overlap the current field
1769 if (v
->may_have_pointers
)
1771 /* If v is a global variable then this is an escape point. */
1772 if (v
->is_global_var
1775 t
= find (escaped_id
);
1776 if (add_graph_edge (graph
, t
, rhs
)
1777 && bitmap_ior_into (get_varinfo (t
)->solution
, sol
))
1778 bitmap_set_bit (changed
, t
);
1779 /* Enough to let rhs escape once. */
1783 if (v
->is_special_var
)
1787 if (add_graph_edge (graph
, t
, rhs
)
1788 && bitmap_ior_into (get_varinfo (t
)->solution
, sol
))
1789 bitmap_set_bit (changed
, t
);
1798 while (v
->offset
< fieldoffset
+ size
);
1802 /* Handle a non-simple (simple meaning requires no iteration),
1803 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1806 do_complex_constraint (constraint_graph_t graph
, constraint_t c
, bitmap delta
,
1807 bitmap
*expanded_delta
)
1809 if (c
->lhs
.type
== DEREF
)
1811 if (c
->rhs
.type
== ADDRESSOF
)
1818 do_ds_constraint (c
, delta
, expanded_delta
);
1821 else if (c
->rhs
.type
== DEREF
)
1824 if (!(get_varinfo (c
->lhs
.var
)->is_special_var
))
1825 do_sd_constraint (graph
, c
, delta
, expanded_delta
);
1832 gcc_checking_assert (c
->rhs
.type
== SCALAR
&& c
->lhs
.type
== SCALAR
1833 && c
->rhs
.offset
!= 0 && c
->lhs
.offset
== 0);
1834 tmp
= get_varinfo (c
->lhs
.var
)->solution
;
1836 flag
= set_union_with_increment (tmp
, delta
, c
->rhs
.offset
,
1840 bitmap_set_bit (changed
, c
->lhs
.var
);
1844 /* Initialize and return a new SCC info structure. */
1846 scc_info::scc_info (size_t size
) :
1847 visited (size
), deleted (size
), current_index (0), scc_stack (1)
1849 bitmap_clear (visited
);
1850 bitmap_clear (deleted
);
1851 node_mapping
= XNEWVEC (unsigned int, size
);
1852 dfs
= XCNEWVEC (unsigned int, size
);
1854 for (size_t i
= 0; i
< size
; i
++)
1855 node_mapping
[i
] = i
;
1858 /* Free an SCC info structure pointed to by SI */
1860 scc_info::~scc_info ()
1862 free (node_mapping
);
1867 /* Find indirect cycles in GRAPH that occur, using strongly connected
1868 components, and note them in the indirect cycles map.
1870 This technique comes from Ben Hardekopf and Calvin Lin,
1871 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1872 Lines of Code", submitted to PLDI 2007. */
1875 find_indirect_cycles (constraint_graph_t graph
)
1878 unsigned int size
= graph
->size
;
1881 for (i
= 0; i
< MIN (LAST_REF_NODE
, size
); i
++ )
1882 if (!bitmap_bit_p (si
.visited
, i
) && find (i
) == i
)
1883 scc_visit (graph
, &si
, i
);
1886 /* Compute a topological ordering for GRAPH, and store the result in the
1887 topo_info structure TI. */
1890 compute_topo_order (constraint_graph_t graph
,
1891 struct topo_info
*ti
)
1894 unsigned int size
= graph
->size
;
1896 for (i
= 0; i
!= size
; ++i
)
1897 if (!bitmap_bit_p (ti
->visited
, i
) && find (i
) == i
)
1898 topo_visit (graph
, ti
, i
);
1901 /* Structure used to for hash value numbering of pointer equivalence
1904 typedef struct equiv_class_label
1907 unsigned int equivalence_class
;
1909 } *equiv_class_label_t
;
1910 typedef const struct equiv_class_label
*const_equiv_class_label_t
;
1912 /* Equiv_class_label hashtable helpers. */
1914 struct equiv_class_hasher
: nofree_ptr_hash
<equiv_class_label
>
1916 static inline hashval_t
hash (const equiv_class_label
*);
1917 static inline bool equal (const equiv_class_label
*,
1918 const equiv_class_label
*);
1921 /* Hash function for a equiv_class_label_t */
1924 equiv_class_hasher::hash (const equiv_class_label
*ecl
)
1926 return ecl
->hashcode
;
1929 /* Equality function for two equiv_class_label_t's. */
1932 equiv_class_hasher::equal (const equiv_class_label
*eql1
,
1933 const equiv_class_label
*eql2
)
1935 return (eql1
->hashcode
== eql2
->hashcode
1936 && bitmap_equal_p (eql1
->labels
, eql2
->labels
));
1939 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1941 static hash_table
<equiv_class_hasher
> *pointer_equiv_class_table
;
1943 /* A hashtable for mapping a bitmap of labels->location equivalence
1945 static hash_table
<equiv_class_hasher
> *location_equiv_class_table
;
1947 struct obstack equiv_class_obstack
;
1949 /* Lookup a equivalence class in TABLE by the bitmap of LABELS with
1950 hash HAS it contains. Sets *REF_LABELS to the bitmap LABELS
1951 is equivalent to. */
1953 static equiv_class_label
*
1954 equiv_class_lookup_or_add (hash_table
<equiv_class_hasher
> *table
,
1957 equiv_class_label
**slot
;
1958 equiv_class_label ecl
;
1960 ecl
.labels
= labels
;
1961 ecl
.hashcode
= bitmap_hash (labels
);
1962 slot
= table
->find_slot (&ecl
, INSERT
);
1965 *slot
= XOBNEW (&equiv_class_obstack
, struct equiv_class_label
);
1966 (*slot
)->labels
= labels
;
1967 (*slot
)->hashcode
= ecl
.hashcode
;
1968 (*slot
)->equivalence_class
= 0;
1974 /* Perform offline variable substitution.
1976 This is a worst case quadratic time way of identifying variables
1977 that must have equivalent points-to sets, including those caused by
1978 static cycles, and single entry subgraphs, in the constraint graph.
1980 The technique is described in "Exploiting Pointer and Location
1981 Equivalence to Optimize Pointer Analysis. In the 14th International
1982 Static Analysis Symposium (SAS), August 2007." It is known as the
1983 "HU" algorithm, and is equivalent to value numbering the collapsed
1984 constraint graph including evaluating unions.
1986 The general method of finding equivalence classes is as follows:
1987 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1988 Initialize all non-REF nodes to be direct nodes.
1989 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1991 For each constraint containing the dereference, we also do the same
1994 We then compute SCC's in the graph and unify nodes in the same SCC,
1997 For each non-collapsed node x:
1998 Visit all unvisited explicit incoming edges.
1999 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
2001 Lookup the equivalence class for pts(x).
2002 If we found one, equivalence_class(x) = found class.
2003 Otherwise, equivalence_class(x) = new class, and new_class is
2004 added to the lookup table.
2006 All direct nodes with the same equivalence class can be replaced
2007 with a single representative node.
2008 All unlabeled nodes (label == 0) are not pointers and all edges
2009 involving them can be eliminated.
2010 We perform these optimizations during rewrite_constraints
2012 In addition to pointer equivalence class finding, we also perform
2013 location equivalence class finding. This is the set of variables
2014 that always appear together in points-to sets. We use this to
2015 compress the size of the points-to sets. */
2017 /* Current maximum pointer equivalence class id. */
2018 static int pointer_equiv_class
;
2020 /* Current maximum location equivalence class id. */
2021 static int location_equiv_class
;
2023 /* Recursive routine to find strongly connected components in GRAPH,
2024 and label it's nodes with DFS numbers. */
2027 condense_visit (constraint_graph_t graph
, class scc_info
*si
, unsigned int n
)
2031 unsigned int my_dfs
;
2033 gcc_checking_assert (si
->node_mapping
[n
] == n
);
2034 bitmap_set_bit (si
->visited
, n
);
2035 si
->dfs
[n
] = si
->current_index
++;
2036 my_dfs
= si
->dfs
[n
];
2038 /* Visit all the successors. */
2039 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->preds
[n
], 0, i
, bi
)
2041 unsigned int w
= si
->node_mapping
[i
];
2043 if (bitmap_bit_p (si
->deleted
, w
))
2046 if (!bitmap_bit_p (si
->visited
, w
))
2047 condense_visit (graph
, si
, w
);
2049 unsigned int t
= si
->node_mapping
[w
];
2050 gcc_checking_assert (si
->node_mapping
[n
] == n
);
2051 if (si
->dfs
[t
] < si
->dfs
[n
])
2052 si
->dfs
[n
] = si
->dfs
[t
];
2055 /* Visit all the implicit predecessors. */
2056 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->implicit_preds
[n
], 0, i
, bi
)
2058 unsigned int w
= si
->node_mapping
[i
];
2060 if (bitmap_bit_p (si
->deleted
, w
))
2063 if (!bitmap_bit_p (si
->visited
, w
))
2064 condense_visit (graph
, si
, w
);
2066 unsigned int t
= si
->node_mapping
[w
];
2067 gcc_assert (si
->node_mapping
[n
] == n
);
2068 if (si
->dfs
[t
] < si
->dfs
[n
])
2069 si
->dfs
[n
] = si
->dfs
[t
];
2072 /* See if any components have been identified. */
2073 if (si
->dfs
[n
] == my_dfs
)
2075 if (si
->scc_stack
.length () != 0
2076 && si
->dfs
[si
->scc_stack
.last ()] >= my_dfs
)
2078 /* Find the first node of the SCC and do non-bitmap work. */
2079 bool direct_p
= true;
2080 unsigned first
= si
->scc_stack
.length ();
2084 unsigned int w
= si
->scc_stack
[first
];
2085 si
->node_mapping
[w
] = n
;
2086 if (!bitmap_bit_p (graph
->direct_nodes
, w
))
2090 && si
->dfs
[si
->scc_stack
[first
- 1]] >= my_dfs
);
2092 bitmap_clear_bit (graph
->direct_nodes
, n
);
2094 /* Want to reduce to node n, push that first. */
2095 si
->scc_stack
.reserve (1);
2096 si
->scc_stack
.quick_push (si
->scc_stack
[first
]);
2097 si
->scc_stack
[first
] = n
;
2099 unsigned scc_size
= si
->scc_stack
.length () - first
;
2100 unsigned split
= scc_size
/ 2;
2101 unsigned carry
= scc_size
- split
* 2;
2104 for (unsigned i
= 0; i
< split
; ++i
)
2106 unsigned a
= si
->scc_stack
[first
+ i
];
2107 unsigned b
= si
->scc_stack
[first
+ split
+ carry
+ i
];
2109 /* Unify our nodes. */
2110 if (graph
->preds
[b
])
2112 if (!graph
->preds
[a
])
2113 std::swap (graph
->preds
[a
], graph
->preds
[b
]);
2115 bitmap_ior_into_and_free (graph
->preds
[a
],
2118 if (graph
->implicit_preds
[b
])
2120 if (!graph
->implicit_preds
[a
])
2121 std::swap (graph
->implicit_preds
[a
],
2122 graph
->implicit_preds
[b
]);
2124 bitmap_ior_into_and_free (graph
->implicit_preds
[a
],
2125 &graph
->implicit_preds
[b
]);
2127 if (graph
->points_to
[b
])
2129 if (!graph
->points_to
[a
])
2130 std::swap (graph
->points_to
[a
], graph
->points_to
[b
]);
2132 bitmap_ior_into_and_free (graph
->points_to
[a
],
2133 &graph
->points_to
[b
]);
2136 unsigned remain
= split
+ carry
;
2138 carry
= remain
- split
* 2;
2140 /* Actually pop the SCC. */
2141 si
->scc_stack
.truncate (first
);
2143 bitmap_set_bit (si
->deleted
, n
);
2146 si
->scc_stack
.safe_push (n
);
2149 /* Label pointer equivalences.
2151 This performs a value numbering of the constraint graph to
2152 discover which variables will always have the same points-to sets
2153 under the current set of constraints.
2155 The way it value numbers is to store the set of points-to bits
2156 generated by the constraints and graph edges. This is just used as a
2157 hash and equality comparison. The *actual set of points-to bits* is
2158 completely irrelevant, in that we don't care about being able to
2161 The equality values (currently bitmaps) just have to satisfy a few
2162 constraints, the main ones being:
2163 1. The combining operation must be order independent.
2164 2. The end result of a given set of operations must be unique iff the
2165 combination of input values is unique
2169 label_visit (constraint_graph_t graph
, class scc_info
*si
, unsigned int n
)
2171 unsigned int i
, first_pred
;
2174 bitmap_set_bit (si
->visited
, n
);
2176 /* Label and union our incoming edges's points to sets. */
2178 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->preds
[n
], 0, i
, bi
)
2180 unsigned int w
= si
->node_mapping
[i
];
2181 if (!bitmap_bit_p (si
->visited
, w
))
2182 label_visit (graph
, si
, w
);
2184 /* Skip unused edges */
2185 if (w
== n
|| graph
->pointer_label
[w
] == 0)
2188 if (graph
->points_to
[w
])
2190 if (!graph
->points_to
[n
])
2192 if (first_pred
== -1U)
2196 graph
->points_to
[n
] = BITMAP_ALLOC (&predbitmap_obstack
);
2197 bitmap_ior (graph
->points_to
[n
],
2198 graph
->points_to
[first_pred
],
2199 graph
->points_to
[w
]);
2203 bitmap_ior_into (graph
->points_to
[n
], graph
->points_to
[w
]);
2207 /* Indirect nodes get fresh variables and a new pointer equiv class. */
2208 if (!bitmap_bit_p (graph
->direct_nodes
, n
))
2210 if (!graph
->points_to
[n
])
2212 graph
->points_to
[n
] = BITMAP_ALLOC (&predbitmap_obstack
);
2213 if (first_pred
!= -1U)
2214 bitmap_copy (graph
->points_to
[n
], graph
->points_to
[first_pred
]);
2216 bitmap_set_bit (graph
->points_to
[n
], FIRST_REF_NODE
+ n
);
2217 graph
->pointer_label
[n
] = pointer_equiv_class
++;
2218 equiv_class_label_t ecl
;
2219 ecl
= equiv_class_lookup_or_add (pointer_equiv_class_table
,
2220 graph
->points_to
[n
]);
2221 ecl
->equivalence_class
= graph
->pointer_label
[n
];
2225 /* If there was only a single non-empty predecessor the pointer equiv
2226 class is the same. */
2227 if (!graph
->points_to
[n
])
2229 if (first_pred
!= -1U)
2231 graph
->pointer_label
[n
] = graph
->pointer_label
[first_pred
];
2232 graph
->points_to
[n
] = graph
->points_to
[first_pred
];
2237 if (!bitmap_empty_p (graph
->points_to
[n
]))
2239 equiv_class_label_t ecl
;
2240 ecl
= equiv_class_lookup_or_add (pointer_equiv_class_table
,
2241 graph
->points_to
[n
]);
2242 if (ecl
->equivalence_class
== 0)
2243 ecl
->equivalence_class
= pointer_equiv_class
++;
2246 BITMAP_FREE (graph
->points_to
[n
]);
2247 graph
->points_to
[n
] = ecl
->labels
;
2249 graph
->pointer_label
[n
] = ecl
->equivalence_class
;
2253 /* Print the pred graph in dot format. */
2256 dump_pred_graph (class scc_info
*si
, FILE *file
)
2260 /* Only print the graph if it has already been initialized: */
2264 /* Prints the header of the dot file: */
2265 fprintf (file
, "strict digraph {\n");
2266 fprintf (file
, " node [\n shape = box\n ]\n");
2267 fprintf (file
, " edge [\n fontsize = \"12\"\n ]\n");
2268 fprintf (file
, "\n // List of nodes and complex constraints in "
2269 "the constraint graph:\n");
2271 /* The next lines print the nodes in the graph together with the
2272 complex constraints attached to them. */
2273 for (i
= 1; i
< graph
->size
; i
++)
2275 if (i
== FIRST_REF_NODE
)
2277 if (si
->node_mapping
[i
] != i
)
2279 if (i
< FIRST_REF_NODE
)
2280 fprintf (file
, "\"%s\"", get_varinfo (i
)->name
);
2282 fprintf (file
, "\"*%s\"", get_varinfo (i
- FIRST_REF_NODE
)->name
);
2283 if (graph
->points_to
[i
]
2284 && !bitmap_empty_p (graph
->points_to
[i
]))
2286 if (i
< FIRST_REF_NODE
)
2287 fprintf (file
, "[label=\"%s = {", get_varinfo (i
)->name
);
2289 fprintf (file
, "[label=\"*%s = {",
2290 get_varinfo (i
- FIRST_REF_NODE
)->name
);
2293 EXECUTE_IF_SET_IN_BITMAP (graph
->points_to
[i
], 0, j
, bi
)
2294 fprintf (file
, " %d", j
);
2295 fprintf (file
, " }\"]");
2297 fprintf (file
, ";\n");
2300 /* Go over the edges. */
2301 fprintf (file
, "\n // Edges in the constraint graph:\n");
2302 for (i
= 1; i
< graph
->size
; i
++)
2306 if (si
->node_mapping
[i
] != i
)
2308 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->preds
[i
], 0, j
, bi
)
2310 unsigned from
= si
->node_mapping
[j
];
2311 if (from
< FIRST_REF_NODE
)
2312 fprintf (file
, "\"%s\"", get_varinfo (from
)->name
);
2314 fprintf (file
, "\"*%s\"", get_varinfo (from
- FIRST_REF_NODE
)->name
);
2315 fprintf (file
, " -> ");
2316 if (i
< FIRST_REF_NODE
)
2317 fprintf (file
, "\"%s\"", get_varinfo (i
)->name
);
2319 fprintf (file
, "\"*%s\"", get_varinfo (i
- FIRST_REF_NODE
)->name
);
2320 fprintf (file
, ";\n");
2324 /* Prints the tail of the dot file. */
2325 fprintf (file
, "}\n");
2328 /* Perform offline variable substitution, discovering equivalence
2329 classes, and eliminating non-pointer variables. */
2331 static class scc_info
*
2332 perform_var_substitution (constraint_graph_t graph
)
2335 unsigned int size
= graph
->size
;
2336 scc_info
*si
= new scc_info (size
);
2338 bitmap_obstack_initialize (&iteration_obstack
);
2339 gcc_obstack_init (&equiv_class_obstack
);
2340 pointer_equiv_class_table
= new hash_table
<equiv_class_hasher
> (511);
2341 location_equiv_class_table
2342 = new hash_table
<equiv_class_hasher
> (511);
2343 pointer_equiv_class
= 1;
2344 location_equiv_class
= 1;
2346 /* Condense the nodes, which means to find SCC's, count incoming
2347 predecessors, and unite nodes in SCC's. */
2348 for (i
= 1; i
< FIRST_REF_NODE
; i
++)
2349 if (!bitmap_bit_p (si
->visited
, si
->node_mapping
[i
]))
2350 condense_visit (graph
, si
, si
->node_mapping
[i
]);
2352 if (dump_file
&& (dump_flags
& TDF_GRAPH
))
2354 fprintf (dump_file
, "\n\n// The constraint graph before var-substitution "
2355 "in dot format:\n");
2356 dump_pred_graph (si
, dump_file
);
2357 fprintf (dump_file
, "\n\n");
2360 bitmap_clear (si
->visited
);
2361 /* Actually the label the nodes for pointer equivalences */
2362 for (i
= 1; i
< FIRST_REF_NODE
; i
++)
2363 if (!bitmap_bit_p (si
->visited
, si
->node_mapping
[i
]))
2364 label_visit (graph
, si
, si
->node_mapping
[i
]);
2366 /* Calculate location equivalence labels. */
2367 for (i
= 1; i
< FIRST_REF_NODE
; i
++)
2373 if (!graph
->pointed_by
[i
])
2375 pointed_by
= BITMAP_ALLOC (&iteration_obstack
);
2377 /* Translate the pointed-by mapping for pointer equivalence
2379 EXECUTE_IF_SET_IN_BITMAP (graph
->pointed_by
[i
], 0, j
, bi
)
2381 bitmap_set_bit (pointed_by
,
2382 graph
->pointer_label
[si
->node_mapping
[j
]]);
2384 /* The original pointed_by is now dead. */
2385 BITMAP_FREE (graph
->pointed_by
[i
]);
2387 /* Look up the location equivalence label if one exists, or make
2389 equiv_class_label_t ecl
;
2390 ecl
= equiv_class_lookup_or_add (location_equiv_class_table
, pointed_by
);
2391 if (ecl
->equivalence_class
== 0)
2392 ecl
->equivalence_class
= location_equiv_class
++;
2395 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2396 fprintf (dump_file
, "Found location equivalence for node %s\n",
2397 get_varinfo (i
)->name
);
2398 BITMAP_FREE (pointed_by
);
2400 graph
->loc_label
[i
] = ecl
->equivalence_class
;
2404 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2405 for (i
= 1; i
< FIRST_REF_NODE
; i
++)
2407 unsigned j
= si
->node_mapping
[i
];
2410 fprintf (dump_file
, "%s node id %d ",
2411 bitmap_bit_p (graph
->direct_nodes
, i
)
2412 ? "Direct" : "Indirect", i
);
2413 if (i
< FIRST_REF_NODE
)
2414 fprintf (dump_file
, "\"%s\"", get_varinfo (i
)->name
);
2416 fprintf (dump_file
, "\"*%s\"",
2417 get_varinfo (i
- FIRST_REF_NODE
)->name
);
2418 fprintf (dump_file
, " mapped to SCC leader node id %d ", j
);
2419 if (j
< FIRST_REF_NODE
)
2420 fprintf (dump_file
, "\"%s\"\n", get_varinfo (j
)->name
);
2422 fprintf (dump_file
, "\"*%s\"\n",
2423 get_varinfo (j
- FIRST_REF_NODE
)->name
);
2428 "Equivalence classes for %s node id %d ",
2429 bitmap_bit_p (graph
->direct_nodes
, i
)
2430 ? "direct" : "indirect", i
);
2431 if (i
< FIRST_REF_NODE
)
2432 fprintf (dump_file
, "\"%s\"", get_varinfo (i
)->name
);
2434 fprintf (dump_file
, "\"*%s\"",
2435 get_varinfo (i
- FIRST_REF_NODE
)->name
);
2437 ": pointer %d, location %d\n",
2438 graph
->pointer_label
[i
], graph
->loc_label
[i
]);
2442 /* Quickly eliminate our non-pointer variables. */
2444 for (i
= 1; i
< FIRST_REF_NODE
; i
++)
2446 unsigned int node
= si
->node_mapping
[i
];
2448 if (graph
->pointer_label
[node
] == 0)
2450 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2452 "%s is a non-pointer variable, eliminating edges.\n",
2453 get_varinfo (node
)->name
);
2454 stats
.nonpointer_vars
++;
2455 clear_edges_for_node (graph
, node
);
2462 /* Free information that was only necessary for variable
2466 free_var_substitution_info (class scc_info
*si
)
2469 free (graph
->pointer_label
);
2470 free (graph
->loc_label
);
2471 free (graph
->pointed_by
);
2472 free (graph
->points_to
);
2473 free (graph
->eq_rep
);
2474 sbitmap_free (graph
->direct_nodes
);
2475 delete pointer_equiv_class_table
;
2476 pointer_equiv_class_table
= NULL
;
2477 delete location_equiv_class_table
;
2478 location_equiv_class_table
= NULL
;
2479 obstack_free (&equiv_class_obstack
, NULL
);
2480 bitmap_obstack_release (&iteration_obstack
);
2483 /* Return an existing node that is equivalent to NODE, which has
2484 equivalence class LABEL, if one exists. Return NODE otherwise. */
2487 find_equivalent_node (constraint_graph_t graph
,
2488 unsigned int node
, unsigned int label
)
2490 /* If the address version of this variable is unused, we can
2491 substitute it for anything else with the same label.
2492 Otherwise, we know the pointers are equivalent, but not the
2493 locations, and we can unite them later. */
2495 if (!bitmap_bit_p (graph
->address_taken
, node
))
2497 gcc_checking_assert (label
< graph
->size
);
2499 if (graph
->eq_rep
[label
] != -1)
2501 /* Unify the two variables since we know they are equivalent. */
2502 if (unite (graph
->eq_rep
[label
], node
))
2503 unify_nodes (graph
, graph
->eq_rep
[label
], node
, false);
2504 return graph
->eq_rep
[label
];
2508 graph
->eq_rep
[label
] = node
;
2509 graph
->pe_rep
[label
] = node
;
2514 gcc_checking_assert (label
< graph
->size
);
2515 graph
->pe
[node
] = label
;
2516 if (graph
->pe_rep
[label
] == -1)
2517 graph
->pe_rep
[label
] = node
;
2523 /* Unite pointer equivalent but not location equivalent nodes in
2524 GRAPH. This may only be performed once variable substitution is
2528 unite_pointer_equivalences (constraint_graph_t graph
)
2532 /* Go through the pointer equivalences and unite them to their
2533 representative, if they aren't already. */
2534 for (i
= 1; i
< FIRST_REF_NODE
; i
++)
2536 unsigned int label
= graph
->pe
[i
];
2539 int label_rep
= graph
->pe_rep
[label
];
2541 if (label_rep
== -1)
2544 label_rep
= find (label_rep
);
2545 if (label_rep
>= 0 && unite (label_rep
, find (i
)))
2546 unify_nodes (graph
, label_rep
, i
, false);
2551 /* Move complex constraints to the GRAPH nodes they belong to. */
2554 move_complex_constraints (constraint_graph_t graph
)
2559 FOR_EACH_VEC_ELT (constraints
, i
, c
)
2563 struct constraint_expr lhs
= c
->lhs
;
2564 struct constraint_expr rhs
= c
->rhs
;
2566 if (lhs
.type
== DEREF
)
2568 insert_into_complex (graph
, lhs
.var
, c
);
2570 else if (rhs
.type
== DEREF
)
2572 if (!(get_varinfo (lhs
.var
)->is_special_var
))
2573 insert_into_complex (graph
, rhs
.var
, c
);
2575 else if (rhs
.type
!= ADDRESSOF
&& lhs
.var
> anything_id
2576 && (lhs
.offset
!= 0 || rhs
.offset
!= 0))
2578 insert_into_complex (graph
, rhs
.var
, c
);
2585 /* Optimize and rewrite complex constraints while performing
2586 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2587 result of perform_variable_substitution. */
2590 rewrite_constraints (constraint_graph_t graph
,
2598 for (unsigned int j
= 0; j
< graph
->size
; j
++)
2599 gcc_assert (find (j
) == j
);
2602 FOR_EACH_VEC_ELT (constraints
, i
, c
)
2604 struct constraint_expr lhs
= c
->lhs
;
2605 struct constraint_expr rhs
= c
->rhs
;
2606 unsigned int lhsvar
= find (lhs
.var
);
2607 unsigned int rhsvar
= find (rhs
.var
);
2608 unsigned int lhsnode
, rhsnode
;
2609 unsigned int lhslabel
, rhslabel
;
2611 lhsnode
= si
->node_mapping
[lhsvar
];
2612 rhsnode
= si
->node_mapping
[rhsvar
];
2613 lhslabel
= graph
->pointer_label
[lhsnode
];
2614 rhslabel
= graph
->pointer_label
[rhsnode
];
2616 /* See if it is really a non-pointer variable, and if so, ignore
2620 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2623 fprintf (dump_file
, "%s is a non-pointer variable, "
2624 "ignoring constraint:",
2625 get_varinfo (lhs
.var
)->name
);
2626 dump_constraint (dump_file
, c
);
2627 fprintf (dump_file
, "\n");
2629 constraints
[i
] = NULL
;
2635 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2638 fprintf (dump_file
, "%s is a non-pointer variable, "
2639 "ignoring constraint:",
2640 get_varinfo (rhs
.var
)->name
);
2641 dump_constraint (dump_file
, c
);
2642 fprintf (dump_file
, "\n");
2644 constraints
[i
] = NULL
;
2648 lhsvar
= find_equivalent_node (graph
, lhsvar
, lhslabel
);
2649 rhsvar
= find_equivalent_node (graph
, rhsvar
, rhslabel
);
2650 c
->lhs
.var
= lhsvar
;
2651 c
->rhs
.var
= rhsvar
;
2655 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2656 part of an SCC, false otherwise. */
2659 eliminate_indirect_cycles (unsigned int node
)
2661 if (graph
->indirect_cycles
[node
] != -1
2662 && !bitmap_empty_p (get_varinfo (node
)->solution
))
2665 auto_vec
<unsigned> queue
;
2667 unsigned int to
= find (graph
->indirect_cycles
[node
]);
2670 /* We can't touch the solution set and call unify_nodes
2671 at the same time, because unify_nodes is going to do
2672 bitmap unions into it. */
2674 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node
)->solution
, 0, i
, bi
)
2676 if (find (i
) == i
&& i
!= to
)
2679 queue
.safe_push (i
);
2684 queue
.iterate (queuepos
, &i
);
2687 unify_nodes (graph
, to
, i
, true);
2694 /* Solve the constraint graph GRAPH using our worklist solver.
2695 This is based on the PW* family of solvers from the "Efficient Field
2696 Sensitive Pointer Analysis for C" paper.
2697 It works by iterating over all the graph nodes, processing the complex
2698 constraints and propagating the copy constraints, until everything stops
2699 changed. This corresponds to steps 6-8 in the solving list given above. */
2702 solve_graph (constraint_graph_t graph
)
2704 unsigned int size
= graph
->size
;
2708 changed
= BITMAP_ALLOC (NULL
);
2710 /* Mark all initial non-collapsed nodes as changed. */
2711 for (i
= 1; i
< size
; i
++)
2713 varinfo_t ivi
= get_varinfo (i
);
2714 if (find (i
) == i
&& !bitmap_empty_p (ivi
->solution
)
2715 && ((graph
->succs
[i
] && !bitmap_empty_p (graph
->succs
[i
]))
2716 || graph
->complex[i
].length () > 0))
2717 bitmap_set_bit (changed
, i
);
2720 /* Allocate a bitmap to be used to store the changed bits. */
2721 pts
= BITMAP_ALLOC (&pta_obstack
);
2723 while (!bitmap_empty_p (changed
))
2726 struct topo_info
*ti
= init_topo_info ();
2729 bitmap_obstack_initialize (&iteration_obstack
);
2731 compute_topo_order (graph
, ti
);
2733 while (ti
->topo_order
.length () != 0)
2736 i
= ti
->topo_order
.pop ();
2738 /* If this variable is not a representative, skip it. */
2742 /* In certain indirect cycle cases, we may merge this
2743 variable to another. */
2744 if (eliminate_indirect_cycles (i
) && find (i
) != i
)
2747 /* If the node has changed, we need to process the
2748 complex constraints and outgoing edges again. */
2749 if (bitmap_clear_bit (changed
, i
))
2754 vec
<constraint_t
> complex = graph
->complex[i
];
2755 varinfo_t vi
= get_varinfo (i
);
2756 bool solution_empty
;
2758 /* Compute the changed set of solution bits. If anything
2759 is in the solution just propagate that. */
2760 if (bitmap_bit_p (vi
->solution
, anything_id
))
2762 /* If anything is also in the old solution there is
2764 ??? But we shouldn't ended up with "changed" set ... */
2766 && bitmap_bit_p (vi
->oldsolution
, anything_id
))
2768 bitmap_copy (pts
, get_varinfo (find (anything_id
))->solution
);
2770 else if (vi
->oldsolution
)
2771 bitmap_and_compl (pts
, vi
->solution
, vi
->oldsolution
);
2773 bitmap_copy (pts
, vi
->solution
);
2775 if (bitmap_empty_p (pts
))
2778 if (vi
->oldsolution
)
2779 bitmap_ior_into (vi
->oldsolution
, pts
);
2782 vi
->oldsolution
= BITMAP_ALLOC (&oldpta_obstack
);
2783 bitmap_copy (vi
->oldsolution
, pts
);
2786 solution
= vi
->solution
;
2787 solution_empty
= bitmap_empty_p (solution
);
2789 /* Process the complex constraints */
2790 bitmap expanded_pts
= NULL
;
2791 FOR_EACH_VEC_ELT (complex, j
, c
)
2793 /* XXX: This is going to unsort the constraints in
2794 some cases, which will occasionally add duplicate
2795 constraints during unification. This does not
2796 affect correctness. */
2797 c
->lhs
.var
= find (c
->lhs
.var
);
2798 c
->rhs
.var
= find (c
->rhs
.var
);
2800 /* The only complex constraint that can change our
2801 solution to non-empty, given an empty solution,
2802 is a constraint where the lhs side is receiving
2803 some set from elsewhere. */
2804 if (!solution_empty
|| c
->lhs
.type
!= DEREF
)
2805 do_complex_constraint (graph
, c
, pts
, &expanded_pts
);
2807 BITMAP_FREE (expanded_pts
);
2809 solution_empty
= bitmap_empty_p (solution
);
2811 if (!solution_empty
)
2814 unsigned eff_escaped_id
= find (escaped_id
);
2816 /* Propagate solution to all successors. */
2817 unsigned to_remove
= ~0U;
2818 EXECUTE_IF_IN_NONNULL_BITMAP (graph
->succs
[i
],
2821 if (to_remove
!= ~0U)
2823 bitmap_clear_bit (graph
->succs
[i
], to_remove
);
2826 unsigned int to
= find (j
);
2829 /* Update the succ graph, avoiding duplicate
2832 if (! bitmap_set_bit (graph
->succs
[i
], to
))
2834 /* We eventually end up processing 'to' twice
2835 as it is undefined whether bitmap iteration
2836 iterates over bits set during iteration.
2837 Play safe instead of doing tricks. */
2839 /* Don't try to propagate to ourselves. */
2843 bitmap tmp
= get_varinfo (to
)->solution
;
2846 /* If we propagate from ESCAPED use ESCAPED as
2848 if (i
== eff_escaped_id
)
2849 flag
= bitmap_set_bit (tmp
, escaped_id
);
2851 flag
= bitmap_ior_into (tmp
, pts
);
2854 bitmap_set_bit (changed
, to
);
2856 if (to_remove
!= ~0U)
2857 bitmap_clear_bit (graph
->succs
[i
], to_remove
);
2861 free_topo_info (ti
);
2862 bitmap_obstack_release (&iteration_obstack
);
2866 BITMAP_FREE (changed
);
2867 bitmap_obstack_release (&oldpta_obstack
);
2870 /* Map from trees to variable infos. */
2871 static hash_map
<tree
, varinfo_t
> *vi_for_tree
;
2874 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2877 insert_vi_for_tree (tree t
, varinfo_t vi
)
2880 gcc_assert (!vi_for_tree
->put (t
, vi
));
2883 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2884 exist in the map, return NULL, otherwise, return the varinfo we found. */
2887 lookup_vi_for_tree (tree t
)
2889 varinfo_t
*slot
= vi_for_tree
->get (t
);
2896 /* Return a printable name for DECL */
2899 alias_get_name (tree decl
)
2901 const char *res
= "NULL";
2905 if (TREE_CODE (decl
) == SSA_NAME
)
2907 res
= get_name (decl
);
2908 temp
= xasprintf ("%s_%u", res
? res
: "", SSA_NAME_VERSION (decl
));
2910 else if (HAS_DECL_ASSEMBLER_NAME_P (decl
)
2911 && DECL_ASSEMBLER_NAME_SET_P (decl
))
2912 res
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME_RAW (decl
));
2913 else if (DECL_P (decl
))
2915 res
= get_name (decl
);
2917 temp
= xasprintf ("D.%u", DECL_UID (decl
));
2922 res
= ggc_strdup (temp
);
2930 /* Find the variable id for tree T in the map.
2931 If T doesn't exist in the map, create an entry for it and return it. */
2934 get_vi_for_tree (tree t
)
2936 varinfo_t
*slot
= vi_for_tree
->get (t
);
2939 unsigned int id
= create_variable_info_for (t
, alias_get_name (t
), false);
2940 return get_varinfo (id
);
2946 /* Get a scalar constraint expression for a new temporary variable. */
2948 static struct constraint_expr
2949 new_scalar_tmp_constraint_exp (const char *name
, bool add_id
)
2951 struct constraint_expr tmp
;
2954 vi
= new_var_info (NULL_TREE
, name
, add_id
);
2958 vi
->is_full_var
= 1;
2968 /* Get a constraint expression vector from an SSA_VAR_P node.
2969 If address_p is true, the result will be taken its address of. */
2972 get_constraint_for_ssa_var (tree t
, vec
<ce_s
> *results
, bool address_p
)
2974 struct constraint_expr cexpr
;
2977 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2978 gcc_assert (TREE_CODE (t
) == SSA_NAME
|| DECL_P (t
));
2980 if (TREE_CODE (t
) == SSA_NAME
2981 && SSA_NAME_IS_DEFAULT_DEF (t
))
2983 /* For parameters, get at the points-to set for the actual parm
2985 if (TREE_CODE (SSA_NAME_VAR (t
)) == PARM_DECL
2986 || TREE_CODE (SSA_NAME_VAR (t
)) == RESULT_DECL
)
2988 get_constraint_for_ssa_var (SSA_NAME_VAR (t
), results
, address_p
);
2991 /* For undefined SSA names return nothing. */
2992 else if (!ssa_defined_default_def_p (t
))
2994 cexpr
.var
= nothing_id
;
2995 cexpr
.type
= SCALAR
;
2997 results
->safe_push (cexpr
);
3002 /* For global variables resort to the alias target. */
3003 if (VAR_P (t
) && (TREE_STATIC (t
) || DECL_EXTERNAL (t
)))
3005 varpool_node
*node
= varpool_node::get (t
);
3006 if (node
&& node
->alias
&& node
->analyzed
)
3008 node
= node
->ultimate_alias_target ();
3009 /* Canonicalize the PT uid of all aliases to the ultimate target.
3010 ??? Hopefully the set of aliases can't change in a way that
3011 changes the ultimate alias target. */
3012 gcc_assert ((! DECL_PT_UID_SET_P (node
->decl
)
3013 || DECL_PT_UID (node
->decl
) == DECL_UID (node
->decl
))
3014 && (! DECL_PT_UID_SET_P (t
)
3015 || DECL_PT_UID (t
) == DECL_UID (node
->decl
)));
3016 DECL_PT_UID (t
) = DECL_UID (node
->decl
);
3020 /* If this is decl may bind to NULL note that. */
3022 && (! node
|| ! node
->nonzero_address ()))
3024 cexpr
.var
= nothing_id
;
3025 cexpr
.type
= SCALAR
;
3027 results
->safe_push (cexpr
);
3031 vi
= get_vi_for_tree (t
);
3033 cexpr
.type
= SCALAR
;
3036 /* If we are not taking the address of the constraint expr, add all
3037 sub-fiels of the variable as well. */
3039 && !vi
->is_full_var
)
3041 for (; vi
; vi
= vi_next (vi
))
3044 results
->safe_push (cexpr
);
3049 results
->safe_push (cexpr
);
3052 /* Process constraint T, performing various simplifications and then
3053 adding it to our list of overall constraints. */
3056 process_constraint (constraint_t t
)
3058 struct constraint_expr rhs
= t
->rhs
;
3059 struct constraint_expr lhs
= t
->lhs
;
3061 gcc_assert (rhs
.var
< varmap
.length ());
3062 gcc_assert (lhs
.var
< varmap
.length ());
3064 /* If we didn't get any useful constraint from the lhs we get
3065 &ANYTHING as fallback from get_constraint_for. Deal with
3066 it here by turning it into *ANYTHING. */
3067 if (lhs
.type
== ADDRESSOF
3068 && lhs
.var
== anything_id
)
3071 /* ADDRESSOF on the lhs is invalid. */
3072 gcc_assert (lhs
.type
!= ADDRESSOF
);
3074 /* We shouldn't add constraints from things that cannot have pointers.
3075 It's not completely trivial to avoid in the callers, so do it here. */
3076 if (rhs
.type
!= ADDRESSOF
3077 && !get_varinfo (rhs
.var
)->may_have_pointers
)
3080 /* Likewise adding to the solution of a non-pointer var isn't useful. */
3081 if (!get_varinfo (lhs
.var
)->may_have_pointers
)
3084 /* This can happen in our IR with things like n->a = *p */
3085 if (rhs
.type
== DEREF
&& lhs
.type
== DEREF
&& rhs
.var
!= anything_id
)
3087 /* Split into tmp = *rhs, *lhs = tmp */
3088 struct constraint_expr tmplhs
;
3089 tmplhs
= new_scalar_tmp_constraint_exp ("doubledereftmp", true);
3090 process_constraint (new_constraint (tmplhs
, rhs
));
3091 process_constraint (new_constraint (lhs
, tmplhs
));
3093 else if ((rhs
.type
!= SCALAR
|| rhs
.offset
!= 0) && lhs
.type
== DEREF
)
3095 /* Split into tmp = &rhs, *lhs = tmp */
3096 struct constraint_expr tmplhs
;
3097 tmplhs
= new_scalar_tmp_constraint_exp ("derefaddrtmp", true);
3098 process_constraint (new_constraint (tmplhs
, rhs
));
3099 process_constraint (new_constraint (lhs
, tmplhs
));
3103 gcc_assert (rhs
.type
!= ADDRESSOF
|| rhs
.offset
== 0);
3104 constraints
.safe_push (t
);
3109 /* Return the position, in bits, of FIELD_DECL from the beginning of its
3112 static HOST_WIDE_INT
3113 bitpos_of_field (const tree fdecl
)
3115 if (!tree_fits_shwi_p (DECL_FIELD_OFFSET (fdecl
))
3116 || !tree_fits_shwi_p (DECL_FIELD_BIT_OFFSET (fdecl
)))
3119 return (tree_to_shwi (DECL_FIELD_OFFSET (fdecl
)) * BITS_PER_UNIT
3120 + tree_to_shwi (DECL_FIELD_BIT_OFFSET (fdecl
)));
3124 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
3125 resulting constraint expressions in *RESULTS. */
3128 get_constraint_for_ptr_offset (tree ptr
, tree offset
,
3131 struct constraint_expr c
;
3133 HOST_WIDE_INT rhsoffset
;
3135 /* If we do not do field-sensitive PTA adding offsets to pointers
3136 does not change the points-to solution. */
3137 if (!use_field_sensitive
)
3139 get_constraint_for_rhs (ptr
, results
);
3143 /* If the offset is not a non-negative integer constant that fits
3144 in a HOST_WIDE_INT, we have to fall back to a conservative
3145 solution which includes all sub-fields of all pointed-to
3146 variables of ptr. */
3147 if (offset
== NULL_TREE
3148 || TREE_CODE (offset
) != INTEGER_CST
)
3149 rhsoffset
= UNKNOWN_OFFSET
;
3152 /* Sign-extend the offset. */
3153 offset_int soffset
= offset_int::from (wi::to_wide (offset
), SIGNED
);
3154 if (!wi::fits_shwi_p (soffset
))
3155 rhsoffset
= UNKNOWN_OFFSET
;
3158 /* Make sure the bit-offset also fits. */
3159 HOST_WIDE_INT rhsunitoffset
= soffset
.to_shwi ();
3160 rhsoffset
= rhsunitoffset
* (unsigned HOST_WIDE_INT
) BITS_PER_UNIT
;
3161 if (rhsunitoffset
!= rhsoffset
/ BITS_PER_UNIT
)
3162 rhsoffset
= UNKNOWN_OFFSET
;
3166 get_constraint_for_rhs (ptr
, results
);
3170 /* As we are eventually appending to the solution do not use
3171 vec::iterate here. */
3172 n
= results
->length ();
3173 for (j
= 0; j
< n
; j
++)
3177 curr
= get_varinfo (c
.var
);
3179 if (c
.type
== ADDRESSOF
3180 /* If this varinfo represents a full variable just use it. */
3181 && curr
->is_full_var
)
3183 else if (c
.type
== ADDRESSOF
3184 /* If we do not know the offset add all subfields. */
3185 && rhsoffset
== UNKNOWN_OFFSET
)
3187 varinfo_t temp
= get_varinfo (curr
->head
);
3190 struct constraint_expr c2
;
3192 c2
.type
= ADDRESSOF
;
3194 if (c2
.var
!= c
.var
)
3195 results
->safe_push (c2
);
3196 temp
= vi_next (temp
);
3200 else if (c
.type
== ADDRESSOF
)
3203 unsigned HOST_WIDE_INT offset
= curr
->offset
+ rhsoffset
;
3205 /* If curr->offset + rhsoffset is less than zero adjust it. */
3207 && curr
->offset
< offset
)
3210 /* We have to include all fields that overlap the current
3211 field shifted by rhsoffset. And we include at least
3212 the last or the first field of the variable to represent
3213 reachability of off-bound addresses, in particular &object + 1,
3214 conservatively correct. */
3215 temp
= first_or_preceding_vi_for_offset (curr
, offset
);
3218 temp
= vi_next (temp
);
3220 && temp
->offset
< offset
+ curr
->size
)
3222 struct constraint_expr c2
;
3224 c2
.type
= ADDRESSOF
;
3226 results
->safe_push (c2
);
3227 temp
= vi_next (temp
);
3230 else if (c
.type
== SCALAR
)
3232 gcc_assert (c
.offset
== 0);
3233 c
.offset
= rhsoffset
;
3236 /* We shouldn't get any DEREFs here. */
3244 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3245 If address_p is true the result will be taken its address of.
3246 If lhs_p is true then the constraint expression is assumed to be used
3250 get_constraint_for_component_ref (tree t
, vec
<ce_s
> *results
,
3251 bool address_p
, bool lhs_p
)
3254 poly_int64 bitsize
= -1;
3255 poly_int64 bitmaxsize
= -1;
3260 /* Some people like to do cute things like take the address of
3263 while (handled_component_p (forzero
)
3264 || INDIRECT_REF_P (forzero
)
3265 || TREE_CODE (forzero
) == MEM_REF
)
3266 forzero
= TREE_OPERAND (forzero
, 0);
3268 if (CONSTANT_CLASS_P (forzero
) && integer_zerop (forzero
))
3270 struct constraint_expr temp
;
3273 temp
.var
= integer_id
;
3275 results
->safe_push (temp
);
3279 t
= get_ref_base_and_extent (t
, &bitpos
, &bitsize
, &bitmaxsize
, &reverse
);
3281 /* We can end up here for component references on a
3282 VIEW_CONVERT_EXPR <>(&foobar) or things like a
3283 BIT_FIELD_REF <&MEM[(void *)&b + 4B], ...>. So for
3284 symbolic constants simply give up. */
3285 if (TREE_CODE (t
) == ADDR_EXPR
)
3287 constraint_expr result
;
3288 result
.type
= SCALAR
;
3289 result
.var
= anything_id
;
3291 results
->safe_push (result
);
3295 /* Avoid creating pointer-offset constraints, so handle MEM_REF
3296 offsets directly. Pretend to take the address of the base,
3297 we'll take care of adding the required subset of sub-fields below. */
3298 if (TREE_CODE (t
) == MEM_REF
3299 && !integer_zerop (TREE_OPERAND (t
, 0)))
3301 poly_offset_int off
= mem_ref_offset (t
);
3302 off
<<= LOG2_BITS_PER_UNIT
;
3305 if (off
.to_shwi (&off_hwi
))
3312 get_constraint_for_1 (TREE_OPERAND (t
, 0), results
, false, lhs_p
);
3316 get_constraint_for_1 (t
, results
, true, lhs_p
);
3318 /* Strip off nothing_id. */
3319 if (results
->length () == 2)
3321 gcc_assert ((*results
)[0].var
== nothing_id
);
3322 results
->unordered_remove (0);
3324 gcc_assert (results
->length () == 1);
3325 struct constraint_expr
&result
= results
->last ();
3327 if (result
.type
== SCALAR
3328 && get_varinfo (result
.var
)->is_full_var
)
3329 /* For single-field vars do not bother about the offset. */
3331 else if (result
.type
== SCALAR
)
3333 /* In languages like C, you can access one past the end of an
3334 array. You aren't allowed to dereference it, so we can
3335 ignore this constraint. When we handle pointer subtraction,
3336 we may have to do something cute here. */
3338 if (maybe_lt (poly_uint64 (bitpos
), get_varinfo (result
.var
)->fullsize
)
3339 && maybe_ne (bitmaxsize
, 0))
3341 /* It's also not true that the constraint will actually start at the
3342 right offset, it may start in some padding. We only care about
3343 setting the constraint to the first actual field it touches, so
3345 struct constraint_expr cexpr
= result
;
3349 for (curr
= get_varinfo (cexpr
.var
); curr
; curr
= vi_next (curr
))
3351 if (ranges_maybe_overlap_p (poly_int64 (curr
->offset
),
3352 curr
->size
, bitpos
, bitmaxsize
))
3354 cexpr
.var
= curr
->id
;
3355 results
->safe_push (cexpr
);
3360 /* If we are going to take the address of this field then
3361 to be able to compute reachability correctly add at least
3362 the last field of the variable. */
3363 if (address_p
&& results
->length () == 0)
3365 curr
= get_varinfo (cexpr
.var
);
3366 while (curr
->next
!= 0)
3367 curr
= vi_next (curr
);
3368 cexpr
.var
= curr
->id
;
3369 results
->safe_push (cexpr
);
3371 else if (results
->length () == 0)
3372 /* Assert that we found *some* field there. The user couldn't be
3373 accessing *only* padding. */
3374 /* Still the user could access one past the end of an array
3375 embedded in a struct resulting in accessing *only* padding. */
3376 /* Or accessing only padding via type-punning to a type
3377 that has a filed just in padding space. */
3379 cexpr
.type
= SCALAR
;
3380 cexpr
.var
= anything_id
;
3382 results
->safe_push (cexpr
);
3385 else if (known_eq (bitmaxsize
, 0))
3387 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3388 fprintf (dump_file
, "Access to zero-sized part of variable, "
3392 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3393 fprintf (dump_file
, "Access to past the end of variable, ignoring\n");
3395 else if (result
.type
== DEREF
)
3397 /* If we do not know exactly where the access goes say so. Note
3398 that only for non-structure accesses we know that we access
3399 at most one subfiled of any variable. */
3400 HOST_WIDE_INT const_bitpos
;
3401 if (!bitpos
.is_constant (&const_bitpos
)
3402 || const_bitpos
== -1
3403 || maybe_ne (bitsize
, bitmaxsize
)
3404 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t
))
3405 || result
.offset
== UNKNOWN_OFFSET
)
3406 result
.offset
= UNKNOWN_OFFSET
;
3408 result
.offset
+= const_bitpos
;
3410 else if (result
.type
== ADDRESSOF
)
3412 /* We can end up here for component references on constants like
3413 VIEW_CONVERT_EXPR <>({ 0, 1, 2, 3 })[i]. */
3414 result
.type
= SCALAR
;
3415 result
.var
= anything_id
;
3423 /* Dereference the constraint expression CONS, and return the result.
3424 DEREF (ADDRESSOF) = SCALAR
3425 DEREF (SCALAR) = DEREF
3426 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3427 This is needed so that we can handle dereferencing DEREF constraints. */
3430 do_deref (vec
<ce_s
> *constraints
)
3432 struct constraint_expr
*c
;
3435 FOR_EACH_VEC_ELT (*constraints
, i
, c
)
3437 if (c
->type
== SCALAR
)
3439 else if (c
->type
== ADDRESSOF
)
3441 else if (c
->type
== DEREF
)
3443 struct constraint_expr tmplhs
;
3444 tmplhs
= new_scalar_tmp_constraint_exp ("dereftmp", true);
3445 process_constraint (new_constraint (tmplhs
, *c
));
3446 c
->var
= tmplhs
.var
;
3453 /* Given a tree T, return the constraint expression for taking the
3457 get_constraint_for_address_of (tree t
, vec
<ce_s
> *results
)
3459 struct constraint_expr
*c
;
3462 get_constraint_for_1 (t
, results
, true, true);
3464 FOR_EACH_VEC_ELT (*results
, i
, c
)
3466 if (c
->type
== DEREF
)
3469 c
->type
= ADDRESSOF
;
3473 /* Given a tree T, return the constraint expression for it. */
3476 get_constraint_for_1 (tree t
, vec
<ce_s
> *results
, bool address_p
,
3479 struct constraint_expr temp
;
3481 /* x = integer is all glommed to a single variable, which doesn't
3482 point to anything by itself. That is, of course, unless it is an
3483 integer constant being treated as a pointer, in which case, we
3484 will return that this is really the addressof anything. This
3485 happens below, since it will fall into the default case. The only
3486 case we know something about an integer treated like a pointer is
3487 when it is the NULL pointer, and then we just say it points to
3490 Do not do that if -fno-delete-null-pointer-checks though, because
3491 in that case *NULL does not fail, so it _should_ alias *anything.
3492 It is not worth adding a new option or renaming the existing one,
3493 since this case is relatively obscure. */
3494 if ((TREE_CODE (t
) == INTEGER_CST
3495 && integer_zerop (t
))
3496 /* The only valid CONSTRUCTORs in gimple with pointer typed
3497 elements are zero-initializer. But in IPA mode we also
3498 process global initializers, so verify at least. */
3499 || (TREE_CODE (t
) == CONSTRUCTOR
3500 && CONSTRUCTOR_NELTS (t
) == 0))
3502 if (flag_delete_null_pointer_checks
)
3503 temp
.var
= nothing_id
;
3505 temp
.var
= nonlocal_id
;
3506 temp
.type
= ADDRESSOF
;
3508 results
->safe_push (temp
);
3512 /* String constants are read-only, ideally we'd have a CONST_DECL
3514 if (TREE_CODE (t
) == STRING_CST
)
3516 temp
.var
= string_id
;
3519 results
->safe_push (temp
);
3523 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
3525 case tcc_expression
:
3527 switch (TREE_CODE (t
))
3530 get_constraint_for_address_of (TREE_OPERAND (t
, 0), results
);
3538 switch (TREE_CODE (t
))
3542 struct constraint_expr cs
;
3544 get_constraint_for_ptr_offset (TREE_OPERAND (t
, 0),
3545 TREE_OPERAND (t
, 1), results
);
3548 /* If we are not taking the address then make sure to process
3549 all subvariables we might access. */
3553 cs
= results
->last ();
3554 if (cs
.type
== DEREF
3555 && type_can_have_subvars (TREE_TYPE (t
)))
3557 /* For dereferences this means we have to defer it
3559 results
->last ().offset
= UNKNOWN_OFFSET
;
3562 if (cs
.type
!= SCALAR
)
3565 vi
= get_varinfo (cs
.var
);
3566 curr
= vi_next (vi
);
3567 if (!vi
->is_full_var
3570 unsigned HOST_WIDE_INT size
;
3571 if (tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (t
))))
3572 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t
)));
3575 for (; curr
; curr
= vi_next (curr
))
3577 if (curr
->offset
- vi
->offset
< size
)
3580 results
->safe_push (cs
);
3589 case ARRAY_RANGE_REF
:
3594 get_constraint_for_component_ref (t
, results
, address_p
, lhs_p
);
3596 case VIEW_CONVERT_EXPR
:
3597 get_constraint_for_1 (TREE_OPERAND (t
, 0), results
, address_p
,
3600 /* We are missing handling for TARGET_MEM_REF here. */
3605 case tcc_exceptional
:
3607 switch (TREE_CODE (t
))
3611 get_constraint_for_ssa_var (t
, results
, address_p
);
3619 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), i
, val
)
3621 struct constraint_expr
*rhsp
;
3623 get_constraint_for_1 (val
, &tmp
, address_p
, lhs_p
);
3624 FOR_EACH_VEC_ELT (tmp
, j
, rhsp
)
3625 results
->safe_push (*rhsp
);
3628 /* We do not know whether the constructor was complete,
3629 so technically we have to add &NOTHING or &ANYTHING
3630 like we do for an empty constructor as well. */
3637 case tcc_declaration
:
3639 get_constraint_for_ssa_var (t
, results
, address_p
);
3644 /* We cannot refer to automatic variables through constants. */
3645 temp
.type
= ADDRESSOF
;
3646 temp
.var
= nonlocal_id
;
3648 results
->safe_push (temp
);
3654 /* The default fallback is a constraint from anything. */
3655 temp
.type
= ADDRESSOF
;
3656 temp
.var
= anything_id
;
3658 results
->safe_push (temp
);
3661 /* Given a gimple tree T, return the constraint expression vector for it. */
3664 get_constraint_for (tree t
, vec
<ce_s
> *results
)
3666 gcc_assert (results
->length () == 0);
3668 get_constraint_for_1 (t
, results
, false, true);
3671 /* Given a gimple tree T, return the constraint expression vector for it
3672 to be used as the rhs of a constraint. */
3675 get_constraint_for_rhs (tree t
, vec
<ce_s
> *results
)
3677 gcc_assert (results
->length () == 0);
3679 get_constraint_for_1 (t
, results
, false, false);
3683 /* Efficiently generates constraints from all entries in *RHSC to all
3684 entries in *LHSC. */
3687 process_all_all_constraints (vec
<ce_s
> lhsc
,
3690 struct constraint_expr
*lhsp
, *rhsp
;
3693 if (lhsc
.length () <= 1 || rhsc
.length () <= 1)
3695 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
3696 FOR_EACH_VEC_ELT (rhsc
, j
, rhsp
)
3697 process_constraint (new_constraint (*lhsp
, *rhsp
));
3701 struct constraint_expr tmp
;
3702 tmp
= new_scalar_tmp_constraint_exp ("allalltmp", true);
3703 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
3704 process_constraint (new_constraint (tmp
, *rhsp
));
3705 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
3706 process_constraint (new_constraint (*lhsp
, tmp
));
3710 /* Handle aggregate copies by expanding into copies of the respective
3711 fields of the structures. */
3714 do_structure_copy (tree lhsop
, tree rhsop
)
3716 struct constraint_expr
*lhsp
, *rhsp
;
3717 auto_vec
<ce_s
> lhsc
;
3718 auto_vec
<ce_s
> rhsc
;
3721 get_constraint_for (lhsop
, &lhsc
);
3722 get_constraint_for_rhs (rhsop
, &rhsc
);
3725 if (lhsp
->type
== DEREF
3726 || (lhsp
->type
== ADDRESSOF
&& lhsp
->var
== anything_id
)
3727 || rhsp
->type
== DEREF
)
3729 if (lhsp
->type
== DEREF
)
3731 gcc_assert (lhsc
.length () == 1);
3732 lhsp
->offset
= UNKNOWN_OFFSET
;
3734 if (rhsp
->type
== DEREF
)
3736 gcc_assert (rhsc
.length () == 1);
3737 rhsp
->offset
= UNKNOWN_OFFSET
;
3739 process_all_all_constraints (lhsc
, rhsc
);
3741 else if (lhsp
->type
== SCALAR
3742 && (rhsp
->type
== SCALAR
3743 || rhsp
->type
== ADDRESSOF
))
3745 HOST_WIDE_INT lhssize
, lhsoffset
;
3746 HOST_WIDE_INT rhssize
, rhsoffset
;
3749 if (!get_ref_base_and_extent_hwi (lhsop
, &lhsoffset
, &lhssize
, &reverse
)
3750 || !get_ref_base_and_extent_hwi (rhsop
, &rhsoffset
, &rhssize
,
3753 process_all_all_constraints (lhsc
, rhsc
);
3756 for (j
= 0; lhsc
.iterate (j
, &lhsp
);)
3758 varinfo_t lhsv
, rhsv
;
3760 lhsv
= get_varinfo (lhsp
->var
);
3761 rhsv
= get_varinfo (rhsp
->var
);
3762 if (lhsv
->may_have_pointers
3763 && (lhsv
->is_full_var
3764 || rhsv
->is_full_var
3765 || ranges_overlap_p (lhsv
->offset
+ rhsoffset
, lhsv
->size
,
3766 rhsv
->offset
+ lhsoffset
, rhsv
->size
)))
3767 process_constraint (new_constraint (*lhsp
, *rhsp
));
3768 if (!rhsv
->is_full_var
3769 && (lhsv
->is_full_var
3770 || (lhsv
->offset
+ rhsoffset
+ lhsv
->size
3771 > rhsv
->offset
+ lhsoffset
+ rhsv
->size
)))
3774 if (k
>= rhsc
.length ())
3785 /* Create constraints ID = { rhsc }. */
3788 make_constraints_to (unsigned id
, vec
<ce_s
> rhsc
)
3790 struct constraint_expr
*c
;
3791 struct constraint_expr includes
;
3795 includes
.offset
= 0;
3796 includes
.type
= SCALAR
;
3798 FOR_EACH_VEC_ELT (rhsc
, j
, c
)
3799 process_constraint (new_constraint (includes
, *c
));
3802 /* Create a constraint ID = OP. */
3805 make_constraint_to (unsigned id
, tree op
)
3807 auto_vec
<ce_s
> rhsc
;
3808 get_constraint_for_rhs (op
, &rhsc
);
3809 make_constraints_to (id
, rhsc
);
3812 /* Create a constraint ID = &FROM. */
3815 make_constraint_from (varinfo_t vi
, int from
)
3817 struct constraint_expr lhs
, rhs
;
3825 rhs
.type
= ADDRESSOF
;
3826 process_constraint (new_constraint (lhs
, rhs
));
3829 /* Create a constraint ID = FROM. */
3832 make_copy_constraint (varinfo_t vi
, int from
)
3834 struct constraint_expr lhs
, rhs
;
3843 process_constraint (new_constraint (lhs
, rhs
));
3846 /* Make constraints necessary to make OP escape. */
3849 make_escape_constraint (tree op
)
3851 make_constraint_to (escaped_id
, op
);
3854 /* Make constraint necessary to make all indirect references
3858 make_indirect_escape_constraint (varinfo_t vi
)
3860 struct constraint_expr lhs
, rhs
;
3861 /* escaped = *(VAR + UNKNOWN); */
3863 lhs
.var
= escaped_id
;
3867 rhs
.offset
= UNKNOWN_OFFSET
;
3868 process_constraint (new_constraint (lhs
, rhs
));
3871 /* Add constraints to that the solution of VI is transitively closed. */
3874 make_transitive_closure_constraints (varinfo_t vi
)
3876 struct constraint_expr lhs
, rhs
;
3878 /* VAR = *(VAR + UNKNOWN); */
3884 rhs
.offset
= UNKNOWN_OFFSET
;
3885 process_constraint (new_constraint (lhs
, rhs
));
3888 /* Add constraints to that the solution of VI has all subvariables added. */
3891 make_any_offset_constraints (varinfo_t vi
)
3893 struct constraint_expr lhs
, rhs
;
3895 /* VAR = VAR + UNKNOWN; */
3901 rhs
.offset
= UNKNOWN_OFFSET
;
3902 process_constraint (new_constraint (lhs
, rhs
));
3905 /* Temporary storage for fake var decls. */
3906 struct obstack fake_var_decl_obstack
;
3908 /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */
3911 build_fake_var_decl (tree type
)
3913 tree decl
= (tree
) XOBNEW (&fake_var_decl_obstack
, struct tree_var_decl
);
3914 memset (decl
, 0, sizeof (struct tree_var_decl
));
3915 TREE_SET_CODE (decl
, VAR_DECL
);
3916 TREE_TYPE (decl
) = type
;
3917 DECL_UID (decl
) = allocate_decl_uid ();
3918 SET_DECL_PT_UID (decl
, -1);
3919 layout_decl (decl
, 0);
3923 /* Create a new artificial heap variable with NAME.
3924 Return the created variable. */
3927 make_heapvar (const char *name
, bool add_id
)
3932 heapvar
= build_fake_var_decl (ptr_type_node
);
3933 DECL_EXTERNAL (heapvar
) = 1;
3935 vi
= new_var_info (heapvar
, name
, add_id
);
3936 vi
->is_heap_var
= true;
3937 vi
->is_unknown_size_var
= true;
3941 vi
->is_full_var
= true;
3942 insert_vi_for_tree (heapvar
, vi
);
3947 /* Create a new artificial heap variable with NAME and make a
3948 constraint from it to LHS. Set flags according to a tag used
3949 for tracking restrict pointers. */
3952 make_constraint_from_restrict (varinfo_t lhs
, const char *name
, bool add_id
)
3954 varinfo_t vi
= make_heapvar (name
, add_id
);
3955 vi
->is_restrict_var
= 1;
3956 vi
->is_global_var
= 1;
3957 vi
->may_have_pointers
= 1;
3958 make_constraint_from (lhs
, vi
->id
);
3962 /* Create a new artificial heap variable with NAME and make a
3963 constraint from it to LHS. Set flags according to a tag used
3964 for tracking restrict pointers and make the artificial heap
3965 point to global memory. */
3968 make_constraint_from_global_restrict (varinfo_t lhs
, const char *name
,
3971 varinfo_t vi
= make_constraint_from_restrict (lhs
, name
, add_id
);
3972 make_copy_constraint (vi
, nonlocal_id
);
3976 /* In IPA mode there are varinfos for different aspects of reach
3977 function designator. One for the points-to set of the return
3978 value, one for the variables that are clobbered by the function,
3979 one for its uses and one for each parameter (including a single
3980 glob for remaining variadic arguments). */
3982 enum { fi_clobbers
= 1, fi_uses
= 2,
3983 fi_static_chain
= 3, fi_result
= 4, fi_parm_base
= 5 };
3985 /* Get a constraint for the requested part of a function designator FI
3986 when operating in IPA mode. */
3988 static struct constraint_expr
3989 get_function_part_constraint (varinfo_t fi
, unsigned part
)
3991 struct constraint_expr c
;
3993 gcc_assert (in_ipa_mode
);
3995 if (fi
->id
== anything_id
)
3997 /* ??? We probably should have a ANYFN special variable. */
3998 c
.var
= anything_id
;
4002 else if (fi
->decl
&& TREE_CODE (fi
->decl
) == FUNCTION_DECL
)
4004 varinfo_t ai
= first_vi_for_offset (fi
, part
);
4008 c
.var
= anything_id
;
4022 /* For non-IPA mode, generate constraints necessary for a call on the
4026 handle_rhs_call (gcall
*stmt
, vec
<ce_s
> *results
)
4028 struct constraint_expr rhsc
;
4030 bool returns_uses
= false;
4032 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
4034 tree arg
= gimple_call_arg (stmt
, i
);
4035 int flags
= gimple_call_arg_flags (stmt
, i
);
4037 /* If the argument is not used we can ignore it. */
4038 if (flags
& EAF_UNUSED
)
4041 /* As we compute ESCAPED context-insensitive we do not gain
4042 any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
4043 set. The argument would still get clobbered through the
4045 if ((flags
& EAF_NOCLOBBER
)
4046 && (flags
& (EAF_NOESCAPE
| EAF_NODIRECTESCAPE
)))
4048 varinfo_t uses
= get_call_use_vi (stmt
);
4049 varinfo_t tem
= new_var_info (NULL_TREE
, "callarg", true);
4050 tem
->is_reg_var
= true;
4051 make_constraint_to (tem
->id
, arg
);
4052 make_any_offset_constraints (tem
);
4053 if (!(flags
& EAF_DIRECT
))
4054 make_transitive_closure_constraints (tem
);
4055 make_copy_constraint (uses
, tem
->id
);
4056 if (!(flags
& (EAF_NOESCAPE
| EAF_DIRECT
)))
4057 make_indirect_escape_constraint (tem
);
4058 returns_uses
= true;
4060 else if (flags
& (EAF_NOESCAPE
| EAF_NODIRECTESCAPE
))
4062 struct constraint_expr lhs
, rhs
;
4063 varinfo_t uses
= get_call_use_vi (stmt
);
4064 varinfo_t clobbers
= get_call_clobber_vi (stmt
);
4065 varinfo_t tem
= new_var_info (NULL_TREE
, "callarg", true);
4066 tem
->is_reg_var
= true;
4067 make_constraint_to (tem
->id
, arg
);
4068 make_any_offset_constraints (tem
);
4069 if (!(flags
& EAF_DIRECT
))
4070 make_transitive_closure_constraints (tem
);
4071 make_copy_constraint (uses
, tem
->id
);
4072 make_copy_constraint (clobbers
, tem
->id
);
4073 /* Add *tem = nonlocal, do not add *tem = callused as
4074 EAF_NOESCAPE parameters do not escape to other parameters
4075 and all other uses appear in NONLOCAL as well. */
4080 rhs
.var
= nonlocal_id
;
4082 process_constraint (new_constraint (lhs
, rhs
));
4083 if (!(flags
& (EAF_NOESCAPE
| EAF_DIRECT
)))
4084 make_indirect_escape_constraint (tem
);
4085 returns_uses
= true;
4088 make_escape_constraint (arg
);
4091 /* If we added to the calls uses solution make sure we account for
4092 pointers to it to be returned. */
4095 rhsc
.var
= get_call_use_vi (stmt
)->id
;
4096 rhsc
.offset
= UNKNOWN_OFFSET
;
4098 results
->safe_push (rhsc
);
4101 /* The static chain escapes as well. */
4102 if (gimple_call_chain (stmt
))
4103 make_escape_constraint (gimple_call_chain (stmt
));
4105 /* And if we applied NRV the address of the return slot escapes as well. */
4106 if (gimple_call_return_slot_opt_p (stmt
)
4107 && gimple_call_lhs (stmt
) != NULL_TREE
4108 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt
))))
4110 auto_vec
<ce_s
> tmpc
;
4111 struct constraint_expr lhsc
, *c
;
4112 get_constraint_for_address_of (gimple_call_lhs (stmt
), &tmpc
);
4113 lhsc
.var
= escaped_id
;
4116 FOR_EACH_VEC_ELT (tmpc
, i
, c
)
4117 process_constraint (new_constraint (lhsc
, *c
));
4120 /* Regular functions return nonlocal memory. */
4121 rhsc
.var
= nonlocal_id
;
4124 results
->safe_push (rhsc
);
4127 /* For non-IPA mode, generate constraints necessary for a call
4128 that returns a pointer and assigns it to LHS. This simply makes
4129 the LHS point to global and escaped variables. */
4132 handle_lhs_call (gcall
*stmt
, tree lhs
, int flags
, vec
<ce_s
> rhsc
,
4135 auto_vec
<ce_s
> lhsc
;
4137 get_constraint_for (lhs
, &lhsc
);
4138 /* If the store is to a global decl make sure to
4139 add proper escape constraints. */
4140 lhs
= get_base_address (lhs
);
4143 && is_global_var (lhs
))
4145 struct constraint_expr tmpc
;
4146 tmpc
.var
= escaped_id
;
4149 lhsc
.safe_push (tmpc
);
4152 /* If the call returns an argument unmodified override the rhs
4154 if (flags
& ERF_RETURNS_ARG
4155 && (flags
& ERF_RETURN_ARG_MASK
) < gimple_call_num_args (stmt
))
4159 arg
= gimple_call_arg (stmt
, flags
& ERF_RETURN_ARG_MASK
);
4160 get_constraint_for (arg
, &rhsc
);
4161 process_all_all_constraints (lhsc
, rhsc
);
4164 else if (flags
& ERF_NOALIAS
)
4167 struct constraint_expr tmpc
;
4169 vi
= make_heapvar ("HEAP", true);
4170 /* We are marking allocated storage local, we deal with it becoming
4171 global by escaping and setting of vars_contains_escaped_heap. */
4172 DECL_EXTERNAL (vi
->decl
) = 0;
4173 vi
->is_global_var
= 0;
4174 /* If this is not a real malloc call assume the memory was
4175 initialized and thus may point to global memory. All
4176 builtin functions with the malloc attribute behave in a sane way. */
4178 || !fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
4179 make_constraint_from (vi
, nonlocal_id
);
4182 tmpc
.type
= ADDRESSOF
;
4183 rhsc
.safe_push (tmpc
);
4184 process_all_all_constraints (lhsc
, rhsc
);
4188 process_all_all_constraints (lhsc
, rhsc
);
4191 /* For non-IPA mode, generate constraints necessary for a call of a
4192 const function that returns a pointer in the statement STMT. */
4195 handle_const_call (gcall
*stmt
, vec
<ce_s
> *results
)
4197 struct constraint_expr rhsc
;
4199 bool need_uses
= false;
4201 /* Treat nested const functions the same as pure functions as far
4202 as the static chain is concerned. */
4203 if (gimple_call_chain (stmt
))
4205 varinfo_t uses
= get_call_use_vi (stmt
);
4206 make_constraint_to (uses
->id
, gimple_call_chain (stmt
));
4210 /* And if we applied NRV the address of the return slot escapes as well. */
4211 if (gimple_call_return_slot_opt_p (stmt
)
4212 && gimple_call_lhs (stmt
) != NULL_TREE
4213 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt
))))
4215 varinfo_t uses
= get_call_use_vi (stmt
);
4216 auto_vec
<ce_s
> tmpc
;
4217 get_constraint_for_address_of (gimple_call_lhs (stmt
), &tmpc
);
4218 make_constraints_to (uses
->id
, tmpc
);
4224 varinfo_t uses
= get_call_use_vi (stmt
);
4225 make_any_offset_constraints (uses
);
4226 make_transitive_closure_constraints (uses
);
4227 rhsc
.var
= uses
->id
;
4230 results
->safe_push (rhsc
);
4233 /* May return offsetted arguments. */
4234 varinfo_t tem
= NULL
;
4235 if (gimple_call_num_args (stmt
) != 0)
4237 tem
= new_var_info (NULL_TREE
, "callarg", true);
4238 tem
->is_reg_var
= true;
4240 for (k
= 0; k
< gimple_call_num_args (stmt
); ++k
)
4242 tree arg
= gimple_call_arg (stmt
, k
);
4243 auto_vec
<ce_s
> argc
;
4244 get_constraint_for_rhs (arg
, &argc
);
4245 make_constraints_to (tem
->id
, argc
);
4252 ce
.offset
= UNKNOWN_OFFSET
;
4253 results
->safe_push (ce
);
4256 /* May return addresses of globals. */
4257 rhsc
.var
= nonlocal_id
;
4259 rhsc
.type
= ADDRESSOF
;
4260 results
->safe_push (rhsc
);
4263 /* For non-IPA mode, generate constraints necessary for a call to a
4264 pure function in statement STMT. */
4267 handle_pure_call (gcall
*stmt
, vec
<ce_s
> *results
)
4269 struct constraint_expr rhsc
;
4271 varinfo_t uses
= NULL
;
4273 /* Memory reached from pointer arguments is call-used. */
4274 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
4276 tree arg
= gimple_call_arg (stmt
, i
);
4277 int flags
= gimple_call_arg_flags (stmt
, i
);
4279 /* If the argument is not used we can ignore it. */
4280 if (flags
& EAF_UNUSED
)
4284 uses
= get_call_use_vi (stmt
);
4285 make_any_offset_constraints (uses
);
4286 make_transitive_closure_constraints (uses
);
4288 make_constraint_to (uses
->id
, arg
);
4291 /* The static chain is used as well. */
4292 if (gimple_call_chain (stmt
))
4296 uses
= get_call_use_vi (stmt
);
4297 make_any_offset_constraints (uses
);
4298 make_transitive_closure_constraints (uses
);
4300 make_constraint_to (uses
->id
, gimple_call_chain (stmt
));
4303 /* And if we applied NRV the address of the return slot. */
4304 if (gimple_call_return_slot_opt_p (stmt
)
4305 && gimple_call_lhs (stmt
) != NULL_TREE
4306 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt
))))
4310 uses
= get_call_use_vi (stmt
);
4311 make_any_offset_constraints (uses
);
4312 make_transitive_closure_constraints (uses
);
4314 auto_vec
<ce_s
> tmpc
;
4315 get_constraint_for_address_of (gimple_call_lhs (stmt
), &tmpc
);
4316 make_constraints_to (uses
->id
, tmpc
);
4319 /* Pure functions may return call-used and nonlocal memory. */
4322 rhsc
.var
= uses
->id
;
4325 results
->safe_push (rhsc
);
4327 rhsc
.var
= nonlocal_id
;
4330 results
->safe_push (rhsc
);
4334 /* Return the varinfo for the callee of CALL. */
4337 get_fi_for_callee (gcall
*call
)
4339 tree decl
, fn
= gimple_call_fn (call
);
4341 if (fn
&& TREE_CODE (fn
) == OBJ_TYPE_REF
)
4342 fn
= OBJ_TYPE_REF_EXPR (fn
);
4344 /* If we can directly resolve the function being called, do so.
4345 Otherwise, it must be some sort of indirect expression that
4346 we should still be able to handle. */
4347 decl
= gimple_call_addr_fndecl (fn
);
4349 return get_vi_for_tree (decl
);
4351 /* If the function is anything other than a SSA name pointer we have no
4352 clue and should be getting ANYFN (well, ANYTHING for now). */
4353 if (!fn
|| TREE_CODE (fn
) != SSA_NAME
)
4354 return get_varinfo (anything_id
);
4356 if (SSA_NAME_IS_DEFAULT_DEF (fn
)
4357 && (TREE_CODE (SSA_NAME_VAR (fn
)) == PARM_DECL
4358 || TREE_CODE (SSA_NAME_VAR (fn
)) == RESULT_DECL
))
4359 fn
= SSA_NAME_VAR (fn
);
4361 return get_vi_for_tree (fn
);
4364 /* Create constraints for assigning call argument ARG to the incoming parameter
4365 INDEX of function FI. */
4368 find_func_aliases_for_call_arg (varinfo_t fi
, unsigned index
, tree arg
)
4370 struct constraint_expr lhs
;
4371 lhs
= get_function_part_constraint (fi
, fi_parm_base
+ index
);
4373 auto_vec
<ce_s
, 2> rhsc
;
4374 get_constraint_for_rhs (arg
, &rhsc
);
4377 struct constraint_expr
*rhsp
;
4378 FOR_EACH_VEC_ELT (rhsc
, j
, rhsp
)
4379 process_constraint (new_constraint (lhs
, *rhsp
));
4382 /* Return true if FNDECL may be part of another lto partition. */
4385 fndecl_maybe_in_other_partition (tree fndecl
)
4387 cgraph_node
*fn_node
= cgraph_node::get (fndecl
);
4388 if (fn_node
== NULL
)
4391 return fn_node
->in_other_partition
;
4394 /* Create constraints for the builtin call T. Return true if the call
4395 was handled, otherwise false. */
4398 find_func_aliases_for_builtin_call (struct function
*fn
, gcall
*t
)
4400 tree fndecl
= gimple_call_fndecl (t
);
4401 auto_vec
<ce_s
, 2> lhsc
;
4402 auto_vec
<ce_s
, 4> rhsc
;
4405 if (gimple_call_builtin_p (t
, BUILT_IN_NORMAL
))
4406 /* ??? All builtins that are handled here need to be handled
4407 in the alias-oracle query functions explicitly! */
4408 switch (DECL_FUNCTION_CODE (fndecl
))
4410 /* All the following functions return a pointer to the same object
4411 as their first argument points to. The functions do not add
4412 to the ESCAPED solution. The functions make the first argument
4413 pointed to memory point to what the second argument pointed to
4414 memory points to. */
4415 case BUILT_IN_STRCPY
:
4416 case BUILT_IN_STRNCPY
:
4417 case BUILT_IN_BCOPY
:
4418 case BUILT_IN_MEMCPY
:
4419 case BUILT_IN_MEMMOVE
:
4420 case BUILT_IN_MEMPCPY
:
4421 case BUILT_IN_STPCPY
:
4422 case BUILT_IN_STPNCPY
:
4423 case BUILT_IN_STRCAT
:
4424 case BUILT_IN_STRNCAT
:
4425 case BUILT_IN_STRCPY_CHK
:
4426 case BUILT_IN_STRNCPY_CHK
:
4427 case BUILT_IN_MEMCPY_CHK
:
4428 case BUILT_IN_MEMMOVE_CHK
:
4429 case BUILT_IN_MEMPCPY_CHK
:
4430 case BUILT_IN_STPCPY_CHK
:
4431 case BUILT_IN_STPNCPY_CHK
:
4432 case BUILT_IN_STRCAT_CHK
:
4433 case BUILT_IN_STRNCAT_CHK
:
4434 case BUILT_IN_TM_MEMCPY
:
4435 case BUILT_IN_TM_MEMMOVE
:
4437 tree res
= gimple_call_lhs (t
);
4438 tree dest
= gimple_call_arg (t
, (DECL_FUNCTION_CODE (fndecl
)
4439 == BUILT_IN_BCOPY
? 1 : 0));
4440 tree src
= gimple_call_arg (t
, (DECL_FUNCTION_CODE (fndecl
)
4441 == BUILT_IN_BCOPY
? 0 : 1));
4442 if (res
!= NULL_TREE
)
4444 get_constraint_for (res
, &lhsc
);
4445 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_MEMPCPY
4446 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STPCPY
4447 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STPNCPY
4448 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_MEMPCPY_CHK
4449 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STPCPY_CHK
4450 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STPNCPY_CHK
)
4451 get_constraint_for_ptr_offset (dest
, NULL_TREE
, &rhsc
);
4453 get_constraint_for (dest
, &rhsc
);
4454 process_all_all_constraints (lhsc
, rhsc
);
4458 get_constraint_for_ptr_offset (dest
, NULL_TREE
, &lhsc
);
4459 get_constraint_for_ptr_offset (src
, NULL_TREE
, &rhsc
);
4462 process_all_all_constraints (lhsc
, rhsc
);
4465 case BUILT_IN_MEMSET
:
4466 case BUILT_IN_MEMSET_CHK
:
4467 case BUILT_IN_TM_MEMSET
:
4469 tree res
= gimple_call_lhs (t
);
4470 tree dest
= gimple_call_arg (t
, 0);
4473 struct constraint_expr ac
;
4474 if (res
!= NULL_TREE
)
4476 get_constraint_for (res
, &lhsc
);
4477 get_constraint_for (dest
, &rhsc
);
4478 process_all_all_constraints (lhsc
, rhsc
);
4481 get_constraint_for_ptr_offset (dest
, NULL_TREE
, &lhsc
);
4483 if (flag_delete_null_pointer_checks
4484 && integer_zerop (gimple_call_arg (t
, 1)))
4486 ac
.type
= ADDRESSOF
;
4487 ac
.var
= nothing_id
;
4492 ac
.var
= integer_id
;
4495 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
4496 process_constraint (new_constraint (*lhsp
, ac
));
4499 case BUILT_IN_STACK_SAVE
:
4500 case BUILT_IN_STACK_RESTORE
:
4501 /* Nothing interesting happens. */
4503 case BUILT_IN_ALLOCA
:
4504 case BUILT_IN_ALLOCA_WITH_ALIGN
:
4505 case BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
:
4507 tree ptr
= gimple_call_lhs (t
);
4508 if (ptr
== NULL_TREE
)
4510 get_constraint_for (ptr
, &lhsc
);
4511 varinfo_t vi
= make_heapvar ("HEAP", true);
4512 /* Alloca storage is never global. To exempt it from escaped
4513 handling make it a non-heap var. */
4514 DECL_EXTERNAL (vi
->decl
) = 0;
4515 vi
->is_global_var
= 0;
4516 vi
->is_heap_var
= 0;
4517 struct constraint_expr tmpc
;
4520 tmpc
.type
= ADDRESSOF
;
4521 rhsc
.safe_push (tmpc
);
4522 process_all_all_constraints (lhsc
, rhsc
);
4525 case BUILT_IN_POSIX_MEMALIGN
:
4527 tree ptrptr
= gimple_call_arg (t
, 0);
4528 get_constraint_for (ptrptr
, &lhsc
);
4530 varinfo_t vi
= make_heapvar ("HEAP", true);
4531 /* We are marking allocated storage local, we deal with it becoming
4532 global by escaping and setting of vars_contains_escaped_heap. */
4533 DECL_EXTERNAL (vi
->decl
) = 0;
4534 vi
->is_global_var
= 0;
4535 struct constraint_expr tmpc
;
4538 tmpc
.type
= ADDRESSOF
;
4539 rhsc
.safe_push (tmpc
);
4540 process_all_all_constraints (lhsc
, rhsc
);
4543 case BUILT_IN_ASSUME_ALIGNED
:
4545 tree res
= gimple_call_lhs (t
);
4546 tree dest
= gimple_call_arg (t
, 0);
4547 if (res
!= NULL_TREE
)
4549 get_constraint_for (res
, &lhsc
);
4550 get_constraint_for (dest
, &rhsc
);
4551 process_all_all_constraints (lhsc
, rhsc
);
4555 /* All the following functions do not return pointers, do not
4556 modify the points-to sets of memory reachable from their
4557 arguments and do not add to the ESCAPED solution. */
4558 case BUILT_IN_SINCOS
:
4559 case BUILT_IN_SINCOSF
:
4560 case BUILT_IN_SINCOSL
:
4561 case BUILT_IN_FREXP
:
4562 case BUILT_IN_FREXPF
:
4563 case BUILT_IN_FREXPL
:
4564 case BUILT_IN_GAMMA_R
:
4565 case BUILT_IN_GAMMAF_R
:
4566 case BUILT_IN_GAMMAL_R
:
4567 case BUILT_IN_LGAMMA_R
:
4568 case BUILT_IN_LGAMMAF_R
:
4569 case BUILT_IN_LGAMMAL_R
:
4571 case BUILT_IN_MODFF
:
4572 case BUILT_IN_MODFL
:
4573 case BUILT_IN_REMQUO
:
4574 case BUILT_IN_REMQUOF
:
4575 case BUILT_IN_REMQUOL
:
4578 case BUILT_IN_STRDUP
:
4579 case BUILT_IN_STRNDUP
:
4580 case BUILT_IN_REALLOC
:
4581 if (gimple_call_lhs (t
))
4583 handle_lhs_call (t
, gimple_call_lhs (t
),
4584 gimple_call_return_flags (t
) | ERF_NOALIAS
,
4586 get_constraint_for_ptr_offset (gimple_call_lhs (t
),
4588 get_constraint_for_ptr_offset (gimple_call_arg (t
, 0),
4592 process_all_all_constraints (lhsc
, rhsc
);
4595 /* For realloc the resulting pointer can be equal to the
4596 argument as well. But only doing this wouldn't be
4597 correct because with ptr == 0 realloc behaves like malloc. */
4598 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_REALLOC
)
4600 get_constraint_for (gimple_call_lhs (t
), &lhsc
);
4601 get_constraint_for (gimple_call_arg (t
, 0), &rhsc
);
4602 process_all_all_constraints (lhsc
, rhsc
);
4607 /* String / character search functions return a pointer into the
4608 source string or NULL. */
4609 case BUILT_IN_INDEX
:
4610 case BUILT_IN_STRCHR
:
4611 case BUILT_IN_STRRCHR
:
4612 case BUILT_IN_MEMCHR
:
4613 case BUILT_IN_STRSTR
:
4614 case BUILT_IN_STRPBRK
:
4615 if (gimple_call_lhs (t
))
4617 tree src
= gimple_call_arg (t
, 0);
4618 get_constraint_for_ptr_offset (src
, NULL_TREE
, &rhsc
);
4619 constraint_expr nul
;
4620 nul
.var
= nothing_id
;
4622 nul
.type
= ADDRESSOF
;
4623 rhsc
.safe_push (nul
);
4624 get_constraint_for (gimple_call_lhs (t
), &lhsc
);
4625 process_all_all_constraints (lhsc
, rhsc
);
4628 /* Pure functions that return something not based on any object and
4629 that use the memory pointed to by their arguments (but not
4631 case BUILT_IN_STRCMP
:
4632 case BUILT_IN_STRCMP_EQ
:
4633 case BUILT_IN_STRNCMP
:
4634 case BUILT_IN_STRNCMP_EQ
:
4635 case BUILT_IN_STRCASECMP
:
4636 case BUILT_IN_STRNCASECMP
:
4637 case BUILT_IN_MEMCMP
:
4639 case BUILT_IN_STRSPN
:
4640 case BUILT_IN_STRCSPN
:
4642 varinfo_t uses
= get_call_use_vi (t
);
4643 make_any_offset_constraints (uses
);
4644 make_constraint_to (uses
->id
, gimple_call_arg (t
, 0));
4645 make_constraint_to (uses
->id
, gimple_call_arg (t
, 1));
4646 /* No constraints are necessary for the return value. */
4649 case BUILT_IN_STRLEN
:
4651 varinfo_t uses
= get_call_use_vi (t
);
4652 make_any_offset_constraints (uses
);
4653 make_constraint_to (uses
->id
, gimple_call_arg (t
, 0));
4654 /* No constraints are necessary for the return value. */
4657 case BUILT_IN_OBJECT_SIZE
:
4658 case BUILT_IN_CONSTANT_P
:
4660 /* No constraints are necessary for the return value or the
4664 /* Trampolines are special - they set up passing the static
4666 case BUILT_IN_INIT_TRAMPOLINE
:
4668 tree tramp
= gimple_call_arg (t
, 0);
4669 tree nfunc
= gimple_call_arg (t
, 1);
4670 tree frame
= gimple_call_arg (t
, 2);
4672 struct constraint_expr lhs
, *rhsp
;
4675 varinfo_t nfi
= NULL
;
4676 gcc_assert (TREE_CODE (nfunc
) == ADDR_EXPR
);
4677 nfi
= lookup_vi_for_tree (TREE_OPERAND (nfunc
, 0));
4680 lhs
= get_function_part_constraint (nfi
, fi_static_chain
);
4681 get_constraint_for (frame
, &rhsc
);
4682 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
4683 process_constraint (new_constraint (lhs
, *rhsp
));
4686 /* Make the frame point to the function for
4687 the trampoline adjustment call. */
4688 get_constraint_for (tramp
, &lhsc
);
4690 get_constraint_for (nfunc
, &rhsc
);
4691 process_all_all_constraints (lhsc
, rhsc
);
4696 /* Else fallthru to generic handling which will let
4697 the frame escape. */
4700 case BUILT_IN_ADJUST_TRAMPOLINE
:
4702 tree tramp
= gimple_call_arg (t
, 0);
4703 tree res
= gimple_call_lhs (t
);
4704 if (in_ipa_mode
&& res
)
4706 get_constraint_for (res
, &lhsc
);
4707 get_constraint_for (tramp
, &rhsc
);
4709 process_all_all_constraints (lhsc
, rhsc
);
4713 CASE_BUILT_IN_TM_STORE (1):
4714 CASE_BUILT_IN_TM_STORE (2):
4715 CASE_BUILT_IN_TM_STORE (4):
4716 CASE_BUILT_IN_TM_STORE (8):
4717 CASE_BUILT_IN_TM_STORE (FLOAT
):
4718 CASE_BUILT_IN_TM_STORE (DOUBLE
):
4719 CASE_BUILT_IN_TM_STORE (LDOUBLE
):
4720 CASE_BUILT_IN_TM_STORE (M64
):
4721 CASE_BUILT_IN_TM_STORE (M128
):
4722 CASE_BUILT_IN_TM_STORE (M256
):
4724 tree addr
= gimple_call_arg (t
, 0);
4725 tree src
= gimple_call_arg (t
, 1);
4727 get_constraint_for (addr
, &lhsc
);
4729 get_constraint_for (src
, &rhsc
);
4730 process_all_all_constraints (lhsc
, rhsc
);
4733 CASE_BUILT_IN_TM_LOAD (1):
4734 CASE_BUILT_IN_TM_LOAD (2):
4735 CASE_BUILT_IN_TM_LOAD (4):
4736 CASE_BUILT_IN_TM_LOAD (8):
4737 CASE_BUILT_IN_TM_LOAD (FLOAT
):
4738 CASE_BUILT_IN_TM_LOAD (DOUBLE
):
4739 CASE_BUILT_IN_TM_LOAD (LDOUBLE
):
4740 CASE_BUILT_IN_TM_LOAD (M64
):
4741 CASE_BUILT_IN_TM_LOAD (M128
):
4742 CASE_BUILT_IN_TM_LOAD (M256
):
4744 tree dest
= gimple_call_lhs (t
);
4745 tree addr
= gimple_call_arg (t
, 0);
4747 get_constraint_for (dest
, &lhsc
);
4748 get_constraint_for (addr
, &rhsc
);
4750 process_all_all_constraints (lhsc
, rhsc
);
4753 /* Variadic argument handling needs to be handled in IPA
4755 case BUILT_IN_VA_START
:
4757 tree valist
= gimple_call_arg (t
, 0);
4758 struct constraint_expr rhs
, *lhsp
;
4760 get_constraint_for_ptr_offset (valist
, NULL_TREE
, &lhsc
);
4762 /* The va_list gets access to pointers in variadic
4763 arguments. Which we know in the case of IPA analysis
4764 and otherwise are just all nonlocal variables. */
4767 fi
= lookup_vi_for_tree (fn
->decl
);
4768 rhs
= get_function_part_constraint (fi
, ~0);
4769 rhs
.type
= ADDRESSOF
;
4773 rhs
.var
= nonlocal_id
;
4774 rhs
.type
= ADDRESSOF
;
4777 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
4778 process_constraint (new_constraint (*lhsp
, rhs
));
4779 /* va_list is clobbered. */
4780 make_constraint_to (get_call_clobber_vi (t
)->id
, valist
);
4783 /* va_end doesn't have any effect that matters. */
4784 case BUILT_IN_VA_END
:
4786 /* Alternate return. Simply give up for now. */
4787 case BUILT_IN_RETURN
:
4791 || !(fi
= get_vi_for_tree (fn
->decl
)))
4792 make_constraint_from (get_varinfo (escaped_id
), anything_id
);
4793 else if (in_ipa_mode
4796 struct constraint_expr lhs
, rhs
;
4797 lhs
= get_function_part_constraint (fi
, fi_result
);
4798 rhs
.var
= anything_id
;
4801 process_constraint (new_constraint (lhs
, rhs
));
4805 case BUILT_IN_GOMP_PARALLEL
:
4806 case BUILT_IN_GOACC_PARALLEL
:
4810 unsigned int fnpos
, argpos
;
4811 switch (DECL_FUNCTION_CODE (fndecl
))
4813 case BUILT_IN_GOMP_PARALLEL
:
4814 /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */
4818 case BUILT_IN_GOACC_PARALLEL
:
4819 /* __builtin_GOACC_parallel (flags_m, fn, mapnum, hostaddrs,
4820 sizes, kinds, ...). */
4828 tree fnarg
= gimple_call_arg (t
, fnpos
);
4829 gcc_assert (TREE_CODE (fnarg
) == ADDR_EXPR
);
4830 tree fndecl
= TREE_OPERAND (fnarg
, 0);
4831 if (fndecl_maybe_in_other_partition (fndecl
))
4832 /* Fallthru to general call handling. */
4835 tree arg
= gimple_call_arg (t
, argpos
);
4837 varinfo_t fi
= get_vi_for_tree (fndecl
);
4838 find_func_aliases_for_call_arg (fi
, 0, arg
);
4841 /* Else fallthru to generic call handling. */
4844 /* printf-style functions may have hooks to set pointers to
4845 point to somewhere into the generated string. Leave them
4846 for a later exercise... */
4848 /* Fallthru to general call handling. */;
4854 /* Create constraints for the call T. */
4857 find_func_aliases_for_call (struct function
*fn
, gcall
*t
)
4859 tree fndecl
= gimple_call_fndecl (t
);
4862 if (fndecl
!= NULL_TREE
4863 && fndecl_built_in_p (fndecl
)
4864 && find_func_aliases_for_builtin_call (fn
, t
))
4867 fi
= get_fi_for_callee (t
);
4869 || (fi
->decl
&& fndecl
&& !fi
->is_fn_info
))
4871 auto_vec
<ce_s
, 16> rhsc
;
4872 int flags
= gimple_call_flags (t
);
4874 /* Const functions can return their arguments and addresses
4875 of global memory but not of escaped memory. */
4876 if (flags
& (ECF_CONST
|ECF_NOVOPS
))
4878 if (gimple_call_lhs (t
))
4879 handle_const_call (t
, &rhsc
);
4881 /* Pure functions can return addresses in and of memory
4882 reachable from their arguments, but they are not an escape
4883 point for reachable memory of their arguments. */
4884 else if (flags
& (ECF_PURE
|ECF_LOOPING_CONST_OR_PURE
))
4885 handle_pure_call (t
, &rhsc
);
4886 /* If the call is to a replaceable operator delete and results
4887 from a delete expression as opposed to a direct call to
4888 such operator, then the effects for PTA (in particular
4889 the escaping of the pointer) can be ignored. */
4891 && DECL_IS_OPERATOR_DELETE_P (fndecl
)
4892 && gimple_call_from_new_or_delete (t
))
4895 handle_rhs_call (t
, &rhsc
);
4896 if (gimple_call_lhs (t
))
4897 handle_lhs_call (t
, gimple_call_lhs (t
),
4898 gimple_call_return_flags (t
), rhsc
, fndecl
);
4902 auto_vec
<ce_s
, 2> rhsc
;
4906 /* Assign all the passed arguments to the appropriate incoming
4907 parameters of the function. */
4908 for (j
= 0; j
< gimple_call_num_args (t
); j
++)
4910 tree arg
= gimple_call_arg (t
, j
);
4911 find_func_aliases_for_call_arg (fi
, j
, arg
);
4914 /* If we are returning a value, assign it to the result. */
4915 lhsop
= gimple_call_lhs (t
);
4918 auto_vec
<ce_s
, 2> lhsc
;
4919 struct constraint_expr rhs
;
4920 struct constraint_expr
*lhsp
;
4921 bool aggr_p
= aggregate_value_p (lhsop
, gimple_call_fntype (t
));
4923 get_constraint_for (lhsop
, &lhsc
);
4924 rhs
= get_function_part_constraint (fi
, fi_result
);
4927 auto_vec
<ce_s
, 2> tem
;
4928 tem
.quick_push (rhs
);
4930 gcc_checking_assert (tem
.length () == 1);
4933 FOR_EACH_VEC_ELT (lhsc
, j
, lhsp
)
4934 process_constraint (new_constraint (*lhsp
, rhs
));
4936 /* If we pass the result decl by reference, honor that. */
4939 struct constraint_expr lhs
;
4940 struct constraint_expr
*rhsp
;
4942 get_constraint_for_address_of (lhsop
, &rhsc
);
4943 lhs
= get_function_part_constraint (fi
, fi_result
);
4944 FOR_EACH_VEC_ELT (rhsc
, j
, rhsp
)
4945 process_constraint (new_constraint (lhs
, *rhsp
));
4950 /* If we use a static chain, pass it along. */
4951 if (gimple_call_chain (t
))
4953 struct constraint_expr lhs
;
4954 struct constraint_expr
*rhsp
;
4956 get_constraint_for (gimple_call_chain (t
), &rhsc
);
4957 lhs
= get_function_part_constraint (fi
, fi_static_chain
);
4958 FOR_EACH_VEC_ELT (rhsc
, j
, rhsp
)
4959 process_constraint (new_constraint (lhs
, *rhsp
));
4964 /* Walk statement T setting up aliasing constraints according to the
4965 references found in T. This function is the main part of the
4966 constraint builder. AI points to auxiliary alias information used
4967 when building alias sets and computing alias grouping heuristics. */
4970 find_func_aliases (struct function
*fn
, gimple
*origt
)
4973 auto_vec
<ce_s
, 16> lhsc
;
4974 auto_vec
<ce_s
, 16> rhsc
;
4977 /* Now build constraints expressions. */
4978 if (gimple_code (t
) == GIMPLE_PHI
)
4980 /* For a phi node, assign all the arguments to
4982 get_constraint_for (gimple_phi_result (t
), &lhsc
);
4983 for (unsigned i
= 0; i
< gimple_phi_num_args (t
); i
++)
4985 get_constraint_for_rhs (gimple_phi_arg_def (t
, i
), &rhsc
);
4986 process_all_all_constraints (lhsc
, rhsc
);
4990 /* In IPA mode, we need to generate constraints to pass call
4991 arguments through their calls. There are two cases,
4992 either a GIMPLE_CALL returning a value, or just a plain
4993 GIMPLE_CALL when we are not.
4995 In non-ipa mode, we need to generate constraints for each
4996 pointer passed by address. */
4997 else if (is_gimple_call (t
))
4998 find_func_aliases_for_call (fn
, as_a
<gcall
*> (t
));
5000 /* Otherwise, just a regular assignment statement. Only care about
5001 operations with pointer result, others are dealt with as escape
5002 points if they have pointer operands. */
5003 else if (is_gimple_assign (t
))
5005 /* Otherwise, just a regular assignment statement. */
5006 tree lhsop
= gimple_assign_lhs (t
);
5007 tree rhsop
= (gimple_num_ops (t
) == 2) ? gimple_assign_rhs1 (t
) : NULL
;
5009 if (rhsop
&& TREE_CLOBBER_P (rhsop
))
5010 /* Ignore clobbers, they don't actually store anything into
5013 else if (rhsop
&& AGGREGATE_TYPE_P (TREE_TYPE (lhsop
)))
5014 do_structure_copy (lhsop
, rhsop
);
5017 enum tree_code code
= gimple_assign_rhs_code (t
);
5019 get_constraint_for (lhsop
, &lhsc
);
5021 if (code
== POINTER_PLUS_EXPR
)
5022 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t
),
5023 gimple_assign_rhs2 (t
), &rhsc
);
5024 else if (code
== POINTER_DIFF_EXPR
)
5025 /* The result is not a pointer (part). */
5027 else if (code
== BIT_AND_EXPR
5028 && TREE_CODE (gimple_assign_rhs2 (t
)) == INTEGER_CST
)
5030 /* Aligning a pointer via a BIT_AND_EXPR is offsetting
5031 the pointer. Handle it by offsetting it by UNKNOWN. */
5032 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t
),
5035 else if (code
== TRUNC_DIV_EXPR
5036 || code
== CEIL_DIV_EXPR
5037 || code
== FLOOR_DIV_EXPR
5038 || code
== ROUND_DIV_EXPR
5039 || code
== EXACT_DIV_EXPR
5040 || code
== TRUNC_MOD_EXPR
5041 || code
== CEIL_MOD_EXPR
5042 || code
== FLOOR_MOD_EXPR
5043 || code
== ROUND_MOD_EXPR
)
5044 /* Division and modulo transfer the pointer from the LHS. */
5045 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t
),
5047 else if (CONVERT_EXPR_CODE_P (code
)
5048 || gimple_assign_single_p (t
))
5049 /* See through conversions, single RHS are handled by
5050 get_constraint_for_rhs. */
5051 get_constraint_for_rhs (rhsop
, &rhsc
);
5052 else if (code
== COND_EXPR
)
5054 /* The result is a merge of both COND_EXPR arms. */
5055 auto_vec
<ce_s
, 2> tmp
;
5056 struct constraint_expr
*rhsp
;
5058 get_constraint_for_rhs (gimple_assign_rhs2 (t
), &rhsc
);
5059 get_constraint_for_rhs (gimple_assign_rhs3 (t
), &tmp
);
5060 FOR_EACH_VEC_ELT (tmp
, i
, rhsp
)
5061 rhsc
.safe_push (*rhsp
);
5063 else if (truth_value_p (code
))
5064 /* Truth value results are not pointer (parts). Or at least
5065 very unreasonable obfuscation of a part. */
5069 /* All other operations are possibly offsetting merges. */
5070 auto_vec
<ce_s
, 4> tmp
;
5071 struct constraint_expr
*rhsp
;
5073 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t
),
5075 for (i
= 2; i
< gimple_num_ops (t
); ++i
)
5077 get_constraint_for_ptr_offset (gimple_op (t
, i
),
5079 FOR_EACH_VEC_ELT (tmp
, j
, rhsp
)
5080 rhsc
.safe_push (*rhsp
);
5084 process_all_all_constraints (lhsc
, rhsc
);
5086 /* If there is a store to a global variable the rhs escapes. */
5087 if ((lhsop
= get_base_address (lhsop
)) != NULL_TREE
5090 varinfo_t vi
= get_vi_for_tree (lhsop
);
5091 if ((! in_ipa_mode
&& vi
->is_global_var
)
5092 || vi
->is_ipa_escape_point
)
5093 make_escape_constraint (rhsop
);
5096 /* Handle escapes through return. */
5097 else if (gimple_code (t
) == GIMPLE_RETURN
5098 && gimple_return_retval (as_a
<greturn
*> (t
)) != NULL_TREE
)
5100 greturn
*return_stmt
= as_a
<greturn
*> (t
);
5103 && SSA_VAR_P (gimple_return_retval (return_stmt
)))
5105 /* We handle simple returns by post-processing the solutions. */
5108 if (!(fi
= get_vi_for_tree (fn
->decl
)))
5109 make_escape_constraint (gimple_return_retval (return_stmt
));
5110 else if (in_ipa_mode
)
5112 struct constraint_expr lhs
;
5113 struct constraint_expr
*rhsp
;
5116 lhs
= get_function_part_constraint (fi
, fi_result
);
5117 get_constraint_for_rhs (gimple_return_retval (return_stmt
), &rhsc
);
5118 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
5119 process_constraint (new_constraint (lhs
, *rhsp
));
5122 /* Handle asms conservatively by adding escape constraints to everything. */
5123 else if (gasm
*asm_stmt
= dyn_cast
<gasm
*> (t
))
5125 unsigned i
, noutputs
;
5126 const char **oconstraints
;
5127 const char *constraint
;
5128 bool allows_mem
, allows_reg
, is_inout
;
5130 noutputs
= gimple_asm_noutputs (asm_stmt
);
5131 oconstraints
= XALLOCAVEC (const char *, noutputs
);
5133 for (i
= 0; i
< noutputs
; ++i
)
5135 tree link
= gimple_asm_output_op (asm_stmt
, i
);
5136 tree op
= TREE_VALUE (link
);
5138 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
5139 oconstraints
[i
] = constraint
;
5140 parse_output_constraint (&constraint
, i
, 0, 0, &allows_mem
,
5141 &allows_reg
, &is_inout
);
5143 /* A memory constraint makes the address of the operand escape. */
5144 if (!allows_reg
&& allows_mem
)
5145 make_escape_constraint (build_fold_addr_expr (op
));
5147 /* The asm may read global memory, so outputs may point to
5148 any global memory. */
5151 auto_vec
<ce_s
, 2> lhsc
;
5152 struct constraint_expr rhsc
, *lhsp
;
5154 get_constraint_for (op
, &lhsc
);
5155 rhsc
.var
= nonlocal_id
;
5158 FOR_EACH_VEC_ELT (lhsc
, j
, lhsp
)
5159 process_constraint (new_constraint (*lhsp
, rhsc
));
5162 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
5164 tree link
= gimple_asm_input_op (asm_stmt
, i
);
5165 tree op
= TREE_VALUE (link
);
5167 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
5169 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0, oconstraints
,
5170 &allows_mem
, &allows_reg
);
5172 /* A memory constraint makes the address of the operand escape. */
5173 if (!allows_reg
&& allows_mem
)
5174 make_escape_constraint (build_fold_addr_expr (op
));
5175 /* Strictly we'd only need the constraint to ESCAPED if
5176 the asm clobbers memory, otherwise using something
5177 along the lines of per-call clobbers/uses would be enough. */
5179 make_escape_constraint (op
);
5185 /* Create a constraint adding to the clobber set of FI the memory
5186 pointed to by PTR. */
5189 process_ipa_clobber (varinfo_t fi
, tree ptr
)
5191 vec
<ce_s
> ptrc
= vNULL
;
5192 struct constraint_expr
*c
, lhs
;
5194 get_constraint_for_rhs (ptr
, &ptrc
);
5195 lhs
= get_function_part_constraint (fi
, fi_clobbers
);
5196 FOR_EACH_VEC_ELT (ptrc
, i
, c
)
5197 process_constraint (new_constraint (lhs
, *c
));
5201 /* Walk statement T setting up clobber and use constraints according to the
5202 references found in T. This function is a main part of the
5203 IPA constraint builder. */
5206 find_func_clobbers (struct function
*fn
, gimple
*origt
)
5209 auto_vec
<ce_s
, 16> lhsc
;
5210 auto_vec
<ce_s
, 16> rhsc
;
5213 /* Add constraints for clobbered/used in IPA mode.
5214 We are not interested in what automatic variables are clobbered
5215 or used as we only use the information in the caller to which
5216 they do not escape. */
5217 gcc_assert (in_ipa_mode
);
5219 /* If the stmt refers to memory in any way it better had a VUSE. */
5220 if (gimple_vuse (t
) == NULL_TREE
)
5223 /* We'd better have function information for the current function. */
5224 fi
= lookup_vi_for_tree (fn
->decl
);
5225 gcc_assert (fi
!= NULL
);
5227 /* Account for stores in assignments and calls. */
5228 if (gimple_vdef (t
) != NULL_TREE
5229 && gimple_has_lhs (t
))
5231 tree lhs
= gimple_get_lhs (t
);
5233 while (handled_component_p (tem
))
5234 tem
= TREE_OPERAND (tem
, 0);
5236 && !auto_var_in_fn_p (tem
, fn
->decl
))
5237 || INDIRECT_REF_P (tem
)
5238 || (TREE_CODE (tem
) == MEM_REF
5239 && !(TREE_CODE (TREE_OPERAND (tem
, 0)) == ADDR_EXPR
5241 (TREE_OPERAND (TREE_OPERAND (tem
, 0), 0), fn
->decl
))))
5243 struct constraint_expr lhsc
, *rhsp
;
5245 lhsc
= get_function_part_constraint (fi
, fi_clobbers
);
5246 get_constraint_for_address_of (lhs
, &rhsc
);
5247 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
5248 process_constraint (new_constraint (lhsc
, *rhsp
));
5253 /* Account for uses in assigments and returns. */
5254 if (gimple_assign_single_p (t
)
5255 || (gimple_code (t
) == GIMPLE_RETURN
5256 && gimple_return_retval (as_a
<greturn
*> (t
)) != NULL_TREE
))
5258 tree rhs
= (gimple_assign_single_p (t
)
5259 ? gimple_assign_rhs1 (t
)
5260 : gimple_return_retval (as_a
<greturn
*> (t
)));
5262 while (handled_component_p (tem
))
5263 tem
= TREE_OPERAND (tem
, 0);
5265 && !auto_var_in_fn_p (tem
, fn
->decl
))
5266 || INDIRECT_REF_P (tem
)
5267 || (TREE_CODE (tem
) == MEM_REF
5268 && !(TREE_CODE (TREE_OPERAND (tem
, 0)) == ADDR_EXPR
5270 (TREE_OPERAND (TREE_OPERAND (tem
, 0), 0), fn
->decl
))))
5272 struct constraint_expr lhs
, *rhsp
;
5274 lhs
= get_function_part_constraint (fi
, fi_uses
);
5275 get_constraint_for_address_of (rhs
, &rhsc
);
5276 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
5277 process_constraint (new_constraint (lhs
, *rhsp
));
5282 if (gcall
*call_stmt
= dyn_cast
<gcall
*> (t
))
5284 varinfo_t cfi
= NULL
;
5285 tree decl
= gimple_call_fndecl (t
);
5286 struct constraint_expr lhs
, rhs
;
5289 /* For builtins we do not have separate function info. For those
5290 we do not generate escapes for we have to generate clobbers/uses. */
5291 if (gimple_call_builtin_p (t
, BUILT_IN_NORMAL
))
5292 switch (DECL_FUNCTION_CODE (decl
))
5294 /* The following functions use and clobber memory pointed to
5295 by their arguments. */
5296 case BUILT_IN_STRCPY
:
5297 case BUILT_IN_STRNCPY
:
5298 case BUILT_IN_BCOPY
:
5299 case BUILT_IN_MEMCPY
:
5300 case BUILT_IN_MEMMOVE
:
5301 case BUILT_IN_MEMPCPY
:
5302 case BUILT_IN_STPCPY
:
5303 case BUILT_IN_STPNCPY
:
5304 case BUILT_IN_STRCAT
:
5305 case BUILT_IN_STRNCAT
:
5306 case BUILT_IN_STRCPY_CHK
:
5307 case BUILT_IN_STRNCPY_CHK
:
5308 case BUILT_IN_MEMCPY_CHK
:
5309 case BUILT_IN_MEMMOVE_CHK
:
5310 case BUILT_IN_MEMPCPY_CHK
:
5311 case BUILT_IN_STPCPY_CHK
:
5312 case BUILT_IN_STPNCPY_CHK
:
5313 case BUILT_IN_STRCAT_CHK
:
5314 case BUILT_IN_STRNCAT_CHK
:
5316 tree dest
= gimple_call_arg (t
, (DECL_FUNCTION_CODE (decl
)
5317 == BUILT_IN_BCOPY
? 1 : 0));
5318 tree src
= gimple_call_arg (t
, (DECL_FUNCTION_CODE (decl
)
5319 == BUILT_IN_BCOPY
? 0 : 1));
5321 struct constraint_expr
*rhsp
, *lhsp
;
5322 get_constraint_for_ptr_offset (dest
, NULL_TREE
, &lhsc
);
5323 lhs
= get_function_part_constraint (fi
, fi_clobbers
);
5324 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
5325 process_constraint (new_constraint (lhs
, *lhsp
));
5326 get_constraint_for_ptr_offset (src
, NULL_TREE
, &rhsc
);
5327 lhs
= get_function_part_constraint (fi
, fi_uses
);
5328 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
5329 process_constraint (new_constraint (lhs
, *rhsp
));
5332 /* The following function clobbers memory pointed to by
5334 case BUILT_IN_MEMSET
:
5335 case BUILT_IN_MEMSET_CHK
:
5336 case BUILT_IN_POSIX_MEMALIGN
:
5338 tree dest
= gimple_call_arg (t
, 0);
5341 get_constraint_for_ptr_offset (dest
, NULL_TREE
, &lhsc
);
5342 lhs
= get_function_part_constraint (fi
, fi_clobbers
);
5343 FOR_EACH_VEC_ELT (lhsc
, i
, lhsp
)
5344 process_constraint (new_constraint (lhs
, *lhsp
));
5347 /* The following functions clobber their second and third
5349 case BUILT_IN_SINCOS
:
5350 case BUILT_IN_SINCOSF
:
5351 case BUILT_IN_SINCOSL
:
5353 process_ipa_clobber (fi
, gimple_call_arg (t
, 1));
5354 process_ipa_clobber (fi
, gimple_call_arg (t
, 2));
5357 /* The following functions clobber their second argument. */
5358 case BUILT_IN_FREXP
:
5359 case BUILT_IN_FREXPF
:
5360 case BUILT_IN_FREXPL
:
5361 case BUILT_IN_LGAMMA_R
:
5362 case BUILT_IN_LGAMMAF_R
:
5363 case BUILT_IN_LGAMMAL_R
:
5364 case BUILT_IN_GAMMA_R
:
5365 case BUILT_IN_GAMMAF_R
:
5366 case BUILT_IN_GAMMAL_R
:
5368 case BUILT_IN_MODFF
:
5369 case BUILT_IN_MODFL
:
5371 process_ipa_clobber (fi
, gimple_call_arg (t
, 1));
5374 /* The following functions clobber their third argument. */
5375 case BUILT_IN_REMQUO
:
5376 case BUILT_IN_REMQUOF
:
5377 case BUILT_IN_REMQUOL
:
5379 process_ipa_clobber (fi
, gimple_call_arg (t
, 2));
5382 /* The following functions neither read nor clobber memory. */
5383 case BUILT_IN_ASSUME_ALIGNED
:
5386 /* Trampolines are of no interest to us. */
5387 case BUILT_IN_INIT_TRAMPOLINE
:
5388 case BUILT_IN_ADJUST_TRAMPOLINE
:
5390 case BUILT_IN_VA_START
:
5391 case BUILT_IN_VA_END
:
5393 case BUILT_IN_GOMP_PARALLEL
:
5394 case BUILT_IN_GOACC_PARALLEL
:
5396 unsigned int fnpos
, argpos
;
5397 unsigned int implicit_use_args
[2];
5398 unsigned int num_implicit_use_args
= 0;
5399 switch (DECL_FUNCTION_CODE (decl
))
5401 case BUILT_IN_GOMP_PARALLEL
:
5402 /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */
5406 case BUILT_IN_GOACC_PARALLEL
:
5407 /* __builtin_GOACC_parallel (flags_m, fn, mapnum, hostaddrs,
5408 sizes, kinds, ...). */
5411 implicit_use_args
[num_implicit_use_args
++] = 4;
5412 implicit_use_args
[num_implicit_use_args
++] = 5;
5418 tree fnarg
= gimple_call_arg (t
, fnpos
);
5419 gcc_assert (TREE_CODE (fnarg
) == ADDR_EXPR
);
5420 tree fndecl
= TREE_OPERAND (fnarg
, 0);
5421 if (fndecl_maybe_in_other_partition (fndecl
))
5422 /* Fallthru to general call handling. */
5425 varinfo_t cfi
= get_vi_for_tree (fndecl
);
5427 tree arg
= gimple_call_arg (t
, argpos
);
5429 /* Parameter passed by value is used. */
5430 lhs
= get_function_part_constraint (fi
, fi_uses
);
5431 struct constraint_expr
*rhsp
;
5432 get_constraint_for (arg
, &rhsc
);
5433 FOR_EACH_VEC_ELT (rhsc
, j
, rhsp
)
5434 process_constraint (new_constraint (lhs
, *rhsp
));
5437 /* Handle parameters used by the call, but not used in cfi, as
5438 implicitly used by cfi. */
5439 lhs
= get_function_part_constraint (cfi
, fi_uses
);
5440 for (unsigned i
= 0; i
< num_implicit_use_args
; ++i
)
5442 tree arg
= gimple_call_arg (t
, implicit_use_args
[i
]);
5443 get_constraint_for (arg
, &rhsc
);
5444 FOR_EACH_VEC_ELT (rhsc
, j
, rhsp
)
5445 process_constraint (new_constraint (lhs
, *rhsp
));
5449 /* The caller clobbers what the callee does. */
5450 lhs
= get_function_part_constraint (fi
, fi_clobbers
);
5451 rhs
= get_function_part_constraint (cfi
, fi_clobbers
);
5452 process_constraint (new_constraint (lhs
, rhs
));
5454 /* The caller uses what the callee does. */
5455 lhs
= get_function_part_constraint (fi
, fi_uses
);
5456 rhs
= get_function_part_constraint (cfi
, fi_uses
);
5457 process_constraint (new_constraint (lhs
, rhs
));
5461 /* printf-style functions may have hooks to set pointers to
5462 point to somewhere into the generated string. Leave them
5463 for a later exercise... */
5465 /* Fallthru to general call handling. */;
5468 /* Parameters passed by value are used. */
5469 lhs
= get_function_part_constraint (fi
, fi_uses
);
5470 for (i
= 0; i
< gimple_call_num_args (t
); i
++)
5472 struct constraint_expr
*rhsp
;
5473 tree arg
= gimple_call_arg (t
, i
);
5475 if (TREE_CODE (arg
) == SSA_NAME
5476 || is_gimple_min_invariant (arg
))
5479 get_constraint_for_address_of (arg
, &rhsc
);
5480 FOR_EACH_VEC_ELT (rhsc
, j
, rhsp
)
5481 process_constraint (new_constraint (lhs
, *rhsp
));
5485 /* Build constraints for propagating clobbers/uses along the
5487 cfi
= get_fi_for_callee (call_stmt
);
5488 if (cfi
->id
== anything_id
)
5490 if (gimple_vdef (t
))
5491 make_constraint_from (first_vi_for_offset (fi
, fi_clobbers
),
5493 make_constraint_from (first_vi_for_offset (fi
, fi_uses
),
5498 /* For callees without function info (that's external functions),
5499 ESCAPED is clobbered and used. */
5501 && TREE_CODE (cfi
->decl
) == FUNCTION_DECL
5502 && !cfi
->is_fn_info
)
5506 if (gimple_vdef (t
))
5507 make_copy_constraint (first_vi_for_offset (fi
, fi_clobbers
),
5509 make_copy_constraint (first_vi_for_offset (fi
, fi_uses
), escaped_id
);
5511 /* Also honor the call statement use/clobber info. */
5512 if ((vi
= lookup_call_clobber_vi (call_stmt
)) != NULL
)
5513 make_copy_constraint (first_vi_for_offset (fi
, fi_clobbers
),
5515 if ((vi
= lookup_call_use_vi (call_stmt
)) != NULL
)
5516 make_copy_constraint (first_vi_for_offset (fi
, fi_uses
),
5521 /* Otherwise the caller clobbers and uses what the callee does.
5522 ??? This should use a new complex constraint that filters
5523 local variables of the callee. */
5524 if (gimple_vdef (t
))
5526 lhs
= get_function_part_constraint (fi
, fi_clobbers
);
5527 rhs
= get_function_part_constraint (cfi
, fi_clobbers
);
5528 process_constraint (new_constraint (lhs
, rhs
));
5530 lhs
= get_function_part_constraint (fi
, fi_uses
);
5531 rhs
= get_function_part_constraint (cfi
, fi_uses
);
5532 process_constraint (new_constraint (lhs
, rhs
));
5534 else if (gimple_code (t
) == GIMPLE_ASM
)
5536 /* ??? Ick. We can do better. */
5537 if (gimple_vdef (t
))
5538 make_constraint_from (first_vi_for_offset (fi
, fi_clobbers
),
5540 make_constraint_from (first_vi_for_offset (fi
, fi_uses
),
5546 /* Find the first varinfo in the same variable as START that overlaps with
5547 OFFSET. Return NULL if we can't find one. */
5550 first_vi_for_offset (varinfo_t start
, unsigned HOST_WIDE_INT offset
)
5552 /* If the offset is outside of the variable, bail out. */
5553 if (offset
>= start
->fullsize
)
5556 /* If we cannot reach offset from start, lookup the first field
5557 and start from there. */
5558 if (start
->offset
> offset
)
5559 start
= get_varinfo (start
->head
);
5563 /* We may not find a variable in the field list with the actual
5564 offset when we have glommed a structure to a variable.
5565 In that case, however, offset should still be within the size
5567 if (offset
>= start
->offset
5568 && (offset
- start
->offset
) < start
->size
)
5571 start
= vi_next (start
);
5577 /* Find the first varinfo in the same variable as START that overlaps with
5578 OFFSET. If there is no such varinfo the varinfo directly preceding
5579 OFFSET is returned. */
5582 first_or_preceding_vi_for_offset (varinfo_t start
,
5583 unsigned HOST_WIDE_INT offset
)
5585 /* If we cannot reach offset from start, lookup the first field
5586 and start from there. */
5587 if (start
->offset
> offset
)
5588 start
= get_varinfo (start
->head
);
5590 /* We may not find a variable in the field list with the actual
5591 offset when we have glommed a structure to a variable.
5592 In that case, however, offset should still be within the size
5594 If we got beyond the offset we look for return the field
5595 directly preceding offset which may be the last field. */
5597 && offset
>= start
->offset
5598 && !((offset
- start
->offset
) < start
->size
))
5599 start
= vi_next (start
);
5605 /* This structure is used during pushing fields onto the fieldstack
5606 to track the offset of the field, since bitpos_of_field gives it
5607 relative to its immediate containing type, and we want it relative
5608 to the ultimate containing object. */
5612 /* Offset from the base of the base containing object to this field. */
5613 HOST_WIDE_INT offset
;
5615 /* Size, in bits, of the field. */
5616 unsigned HOST_WIDE_INT size
;
5618 unsigned has_unknown_size
: 1;
5620 unsigned must_have_pointers
: 1;
5622 unsigned may_have_pointers
: 1;
5624 unsigned only_restrict_pointers
: 1;
5626 tree restrict_pointed_type
;
5628 typedef struct fieldoff fieldoff_s
;
5631 /* qsort comparison function for two fieldoff's PA and PB */
5634 fieldoff_compare (const void *pa
, const void *pb
)
5636 const fieldoff_s
*foa
= (const fieldoff_s
*)pa
;
5637 const fieldoff_s
*fob
= (const fieldoff_s
*)pb
;
5638 unsigned HOST_WIDE_INT foasize
, fobsize
;
5640 if (foa
->offset
< fob
->offset
)
5642 else if (foa
->offset
> fob
->offset
)
5645 foasize
= foa
->size
;
5646 fobsize
= fob
->size
;
5647 if (foasize
< fobsize
)
5649 else if (foasize
> fobsize
)
5654 /* Sort a fieldstack according to the field offset and sizes. */
5656 sort_fieldstack (vec
<fieldoff_s
> fieldstack
)
5658 fieldstack
.qsort (fieldoff_compare
);
5661 /* Return true if T is a type that can have subvars. */
5664 type_can_have_subvars (const_tree t
)
5666 /* Aggregates without overlapping fields can have subvars. */
5667 return TREE_CODE (t
) == RECORD_TYPE
;
5670 /* Return true if V is a tree that we can have subvars for.
5671 Normally, this is any aggregate type. Also complex
5672 types which are not gimple registers can have subvars. */
5675 var_can_have_subvars (const_tree v
)
5677 /* Volatile variables should never have subvars. */
5678 if (TREE_THIS_VOLATILE (v
))
5681 /* Non decls or memory tags can never have subvars. */
5685 return type_can_have_subvars (TREE_TYPE (v
));
5688 /* Return true if T is a type that does contain pointers. */
5691 type_must_have_pointers (tree type
)
5693 if (POINTER_TYPE_P (type
))
5696 if (TREE_CODE (type
) == ARRAY_TYPE
)
5697 return type_must_have_pointers (TREE_TYPE (type
));
5699 /* A function or method can have pointers as arguments, so track
5700 those separately. */
5701 if (TREE_CODE (type
) == FUNCTION_TYPE
5702 || TREE_CODE (type
) == METHOD_TYPE
)
5709 field_must_have_pointers (tree t
)
5711 return type_must_have_pointers (TREE_TYPE (t
));
5714 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5715 the fields of TYPE onto fieldstack, recording their offsets along
5718 OFFSET is used to keep track of the offset in this entire
5719 structure, rather than just the immediately containing structure.
5720 Returns false if the caller is supposed to handle the field we
5724 push_fields_onto_fieldstack (tree type
, vec
<fieldoff_s
> *fieldstack
,
5725 HOST_WIDE_INT offset
)
5728 bool empty_p
= true;
5730 if (TREE_CODE (type
) != RECORD_TYPE
)
5733 /* If the vector of fields is growing too big, bail out early.
5734 Callers check for vec::length <= param_max_fields_for_field_sensitive, make
5736 if (fieldstack
->length () > (unsigned)param_max_fields_for_field_sensitive
)
5739 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
5740 if (TREE_CODE (field
) == FIELD_DECL
)
5743 HOST_WIDE_INT foff
= bitpos_of_field (field
);
5744 tree field_type
= TREE_TYPE (field
);
5746 if (!var_can_have_subvars (field
)
5747 || TREE_CODE (field_type
) == QUAL_UNION_TYPE
5748 || TREE_CODE (field_type
) == UNION_TYPE
)
5750 else if (!push_fields_onto_fieldstack
5751 (field_type
, fieldstack
, offset
+ foff
)
5752 && (DECL_SIZE (field
)
5753 && !integer_zerop (DECL_SIZE (field
))))
5754 /* Empty structures may have actual size, like in C++. So
5755 see if we didn't push any subfields and the size is
5756 nonzero, push the field onto the stack. */
5761 fieldoff_s
*pair
= NULL
;
5762 bool has_unknown_size
= false;
5763 bool must_have_pointers_p
;
5765 if (!fieldstack
->is_empty ())
5766 pair
= &fieldstack
->last ();
5768 /* If there isn't anything at offset zero, create sth. */
5770 && offset
+ foff
!= 0)
5773 = {0, offset
+ foff
, false, false, true, false, NULL_TREE
};
5774 pair
= fieldstack
->safe_push (e
);
5777 if (!DECL_SIZE (field
)
5778 || !tree_fits_uhwi_p (DECL_SIZE (field
)))
5779 has_unknown_size
= true;
5781 /* If adjacent fields do not contain pointers merge them. */
5782 must_have_pointers_p
= field_must_have_pointers (field
);
5784 && !has_unknown_size
5785 && !must_have_pointers_p
5786 && !pair
->must_have_pointers
5787 && !pair
->has_unknown_size
5788 && pair
->offset
+ (HOST_WIDE_INT
)pair
->size
== offset
+ foff
)
5790 pair
->size
+= tree_to_uhwi (DECL_SIZE (field
));
5795 e
.offset
= offset
+ foff
;
5796 e
.has_unknown_size
= has_unknown_size
;
5797 if (!has_unknown_size
)
5798 e
.size
= tree_to_uhwi (DECL_SIZE (field
));
5801 e
.must_have_pointers
= must_have_pointers_p
;
5802 e
.may_have_pointers
= true;
5803 e
.only_restrict_pointers
5804 = (!has_unknown_size
5805 && POINTER_TYPE_P (field_type
)
5806 && TYPE_RESTRICT (field_type
));
5807 if (e
.only_restrict_pointers
)
5808 e
.restrict_pointed_type
= TREE_TYPE (field_type
);
5809 fieldstack
->safe_push (e
);
5819 /* Count the number of arguments DECL has, and set IS_VARARGS to true
5820 if it is a varargs function. */
5823 count_num_arguments (tree decl
, bool *is_varargs
)
5825 unsigned int num
= 0;
5828 /* Capture named arguments for K&R functions. They do not
5829 have a prototype and thus no TYPE_ARG_TYPES. */
5830 for (t
= DECL_ARGUMENTS (decl
); t
; t
= DECL_CHAIN (t
))
5833 /* Check if the function has variadic arguments. */
5834 for (t
= TYPE_ARG_TYPES (TREE_TYPE (decl
)); t
; t
= TREE_CHAIN (t
))
5835 if (TREE_VALUE (t
) == void_type_node
)
5843 /* Creation function node for DECL, using NAME, and return the index
5844 of the variable we've created for the function. If NONLOCAL_p, create
5845 initial constraints. */
5848 create_function_info_for (tree decl
, const char *name
, bool add_id
,
5851 struct function
*fn
= DECL_STRUCT_FUNCTION (decl
);
5852 varinfo_t vi
, prev_vi
;
5855 bool is_varargs
= false;
5856 unsigned int num_args
= count_num_arguments (decl
, &is_varargs
);
5858 /* Create the variable info. */
5860 vi
= new_var_info (decl
, name
, add_id
);
5863 vi
->fullsize
= fi_parm_base
+ num_args
;
5865 vi
->may_have_pointers
= false;
5868 insert_vi_for_tree (vi
->decl
, vi
);
5872 /* Create a variable for things the function clobbers and one for
5873 things the function uses. */
5875 varinfo_t clobbervi
, usevi
;
5876 const char *newname
;
5879 tempname
= xasprintf ("%s.clobber", name
);
5880 newname
= ggc_strdup (tempname
);
5883 clobbervi
= new_var_info (NULL
, newname
, false);
5884 clobbervi
->offset
= fi_clobbers
;
5885 clobbervi
->size
= 1;
5886 clobbervi
->fullsize
= vi
->fullsize
;
5887 clobbervi
->is_full_var
= true;
5888 clobbervi
->is_global_var
= false;
5889 clobbervi
->is_reg_var
= true;
5891 gcc_assert (prev_vi
->offset
< clobbervi
->offset
);
5892 prev_vi
->next
= clobbervi
->id
;
5893 prev_vi
= clobbervi
;
5895 tempname
= xasprintf ("%s.use", name
);
5896 newname
= ggc_strdup (tempname
);
5899 usevi
= new_var_info (NULL
, newname
, false);
5900 usevi
->offset
= fi_uses
;
5902 usevi
->fullsize
= vi
->fullsize
;
5903 usevi
->is_full_var
= true;
5904 usevi
->is_global_var
= false;
5905 usevi
->is_reg_var
= true;
5907 gcc_assert (prev_vi
->offset
< usevi
->offset
);
5908 prev_vi
->next
= usevi
->id
;
5912 /* And one for the static chain. */
5913 if (fn
->static_chain_decl
!= NULL_TREE
)
5916 const char *newname
;
5919 tempname
= xasprintf ("%s.chain", name
);
5920 newname
= ggc_strdup (tempname
);
5923 chainvi
= new_var_info (fn
->static_chain_decl
, newname
, false);
5924 chainvi
->offset
= fi_static_chain
;
5926 chainvi
->fullsize
= vi
->fullsize
;
5927 chainvi
->is_full_var
= true;
5928 chainvi
->is_global_var
= false;
5930 insert_vi_for_tree (fn
->static_chain_decl
, chainvi
);
5933 && chainvi
->may_have_pointers
)
5934 make_constraint_from (chainvi
, nonlocal_id
);
5936 gcc_assert (prev_vi
->offset
< chainvi
->offset
);
5937 prev_vi
->next
= chainvi
->id
;
5941 /* Create a variable for the return var. */
5942 if (DECL_RESULT (decl
) != NULL
5943 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
5946 const char *newname
;
5948 tree resultdecl
= decl
;
5950 if (DECL_RESULT (decl
))
5951 resultdecl
= DECL_RESULT (decl
);
5953 tempname
= xasprintf ("%s.result", name
);
5954 newname
= ggc_strdup (tempname
);
5957 resultvi
= new_var_info (resultdecl
, newname
, false);
5958 resultvi
->offset
= fi_result
;
5960 resultvi
->fullsize
= vi
->fullsize
;
5961 resultvi
->is_full_var
= true;
5962 if (DECL_RESULT (decl
))
5963 resultvi
->may_have_pointers
= true;
5965 if (DECL_RESULT (decl
))
5966 insert_vi_for_tree (DECL_RESULT (decl
), resultvi
);
5969 && DECL_RESULT (decl
)
5970 && DECL_BY_REFERENCE (DECL_RESULT (decl
)))
5971 make_constraint_from (resultvi
, nonlocal_id
);
5973 gcc_assert (prev_vi
->offset
< resultvi
->offset
);
5974 prev_vi
->next
= resultvi
->id
;
5978 /* We also need to make function return values escape. Nothing
5979 escapes by returning from main though. */
5981 && !MAIN_NAME_P (DECL_NAME (decl
)))
5984 fi
= lookup_vi_for_tree (decl
);
5985 rvi
= first_vi_for_offset (fi
, fi_result
);
5986 if (rvi
&& rvi
->offset
== fi_result
)
5987 make_copy_constraint (get_varinfo (escaped_id
), rvi
->id
);
5990 /* Set up variables for each argument. */
5991 arg
= DECL_ARGUMENTS (decl
);
5992 for (i
= 0; i
< num_args
; i
++)
5995 const char *newname
;
5997 tree argdecl
= decl
;
6002 tempname
= xasprintf ("%s.arg%d", name
, i
);
6003 newname
= ggc_strdup (tempname
);
6006 argvi
= new_var_info (argdecl
, newname
, false);
6007 argvi
->offset
= fi_parm_base
+ i
;
6009 argvi
->is_full_var
= true;
6010 argvi
->fullsize
= vi
->fullsize
;
6012 argvi
->may_have_pointers
= true;
6015 insert_vi_for_tree (arg
, argvi
);
6018 && argvi
->may_have_pointers
)
6019 make_constraint_from (argvi
, nonlocal_id
);
6021 gcc_assert (prev_vi
->offset
< argvi
->offset
);
6022 prev_vi
->next
= argvi
->id
;
6025 arg
= DECL_CHAIN (arg
);
6028 /* Add one representative for all further args. */
6032 const char *newname
;
6036 tempname
= xasprintf ("%s.varargs", name
);
6037 newname
= ggc_strdup (tempname
);
6040 /* We need sth that can be pointed to for va_start. */
6041 decl
= build_fake_var_decl (ptr_type_node
);
6043 argvi
= new_var_info (decl
, newname
, false);
6044 argvi
->offset
= fi_parm_base
+ num_args
;
6046 argvi
->is_full_var
= true;
6047 argvi
->is_heap_var
= true;
6048 argvi
->fullsize
= vi
->fullsize
;
6051 && argvi
->may_have_pointers
)
6052 make_constraint_from (argvi
, nonlocal_id
);
6054 gcc_assert (prev_vi
->offset
< argvi
->offset
);
6055 prev_vi
->next
= argvi
->id
;
6062 /* Return true if FIELDSTACK contains fields that overlap.
6063 FIELDSTACK is assumed to be sorted by offset. */
6066 check_for_overlaps (vec
<fieldoff_s
> fieldstack
)
6068 fieldoff_s
*fo
= NULL
;
6070 HOST_WIDE_INT lastoffset
= -1;
6072 FOR_EACH_VEC_ELT (fieldstack
, i
, fo
)
6074 if (fo
->offset
== lastoffset
)
6076 lastoffset
= fo
->offset
;
6081 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
6082 This will also create any varinfo structures necessary for fields
6083 of DECL. DECL is a function parameter if HANDLE_PARAM is set.
6084 HANDLED_STRUCT_TYPE is used to register struct types reached by following
6085 restrict pointers. This is needed to prevent infinite recursion.
6086 If ADD_RESTRICT, pretend that the pointer NAME is restrict even if DECL
6087 does not advertise it. */
6090 create_variable_info_for_1 (tree decl
, const char *name
, bool add_id
,
6091 bool handle_param
, bitmap handled_struct_type
,
6092 bool add_restrict
= false)
6094 varinfo_t vi
, newvi
;
6095 tree decl_type
= TREE_TYPE (decl
);
6096 tree declsize
= DECL_P (decl
) ? DECL_SIZE (decl
) : TYPE_SIZE (decl_type
);
6097 auto_vec
<fieldoff_s
> fieldstack
;
6102 || !tree_fits_uhwi_p (declsize
))
6104 vi
= new_var_info (decl
, name
, add_id
);
6108 vi
->is_unknown_size_var
= true;
6109 vi
->is_full_var
= true;
6110 vi
->may_have_pointers
= true;
6114 /* Collect field information. */
6115 if (use_field_sensitive
6116 && var_can_have_subvars (decl
)
6117 /* ??? Force us to not use subfields for globals in IPA mode.
6118 Else we'd have to parse arbitrary initializers. */
6120 && is_global_var (decl
)))
6122 fieldoff_s
*fo
= NULL
;
6123 bool notokay
= false;
6126 push_fields_onto_fieldstack (decl_type
, &fieldstack
, 0);
6128 for (i
= 0; !notokay
&& fieldstack
.iterate (i
, &fo
); i
++)
6129 if (fo
->has_unknown_size
6136 /* We can't sort them if we have a field with a variable sized type,
6137 which will make notokay = true. In that case, we are going to return
6138 without creating varinfos for the fields anyway, so sorting them is a
6142 sort_fieldstack (fieldstack
);
6143 /* Due to some C++ FE issues, like PR 22488, we might end up
6144 what appear to be overlapping fields even though they,
6145 in reality, do not overlap. Until the C++ FE is fixed,
6146 we will simply disable field-sensitivity for these cases. */
6147 notokay
= check_for_overlaps (fieldstack
);
6151 fieldstack
.release ();
6154 /* If we didn't end up collecting sub-variables create a full
6155 variable for the decl. */
6156 if (fieldstack
.length () == 0
6157 || fieldstack
.length () > (unsigned)param_max_fields_for_field_sensitive
)
6159 vi
= new_var_info (decl
, name
, add_id
);
6161 vi
->may_have_pointers
= true;
6162 vi
->fullsize
= tree_to_uhwi (declsize
);
6163 vi
->size
= vi
->fullsize
;
6164 vi
->is_full_var
= true;
6165 if (POINTER_TYPE_P (decl_type
)
6166 && (TYPE_RESTRICT (decl_type
) || add_restrict
))
6167 vi
->only_restrict_pointers
= 1;
6168 if (vi
->only_restrict_pointers
6169 && !type_contains_placeholder_p (TREE_TYPE (decl_type
))
6171 && !bitmap_bit_p (handled_struct_type
,
6172 TYPE_UID (TREE_TYPE (decl_type
))))
6175 tree heapvar
= build_fake_var_decl (TREE_TYPE (decl_type
));
6176 DECL_EXTERNAL (heapvar
) = 1;
6177 if (var_can_have_subvars (heapvar
))
6178 bitmap_set_bit (handled_struct_type
,
6179 TYPE_UID (TREE_TYPE (decl_type
)));
6180 rvi
= create_variable_info_for_1 (heapvar
, "PARM_NOALIAS", true,
6181 true, handled_struct_type
);
6182 if (var_can_have_subvars (heapvar
))
6183 bitmap_clear_bit (handled_struct_type
,
6184 TYPE_UID (TREE_TYPE (decl_type
)));
6185 rvi
->is_restrict_var
= 1;
6186 insert_vi_for_tree (heapvar
, rvi
);
6187 make_constraint_from (vi
, rvi
->id
);
6188 make_param_constraints (rvi
);
6190 fieldstack
.release ();
6194 vi
= new_var_info (decl
, name
, add_id
);
6195 vi
->fullsize
= tree_to_uhwi (declsize
);
6196 if (fieldstack
.length () == 1)
6197 vi
->is_full_var
= true;
6198 for (i
= 0, newvi
= vi
;
6199 fieldstack
.iterate (i
, &fo
);
6200 ++i
, newvi
= vi_next (newvi
))
6202 const char *newname
= NULL
;
6207 if (fieldstack
.length () != 1)
6210 = xasprintf ("%s." HOST_WIDE_INT_PRINT_DEC
6211 "+" HOST_WIDE_INT_PRINT_DEC
, name
,
6212 fo
->offset
, fo
->size
);
6213 newname
= ggc_strdup (tempname
);
6221 newvi
->name
= newname
;
6222 newvi
->offset
= fo
->offset
;
6223 newvi
->size
= fo
->size
;
6224 newvi
->fullsize
= vi
->fullsize
;
6225 newvi
->may_have_pointers
= fo
->may_have_pointers
;
6226 newvi
->only_restrict_pointers
= fo
->only_restrict_pointers
;
6228 && newvi
->only_restrict_pointers
6229 && !type_contains_placeholder_p (fo
->restrict_pointed_type
)
6230 && !bitmap_bit_p (handled_struct_type
,
6231 TYPE_UID (fo
->restrict_pointed_type
)))
6234 tree heapvar
= build_fake_var_decl (fo
->restrict_pointed_type
);
6235 DECL_EXTERNAL (heapvar
) = 1;
6236 if (var_can_have_subvars (heapvar
))
6237 bitmap_set_bit (handled_struct_type
,
6238 TYPE_UID (fo
->restrict_pointed_type
));
6239 rvi
= create_variable_info_for_1 (heapvar
, "PARM_NOALIAS", true,
6240 true, handled_struct_type
);
6241 if (var_can_have_subvars (heapvar
))
6242 bitmap_clear_bit (handled_struct_type
,
6243 TYPE_UID (fo
->restrict_pointed_type
));
6244 rvi
->is_restrict_var
= 1;
6245 insert_vi_for_tree (heapvar
, rvi
);
6246 make_constraint_from (newvi
, rvi
->id
);
6247 make_param_constraints (rvi
);
6249 if (i
+ 1 < fieldstack
.length ())
6251 varinfo_t tem
= new_var_info (decl
, name
, false);
6252 newvi
->next
= tem
->id
;
6261 create_variable_info_for (tree decl
, const char *name
, bool add_id
)
6263 /* First see if we are dealing with an ifunc resolver call and
6264 assiociate that with a call to the resolver function result. */
6267 && TREE_CODE (decl
) == FUNCTION_DECL
6268 && (node
= cgraph_node::get (decl
))
6269 && node
->ifunc_resolver
)
6271 varinfo_t fi
= get_vi_for_tree (node
->get_alias_target ()->decl
);
6273 = get_function_part_constraint (fi
, fi_result
);
6274 fi
= new_var_info (NULL_TREE
, "ifuncres", true);
6275 fi
->is_reg_var
= true;
6276 constraint_expr lhs
;
6280 process_constraint (new_constraint (lhs
, rhs
));
6281 insert_vi_for_tree (decl
, fi
);
6285 varinfo_t vi
= create_variable_info_for_1 (decl
, name
, add_id
, false, NULL
);
6286 unsigned int id
= vi
->id
;
6288 insert_vi_for_tree (decl
, vi
);
6293 /* Create initial constraints for globals. */
6294 for (; vi
; vi
= vi_next (vi
))
6296 if (!vi
->may_have_pointers
6297 || !vi
->is_global_var
)
6300 /* Mark global restrict qualified pointers. */
6301 if ((POINTER_TYPE_P (TREE_TYPE (decl
))
6302 && TYPE_RESTRICT (TREE_TYPE (decl
)))
6303 || vi
->only_restrict_pointers
)
6306 = make_constraint_from_global_restrict (vi
, "GLOBAL_RESTRICT",
6308 /* ??? For now exclude reads from globals as restrict sources
6309 if those are not (indirectly) from incoming parameters. */
6310 rvi
->is_restrict_var
= false;
6314 /* In non-IPA mode the initializer from nonlocal is all we need. */
6316 || DECL_HARD_REGISTER (decl
))
6317 make_copy_constraint (vi
, nonlocal_id
);
6319 /* In IPA mode parse the initializer and generate proper constraints
6323 varpool_node
*vnode
= varpool_node::get (decl
);
6325 /* For escaped variables initialize them from nonlocal. */
6326 if (!vnode
->all_refs_explicit_p ())
6327 make_copy_constraint (vi
, nonlocal_id
);
6329 /* If this is a global variable with an initializer and we are in
6330 IPA mode generate constraints for it. */
6332 for (unsigned idx
= 0; vnode
->iterate_reference (idx
, ref
); ++idx
)
6334 auto_vec
<ce_s
> rhsc
;
6335 struct constraint_expr lhs
, *rhsp
;
6337 get_constraint_for_address_of (ref
->referred
->decl
, &rhsc
);
6341 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
6342 process_constraint (new_constraint (lhs
, *rhsp
));
6343 /* If this is a variable that escapes from the unit
6344 the initializer escapes as well. */
6345 if (!vnode
->all_refs_explicit_p ())
6347 lhs
.var
= escaped_id
;
6350 FOR_EACH_VEC_ELT (rhsc
, i
, rhsp
)
6351 process_constraint (new_constraint (lhs
, *rhsp
));
6360 /* Print out the points-to solution for VAR to FILE. */
6363 dump_solution_for_var (FILE *file
, unsigned int var
)
6365 varinfo_t vi
= get_varinfo (var
);
6369 /* Dump the solution for unified vars anyway, this avoids difficulties
6370 in scanning dumps in the testsuite. */
6371 fprintf (file
, "%s = { ", vi
->name
);
6372 vi
= get_varinfo (find (var
));
6373 EXECUTE_IF_SET_IN_BITMAP (vi
->solution
, 0, i
, bi
)
6374 fprintf (file
, "%s ", get_varinfo (i
)->name
);
6375 fprintf (file
, "}");
6377 /* But note when the variable was unified. */
6379 fprintf (file
, " same as %s", vi
->name
);
6381 fprintf (file
, "\n");
6384 /* Print the points-to solution for VAR to stderr. */
6387 debug_solution_for_var (unsigned int var
)
6389 dump_solution_for_var (stderr
, var
);
6392 /* Register the constraints for function parameter related VI. */
6395 make_param_constraints (varinfo_t vi
)
6397 for (; vi
; vi
= vi_next (vi
))
6399 if (vi
->only_restrict_pointers
)
6401 else if (vi
->may_have_pointers
)
6402 make_constraint_from (vi
, nonlocal_id
);
6404 if (vi
->is_full_var
)
6409 /* Create varinfo structures for all of the variables in the
6410 function for intraprocedural mode. */
6413 intra_create_variable_infos (struct function
*fn
)
6416 bitmap handled_struct_type
= NULL
;
6417 bool this_parm_in_ctor
= DECL_CXX_CONSTRUCTOR_P (fn
->decl
);
6419 /* For each incoming pointer argument arg, create the constraint ARG
6420 = NONLOCAL or a dummy variable if it is a restrict qualified
6421 passed-by-reference argument. */
6422 for (t
= DECL_ARGUMENTS (fn
->decl
); t
; t
= DECL_CHAIN (t
))
6424 if (handled_struct_type
== NULL
)
6425 handled_struct_type
= BITMAP_ALLOC (NULL
);
6428 = create_variable_info_for_1 (t
, alias_get_name (t
), false, true,
6429 handled_struct_type
, this_parm_in_ctor
);
6430 insert_vi_for_tree (t
, p
);
6432 make_param_constraints (p
);
6434 this_parm_in_ctor
= false;
6437 if (handled_struct_type
!= NULL
)
6438 BITMAP_FREE (handled_struct_type
);
6440 /* Add a constraint for a result decl that is passed by reference. */
6441 if (DECL_RESULT (fn
->decl
)
6442 && DECL_BY_REFERENCE (DECL_RESULT (fn
->decl
)))
6444 varinfo_t p
, result_vi
= get_vi_for_tree (DECL_RESULT (fn
->decl
));
6446 for (p
= result_vi
; p
; p
= vi_next (p
))
6447 make_constraint_from (p
, nonlocal_id
);
6450 /* Add a constraint for the incoming static chain parameter. */
6451 if (fn
->static_chain_decl
!= NULL_TREE
)
6453 varinfo_t p
, chain_vi
= get_vi_for_tree (fn
->static_chain_decl
);
6455 for (p
= chain_vi
; p
; p
= vi_next (p
))
6456 make_constraint_from (p
, nonlocal_id
);
6460 /* Structure used to put solution bitmaps in a hashtable so they can
6461 be shared among variables with the same points-to set. */
6463 typedef struct shared_bitmap_info
6467 } *shared_bitmap_info_t
;
6468 typedef const struct shared_bitmap_info
*const_shared_bitmap_info_t
;
6470 /* Shared_bitmap hashtable helpers. */
6472 struct shared_bitmap_hasher
: free_ptr_hash
<shared_bitmap_info
>
6474 static inline hashval_t
hash (const shared_bitmap_info
*);
6475 static inline bool equal (const shared_bitmap_info
*,
6476 const shared_bitmap_info
*);
6479 /* Hash function for a shared_bitmap_info_t */
6482 shared_bitmap_hasher::hash (const shared_bitmap_info
*bi
)
6484 return bi
->hashcode
;
6487 /* Equality function for two shared_bitmap_info_t's. */
6490 shared_bitmap_hasher::equal (const shared_bitmap_info
*sbi1
,
6491 const shared_bitmap_info
*sbi2
)
6493 return bitmap_equal_p (sbi1
->pt_vars
, sbi2
->pt_vars
);
6496 /* Shared_bitmap hashtable. */
6498 static hash_table
<shared_bitmap_hasher
> *shared_bitmap_table
;
6500 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
6501 existing instance if there is one, NULL otherwise. */
6504 shared_bitmap_lookup (bitmap pt_vars
)
6506 shared_bitmap_info
**slot
;
6507 struct shared_bitmap_info sbi
;
6509 sbi
.pt_vars
= pt_vars
;
6510 sbi
.hashcode
= bitmap_hash (pt_vars
);
6512 slot
= shared_bitmap_table
->find_slot (&sbi
, NO_INSERT
);
6516 return (*slot
)->pt_vars
;
6520 /* Add a bitmap to the shared bitmap hashtable. */
6523 shared_bitmap_add (bitmap pt_vars
)
6525 shared_bitmap_info
**slot
;
6526 shared_bitmap_info_t sbi
= XNEW (struct shared_bitmap_info
);
6528 sbi
->pt_vars
= pt_vars
;
6529 sbi
->hashcode
= bitmap_hash (pt_vars
);
6531 slot
= shared_bitmap_table
->find_slot (sbi
, INSERT
);
6532 gcc_assert (!*slot
);
6537 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
6540 set_uids_in_ptset (bitmap into
, bitmap from
, struct pt_solution
*pt
,
6545 varinfo_t escaped_vi
= get_varinfo (find (escaped_id
));
6546 bool everything_escaped
6547 = escaped_vi
->solution
&& bitmap_bit_p (escaped_vi
->solution
, anything_id
);
6549 EXECUTE_IF_SET_IN_BITMAP (from
, 0, i
, bi
)
6551 varinfo_t vi
= get_varinfo (i
);
6553 if (vi
->is_artificial_var
)
6556 if (everything_escaped
6557 || (escaped_vi
->solution
6558 && bitmap_bit_p (escaped_vi
->solution
, i
)))
6560 pt
->vars_contains_escaped
= true;
6561 pt
->vars_contains_escaped_heap
|= vi
->is_heap_var
;
6564 if (vi
->is_restrict_var
)
6565 pt
->vars_contains_restrict
= true;
6567 if (VAR_P (vi
->decl
)
6568 || TREE_CODE (vi
->decl
) == PARM_DECL
6569 || TREE_CODE (vi
->decl
) == RESULT_DECL
)
6571 /* If we are in IPA mode we will not recompute points-to
6572 sets after inlining so make sure they stay valid. */
6574 && !DECL_PT_UID_SET_P (vi
->decl
))
6575 SET_DECL_PT_UID (vi
->decl
, DECL_UID (vi
->decl
));
6577 /* Add the decl to the points-to set. Note that the points-to
6578 set contains global variables. */
6579 bitmap_set_bit (into
, DECL_PT_UID (vi
->decl
));
6580 if (vi
->is_global_var
6581 /* In IPA mode the escaped_heap trick doesn't work as
6582 ESCAPED is escaped from the unit but
6583 pt_solution_includes_global needs to answer true for
6584 all variables not automatic within a function.
6585 For the same reason is_global_var is not the
6586 correct flag to track - local variables from other
6587 functions also need to be considered global.
6588 Conveniently all HEAP vars are not put in function
6592 && ! auto_var_in_fn_p (vi
->decl
, fndecl
)))
6593 pt
->vars_contains_nonlocal
= true;
6595 /* If we have a variable that is interposable record that fact
6596 for pointer comparison simplification. */
6597 if (VAR_P (vi
->decl
)
6598 && (TREE_STATIC (vi
->decl
) || DECL_EXTERNAL (vi
->decl
))
6599 && ! decl_binds_to_current_def_p (vi
->decl
))
6600 pt
->vars_contains_interposable
= true;
6602 /* If this is a local variable we can have overlapping lifetime
6603 of different function invocations through recursion duplicate
6604 it with its shadow variable. */
6606 && vi
->shadow_var_uid
!= 0)
6608 bitmap_set_bit (into
, vi
->shadow_var_uid
);
6609 pt
->vars_contains_nonlocal
= true;
6613 else if (TREE_CODE (vi
->decl
) == FUNCTION_DECL
6614 || TREE_CODE (vi
->decl
) == LABEL_DECL
)
6616 /* Nothing should read/write from/to code so we can
6617 save bits by not including them in the points-to bitmaps.
6618 Still mark the points-to set as containing global memory
6619 to make code-patching possible - see PR70128. */
6620 pt
->vars_contains_nonlocal
= true;
6626 /* Compute the points-to solution *PT for the variable VI. */
6628 static struct pt_solution
6629 find_what_var_points_to (tree fndecl
, varinfo_t orig_vi
)
6633 bitmap finished_solution
;
6636 struct pt_solution
*pt
;
6638 /* This variable may have been collapsed, let's get the real
6640 vi
= get_varinfo (find (orig_vi
->id
));
6642 /* See if we have already computed the solution and return it. */
6643 pt_solution
**slot
= &final_solutions
->get_or_insert (vi
);
6647 *slot
= pt
= XOBNEW (&final_solutions_obstack
, struct pt_solution
);
6648 memset (pt
, 0, sizeof (struct pt_solution
));
6650 /* Translate artificial variables into SSA_NAME_PTR_INFO
6652 EXECUTE_IF_SET_IN_BITMAP (vi
->solution
, 0, i
, bi
)
6654 varinfo_t vi
= get_varinfo (i
);
6656 if (vi
->is_artificial_var
)
6658 if (vi
->id
== nothing_id
)
6660 else if (vi
->id
== escaped_id
)
6663 pt
->ipa_escaped
= 1;
6666 /* Expand some special vars of ESCAPED in-place here. */
6667 varinfo_t evi
= get_varinfo (find (escaped_id
));
6668 if (bitmap_bit_p (evi
->solution
, nonlocal_id
))
6671 else if (vi
->id
== nonlocal_id
)
6673 else if (vi
->id
== string_id
)
6674 /* Nobody cares - STRING_CSTs are read-only entities. */
6676 else if (vi
->id
== anything_id
6677 || vi
->id
== integer_id
)
6682 /* Instead of doing extra work, simply do not create
6683 elaborate points-to information for pt_anything pointers. */
6687 /* Share the final set of variables when possible. */
6688 finished_solution
= BITMAP_GGC_ALLOC ();
6689 stats
.points_to_sets_created
++;
6691 set_uids_in_ptset (finished_solution
, vi
->solution
, pt
, fndecl
);
6692 result
= shared_bitmap_lookup (finished_solution
);
6695 shared_bitmap_add (finished_solution
);
6696 pt
->vars
= finished_solution
;
6701 bitmap_clear (finished_solution
);
6707 /* Given a pointer variable P, fill in its points-to set. */
6710 find_what_p_points_to (tree fndecl
, tree p
)
6712 struct ptr_info_def
*pi
;
6715 bool nonnull
= get_ptr_nonnull (p
);
6717 /* For parameters, get at the points-to set for the actual parm
6719 if (TREE_CODE (p
) == SSA_NAME
6720 && SSA_NAME_IS_DEFAULT_DEF (p
)
6721 && (TREE_CODE (SSA_NAME_VAR (p
)) == PARM_DECL
6722 || TREE_CODE (SSA_NAME_VAR (p
)) == RESULT_DECL
))
6723 lookup_p
= SSA_NAME_VAR (p
);
6725 vi
= lookup_vi_for_tree (lookup_p
);
6729 pi
= get_ptr_info (p
);
6730 pi
->pt
= find_what_var_points_to (fndecl
, vi
);
6731 /* Conservatively set to NULL from PTA (to true). */
6733 /* Preserve pointer nonnull computed by VRP. See get_ptr_nonnull
6734 in gcc/tree-ssaname.c for more information. */
6736 set_ptr_nonnull (p
);
6740 /* Query statistics for points-to solutions. */
6743 unsigned HOST_WIDE_INT pt_solution_includes_may_alias
;
6744 unsigned HOST_WIDE_INT pt_solution_includes_no_alias
;
6745 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias
;
6746 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias
;
6750 dump_pta_stats (FILE *s
)
6752 fprintf (s
, "\nPTA query stats:\n");
6753 fprintf (s
, " pt_solution_includes: "
6754 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
6755 HOST_WIDE_INT_PRINT_DEC
" queries\n",
6756 pta_stats
.pt_solution_includes_no_alias
,
6757 pta_stats
.pt_solution_includes_no_alias
6758 + pta_stats
.pt_solution_includes_may_alias
);
6759 fprintf (s
, " pt_solutions_intersect: "
6760 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
6761 HOST_WIDE_INT_PRINT_DEC
" queries\n",
6762 pta_stats
.pt_solutions_intersect_no_alias
,
6763 pta_stats
.pt_solutions_intersect_no_alias
6764 + pta_stats
.pt_solutions_intersect_may_alias
);
6768 /* Reset the points-to solution *PT to a conservative default
6769 (point to anything). */
6772 pt_solution_reset (struct pt_solution
*pt
)
6774 memset (pt
, 0, sizeof (struct pt_solution
));
6775 pt
->anything
= true;
6779 /* Set the points-to solution *PT to point only to the variables
6780 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
6781 global variables and VARS_CONTAINS_RESTRICT specifies whether
6782 it contains restrict tag variables. */
6785 pt_solution_set (struct pt_solution
*pt
, bitmap vars
,
6786 bool vars_contains_nonlocal
)
6788 memset (pt
, 0, sizeof (struct pt_solution
));
6790 pt
->vars_contains_nonlocal
= vars_contains_nonlocal
;
6791 pt
->vars_contains_escaped
6792 = (cfun
->gimple_df
->escaped
.anything
6793 || bitmap_intersect_p (cfun
->gimple_df
->escaped
.vars
, vars
));
6796 /* Set the points-to solution *PT to point only to the variable VAR. */
6799 pt_solution_set_var (struct pt_solution
*pt
, tree var
)
6801 memset (pt
, 0, sizeof (struct pt_solution
));
6802 pt
->vars
= BITMAP_GGC_ALLOC ();
6803 bitmap_set_bit (pt
->vars
, DECL_PT_UID (var
));
6804 pt
->vars_contains_nonlocal
= is_global_var (var
);
6805 pt
->vars_contains_escaped
6806 = (cfun
->gimple_df
->escaped
.anything
6807 || bitmap_bit_p (cfun
->gimple_df
->escaped
.vars
, DECL_PT_UID (var
)));
6810 /* Computes the union of the points-to solutions *DEST and *SRC and
6811 stores the result in *DEST. This changes the points-to bitmap
6812 of *DEST and thus may not be used if that might be shared.
6813 The points-to bitmap of *SRC and *DEST will not be shared after
6814 this function if they were not before. */
6817 pt_solution_ior_into (struct pt_solution
*dest
, struct pt_solution
*src
)
6819 dest
->anything
|= src
->anything
;
6822 pt_solution_reset (dest
);
6826 dest
->nonlocal
|= src
->nonlocal
;
6827 dest
->escaped
|= src
->escaped
;
6828 dest
->ipa_escaped
|= src
->ipa_escaped
;
6829 dest
->null
|= src
->null
;
6830 dest
->vars_contains_nonlocal
|= src
->vars_contains_nonlocal
;
6831 dest
->vars_contains_escaped
|= src
->vars_contains_escaped
;
6832 dest
->vars_contains_escaped_heap
|= src
->vars_contains_escaped_heap
;
6837 dest
->vars
= BITMAP_GGC_ALLOC ();
6838 bitmap_ior_into (dest
->vars
, src
->vars
);
6841 /* Return true if the points-to solution *PT is empty. */
6844 pt_solution_empty_p (const pt_solution
*pt
)
6851 && !bitmap_empty_p (pt
->vars
))
6854 /* If the solution includes ESCAPED, check if that is empty. */
6856 && !pt_solution_empty_p (&cfun
->gimple_df
->escaped
))
6859 /* If the solution includes ESCAPED, check if that is empty. */
6861 && !pt_solution_empty_p (&ipa_escaped_pt
))
6867 /* Return true if the points-to solution *PT only point to a single var, and
6868 return the var uid in *UID. */
6871 pt_solution_singleton_or_null_p (struct pt_solution
*pt
, unsigned *uid
)
6873 if (pt
->anything
|| pt
->nonlocal
|| pt
->escaped
|| pt
->ipa_escaped
6875 || !bitmap_single_bit_set_p (pt
->vars
))
6878 *uid
= bitmap_first_set_bit (pt
->vars
);
6882 /* Return true if the points-to solution *PT includes global memory. */
6885 pt_solution_includes_global (struct pt_solution
*pt
)
6889 || pt
->vars_contains_nonlocal
6890 /* The following is a hack to make the malloc escape hack work.
6891 In reality we'd need different sets for escaped-through-return
6892 and escaped-to-callees and passes would need to be updated. */
6893 || pt
->vars_contains_escaped_heap
)
6896 /* 'escaped' is also a placeholder so we have to look into it. */
6898 return pt_solution_includes_global (&cfun
->gimple_df
->escaped
);
6900 if (pt
->ipa_escaped
)
6901 return pt_solution_includes_global (&ipa_escaped_pt
);
6906 /* Return true if the points-to solution *PT includes the variable
6907 declaration DECL. */
6910 pt_solution_includes_1 (struct pt_solution
*pt
, const_tree decl
)
6916 && is_global_var (decl
))
6920 && bitmap_bit_p (pt
->vars
, DECL_PT_UID (decl
)))
6923 /* If the solution includes ESCAPED, check it. */
6925 && pt_solution_includes_1 (&cfun
->gimple_df
->escaped
, decl
))
6928 /* If the solution includes ESCAPED, check it. */
6930 && pt_solution_includes_1 (&ipa_escaped_pt
, decl
))
6937 pt_solution_includes (struct pt_solution
*pt
, const_tree decl
)
6939 bool res
= pt_solution_includes_1 (pt
, decl
);
6941 ++pta_stats
.pt_solution_includes_may_alias
;
6943 ++pta_stats
.pt_solution_includes_no_alias
;
6947 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
6951 pt_solutions_intersect_1 (struct pt_solution
*pt1
, struct pt_solution
*pt2
)
6953 if (pt1
->anything
|| pt2
->anything
)
6956 /* If either points to unknown global memory and the other points to
6957 any global memory they alias. */
6960 || pt2
->vars_contains_nonlocal
))
6962 && pt1
->vars_contains_nonlocal
))
6965 /* If either points to all escaped memory and the other points to
6966 any escaped memory they alias. */
6969 || pt2
->vars_contains_escaped
))
6971 && pt1
->vars_contains_escaped
))
6974 /* Check the escaped solution if required.
6975 ??? Do we need to check the local against the IPA escaped sets? */
6976 if ((pt1
->ipa_escaped
|| pt2
->ipa_escaped
)
6977 && !pt_solution_empty_p (&ipa_escaped_pt
))
6979 /* If both point to escaped memory and that solution
6980 is not empty they alias. */
6981 if (pt1
->ipa_escaped
&& pt2
->ipa_escaped
)
6984 /* If either points to escaped memory see if the escaped solution
6985 intersects with the other. */
6986 if ((pt1
->ipa_escaped
6987 && pt_solutions_intersect_1 (&ipa_escaped_pt
, pt2
))
6988 || (pt2
->ipa_escaped
6989 && pt_solutions_intersect_1 (&ipa_escaped_pt
, pt1
)))
6993 /* Now both pointers alias if their points-to solution intersects. */
6996 && bitmap_intersect_p (pt1
->vars
, pt2
->vars
));
7000 pt_solutions_intersect (struct pt_solution
*pt1
, struct pt_solution
*pt2
)
7002 bool res
= pt_solutions_intersect_1 (pt1
, pt2
);
7004 ++pta_stats
.pt_solutions_intersect_may_alias
;
7006 ++pta_stats
.pt_solutions_intersect_no_alias
;
7011 /* Dump points-to information to OUTFILE. */
7014 dump_sa_points_to_info (FILE *outfile
)
7018 fprintf (outfile
, "\nPoints-to sets\n\n");
7020 if (dump_flags
& TDF_STATS
)
7022 fprintf (outfile
, "Stats:\n");
7023 fprintf (outfile
, "Total vars: %d\n", stats
.total_vars
);
7024 fprintf (outfile
, "Non-pointer vars: %d\n",
7025 stats
.nonpointer_vars
);
7026 fprintf (outfile
, "Statically unified vars: %d\n",
7027 stats
.unified_vars_static
);
7028 fprintf (outfile
, "Dynamically unified vars: %d\n",
7029 stats
.unified_vars_dynamic
);
7030 fprintf (outfile
, "Iterations: %d\n", stats
.iterations
);
7031 fprintf (outfile
, "Number of edges: %d\n", stats
.num_edges
);
7032 fprintf (outfile
, "Number of implicit edges: %d\n",
7033 stats
.num_implicit_edges
);
7036 for (i
= 1; i
< varmap
.length (); i
++)
7038 varinfo_t vi
= get_varinfo (i
);
7039 if (!vi
->may_have_pointers
)
7041 dump_solution_for_var (outfile
, i
);
7046 /* Debug points-to information to stderr. */
7049 debug_sa_points_to_info (void)
7051 dump_sa_points_to_info (stderr
);
7055 /* Initialize the always-existing constraint variables for NULL
7056 ANYTHING, READONLY, and INTEGER */
7059 init_base_vars (void)
7061 struct constraint_expr lhs
, rhs
;
7062 varinfo_t var_anything
;
7063 varinfo_t var_nothing
;
7064 varinfo_t var_string
;
7065 varinfo_t var_escaped
;
7066 varinfo_t var_nonlocal
;
7067 varinfo_t var_storedanything
;
7068 varinfo_t var_integer
;
7070 /* Variable ID zero is reserved and should be NULL. */
7071 varmap
.safe_push (NULL
);
7073 /* Create the NULL variable, used to represent that a variable points
7075 var_nothing
= new_var_info (NULL_TREE
, "NULL", false);
7076 gcc_assert (var_nothing
->id
== nothing_id
);
7077 var_nothing
->is_artificial_var
= 1;
7078 var_nothing
->offset
= 0;
7079 var_nothing
->size
= ~0;
7080 var_nothing
->fullsize
= ~0;
7081 var_nothing
->is_special_var
= 1;
7082 var_nothing
->may_have_pointers
= 0;
7083 var_nothing
->is_global_var
= 0;
7085 /* Create the ANYTHING variable, used to represent that a variable
7086 points to some unknown piece of memory. */
7087 var_anything
= new_var_info (NULL_TREE
, "ANYTHING", false);
7088 gcc_assert (var_anything
->id
== anything_id
);
7089 var_anything
->is_artificial_var
= 1;
7090 var_anything
->size
= ~0;
7091 var_anything
->offset
= 0;
7092 var_anything
->fullsize
= ~0;
7093 var_anything
->is_special_var
= 1;
7095 /* Anything points to anything. This makes deref constraints just
7096 work in the presence of linked list and other p = *p type loops,
7097 by saying that *ANYTHING = ANYTHING. */
7099 lhs
.var
= anything_id
;
7101 rhs
.type
= ADDRESSOF
;
7102 rhs
.var
= anything_id
;
7105 /* This specifically does not use process_constraint because
7106 process_constraint ignores all anything = anything constraints, since all
7107 but this one are redundant. */
7108 constraints
.safe_push (new_constraint (lhs
, rhs
));
7110 /* Create the STRING variable, used to represent that a variable
7111 points to a string literal. String literals don't contain
7112 pointers so STRING doesn't point to anything. */
7113 var_string
= new_var_info (NULL_TREE
, "STRING", false);
7114 gcc_assert (var_string
->id
== string_id
);
7115 var_string
->is_artificial_var
= 1;
7116 var_string
->offset
= 0;
7117 var_string
->size
= ~0;
7118 var_string
->fullsize
= ~0;
7119 var_string
->is_special_var
= 1;
7120 var_string
->may_have_pointers
= 0;
7122 /* Create the ESCAPED variable, used to represent the set of escaped
7124 var_escaped
= new_var_info (NULL_TREE
, "ESCAPED", false);
7125 gcc_assert (var_escaped
->id
== escaped_id
);
7126 var_escaped
->is_artificial_var
= 1;
7127 var_escaped
->offset
= 0;
7128 var_escaped
->size
= ~0;
7129 var_escaped
->fullsize
= ~0;
7130 var_escaped
->is_special_var
= 0;
7132 /* Create the NONLOCAL variable, used to represent the set of nonlocal
7134 var_nonlocal
= new_var_info (NULL_TREE
, "NONLOCAL", false);
7135 gcc_assert (var_nonlocal
->id
== nonlocal_id
);
7136 var_nonlocal
->is_artificial_var
= 1;
7137 var_nonlocal
->offset
= 0;
7138 var_nonlocal
->size
= ~0;
7139 var_nonlocal
->fullsize
= ~0;
7140 var_nonlocal
->is_special_var
= 1;
7142 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
7144 lhs
.var
= escaped_id
;
7147 rhs
.var
= escaped_id
;
7149 process_constraint (new_constraint (lhs
, rhs
));
7151 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
7152 whole variable escapes. */
7154 lhs
.var
= escaped_id
;
7157 rhs
.var
= escaped_id
;
7158 rhs
.offset
= UNKNOWN_OFFSET
;
7159 process_constraint (new_constraint (lhs
, rhs
));
7161 /* *ESCAPED = NONLOCAL. This is true because we have to assume
7162 everything pointed to by escaped points to what global memory can
7165 lhs
.var
= escaped_id
;
7168 rhs
.var
= nonlocal_id
;
7170 process_constraint (new_constraint (lhs
, rhs
));
7172 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
7173 global memory may point to global memory and escaped memory. */
7175 lhs
.var
= nonlocal_id
;
7177 rhs
.type
= ADDRESSOF
;
7178 rhs
.var
= nonlocal_id
;
7180 process_constraint (new_constraint (lhs
, rhs
));
7181 rhs
.type
= ADDRESSOF
;
7182 rhs
.var
= escaped_id
;
7184 process_constraint (new_constraint (lhs
, rhs
));
7186 /* Create the STOREDANYTHING variable, used to represent the set of
7187 variables stored to *ANYTHING. */
7188 var_storedanything
= new_var_info (NULL_TREE
, "STOREDANYTHING", false);
7189 gcc_assert (var_storedanything
->id
== storedanything_id
);
7190 var_storedanything
->is_artificial_var
= 1;
7191 var_storedanything
->offset
= 0;
7192 var_storedanything
->size
= ~0;
7193 var_storedanything
->fullsize
= ~0;
7194 var_storedanything
->is_special_var
= 0;
7196 /* Create the INTEGER variable, used to represent that a variable points
7197 to what an INTEGER "points to". */
7198 var_integer
= new_var_info (NULL_TREE
, "INTEGER", false);
7199 gcc_assert (var_integer
->id
== integer_id
);
7200 var_integer
->is_artificial_var
= 1;
7201 var_integer
->size
= ~0;
7202 var_integer
->fullsize
= ~0;
7203 var_integer
->offset
= 0;
7204 var_integer
->is_special_var
= 1;
7206 /* INTEGER = ANYTHING, because we don't know where a dereference of
7207 a random integer will point to. */
7209 lhs
.var
= integer_id
;
7211 rhs
.type
= ADDRESSOF
;
7212 rhs
.var
= anything_id
;
7214 process_constraint (new_constraint (lhs
, rhs
));
7217 /* Initialize things necessary to perform PTA */
7220 init_alias_vars (void)
7222 use_field_sensitive
= (param_max_fields_for_field_sensitive
> 1);
7224 bitmap_obstack_initialize (&pta_obstack
);
7225 bitmap_obstack_initialize (&oldpta_obstack
);
7226 bitmap_obstack_initialize (&predbitmap_obstack
);
7228 constraints
.create (8);
7230 vi_for_tree
= new hash_map
<tree
, varinfo_t
>;
7231 call_stmt_vars
= new hash_map
<gimple
*, varinfo_t
>;
7233 memset (&stats
, 0, sizeof (stats
));
7234 shared_bitmap_table
= new hash_table
<shared_bitmap_hasher
> (511);
7237 gcc_obstack_init (&fake_var_decl_obstack
);
7239 final_solutions
= new hash_map
<varinfo_t
, pt_solution
*>;
7240 gcc_obstack_init (&final_solutions_obstack
);
7243 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
7244 predecessor edges. */
7247 remove_preds_and_fake_succs (constraint_graph_t graph
)
7251 /* Clear the implicit ref and address nodes from the successor
7253 for (i
= 1; i
< FIRST_REF_NODE
; i
++)
7255 if (graph
->succs
[i
])
7256 bitmap_clear_range (graph
->succs
[i
], FIRST_REF_NODE
,
7257 FIRST_REF_NODE
* 2);
7260 /* Free the successor list for the non-ref nodes. */
7261 for (i
= FIRST_REF_NODE
+ 1; i
< graph
->size
; i
++)
7263 if (graph
->succs
[i
])
7264 BITMAP_FREE (graph
->succs
[i
]);
7267 /* Now reallocate the size of the successor list as, and blow away
7268 the predecessor bitmaps. */
7269 graph
->size
= varmap
.length ();
7270 graph
->succs
= XRESIZEVEC (bitmap
, graph
->succs
, graph
->size
);
7272 free (graph
->implicit_preds
);
7273 graph
->implicit_preds
= NULL
;
7274 free (graph
->preds
);
7275 graph
->preds
= NULL
;
7276 bitmap_obstack_release (&predbitmap_obstack
);
7279 /* Solve the constraint set. */
7282 solve_constraints (void)
7286 /* Sort varinfos so that ones that cannot be pointed to are last.
7287 This makes bitmaps more efficient. */
7288 unsigned int *map
= XNEWVEC (unsigned int, varmap
.length ());
7289 for (unsigned i
= 0; i
< integer_id
+ 1; ++i
)
7291 /* Start with non-register vars (as possibly address-taken), followed
7292 by register vars as conservative set of vars never appearing in
7293 the points-to solution bitmaps. */
7294 unsigned j
= integer_id
+ 1;
7295 for (unsigned i
= integer_id
+ 1; i
< varmap
.length (); ++i
)
7296 if (! varmap
[i
]->is_reg_var
)
7298 for (unsigned i
= integer_id
+ 1; i
< varmap
.length (); ++i
)
7299 if (varmap
[i
]->is_reg_var
)
7301 /* Shuffle varmap according to map. */
7302 for (unsigned i
= integer_id
+ 1; i
< varmap
.length (); ++i
)
7304 while (map
[varmap
[i
]->id
] != i
)
7305 std::swap (varmap
[i
], varmap
[map
[varmap
[i
]->id
]]);
7306 gcc_assert (bitmap_empty_p (varmap
[i
]->solution
));
7308 varmap
[i
]->next
= map
[varmap
[i
]->next
];
7309 varmap
[i
]->head
= map
[varmap
[i
]->head
];
7311 /* Finally rewrite constraints. */
7312 for (unsigned i
= 0; i
< constraints
.length (); ++i
)
7314 constraints
[i
]->lhs
.var
= map
[constraints
[i
]->lhs
.var
];
7315 constraints
[i
]->rhs
.var
= map
[constraints
[i
]->rhs
.var
];
7321 "\nCollapsing static cycles and doing variable "
7324 init_graph (varmap
.length () * 2);
7327 fprintf (dump_file
, "Building predecessor graph\n");
7328 build_pred_graph ();
7331 fprintf (dump_file
, "Detecting pointer and location "
7333 si
= perform_var_substitution (graph
);
7336 fprintf (dump_file
, "Rewriting constraints and unifying "
7338 rewrite_constraints (graph
, si
);
7340 build_succ_graph ();
7342 free_var_substitution_info (si
);
7344 /* Attach complex constraints to graph nodes. */
7345 move_complex_constraints (graph
);
7348 fprintf (dump_file
, "Uniting pointer but not location equivalent "
7350 unite_pointer_equivalences (graph
);
7353 fprintf (dump_file
, "Finding indirect cycles\n");
7354 find_indirect_cycles (graph
);
7356 /* Implicit nodes and predecessors are no longer necessary at this
7358 remove_preds_and_fake_succs (graph
);
7360 if (dump_file
&& (dump_flags
& TDF_GRAPH
))
7362 fprintf (dump_file
, "\n\n// The constraint graph before solve-graph "
7363 "in dot format:\n");
7364 dump_constraint_graph (dump_file
);
7365 fprintf (dump_file
, "\n\n");
7369 fprintf (dump_file
, "Solving graph\n");
7371 solve_graph (graph
);
7373 if (dump_file
&& (dump_flags
& TDF_GRAPH
))
7375 fprintf (dump_file
, "\n\n// The constraint graph after solve-graph "
7376 "in dot format:\n");
7377 dump_constraint_graph (dump_file
);
7378 fprintf (dump_file
, "\n\n");
7382 /* Create points-to sets for the current function. See the comments
7383 at the start of the file for an algorithmic overview. */
7386 compute_points_to_sets (void)
7391 timevar_push (TV_TREE_PTA
);
7395 intra_create_variable_infos (cfun
);
7397 /* Now walk all statements and build the constraint set. */
7398 FOR_EACH_BB_FN (bb
, cfun
)
7400 for (gphi_iterator gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);
7403 gphi
*phi
= gsi
.phi ();
7405 if (! virtual_operand_p (gimple_phi_result (phi
)))
7406 find_func_aliases (cfun
, phi
);
7409 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);
7412 gimple
*stmt
= gsi_stmt (gsi
);
7414 find_func_aliases (cfun
, stmt
);
7420 fprintf (dump_file
, "Points-to analysis\n\nConstraints:\n\n");
7421 dump_constraints (dump_file
, 0);
7424 /* From the constraints compute the points-to sets. */
7425 solve_constraints ();
7427 /* Post-process solutions for escapes through returns. */
7430 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
7431 if (greturn
*ret
= safe_dyn_cast
<greturn
*> (last_stmt (e
->src
)))
7433 tree val
= gimple_return_retval (ret
);
7434 /* ??? Easy to handle simple indirections with some work.
7435 Arbitrary references like foo.bar.baz are more difficult
7436 (but conservatively easy enough with just looking at the base).
7437 Mind to fixup find_func_aliases as well. */
7438 if (!val
|| !SSA_VAR_P (val
))
7440 /* returns happen last in non-IPA so they only influence
7441 the ESCAPED solution and we can filter local variables. */
7442 varinfo_t escaped_vi
= get_varinfo (find (escaped_id
));
7443 varinfo_t vi
= lookup_vi_for_tree (val
);
7444 bitmap delta
= BITMAP_ALLOC (&pta_obstack
);
7447 for (; vi
; vi
= vi_next (vi
))
7449 varinfo_t part_vi
= get_varinfo (find (vi
->id
));
7450 EXECUTE_IF_AND_COMPL_IN_BITMAP (part_vi
->solution
,
7451 escaped_vi
->solution
, 0, i
, bi
)
7453 varinfo_t pointed_to_vi
= get_varinfo (i
);
7454 if (pointed_to_vi
->is_global_var
7455 /* We delay marking of heap memory as global. */
7456 || pointed_to_vi
->is_heap_var
)
7457 bitmap_set_bit (delta
, i
);
7461 /* Now compute the transitive closure. */
7462 bitmap_ior_into (escaped_vi
->solution
, delta
);
7463 bitmap new_delta
= BITMAP_ALLOC (&pta_obstack
);
7464 while (!bitmap_empty_p (delta
))
7466 EXECUTE_IF_SET_IN_BITMAP (delta
, 0, i
, bi
)
7468 varinfo_t pointed_to_vi
= get_varinfo (i
);
7469 pointed_to_vi
= get_varinfo (find (pointed_to_vi
->id
));
7471 bitmap_iterator bi2
;
7472 EXECUTE_IF_AND_COMPL_IN_BITMAP (pointed_to_vi
->solution
,
7473 escaped_vi
->solution
,
7476 varinfo_t pointed_to_vi2
= get_varinfo (j
);
7477 if (pointed_to_vi2
->is_global_var
7478 /* We delay marking of heap memory as global. */
7479 || pointed_to_vi2
->is_heap_var
)
7480 bitmap_set_bit (new_delta
, j
);
7483 bitmap_ior_into (escaped_vi
->solution
, new_delta
);
7484 bitmap_clear (delta
);
7485 std::swap (delta
, new_delta
);
7487 BITMAP_FREE (delta
);
7488 BITMAP_FREE (new_delta
);
7492 dump_sa_points_to_info (dump_file
);
7494 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
7495 cfun
->gimple_df
->escaped
= find_what_var_points_to (cfun
->decl
,
7496 get_varinfo (escaped_id
));
7498 /* Make sure the ESCAPED solution (which is used as placeholder in
7499 other solutions) does not reference itself. This simplifies
7500 points-to solution queries. */
7501 cfun
->gimple_df
->escaped
.escaped
= 0;
7503 /* Compute the points-to sets for pointer SSA_NAMEs. */
7507 FOR_EACH_SSA_NAME (i
, ptr
, cfun
)
7509 if (POINTER_TYPE_P (TREE_TYPE (ptr
)))
7510 find_what_p_points_to (cfun
->decl
, ptr
);
7513 /* Compute the call-used/clobbered sets. */
7514 FOR_EACH_BB_FN (bb
, cfun
)
7516 gimple_stmt_iterator gsi
;
7518 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
7521 struct pt_solution
*pt
;
7523 stmt
= dyn_cast
<gcall
*> (gsi_stmt (gsi
));
7527 pt
= gimple_call_use_set (stmt
);
7528 if (gimple_call_flags (stmt
) & ECF_CONST
)
7529 memset (pt
, 0, sizeof (struct pt_solution
));
7530 else if ((vi
= lookup_call_use_vi (stmt
)) != NULL
)
7532 *pt
= find_what_var_points_to (cfun
->decl
, vi
);
7533 /* Escaped (and thus nonlocal) variables are always
7534 implicitly used by calls. */
7535 /* ??? ESCAPED can be empty even though NONLOCAL
7542 /* If there is nothing special about this call then
7543 we have made everything that is used also escape. */
7544 *pt
= cfun
->gimple_df
->escaped
;
7548 pt
= gimple_call_clobber_set (stmt
);
7549 if (gimple_call_flags (stmt
) & (ECF_CONST
|ECF_PURE
|ECF_NOVOPS
))
7550 memset (pt
, 0, sizeof (struct pt_solution
));
7551 else if ((vi
= lookup_call_clobber_vi (stmt
)) != NULL
)
7553 *pt
= find_what_var_points_to (cfun
->decl
, vi
);
7554 /* Escaped (and thus nonlocal) variables are always
7555 implicitly clobbered by calls. */
7556 /* ??? ESCAPED can be empty even though NONLOCAL
7563 /* If there is nothing special about this call then
7564 we have made everything that is used also escape. */
7565 *pt
= cfun
->gimple_df
->escaped
;
7571 timevar_pop (TV_TREE_PTA
);
7575 /* Delete created points-to sets. */
7578 delete_points_to_sets (void)
7582 delete shared_bitmap_table
;
7583 shared_bitmap_table
= NULL
;
7584 if (dump_file
&& (dump_flags
& TDF_STATS
))
7585 fprintf (dump_file
, "Points to sets created:%d\n",
7586 stats
.points_to_sets_created
);
7589 delete call_stmt_vars
;
7590 bitmap_obstack_release (&pta_obstack
);
7591 constraints
.release ();
7593 for (i
= 0; i
< graph
->size
; i
++)
7594 graph
->complex[i
].release ();
7595 free (graph
->complex);
7598 free (graph
->succs
);
7600 free (graph
->pe_rep
);
7601 free (graph
->indirect_cycles
);
7605 variable_info_pool
.release ();
7606 constraint_pool
.release ();
7608 obstack_free (&fake_var_decl_obstack
, NULL
);
7610 delete final_solutions
;
7611 obstack_free (&final_solutions_obstack
, NULL
);
7616 unsigned short clique
;
7621 /* Mark "other" loads and stores as belonging to CLIQUE and with
7625 visit_loadstore (gimple
*, tree base
, tree ref
, void *data
)
7627 unsigned short clique
= ((vls_data
*) data
)->clique
;
7628 bitmap rvars
= ((vls_data
*) data
)->rvars
;
7629 bool escaped_p
= ((vls_data
*) data
)->escaped_p
;
7630 if (TREE_CODE (base
) == MEM_REF
7631 || TREE_CODE (base
) == TARGET_MEM_REF
)
7633 tree ptr
= TREE_OPERAND (base
, 0);
7634 if (TREE_CODE (ptr
) == SSA_NAME
)
7636 /* For parameters, get at the points-to set for the actual parm
7638 if (SSA_NAME_IS_DEFAULT_DEF (ptr
)
7639 && (TREE_CODE (SSA_NAME_VAR (ptr
)) == PARM_DECL
7640 || TREE_CODE (SSA_NAME_VAR (ptr
)) == RESULT_DECL
))
7641 ptr
= SSA_NAME_VAR (ptr
);
7643 /* We need to make sure 'ptr' doesn't include any of
7644 the restrict tags we added bases for in its points-to set. */
7645 varinfo_t vi
= lookup_vi_for_tree (ptr
);
7649 vi
= get_varinfo (find (vi
->id
));
7650 if (bitmap_intersect_p (rvars
, vi
->solution
)
7651 || (escaped_p
&& bitmap_bit_p (vi
->solution
, escaped_id
)))
7655 /* Do not overwrite existing cliques (that includes clique, base
7656 pairs we just set). */
7657 if (MR_DEPENDENCE_CLIQUE (base
) == 0)
7659 MR_DEPENDENCE_CLIQUE (base
) = clique
;
7660 MR_DEPENDENCE_BASE (base
) = 0;
7664 /* For plain decl accesses see whether they are accesses to globals
7665 and rewrite them to MEM_REFs with { clique, 0 }. */
7667 && is_global_var (base
)
7668 /* ??? We can't rewrite a plain decl with the walk_stmt_load_store
7673 while (handled_component_p (*basep
))
7674 basep
= &TREE_OPERAND (*basep
, 0);
7675 gcc_assert (VAR_P (*basep
));
7676 tree ptr
= build_fold_addr_expr (*basep
);
7677 tree zero
= build_int_cst (TREE_TYPE (ptr
), 0);
7678 *basep
= build2 (MEM_REF
, TREE_TYPE (*basep
), ptr
, zero
);
7679 MR_DEPENDENCE_CLIQUE (*basep
) = clique
;
7680 MR_DEPENDENCE_BASE (*basep
) = 0;
7688 unsigned short *clique
;
7689 unsigned short *last_ruid
;
7690 varinfo_t restrict_var
;
7693 /* If BASE is a MEM_REF then assign a clique, base pair to it, updating
7694 CLIQUE, *RESTRICT_VAR and LAST_RUID as passed via DATA.
7695 Return whether dependence info was assigned to BASE. */
7698 maybe_set_dependence_info (gimple
*, tree base
, tree
, void *data
)
7700 tree ptr
= ((msdi_data
*)data
)->ptr
;
7701 unsigned short &clique
= *((msdi_data
*)data
)->clique
;
7702 unsigned short &last_ruid
= *((msdi_data
*)data
)->last_ruid
;
7703 varinfo_t restrict_var
= ((msdi_data
*)data
)->restrict_var
;
7704 if ((TREE_CODE (base
) == MEM_REF
7705 || TREE_CODE (base
) == TARGET_MEM_REF
)
7706 && TREE_OPERAND (base
, 0) == ptr
)
7708 /* Do not overwrite existing cliques. This avoids overwriting dependence
7709 info inlined from a function with restrict parameters inlined
7710 into a function with restrict parameters. This usually means we
7711 prefer to be precise in innermost loops. */
7712 if (MR_DEPENDENCE_CLIQUE (base
) == 0)
7716 if (cfun
->last_clique
== 0)
7717 cfun
->last_clique
= 1;
7720 if (restrict_var
->ruid
== 0)
7721 restrict_var
->ruid
= ++last_ruid
;
7722 MR_DEPENDENCE_CLIQUE (base
) = clique
;
7723 MR_DEPENDENCE_BASE (base
) = restrict_var
->ruid
;
7730 /* Clear dependence info for the clique DATA. */
7733 clear_dependence_clique (gimple
*, tree base
, tree
, void *data
)
7735 unsigned short clique
= (uintptr_t)data
;
7736 if ((TREE_CODE (base
) == MEM_REF
7737 || TREE_CODE (base
) == TARGET_MEM_REF
)
7738 && MR_DEPENDENCE_CLIQUE (base
) == clique
)
7740 MR_DEPENDENCE_CLIQUE (base
) = 0;
7741 MR_DEPENDENCE_BASE (base
) = 0;
7747 /* Compute the set of independend memory references based on restrict
7748 tags and their conservative propagation to the points-to sets. */
7751 compute_dependence_clique (void)
7753 /* First clear the special "local" clique. */
7755 if (cfun
->last_clique
!= 0)
7756 FOR_EACH_BB_FN (bb
, cfun
)
7757 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
7758 !gsi_end_p (gsi
); gsi_next (&gsi
))
7760 gimple
*stmt
= gsi_stmt (gsi
);
7761 walk_stmt_load_store_ops (stmt
, (void *)(uintptr_t) 1,
7762 clear_dependence_clique
,
7763 clear_dependence_clique
);
7766 unsigned short clique
= 0;
7767 unsigned short last_ruid
= 0;
7768 bitmap rvars
= BITMAP_ALLOC (NULL
);
7769 bool escaped_p
= false;
7770 for (unsigned i
= 0; i
< num_ssa_names
; ++i
)
7772 tree ptr
= ssa_name (i
);
7773 if (!ptr
|| !POINTER_TYPE_P (TREE_TYPE (ptr
)))
7776 /* Avoid all this when ptr is not dereferenced? */
7778 if (SSA_NAME_IS_DEFAULT_DEF (ptr
)
7779 && (TREE_CODE (SSA_NAME_VAR (ptr
)) == PARM_DECL
7780 || TREE_CODE (SSA_NAME_VAR (ptr
)) == RESULT_DECL
))
7781 p
= SSA_NAME_VAR (ptr
);
7782 varinfo_t vi
= lookup_vi_for_tree (p
);
7785 vi
= get_varinfo (find (vi
->id
));
7788 varinfo_t restrict_var
= NULL
;
7789 EXECUTE_IF_SET_IN_BITMAP (vi
->solution
, 0, j
, bi
)
7791 varinfo_t oi
= get_varinfo (j
);
7793 oi
= get_varinfo (oi
->head
);
7794 if (oi
->is_restrict_var
)
7797 && restrict_var
!= oi
)
7799 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7801 fprintf (dump_file
, "found restrict pointed-to "
7803 print_generic_expr (dump_file
, ptr
);
7804 fprintf (dump_file
, " but not exclusively\n");
7806 restrict_var
= NULL
;
7811 /* NULL is the only other valid points-to entry. */
7812 else if (oi
->id
!= nothing_id
)
7814 restrict_var
= NULL
;
7818 /* Ok, found that ptr must(!) point to a single(!) restrict
7820 /* ??? PTA isn't really a proper propagation engine to compute
7822 ??? We could handle merging of two restricts by unifying them. */
7825 /* Now look at possible dereferences of ptr. */
7826 imm_use_iterator ui
;
7829 msdi_data data
= { ptr
, &clique
, &last_ruid
, restrict_var
};
7830 FOR_EACH_IMM_USE_STMT (use_stmt
, ui
, ptr
)
7831 used
|= walk_stmt_load_store_ops (use_stmt
, &data
,
7832 maybe_set_dependence_info
,
7833 maybe_set_dependence_info
);
7836 /* Add all subvars to the set of restrict pointed-to set. */
7837 for (unsigned sv
= restrict_var
->head
; sv
!= 0;
7838 sv
= get_varinfo (sv
)->next
)
7839 bitmap_set_bit (rvars
, sv
);
7840 varinfo_t escaped
= get_varinfo (find (escaped_id
));
7841 if (bitmap_bit_p (escaped
->solution
, restrict_var
->id
))
7849 /* Assign the BASE id zero to all accesses not based on a restrict
7850 pointer. That way they get disambiguated against restrict
7851 accesses but not against each other. */
7852 /* ??? For restricts derived from globals (thus not incoming
7853 parameters) we can't restrict scoping properly thus the following
7854 is too aggressive there. For now we have excluded those globals from
7855 getting into the MR_DEPENDENCE machinery. */
7856 vls_data data
= { clique
, escaped_p
, rvars
};
7858 FOR_EACH_BB_FN (bb
, cfun
)
7859 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
7860 !gsi_end_p (gsi
); gsi_next (&gsi
))
7862 gimple
*stmt
= gsi_stmt (gsi
);
7863 walk_stmt_load_store_ops (stmt
, &data
,
7864 visit_loadstore
, visit_loadstore
);
7868 BITMAP_FREE (rvars
);
7871 /* Compute points-to information for every SSA_NAME pointer in the
7872 current function and compute the transitive closure of escaped
7873 variables to re-initialize the call-clobber states of local variables. */
7876 compute_may_aliases (void)
7878 if (cfun
->gimple_df
->ipa_pta
)
7882 fprintf (dump_file
, "\nNot re-computing points-to information "
7883 "because IPA points-to information is available.\n\n");
7885 /* But still dump what we have remaining it. */
7886 dump_alias_info (dump_file
);
7892 /* For each pointer P_i, determine the sets of variables that P_i may
7893 point-to. Compute the reachability set of escaped and call-used
7895 compute_points_to_sets ();
7897 /* Debugging dumps. */
7899 dump_alias_info (dump_file
);
7901 /* Compute restrict-based memory disambiguations. */
7902 compute_dependence_clique ();
7904 /* Deallocate memory used by aliasing data structures and the internal
7905 points-to solution. */
7906 delete_points_to_sets ();
7908 gcc_assert (!need_ssa_update_p (cfun
));
7913 /* A dummy pass to cause points-to information to be computed via
7914 TODO_rebuild_alias. */
7918 const pass_data pass_data_build_alias
=
7920 GIMPLE_PASS
, /* type */
7922 OPTGROUP_NONE
, /* optinfo_flags */
7923 TV_NONE
, /* tv_id */
7924 ( PROP_cfg
| PROP_ssa
), /* properties_required */
7925 0, /* properties_provided */
7926 0, /* properties_destroyed */
7927 0, /* todo_flags_start */
7928 TODO_rebuild_alias
, /* todo_flags_finish */
7931 class pass_build_alias
: public gimple_opt_pass
7934 pass_build_alias (gcc::context
*ctxt
)
7935 : gimple_opt_pass (pass_data_build_alias
, ctxt
)
7938 /* opt_pass methods: */
7939 virtual bool gate (function
*) { return flag_tree_pta
; }
7941 }; // class pass_build_alias
7946 make_pass_build_alias (gcc::context
*ctxt
)
7948 return new pass_build_alias (ctxt
);
7951 /* A dummy pass to cause points-to information to be computed via
7952 TODO_rebuild_alias. */
7956 const pass_data pass_data_build_ealias
=
7958 GIMPLE_PASS
, /* type */
7959 "ealias", /* name */
7960 OPTGROUP_NONE
, /* optinfo_flags */
7961 TV_NONE
, /* tv_id */
7962 ( PROP_cfg
| PROP_ssa
), /* properties_required */
7963 0, /* properties_provided */
7964 0, /* properties_destroyed */
7965 0, /* todo_flags_start */
7966 TODO_rebuild_alias
, /* todo_flags_finish */
7969 class pass_build_ealias
: public gimple_opt_pass
7972 pass_build_ealias (gcc::context
*ctxt
)
7973 : gimple_opt_pass (pass_data_build_ealias
, ctxt
)
7976 /* opt_pass methods: */
7977 virtual bool gate (function
*) { return flag_tree_pta
; }
7979 }; // class pass_build_ealias
7984 make_pass_build_ealias (gcc::context
*ctxt
)
7986 return new pass_build_ealias (ctxt
);
7990 /* IPA PTA solutions for ESCAPED. */
7991 struct pt_solution ipa_escaped_pt
7992 = { true, false, false, false, false,
7993 false, false, false, false, false, NULL
};
7995 /* Associate node with varinfo DATA. Worker for
7996 cgraph_for_symbol_thunks_and_aliases. */
7998 associate_varinfo_to_alias (struct cgraph_node
*node
, void *data
)
8002 && ! node
->inlined_to
))
8004 && !node
->ifunc_resolver
)
8005 insert_vi_for_tree (node
->decl
, (varinfo_t
)data
);
8009 /* Dump varinfo VI to FILE. */
8012 dump_varinfo (FILE *file
, varinfo_t vi
)
8017 fprintf (file
, "%u: %s\n", vi
->id
, vi
->name
);
8019 const char *sep
= " ";
8020 if (vi
->is_artificial_var
)
8021 fprintf (file
, "%sartificial", sep
);
8022 if (vi
->is_special_var
)
8023 fprintf (file
, "%sspecial", sep
);
8024 if (vi
->is_unknown_size_var
)
8025 fprintf (file
, "%sunknown-size", sep
);
8026 if (vi
->is_full_var
)
8027 fprintf (file
, "%sfull", sep
);
8028 if (vi
->is_heap_var
)
8029 fprintf (file
, "%sheap", sep
);
8030 if (vi
->may_have_pointers
)
8031 fprintf (file
, "%smay-have-pointers", sep
);
8032 if (vi
->only_restrict_pointers
)
8033 fprintf (file
, "%sonly-restrict-pointers", sep
);
8034 if (vi
->is_restrict_var
)
8035 fprintf (file
, "%sis-restrict-var", sep
);
8036 if (vi
->is_global_var
)
8037 fprintf (file
, "%sglobal", sep
);
8038 if (vi
->is_ipa_escape_point
)
8039 fprintf (file
, "%sipa-escape-point", sep
);
8041 fprintf (file
, "%sfn-info", sep
);
8043 fprintf (file
, "%srestrict-uid:%u", sep
, vi
->ruid
);
8045 fprintf (file
, "%snext:%u", sep
, vi
->next
);
8046 if (vi
->head
!= vi
->id
)
8047 fprintf (file
, "%shead:%u", sep
, vi
->head
);
8049 fprintf (file
, "%soffset:" HOST_WIDE_INT_PRINT_DEC
, sep
, vi
->offset
);
8050 if (vi
->size
!= ~(unsigned HOST_WIDE_INT
)0)
8051 fprintf (file
, "%ssize:" HOST_WIDE_INT_PRINT_DEC
, sep
, vi
->size
);
8052 if (vi
->fullsize
!= ~(unsigned HOST_WIDE_INT
)0
8053 && vi
->fullsize
!= vi
->size
)
8054 fprintf (file
, "%sfullsize:" HOST_WIDE_INT_PRINT_DEC
, sep
,
8056 fprintf (file
, "\n");
8058 if (vi
->solution
&& !bitmap_empty_p (vi
->solution
))
8062 fprintf (file
, " solution: {");
8063 EXECUTE_IF_SET_IN_BITMAP (vi
->solution
, 0, i
, bi
)
8064 fprintf (file
, " %u", i
);
8065 fprintf (file
, " }\n");
8068 if (vi
->oldsolution
&& !bitmap_empty_p (vi
->oldsolution
)
8069 && !bitmap_equal_p (vi
->solution
, vi
->oldsolution
))
8073 fprintf (file
, " oldsolution: {");
8074 EXECUTE_IF_SET_IN_BITMAP (vi
->oldsolution
, 0, i
, bi
)
8075 fprintf (file
, " %u", i
);
8076 fprintf (file
, " }\n");
8080 /* Dump varinfo VI to stderr. */
8083 debug_varinfo (varinfo_t vi
)
8085 dump_varinfo (stderr
, vi
);
8088 /* Dump varmap to FILE. */
8091 dump_varmap (FILE *file
)
8093 if (varmap
.length () == 0)
8096 fprintf (file
, "variables:\n");
8098 for (unsigned int i
= 0; i
< varmap
.length (); ++i
)
8100 varinfo_t vi
= get_varinfo (i
);
8101 dump_varinfo (file
, vi
);
8104 fprintf (file
, "\n");
8107 /* Dump varmap to stderr. */
8112 dump_varmap (stderr
);
8115 /* Compute whether node is refered to non-locally. Worker for
8116 cgraph_for_symbol_thunks_and_aliases. */
8118 refered_from_nonlocal_fn (struct cgraph_node
*node
, void *data
)
8120 bool *nonlocal_p
= (bool *)data
;
8121 *nonlocal_p
|= (node
->used_from_other_partition
8122 || DECL_EXTERNAL (node
->decl
)
8123 || TREE_PUBLIC (node
->decl
)
8124 || node
->force_output
8125 || lookup_attribute ("noipa", DECL_ATTRIBUTES (node
->decl
)));
8129 /* Same for varpool nodes. */
8131 refered_from_nonlocal_var (struct varpool_node
*node
, void *data
)
8133 bool *nonlocal_p
= (bool *)data
;
8134 *nonlocal_p
|= (node
->used_from_other_partition
8135 || DECL_EXTERNAL (node
->decl
)
8136 || TREE_PUBLIC (node
->decl
)
8137 || node
->force_output
);
8141 /* Execute the driver for IPA PTA. */
8143 ipa_pta_execute (void)
8145 struct cgraph_node
*node
;
8147 unsigned int from
= 0;
8153 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8155 symtab
->dump (dump_file
);
8156 fprintf (dump_file
, "\n");
8161 fprintf (dump_file
, "Generating generic constraints\n\n");
8162 dump_constraints (dump_file
, from
);
8163 fprintf (dump_file
, "\n");
8164 from
= constraints
.length ();
8167 /* Build the constraints. */
8168 FOR_EACH_DEFINED_FUNCTION (node
)
8171 /* Nodes without a body are not interesting. Especially do not
8172 visit clones at this point for now - we get duplicate decls
8173 there for inline clones at least. */
8174 if (!node
->has_gimple_body_p () || node
->inlined_to
)
8178 gcc_assert (!node
->clone_of
);
8180 /* For externally visible or attribute used annotated functions use
8181 local constraints for their arguments.
8182 For local functions we see all callers and thus do not need initial
8183 constraints for parameters. */
8184 bool nonlocal_p
= (node
->used_from_other_partition
8185 || DECL_EXTERNAL (node
->decl
)
8186 || TREE_PUBLIC (node
->decl
)
8187 || node
->force_output
8188 || lookup_attribute ("noipa",
8189 DECL_ATTRIBUTES (node
->decl
)));
8190 node
->call_for_symbol_thunks_and_aliases (refered_from_nonlocal_fn
,
8193 vi
= create_function_info_for (node
->decl
,
8194 alias_get_name (node
->decl
), false,
8197 && from
!= constraints
.length ())
8200 "Generating initial constraints for %s",
8201 node
->dump_name ());
8202 if (DECL_ASSEMBLER_NAME_SET_P (node
->decl
))
8203 fprintf (dump_file
, " (%s)",
8205 (DECL_ASSEMBLER_NAME (node
->decl
)));
8206 fprintf (dump_file
, "\n\n");
8207 dump_constraints (dump_file
, from
);
8208 fprintf (dump_file
, "\n");
8210 from
= constraints
.length ();
8213 node
->call_for_symbol_thunks_and_aliases
8214 (associate_varinfo_to_alias
, vi
, true);
8217 /* Create constraints for global variables and their initializers. */
8218 FOR_EACH_VARIABLE (var
)
8220 if (var
->alias
&& var
->analyzed
)
8223 varinfo_t vi
= get_vi_for_tree (var
->decl
);
8225 /* For the purpose of IPA PTA unit-local globals are not
8227 bool nonlocal_p
= (DECL_EXTERNAL (var
->decl
)
8228 || TREE_PUBLIC (var
->decl
)
8229 || var
->used_from_other_partition
8230 || var
->force_output
);
8231 var
->call_for_symbol_and_aliases (refered_from_nonlocal_var
,
8234 vi
->is_ipa_escape_point
= true;
8238 && from
!= constraints
.length ())
8241 "Generating constraints for global initializers\n\n");
8242 dump_constraints (dump_file
, from
);
8243 fprintf (dump_file
, "\n");
8244 from
= constraints
.length ();
8247 FOR_EACH_DEFINED_FUNCTION (node
)
8249 struct function
*func
;
8252 /* Nodes without a body are not interesting. */
8253 if (!node
->has_gimple_body_p () || node
->clone_of
)
8259 "Generating constraints for %s", node
->dump_name ());
8260 if (DECL_ASSEMBLER_NAME_SET_P (node
->decl
))
8261 fprintf (dump_file
, " (%s)",
8263 (DECL_ASSEMBLER_NAME (node
->decl
)));
8264 fprintf (dump_file
, "\n");
8267 func
= DECL_STRUCT_FUNCTION (node
->decl
);
8268 gcc_assert (cfun
== NULL
);
8270 /* Build constriants for the function body. */
8271 FOR_EACH_BB_FN (bb
, func
)
8273 for (gphi_iterator gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);
8276 gphi
*phi
= gsi
.phi ();
8278 if (! virtual_operand_p (gimple_phi_result (phi
)))
8279 find_func_aliases (func
, phi
);
8282 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);
8285 gimple
*stmt
= gsi_stmt (gsi
);
8287 find_func_aliases (func
, stmt
);
8288 find_func_clobbers (func
, stmt
);
8294 fprintf (dump_file
, "\n");
8295 dump_constraints (dump_file
, from
);
8296 fprintf (dump_file
, "\n");
8297 from
= constraints
.length ();
8301 /* From the constraints compute the points-to sets. */
8302 solve_constraints ();
8305 dump_sa_points_to_info (dump_file
);
8307 /* Now post-process solutions to handle locals from different
8308 runtime instantiations coming in through recursive invocations. */
8309 unsigned shadow_var_cnt
= 0;
8310 for (unsigned i
= 1; i
< varmap
.length (); ++i
)
8312 varinfo_t fi
= get_varinfo (i
);
8315 /* Automatic variables pointed to by their containing functions
8316 parameters need this treatment. */
8317 for (varinfo_t ai
= first_vi_for_offset (fi
, fi_parm_base
);
8318 ai
; ai
= vi_next (ai
))
8320 varinfo_t vi
= get_varinfo (find (ai
->id
));
8323 EXECUTE_IF_SET_IN_BITMAP (vi
->solution
, 0, j
, bi
)
8325 varinfo_t pt
= get_varinfo (j
);
8326 if (pt
->shadow_var_uid
== 0
8328 && auto_var_in_fn_p (pt
->decl
, fi
->decl
))
8330 pt
->shadow_var_uid
= allocate_decl_uid ();
8335 /* As well as global variables which are another way of passing
8336 arguments to recursive invocations. */
8337 else if (fi
->is_global_var
)
8339 for (varinfo_t ai
= fi
; ai
; ai
= vi_next (ai
))
8341 varinfo_t vi
= get_varinfo (find (ai
->id
));
8344 EXECUTE_IF_SET_IN_BITMAP (vi
->solution
, 0, j
, bi
)
8346 varinfo_t pt
= get_varinfo (j
);
8347 if (pt
->shadow_var_uid
== 0
8349 && auto_var_p (pt
->decl
))
8351 pt
->shadow_var_uid
= allocate_decl_uid ();
8358 if (shadow_var_cnt
&& dump_file
&& (dump_flags
& TDF_DETAILS
))
8359 fprintf (dump_file
, "Allocated %u shadow variables for locals "
8360 "maybe leaking into recursive invocations of their containing "
8361 "functions\n", shadow_var_cnt
);
8363 /* Compute the global points-to sets for ESCAPED.
8364 ??? Note that the computed escape set is not correct
8365 for the whole unit as we fail to consider graph edges to
8366 externally visible functions. */
8367 ipa_escaped_pt
= find_what_var_points_to (NULL
, get_varinfo (escaped_id
));
8369 /* Make sure the ESCAPED solution (which is used as placeholder in
8370 other solutions) does not reference itself. This simplifies
8371 points-to solution queries. */
8372 ipa_escaped_pt
.ipa_escaped
= 0;
8374 /* Assign the points-to sets to the SSA names in the unit. */
8375 FOR_EACH_DEFINED_FUNCTION (node
)
8378 struct function
*fn
;
8382 /* Nodes without a body are not interesting. */
8383 if (!node
->has_gimple_body_p () || node
->clone_of
)
8386 fn
= DECL_STRUCT_FUNCTION (node
->decl
);
8388 /* Compute the points-to sets for pointer SSA_NAMEs. */
8389 FOR_EACH_VEC_ELT (*fn
->gimple_df
->ssa_names
, i
, ptr
)
8392 && POINTER_TYPE_P (TREE_TYPE (ptr
)))
8393 find_what_p_points_to (node
->decl
, ptr
);
8396 /* Compute the call-use and call-clobber sets for indirect calls
8397 and calls to external functions. */
8398 FOR_EACH_BB_FN (bb
, fn
)
8400 gimple_stmt_iterator gsi
;
8402 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
8405 struct pt_solution
*pt
;
8409 stmt
= dyn_cast
<gcall
*> (gsi_stmt (gsi
));
8413 /* Handle direct calls to functions with body. */
8414 decl
= gimple_call_fndecl (stmt
);
8417 tree called_decl
= NULL_TREE
;
8418 if (gimple_call_builtin_p (stmt
, BUILT_IN_GOMP_PARALLEL
))
8419 called_decl
= TREE_OPERAND (gimple_call_arg (stmt
, 0), 0);
8420 else if (gimple_call_builtin_p (stmt
, BUILT_IN_GOACC_PARALLEL
))
8421 called_decl
= TREE_OPERAND (gimple_call_arg (stmt
, 1), 0);
8423 if (called_decl
!= NULL_TREE
8424 && !fndecl_maybe_in_other_partition (called_decl
))
8429 && (fi
= lookup_vi_for_tree (decl
))
8432 *gimple_call_clobber_set (stmt
)
8433 = find_what_var_points_to
8434 (node
->decl
, first_vi_for_offset (fi
, fi_clobbers
));
8435 *gimple_call_use_set (stmt
)
8436 = find_what_var_points_to
8437 (node
->decl
, first_vi_for_offset (fi
, fi_uses
));
8439 /* Handle direct calls to external functions. */
8440 else if (decl
&& (!fi
|| fi
->decl
))
8442 pt
= gimple_call_use_set (stmt
);
8443 if (gimple_call_flags (stmt
) & ECF_CONST
)
8444 memset (pt
, 0, sizeof (struct pt_solution
));
8445 else if ((vi
= lookup_call_use_vi (stmt
)) != NULL
)
8447 *pt
= find_what_var_points_to (node
->decl
, vi
);
8448 /* Escaped (and thus nonlocal) variables are always
8449 implicitly used by calls. */
8450 /* ??? ESCAPED can be empty even though NONLOCAL
8453 pt
->ipa_escaped
= 1;
8457 /* If there is nothing special about this call then
8458 we have made everything that is used also escape. */
8459 *pt
= ipa_escaped_pt
;
8463 pt
= gimple_call_clobber_set (stmt
);
8464 if (gimple_call_flags (stmt
) & (ECF_CONST
|ECF_PURE
|ECF_NOVOPS
))
8465 memset (pt
, 0, sizeof (struct pt_solution
));
8466 else if ((vi
= lookup_call_clobber_vi (stmt
)) != NULL
)
8468 *pt
= find_what_var_points_to (node
->decl
, vi
);
8469 /* Escaped (and thus nonlocal) variables are always
8470 implicitly clobbered by calls. */
8471 /* ??? ESCAPED can be empty even though NONLOCAL
8474 pt
->ipa_escaped
= 1;
8478 /* If there is nothing special about this call then
8479 we have made everything that is used also escape. */
8480 *pt
= ipa_escaped_pt
;
8484 /* Handle indirect calls. */
8485 else if ((fi
= get_fi_for_callee (stmt
)))
8487 /* We need to accumulate all clobbers/uses of all possible
8489 fi
= get_varinfo (find (fi
->id
));
8490 /* If we cannot constrain the set of functions we'll end up
8491 calling we end up using/clobbering everything. */
8492 if (bitmap_bit_p (fi
->solution
, anything_id
)
8493 || bitmap_bit_p (fi
->solution
, nonlocal_id
)
8494 || bitmap_bit_p (fi
->solution
, escaped_id
))
8496 pt_solution_reset (gimple_call_clobber_set (stmt
));
8497 pt_solution_reset (gimple_call_use_set (stmt
));
8503 struct pt_solution
*uses
, *clobbers
;
8505 uses
= gimple_call_use_set (stmt
);
8506 clobbers
= gimple_call_clobber_set (stmt
);
8507 memset (uses
, 0, sizeof (struct pt_solution
));
8508 memset (clobbers
, 0, sizeof (struct pt_solution
));
8509 EXECUTE_IF_SET_IN_BITMAP (fi
->solution
, 0, i
, bi
)
8511 struct pt_solution sol
;
8513 vi
= get_varinfo (i
);
8514 if (!vi
->is_fn_info
)
8516 /* ??? We could be more precise here? */
8518 uses
->ipa_escaped
= 1;
8519 clobbers
->nonlocal
= 1;
8520 clobbers
->ipa_escaped
= 1;
8524 if (!uses
->anything
)
8526 sol
= find_what_var_points_to
8528 first_vi_for_offset (vi
, fi_uses
));
8529 pt_solution_ior_into (uses
, &sol
);
8531 if (!clobbers
->anything
)
8533 sol
= find_what_var_points_to
8535 first_vi_for_offset (vi
, fi_clobbers
));
8536 pt_solution_ior_into (clobbers
, &sol
);
8546 fn
->gimple_df
->ipa_pta
= true;
8548 /* We have to re-set the final-solution cache after each function
8549 because what is a "global" is dependent on function context. */
8550 final_solutions
->empty ();
8551 obstack_free (&final_solutions_obstack
, NULL
);
8552 gcc_obstack_init (&final_solutions_obstack
);
8555 delete_points_to_sets ();
8564 const pass_data pass_data_ipa_pta
=
8566 SIMPLE_IPA_PASS
, /* type */
8568 OPTGROUP_NONE
, /* optinfo_flags */
8569 TV_IPA_PTA
, /* tv_id */
8570 0, /* properties_required */
8571 0, /* properties_provided */
8572 0, /* properties_destroyed */
8573 0, /* todo_flags_start */
8574 0, /* todo_flags_finish */
8577 class pass_ipa_pta
: public simple_ipa_opt_pass
8580 pass_ipa_pta (gcc::context
*ctxt
)
8581 : simple_ipa_opt_pass (pass_data_ipa_pta
, ctxt
)
8584 /* opt_pass methods: */
8585 virtual bool gate (function
*)
8589 /* Don't bother doing anything if the program has errors. */
8593 opt_pass
* clone () { return new pass_ipa_pta (m_ctxt
); }
8595 virtual unsigned int execute (function
*) { return ipa_pta_execute (); }
8597 }; // class pass_ipa_pta
8601 simple_ipa_opt_pass
*
8602 make_pass_ipa_pta (gcc::context
*ctxt
)
8604 return new pass_ipa_pta (ctxt
);