1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2021 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
40 #include "fold-const.h"
45 #include "gimple-fold.h"
48 #include "gimple-iterator.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
56 #include "omp-general.h"
58 #include "gimple-low.h"
59 #include "gomp-constants.h"
60 #include "splay-tree.h"
61 #include "gimple-walk.h"
62 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
64 #include "stringpool.h"
68 #include "omp-offload.h"
70 #include "tree-nested.h"
72 /* Hash set of poisoned variables in a bind expr. */
73 static hash_set
<tree
> *asan_poisoned_variables
= NULL
;
75 enum gimplify_omp_var_data
78 GOVD_EXPLICIT
= 0x000002,
79 GOVD_SHARED
= 0x000004,
80 GOVD_PRIVATE
= 0x000008,
81 GOVD_FIRSTPRIVATE
= 0x000010,
82 GOVD_LASTPRIVATE
= 0x000020,
83 GOVD_REDUCTION
= 0x000040,
86 GOVD_DEBUG_PRIVATE
= 0x000200,
87 GOVD_PRIVATE_OUTER_REF
= 0x000400,
88 GOVD_LINEAR
= 0x000800,
89 GOVD_ALIGNED
= 0x001000,
91 /* Flag for GOVD_MAP: don't copy back. */
92 GOVD_MAP_TO_ONLY
= 0x002000,
94 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
95 GOVD_LINEAR_LASTPRIVATE_NO_OUTER
= 0x004000,
97 GOVD_MAP_0LEN_ARRAY
= 0x008000,
99 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
100 GOVD_MAP_ALWAYS_TO
= 0x010000,
102 /* Flag for shared vars that are or might be stored to in the region. */
103 GOVD_WRITTEN
= 0x020000,
105 /* Flag for GOVD_MAP, if it is a forced mapping. */
106 GOVD_MAP_FORCE
= 0x040000,
108 /* Flag for GOVD_MAP: must be present already. */
109 GOVD_MAP_FORCE_PRESENT
= 0x080000,
111 /* Flag for GOVD_MAP: only allocate. */
112 GOVD_MAP_ALLOC_ONLY
= 0x100000,
114 /* Flag for GOVD_MAP: only copy back. */
115 GOVD_MAP_FROM_ONLY
= 0x200000,
117 GOVD_NONTEMPORAL
= 0x400000,
119 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
120 GOVD_LASTPRIVATE_CONDITIONAL
= 0x800000,
122 GOVD_CONDTEMP
= 0x1000000,
124 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
125 GOVD_REDUCTION_INSCAN
= 0x2000000,
127 /* Flag for GOVD_MAP: (struct) vars that have pointer attachments for
129 GOVD_MAP_HAS_ATTACHMENTS
= 8388608,
131 GOVD_DATA_SHARE_CLASS
= (GOVD_SHARED
| GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
132 | GOVD_LASTPRIVATE
| GOVD_REDUCTION
| GOVD_LINEAR
139 ORT_WORKSHARE
= 0x00,
140 ORT_TASKGROUP
= 0x01,
144 ORT_COMBINED_PARALLEL
= ORT_PARALLEL
| 1,
147 ORT_UNTIED_TASK
= ORT_TASK
| 1,
148 ORT_TASKLOOP
= ORT_TASK
| 2,
149 ORT_UNTIED_TASKLOOP
= ORT_UNTIED_TASK
| 2,
152 ORT_COMBINED_TEAMS
= ORT_TEAMS
| 1,
153 ORT_HOST_TEAMS
= ORT_TEAMS
| 2,
154 ORT_COMBINED_HOST_TEAMS
= ORT_COMBINED_TEAMS
| 2,
157 ORT_TARGET_DATA
= 0x40,
159 /* Data region with offloading. */
161 ORT_COMBINED_TARGET
= ORT_TARGET
| 1,
162 ORT_IMPLICIT_TARGET
= ORT_TARGET
| 2,
164 /* OpenACC variants. */
165 ORT_ACC
= 0x100, /* A generic OpenACC region. */
166 ORT_ACC_DATA
= ORT_ACC
| ORT_TARGET_DATA
, /* Data construct. */
167 ORT_ACC_PARALLEL
= ORT_ACC
| ORT_TARGET
, /* Parallel construct */
168 ORT_ACC_KERNELS
= ORT_ACC
| ORT_TARGET
| 2, /* Kernels construct. */
169 ORT_ACC_SERIAL
= ORT_ACC
| ORT_TARGET
| 4, /* Serial construct. */
170 ORT_ACC_HOST_DATA
= ORT_ACC
| ORT_TARGET_DATA
| 2, /* Host data. */
172 /* Dummy OpenMP region, used to disable expansion of
173 DECL_VALUE_EXPRs in taskloop pre body. */
177 /* Gimplify hashtable helper. */
179 struct gimplify_hasher
: free_ptr_hash
<elt_t
>
181 static inline hashval_t
hash (const elt_t
*);
182 static inline bool equal (const elt_t
*, const elt_t
*);
187 struct gimplify_ctx
*prev_context
;
189 vec
<gbind
*> bind_expr_stack
;
191 gimple_seq conditional_cleanups
;
195 vec
<tree
> case_labels
;
196 hash_set
<tree
> *live_switch_vars
;
197 /* The formal temporary table. Should this be persistent? */
198 hash_table
<gimplify_hasher
> *temp_htab
;
201 unsigned into_ssa
: 1;
202 unsigned allow_rhs_cond_expr
: 1;
203 unsigned in_cleanup_point_expr
: 1;
204 unsigned keep_stack
: 1;
205 unsigned save_stack
: 1;
206 unsigned in_switch_expr
: 1;
209 enum gimplify_defaultmap_kind
217 struct gimplify_omp_ctx
219 struct gimplify_omp_ctx
*outer_context
;
220 splay_tree variables
;
221 hash_set
<tree
> *privatized_types
;
223 /* Iteration variables in an OMP_FOR. */
224 vec
<tree
> loop_iter_var
;
226 enum omp_clause_default_kind default_kind
;
227 enum omp_region_type region_type
;
231 bool target_firstprivatize_array_bases
;
233 bool order_concurrent
;
239 static struct gimplify_ctx
*gimplify_ctxp
;
240 static struct gimplify_omp_ctx
*gimplify_omp_ctxp
;
241 static bool in_omp_construct
;
243 /* Forward declaration. */
244 static enum gimplify_status
gimplify_compound_expr (tree
*, gimple_seq
*, bool);
245 static hash_map
<tree
, tree
> *oacc_declare_returns
;
246 static enum gimplify_status
gimplify_expr (tree
*, gimple_seq
*, gimple_seq
*,
247 bool (*) (tree
), fallback_t
, bool);
249 /* Shorter alias name for the above function for use in gimplify.c
253 gimplify_seq_add_stmt (gimple_seq
*seq_p
, gimple
*gs
)
255 gimple_seq_add_stmt_without_update (seq_p
, gs
);
258 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
259 NULL, a new sequence is allocated. This function is
260 similar to gimple_seq_add_seq, but does not scan the operands.
261 During gimplification, we need to manipulate statement sequences
262 before the def/use vectors have been constructed. */
265 gimplify_seq_add_seq (gimple_seq
*dst_p
, gimple_seq src
)
267 gimple_stmt_iterator si
;
272 si
= gsi_last (*dst_p
);
273 gsi_insert_seq_after_without_update (&si
, src
, GSI_NEW_STMT
);
277 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
278 and popping gimplify contexts. */
280 static struct gimplify_ctx
*ctx_pool
= NULL
;
282 /* Return a gimplify context struct from the pool. */
284 static inline struct gimplify_ctx
*
287 struct gimplify_ctx
* c
= ctx_pool
;
290 ctx_pool
= c
->prev_context
;
292 c
= XNEW (struct gimplify_ctx
);
294 memset (c
, '\0', sizeof (*c
));
298 /* Put gimplify context C back into the pool. */
301 ctx_free (struct gimplify_ctx
*c
)
303 c
->prev_context
= ctx_pool
;
307 /* Free allocated ctx stack memory. */
310 free_gimplify_stack (void)
312 struct gimplify_ctx
*c
;
314 while ((c
= ctx_pool
))
316 ctx_pool
= c
->prev_context
;
322 /* Set up a context for the gimplifier. */
325 push_gimplify_context (bool in_ssa
, bool rhs_cond_ok
)
327 struct gimplify_ctx
*c
= ctx_alloc ();
329 c
->prev_context
= gimplify_ctxp
;
331 gimplify_ctxp
->into_ssa
= in_ssa
;
332 gimplify_ctxp
->allow_rhs_cond_expr
= rhs_cond_ok
;
335 /* Tear down a context for the gimplifier. If BODY is non-null, then
336 put the temporaries into the outer BIND_EXPR. Otherwise, put them
339 BODY is not a sequence, but the first tuple in a sequence. */
342 pop_gimplify_context (gimple
*body
)
344 struct gimplify_ctx
*c
= gimplify_ctxp
;
347 && (!c
->bind_expr_stack
.exists ()
348 || c
->bind_expr_stack
.is_empty ()));
349 c
->bind_expr_stack
.release ();
350 gimplify_ctxp
= c
->prev_context
;
353 declare_vars (c
->temps
, body
, false);
355 record_vars (c
->temps
);
362 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
365 gimple_push_bind_expr (gbind
*bind_stmt
)
367 gimplify_ctxp
->bind_expr_stack
.reserve (8);
368 gimplify_ctxp
->bind_expr_stack
.safe_push (bind_stmt
);
371 /* Pop the first element off the stack of bindings. */
374 gimple_pop_bind_expr (void)
376 gimplify_ctxp
->bind_expr_stack
.pop ();
379 /* Return the first element of the stack of bindings. */
382 gimple_current_bind_expr (void)
384 return gimplify_ctxp
->bind_expr_stack
.last ();
387 /* Return the stack of bindings created during gimplification. */
390 gimple_bind_expr_stack (void)
392 return gimplify_ctxp
->bind_expr_stack
;
395 /* Return true iff there is a COND_EXPR between us and the innermost
396 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
399 gimple_conditional_context (void)
401 return gimplify_ctxp
->conditions
> 0;
404 /* Note that we've entered a COND_EXPR. */
407 gimple_push_condition (void)
409 #ifdef ENABLE_GIMPLE_CHECKING
410 if (gimplify_ctxp
->conditions
== 0)
411 gcc_assert (gimple_seq_empty_p (gimplify_ctxp
->conditional_cleanups
));
413 ++(gimplify_ctxp
->conditions
);
416 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
417 now, add any conditional cleanups we've seen to the prequeue. */
420 gimple_pop_condition (gimple_seq
*pre_p
)
422 int conds
= --(gimplify_ctxp
->conditions
);
424 gcc_assert (conds
>= 0);
427 gimplify_seq_add_seq (pre_p
, gimplify_ctxp
->conditional_cleanups
);
428 gimplify_ctxp
->conditional_cleanups
= NULL
;
432 /* A stable comparison routine for use with splay trees and DECLs. */
435 splay_tree_compare_decl_uid (splay_tree_key xa
, splay_tree_key xb
)
440 return DECL_UID (a
) - DECL_UID (b
);
443 /* Create a new omp construct that deals with variable remapping. */
445 static struct gimplify_omp_ctx
*
446 new_omp_context (enum omp_region_type region_type
)
448 struct gimplify_omp_ctx
*c
;
450 c
= XCNEW (struct gimplify_omp_ctx
);
451 c
->outer_context
= gimplify_omp_ctxp
;
452 c
->variables
= splay_tree_new (splay_tree_compare_decl_uid
, 0, 0);
453 c
->privatized_types
= new hash_set
<tree
>;
454 c
->location
= input_location
;
455 c
->region_type
= region_type
;
456 if ((region_type
& ORT_TASK
) == 0)
457 c
->default_kind
= OMP_CLAUSE_DEFAULT_SHARED
;
459 c
->default_kind
= OMP_CLAUSE_DEFAULT_UNSPECIFIED
;
460 c
->defaultmap
[GDMK_SCALAR
] = GOVD_MAP
;
461 c
->defaultmap
[GDMK_AGGREGATE
] = GOVD_MAP
;
462 c
->defaultmap
[GDMK_ALLOCATABLE
] = GOVD_MAP
;
463 c
->defaultmap
[GDMK_POINTER
] = GOVD_MAP
;
468 /* Destroy an omp construct that deals with variable remapping. */
471 delete_omp_context (struct gimplify_omp_ctx
*c
)
473 splay_tree_delete (c
->variables
);
474 delete c
->privatized_types
;
475 c
->loop_iter_var
.release ();
479 static void omp_add_variable (struct gimplify_omp_ctx
*, tree
, unsigned int);
480 static bool omp_notice_variable (struct gimplify_omp_ctx
*, tree
, bool);
482 /* Both gimplify the statement T and append it to *SEQ_P. This function
483 behaves exactly as gimplify_stmt, but you don't have to pass T as a
487 gimplify_and_add (tree t
, gimple_seq
*seq_p
)
489 gimplify_stmt (&t
, seq_p
);
492 /* Gimplify statement T into sequence *SEQ_P, and return the first
493 tuple in the sequence of generated tuples for this statement.
494 Return NULL if gimplifying T produced no tuples. */
497 gimplify_and_return_first (tree t
, gimple_seq
*seq_p
)
499 gimple_stmt_iterator last
= gsi_last (*seq_p
);
501 gimplify_and_add (t
, seq_p
);
503 if (!gsi_end_p (last
))
506 return gsi_stmt (last
);
509 return gimple_seq_first_stmt (*seq_p
);
512 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
513 LHS, or for a call argument. */
516 is_gimple_mem_rhs (tree t
)
518 /* If we're dealing with a renamable type, either source or dest must be
519 a renamed variable. */
520 if (is_gimple_reg_type (TREE_TYPE (t
)))
521 return is_gimple_val (t
);
523 return is_gimple_val (t
) || is_gimple_lvalue (t
);
526 /* Return true if T is a CALL_EXPR or an expression that can be
527 assigned to a temporary. Note that this predicate should only be
528 used during gimplification. See the rationale for this in
529 gimplify_modify_expr. */
532 is_gimple_reg_rhs_or_call (tree t
)
534 return (get_gimple_rhs_class (TREE_CODE (t
)) != GIMPLE_INVALID_RHS
535 || TREE_CODE (t
) == CALL_EXPR
);
538 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
539 this predicate should only be used during gimplification. See the
540 rationale for this in gimplify_modify_expr. */
543 is_gimple_mem_rhs_or_call (tree t
)
545 /* If we're dealing with a renamable type, either source or dest must be
546 a renamed variable. */
547 if (is_gimple_reg_type (TREE_TYPE (t
)))
548 return is_gimple_val (t
);
550 return (is_gimple_val (t
)
551 || is_gimple_lvalue (t
)
552 || TREE_CLOBBER_P (t
)
553 || TREE_CODE (t
) == CALL_EXPR
);
556 /* Create a temporary with a name derived from VAL. Subroutine of
557 lookup_tmp_var; nobody else should call this function. */
560 create_tmp_from_val (tree val
)
562 /* Drop all qualifiers and address-space information from the value type. */
563 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (val
));
564 tree var
= create_tmp_var (type
, get_name (val
));
568 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
569 an existing expression temporary. */
572 lookup_tmp_var (tree val
, bool is_formal
)
576 /* If not optimizing, never really reuse a temporary. local-alloc
577 won't allocate any variable that is used in more than one basic
578 block, which means it will go into memory, causing much extra
579 work in reload and final and poorer code generation, outweighing
580 the extra memory allocation here. */
581 if (!optimize
|| !is_formal
|| TREE_SIDE_EFFECTS (val
))
582 ret
= create_tmp_from_val (val
);
589 if (!gimplify_ctxp
->temp_htab
)
590 gimplify_ctxp
->temp_htab
= new hash_table
<gimplify_hasher
> (1000);
591 slot
= gimplify_ctxp
->temp_htab
->find_slot (&elt
, INSERT
);
594 elt_p
= XNEW (elt_t
);
596 elt_p
->temp
= ret
= create_tmp_from_val (val
);
609 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
612 internal_get_tmp_var (tree val
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
613 bool is_formal
, bool allow_ssa
)
617 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
618 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
619 gimplify_expr (&val
, pre_p
, post_p
, is_gimple_reg_rhs_or_call
,
623 && gimplify_ctxp
->into_ssa
624 && is_gimple_reg_type (TREE_TYPE (val
)))
626 t
= make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val
)));
627 if (! gimple_in_ssa_p (cfun
))
629 const char *name
= get_name (val
);
631 SET_SSA_NAME_VAR_OR_IDENTIFIER (t
, create_tmp_var_name (name
));
635 t
= lookup_tmp_var (val
, is_formal
);
637 mod
= build2 (INIT_EXPR
, TREE_TYPE (t
), t
, unshare_expr (val
));
639 SET_EXPR_LOCATION (mod
, EXPR_LOC_OR_LOC (val
, input_location
));
641 /* gimplify_modify_expr might want to reduce this further. */
642 gimplify_and_add (mod
, pre_p
);
648 /* Return a formal temporary variable initialized with VAL. PRE_P is as
649 in gimplify_expr. Only use this function if:
651 1) The value of the unfactored expression represented by VAL will not
652 change between the initialization and use of the temporary, and
653 2) The temporary will not be otherwise modified.
655 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
656 and #2 means it is inappropriate for && temps.
658 For other cases, use get_initialized_tmp_var instead. */
661 get_formal_tmp_var (tree val
, gimple_seq
*pre_p
)
663 return internal_get_tmp_var (val
, pre_p
, NULL
, true, true);
666 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
667 are as in gimplify_expr. */
670 get_initialized_tmp_var (tree val
, gimple_seq
*pre_p
,
671 gimple_seq
*post_p
/* = NULL */,
672 bool allow_ssa
/* = true */)
674 return internal_get_tmp_var (val
, pre_p
, post_p
, false, allow_ssa
);
677 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
678 generate debug info for them; otherwise don't. */
681 declare_vars (tree vars
, gimple
*gs
, bool debug_info
)
688 gbind
*scope
= as_a
<gbind
*> (gs
);
690 temps
= nreverse (last
);
692 block
= gimple_bind_block (scope
);
693 gcc_assert (!block
|| TREE_CODE (block
) == BLOCK
);
694 if (!block
|| !debug_info
)
696 DECL_CHAIN (last
) = gimple_bind_vars (scope
);
697 gimple_bind_set_vars (scope
, temps
);
701 /* We need to attach the nodes both to the BIND_EXPR and to its
702 associated BLOCK for debugging purposes. The key point here
703 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
704 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
705 if (BLOCK_VARS (block
))
706 BLOCK_VARS (block
) = chainon (BLOCK_VARS (block
), temps
);
709 gimple_bind_set_vars (scope
,
710 chainon (gimple_bind_vars (scope
), temps
));
711 BLOCK_VARS (block
) = temps
;
717 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
718 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
719 no such upper bound can be obtained. */
722 force_constant_size (tree var
)
724 /* The only attempt we make is by querying the maximum size of objects
725 of the variable's type. */
727 HOST_WIDE_INT max_size
;
729 gcc_assert (VAR_P (var
));
731 max_size
= max_int_size_in_bytes (TREE_TYPE (var
));
733 gcc_assert (max_size
>= 0);
736 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var
)), max_size
);
738 = build_int_cst (TREE_TYPE (DECL_SIZE (var
)), max_size
* BITS_PER_UNIT
);
741 /* Push the temporary variable TMP into the current binding. */
744 gimple_add_tmp_var_fn (struct function
*fn
, tree tmp
)
746 gcc_assert (!DECL_CHAIN (tmp
) && !DECL_SEEN_IN_BIND_EXPR_P (tmp
));
748 /* Later processing assumes that the object size is constant, which might
749 not be true at this point. Force the use of a constant upper bound in
751 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp
)))
752 force_constant_size (tmp
);
754 DECL_CONTEXT (tmp
) = fn
->decl
;
755 DECL_SEEN_IN_BIND_EXPR_P (tmp
) = 1;
757 record_vars_into (tmp
, fn
->decl
);
760 /* Push the temporary variable TMP into the current binding. */
763 gimple_add_tmp_var (tree tmp
)
765 gcc_assert (!DECL_CHAIN (tmp
) && !DECL_SEEN_IN_BIND_EXPR_P (tmp
));
767 /* Later processing assumes that the object size is constant, which might
768 not be true at this point. Force the use of a constant upper bound in
770 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp
)))
771 force_constant_size (tmp
);
773 DECL_CONTEXT (tmp
) = current_function_decl
;
774 DECL_SEEN_IN_BIND_EXPR_P (tmp
) = 1;
778 DECL_CHAIN (tmp
) = gimplify_ctxp
->temps
;
779 gimplify_ctxp
->temps
= tmp
;
781 /* Mark temporaries local within the nearest enclosing parallel. */
782 if (gimplify_omp_ctxp
)
784 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
785 int flag
= GOVD_LOCAL
| GOVD_SEEN
;
787 && (ctx
->region_type
== ORT_WORKSHARE
788 || ctx
->region_type
== ORT_TASKGROUP
789 || ctx
->region_type
== ORT_SIMD
790 || ctx
->region_type
== ORT_ACC
))
792 if (ctx
->region_type
== ORT_SIMD
793 && TREE_ADDRESSABLE (tmp
)
794 && !TREE_STATIC (tmp
))
796 if (TREE_CODE (DECL_SIZE_UNIT (tmp
)) != INTEGER_CST
)
797 ctx
->add_safelen1
= true;
798 else if (ctx
->in_for_exprs
)
801 flag
= GOVD_PRIVATE
| GOVD_SEEN
;
804 ctx
= ctx
->outer_context
;
807 omp_add_variable (ctx
, tmp
, flag
);
816 /* This case is for nested functions. We need to expose the locals
818 body_seq
= gimple_body (current_function_decl
);
819 declare_vars (tmp
, gimple_seq_first_stmt (body_seq
), false);
825 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
826 nodes that are referenced more than once in GENERIC functions. This is
827 necessary because gimplification (translation into GIMPLE) is performed
828 by modifying tree nodes in-place, so gimplication of a shared node in a
829 first context could generate an invalid GIMPLE form in a second context.
831 This is achieved with a simple mark/copy/unmark algorithm that walks the
832 GENERIC representation top-down, marks nodes with TREE_VISITED the first
833 time it encounters them, duplicates them if they already have TREE_VISITED
834 set, and finally removes the TREE_VISITED marks it has set.
836 The algorithm works only at the function level, i.e. it generates a GENERIC
837 representation of a function with no nodes shared within the function when
838 passed a GENERIC function (except for nodes that are allowed to be shared).
840 At the global level, it is also necessary to unshare tree nodes that are
841 referenced in more than one function, for the same aforementioned reason.
842 This requires some cooperation from the front-end. There are 2 strategies:
844 1. Manual unsharing. The front-end needs to call unshare_expr on every
845 expression that might end up being shared across functions.
847 2. Deep unsharing. This is an extension of regular unsharing. Instead
848 of calling unshare_expr on expressions that might be shared across
849 functions, the front-end pre-marks them with TREE_VISITED. This will
850 ensure that they are unshared on the first reference within functions
851 when the regular unsharing algorithm runs. The counterpart is that
852 this algorithm must look deeper than for manual unsharing, which is
853 specified by LANG_HOOKS_DEEP_UNSHARING.
855 If there are only few specific cases of node sharing across functions, it is
856 probably easier for a front-end to unshare the expressions manually. On the
857 contrary, if the expressions generated at the global level are as widespread
858 as expressions generated within functions, deep unsharing is very likely the
861 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
862 These nodes model computations that must be done once. If we were to
863 unshare something like SAVE_EXPR(i++), the gimplification process would
864 create wrong code. However, if DATA is non-null, it must hold a pointer
865 set that is used to unshare the subtrees of these nodes. */
868 mostly_copy_tree_r (tree
*tp
, int *walk_subtrees
, void *data
)
871 enum tree_code code
= TREE_CODE (t
);
873 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
874 copy their subtrees if we can make sure to do it only once. */
875 if (code
== SAVE_EXPR
|| code
== TARGET_EXPR
|| code
== BIND_EXPR
)
877 if (data
&& !((hash_set
<tree
> *)data
)->add (t
))
883 /* Stop at types, decls, constants like copy_tree_r. */
884 else if (TREE_CODE_CLASS (code
) == tcc_type
885 || TREE_CODE_CLASS (code
) == tcc_declaration
886 || TREE_CODE_CLASS (code
) == tcc_constant
)
889 /* Cope with the statement expression extension. */
890 else if (code
== STATEMENT_LIST
)
893 /* Leave the bulk of the work to copy_tree_r itself. */
895 copy_tree_r (tp
, walk_subtrees
, NULL
);
900 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
901 If *TP has been visited already, then *TP is deeply copied by calling
902 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
905 copy_if_shared_r (tree
*tp
, int *walk_subtrees
, void *data
)
908 enum tree_code code
= TREE_CODE (t
);
910 /* Skip types, decls, and constants. But we do want to look at their
911 types and the bounds of types. Mark them as visited so we properly
912 unmark their subtrees on the unmark pass. If we've already seen them,
913 don't look down further. */
914 if (TREE_CODE_CLASS (code
) == tcc_type
915 || TREE_CODE_CLASS (code
) == tcc_declaration
916 || TREE_CODE_CLASS (code
) == tcc_constant
)
918 if (TREE_VISITED (t
))
921 TREE_VISITED (t
) = 1;
924 /* If this node has been visited already, unshare it and don't look
926 else if (TREE_VISITED (t
))
928 walk_tree (tp
, mostly_copy_tree_r
, data
, NULL
);
932 /* Otherwise, mark the node as visited and keep looking. */
934 TREE_VISITED (t
) = 1;
939 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
940 copy_if_shared_r callback unmodified. */
943 copy_if_shared (tree
*tp
, void *data
)
945 walk_tree (tp
, copy_if_shared_r
, data
, NULL
);
948 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
949 any nested functions. */
952 unshare_body (tree fndecl
)
954 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
955 /* If the language requires deep unsharing, we need a pointer set to make
956 sure we don't repeatedly unshare subtrees of unshareable nodes. */
957 hash_set
<tree
> *visited
958 = lang_hooks
.deep_unsharing
? new hash_set
<tree
> : NULL
;
960 copy_if_shared (&DECL_SAVED_TREE (fndecl
), visited
);
961 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl
)), visited
);
962 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl
)), visited
);
967 for (cgn
= first_nested_function (cgn
); cgn
;
968 cgn
= next_nested_function (cgn
))
969 unshare_body (cgn
->decl
);
972 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
973 Subtrees are walked until the first unvisited node is encountered. */
976 unmark_visited_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
980 /* If this node has been visited, unmark it and keep looking. */
981 if (TREE_VISITED (t
))
982 TREE_VISITED (t
) = 0;
984 /* Otherwise, don't look any deeper. */
991 /* Unmark the visited trees rooted at *TP. */
994 unmark_visited (tree
*tp
)
996 walk_tree (tp
, unmark_visited_r
, NULL
, NULL
);
999 /* Likewise, but mark all trees as not visited. */
1002 unvisit_body (tree fndecl
)
1004 struct cgraph_node
*cgn
= cgraph_node::get (fndecl
);
1006 unmark_visited (&DECL_SAVED_TREE (fndecl
));
1007 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl
)));
1008 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl
)));
1011 for (cgn
= first_nested_function (cgn
);
1012 cgn
; cgn
= next_nested_function (cgn
))
1013 unvisit_body (cgn
->decl
);
1016 /* Unconditionally make an unshared copy of EXPR. This is used when using
1017 stored expressions which span multiple functions, such as BINFO_VTABLE,
1018 as the normal unsharing process can't tell that they're shared. */
1021 unshare_expr (tree expr
)
1023 walk_tree (&expr
, mostly_copy_tree_r
, NULL
, NULL
);
1027 /* Worker for unshare_expr_without_location. */
1030 prune_expr_location (tree
*tp
, int *walk_subtrees
, void *)
1033 SET_EXPR_LOCATION (*tp
, UNKNOWN_LOCATION
);
1039 /* Similar to unshare_expr but also prune all expression locations
1043 unshare_expr_without_location (tree expr
)
1045 walk_tree (&expr
, mostly_copy_tree_r
, NULL
, NULL
);
1047 walk_tree (&expr
, prune_expr_location
, NULL
, NULL
);
1051 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1052 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1053 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1054 EXPR is the location of the EXPR. */
1057 rexpr_location (tree expr
, location_t or_else
= UNKNOWN_LOCATION
)
1062 if (EXPR_HAS_LOCATION (expr
))
1063 return EXPR_LOCATION (expr
);
1065 if (TREE_CODE (expr
) != STATEMENT_LIST
)
1068 tree_stmt_iterator i
= tsi_start (expr
);
1071 while (!tsi_end_p (i
) && TREE_CODE (tsi_stmt (i
)) == DEBUG_BEGIN_STMT
)
1077 if (!found
|| !tsi_one_before_end_p (i
))
1080 return rexpr_location (tsi_stmt (i
), or_else
);
1083 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1084 rexpr_location for the potential recursion. */
1087 rexpr_has_location (tree expr
)
1089 return rexpr_location (expr
) != UNKNOWN_LOCATION
;
1093 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1094 contain statements and have a value. Assign its value to a temporary
1095 and give it void_type_node. Return the temporary, or NULL_TREE if
1096 WRAPPER was already void. */
1099 voidify_wrapper_expr (tree wrapper
, tree temp
)
1101 tree type
= TREE_TYPE (wrapper
);
1102 if (type
&& !VOID_TYPE_P (type
))
1106 /* Set p to point to the body of the wrapper. Loop until we find
1107 something that isn't a wrapper. */
1108 for (p
= &wrapper
; p
&& *p
; )
1110 switch (TREE_CODE (*p
))
1113 TREE_SIDE_EFFECTS (*p
) = 1;
1114 TREE_TYPE (*p
) = void_type_node
;
1115 /* For a BIND_EXPR, the body is operand 1. */
1116 p
= &BIND_EXPR_BODY (*p
);
1119 case CLEANUP_POINT_EXPR
:
1120 case TRY_FINALLY_EXPR
:
1121 case TRY_CATCH_EXPR
:
1122 TREE_SIDE_EFFECTS (*p
) = 1;
1123 TREE_TYPE (*p
) = void_type_node
;
1124 p
= &TREE_OPERAND (*p
, 0);
1127 case STATEMENT_LIST
:
1129 tree_stmt_iterator i
= tsi_last (*p
);
1130 TREE_SIDE_EFFECTS (*p
) = 1;
1131 TREE_TYPE (*p
) = void_type_node
;
1132 p
= tsi_end_p (i
) ? NULL
: tsi_stmt_ptr (i
);
1137 /* Advance to the last statement. Set all container types to
1139 for (; TREE_CODE (*p
) == COMPOUND_EXPR
; p
= &TREE_OPERAND (*p
, 1))
1141 TREE_SIDE_EFFECTS (*p
) = 1;
1142 TREE_TYPE (*p
) = void_type_node
;
1146 case TRANSACTION_EXPR
:
1147 TREE_SIDE_EFFECTS (*p
) = 1;
1148 TREE_TYPE (*p
) = void_type_node
;
1149 p
= &TRANSACTION_EXPR_BODY (*p
);
1153 /* Assume that any tree upon which voidify_wrapper_expr is
1154 directly called is a wrapper, and that its body is op0. */
1157 TREE_SIDE_EFFECTS (*p
) = 1;
1158 TREE_TYPE (*p
) = void_type_node
;
1159 p
= &TREE_OPERAND (*p
, 0);
1167 if (p
== NULL
|| IS_EMPTY_STMT (*p
))
1171 /* The wrapper is on the RHS of an assignment that we're pushing
1173 gcc_assert (TREE_CODE (temp
) == INIT_EXPR
1174 || TREE_CODE (temp
) == MODIFY_EXPR
);
1175 TREE_OPERAND (temp
, 1) = *p
;
1180 temp
= create_tmp_var (type
, "retval");
1181 *p
= build2 (INIT_EXPR
, type
, temp
, *p
);
1190 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1191 a temporary through which they communicate. */
1194 build_stack_save_restore (gcall
**save
, gcall
**restore
)
1198 *save
= gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE
), 0);
1199 tmp_var
= create_tmp_var (ptr_type_node
, "saved_stack");
1200 gimple_call_set_lhs (*save
, tmp_var
);
1203 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE
),
1207 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1210 build_asan_poison_call_expr (tree decl
)
1212 /* Do not poison variables that have size equal to zero. */
1213 tree unit_size
= DECL_SIZE_UNIT (decl
);
1214 if (zerop (unit_size
))
1217 tree base
= build_fold_addr_expr (decl
);
1219 return build_call_expr_internal_loc (UNKNOWN_LOCATION
, IFN_ASAN_MARK
,
1221 build_int_cst (integer_type_node
,
1226 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1227 on POISON flag, shadow memory of a DECL variable. The call will be
1228 put on location identified by IT iterator, where BEFORE flag drives
1229 position where the stmt will be put. */
1232 asan_poison_variable (tree decl
, bool poison
, gimple_stmt_iterator
*it
,
1235 tree unit_size
= DECL_SIZE_UNIT (decl
);
1236 tree base
= build_fold_addr_expr (decl
);
1238 /* Do not poison variables that have size equal to zero. */
1239 if (zerop (unit_size
))
1242 /* It's necessary to have all stack variables aligned to ASAN granularity
1244 gcc_assert (!hwasan_sanitize_p () || hwasan_sanitize_stack_p ());
1245 unsigned shadow_granularity
1246 = hwasan_sanitize_p () ? HWASAN_TAG_GRANULE_SIZE
: ASAN_SHADOW_GRANULARITY
;
1247 if (DECL_ALIGN_UNIT (decl
) <= shadow_granularity
)
1248 SET_DECL_ALIGN (decl
, BITS_PER_UNIT
* shadow_granularity
);
1250 HOST_WIDE_INT flags
= poison
? ASAN_MARK_POISON
: ASAN_MARK_UNPOISON
;
1253 = gimple_build_call_internal (IFN_ASAN_MARK
, 3,
1254 build_int_cst (integer_type_node
, flags
),
1258 gsi_insert_before (it
, g
, GSI_NEW_STMT
);
1260 gsi_insert_after (it
, g
, GSI_NEW_STMT
);
1263 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1264 either poisons or unpoisons a DECL. Created statement is appended
1265 to SEQ_P gimple sequence. */
1268 asan_poison_variable (tree decl
, bool poison
, gimple_seq
*seq_p
)
1270 gimple_stmt_iterator it
= gsi_last (*seq_p
);
1271 bool before
= false;
1276 asan_poison_variable (decl
, poison
, &it
, before
);
1279 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1282 sort_by_decl_uid (const void *a
, const void *b
)
1284 const tree
*t1
= (const tree
*)a
;
1285 const tree
*t2
= (const tree
*)b
;
1287 int uid1
= DECL_UID (*t1
);
1288 int uid2
= DECL_UID (*t2
);
1292 else if (uid1
> uid2
)
1298 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1299 depending on POISON flag. Created statement is appended
1300 to SEQ_P gimple sequence. */
1303 asan_poison_variables (hash_set
<tree
> *variables
, bool poison
, gimple_seq
*seq_p
)
1305 unsigned c
= variables
->elements ();
1309 auto_vec
<tree
> sorted_variables (c
);
1311 for (hash_set
<tree
>::iterator it
= variables
->begin ();
1312 it
!= variables
->end (); ++it
)
1313 sorted_variables
.safe_push (*it
);
1315 sorted_variables
.qsort (sort_by_decl_uid
);
1319 FOR_EACH_VEC_ELT (sorted_variables
, i
, var
)
1321 asan_poison_variable (var
, poison
, seq_p
);
1323 /* Add use_after_scope_memory attribute for the variable in order
1324 to prevent re-written into SSA. */
1325 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE
,
1326 DECL_ATTRIBUTES (var
)))
1327 DECL_ATTRIBUTES (var
)
1328 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE
),
1330 DECL_ATTRIBUTES (var
));
1334 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1336 static enum gimplify_status
1337 gimplify_bind_expr (tree
*expr_p
, gimple_seq
*pre_p
)
1339 tree bind_expr
= *expr_p
;
1340 bool old_keep_stack
= gimplify_ctxp
->keep_stack
;
1341 bool old_save_stack
= gimplify_ctxp
->save_stack
;
1344 gimple_seq body
, cleanup
;
1346 location_t start_locus
= 0, end_locus
= 0;
1347 tree ret_clauses
= NULL
;
1349 tree temp
= voidify_wrapper_expr (bind_expr
, NULL
);
1351 /* Mark variables seen in this bind expr. */
1352 for (t
= BIND_EXPR_VARS (bind_expr
); t
; t
= DECL_CHAIN (t
))
1356 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
1358 /* Mark variable as local. */
1359 if (ctx
&& ctx
->region_type
!= ORT_NONE
&& !DECL_EXTERNAL (t
))
1361 if (! DECL_SEEN_IN_BIND_EXPR_P (t
)
1362 || splay_tree_lookup (ctx
->variables
,
1363 (splay_tree_key
) t
) == NULL
)
1365 int flag
= GOVD_LOCAL
;
1366 if (ctx
->region_type
== ORT_SIMD
1367 && TREE_ADDRESSABLE (t
)
1368 && !TREE_STATIC (t
))
1370 if (TREE_CODE (DECL_SIZE_UNIT (t
)) != INTEGER_CST
)
1371 ctx
->add_safelen1
= true;
1373 flag
= GOVD_PRIVATE
;
1375 omp_add_variable (ctx
, t
, flag
| GOVD_SEEN
);
1377 /* Static locals inside of target construct or offloaded
1378 routines need to be "omp declare target". */
1379 if (TREE_STATIC (t
))
1380 for (; ctx
; ctx
= ctx
->outer_context
)
1381 if ((ctx
->region_type
& ORT_TARGET
) != 0)
1383 if (!lookup_attribute ("omp declare target",
1384 DECL_ATTRIBUTES (t
)))
1386 tree id
= get_identifier ("omp declare target");
1388 = tree_cons (id
, NULL_TREE
, DECL_ATTRIBUTES (t
));
1389 varpool_node
*node
= varpool_node::get (t
);
1392 node
->offloadable
= 1;
1393 if (ENABLE_OFFLOADING
&& !DECL_EXTERNAL (t
))
1395 g
->have_offload
= true;
1397 vec_safe_push (offload_vars
, t
);
1405 DECL_SEEN_IN_BIND_EXPR_P (t
) = 1;
1407 if (DECL_HARD_REGISTER (t
) && !is_global_var (t
) && cfun
)
1408 cfun
->has_local_explicit_reg_vars
= true;
1412 bind_stmt
= gimple_build_bind (BIND_EXPR_VARS (bind_expr
), NULL
,
1413 BIND_EXPR_BLOCK (bind_expr
));
1414 gimple_push_bind_expr (bind_stmt
);
1416 gimplify_ctxp
->keep_stack
= false;
1417 gimplify_ctxp
->save_stack
= false;
1419 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1421 gimplify_stmt (&BIND_EXPR_BODY (bind_expr
), &body
);
1422 gimple_bind_set_body (bind_stmt
, body
);
1424 /* Source location wise, the cleanup code (stack_restore and clobbers)
1425 belongs to the end of the block, so propagate what we have. The
1426 stack_save operation belongs to the beginning of block, which we can
1427 infer from the bind_expr directly if the block has no explicit
1429 if (BIND_EXPR_BLOCK (bind_expr
))
1431 end_locus
= BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr
));
1432 start_locus
= BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr
));
1434 if (start_locus
== 0)
1435 start_locus
= EXPR_LOCATION (bind_expr
);
1440 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1441 the stack space allocated to the VLAs. */
1442 if (gimplify_ctxp
->save_stack
&& !gimplify_ctxp
->keep_stack
)
1444 gcall
*stack_restore
;
1446 /* Save stack on entry and restore it on exit. Add a try_finally
1447 block to achieve this. */
1448 build_stack_save_restore (&stack_save
, &stack_restore
);
1450 gimple_set_location (stack_save
, start_locus
);
1451 gimple_set_location (stack_restore
, end_locus
);
1453 gimplify_seq_add_stmt (&cleanup
, stack_restore
);
1456 /* Add clobbers for all variables that go out of scope. */
1457 for (t
= BIND_EXPR_VARS (bind_expr
); t
; t
= DECL_CHAIN (t
))
1460 && !is_global_var (t
)
1461 && DECL_CONTEXT (t
) == current_function_decl
)
1463 if (!DECL_HARD_REGISTER (t
)
1464 && !TREE_THIS_VOLATILE (t
)
1465 && !DECL_HAS_VALUE_EXPR_P (t
)
1466 /* Only care for variables that have to be in memory. Others
1467 will be rewritten into SSA names, hence moved to the
1469 && !is_gimple_reg (t
)
1470 && flag_stack_reuse
!= SR_NONE
)
1472 tree clobber
= build_clobber (TREE_TYPE (t
));
1473 gimple
*clobber_stmt
;
1474 clobber_stmt
= gimple_build_assign (t
, clobber
);
1475 gimple_set_location (clobber_stmt
, end_locus
);
1476 gimplify_seq_add_stmt (&cleanup
, clobber_stmt
);
1479 if (flag_openacc
&& oacc_declare_returns
!= NULL
)
1482 if (DECL_HAS_VALUE_EXPR_P (key
))
1484 key
= DECL_VALUE_EXPR (key
);
1485 if (TREE_CODE (key
) == INDIRECT_REF
)
1486 key
= TREE_OPERAND (key
, 0);
1488 tree
*c
= oacc_declare_returns
->get (key
);
1492 OMP_CLAUSE_CHAIN (*c
) = ret_clauses
;
1494 ret_clauses
= unshare_expr (*c
);
1496 oacc_declare_returns
->remove (key
);
1498 if (oacc_declare_returns
->is_empty ())
1500 delete oacc_declare_returns
;
1501 oacc_declare_returns
= NULL
;
1507 if (asan_poisoned_variables
!= NULL
1508 && asan_poisoned_variables
->contains (t
))
1510 asan_poisoned_variables
->remove (t
);
1511 asan_poison_variable (t
, true, &cleanup
);
1514 if (gimplify_ctxp
->live_switch_vars
!= NULL
1515 && gimplify_ctxp
->live_switch_vars
->contains (t
))
1516 gimplify_ctxp
->live_switch_vars
->remove (t
);
1522 gimple_stmt_iterator si
= gsi_start (cleanup
);
1524 stmt
= gimple_build_omp_target (NULL
, GF_OMP_TARGET_KIND_OACC_DECLARE
,
1526 gsi_insert_seq_before_without_update (&si
, stmt
, GSI_NEW_STMT
);
1532 gimple_seq new_body
;
1535 gs
= gimple_build_try (gimple_bind_body (bind_stmt
), cleanup
,
1536 GIMPLE_TRY_FINALLY
);
1539 gimplify_seq_add_stmt (&new_body
, stack_save
);
1540 gimplify_seq_add_stmt (&new_body
, gs
);
1541 gimple_bind_set_body (bind_stmt
, new_body
);
1544 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1545 if (!gimplify_ctxp
->keep_stack
)
1546 gimplify_ctxp
->keep_stack
= old_keep_stack
;
1547 gimplify_ctxp
->save_stack
= old_save_stack
;
1549 gimple_pop_bind_expr ();
1551 gimplify_seq_add_stmt (pre_p
, bind_stmt
);
1559 *expr_p
= NULL_TREE
;
1563 /* Maybe add early return predict statement to PRE_P sequence. */
1566 maybe_add_early_return_predict_stmt (gimple_seq
*pre_p
)
1568 /* If we are not in a conditional context, add PREDICT statement. */
1569 if (gimple_conditional_context ())
1571 gimple
*predict
= gimple_build_predict (PRED_TREE_EARLY_RETURN
,
1573 gimplify_seq_add_stmt (pre_p
, predict
);
1577 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1578 GIMPLE value, it is assigned to a new temporary and the statement is
1579 re-written to return the temporary.
1581 PRE_P points to the sequence where side effects that must happen before
1582 STMT should be stored. */
1584 static enum gimplify_status
1585 gimplify_return_expr (tree stmt
, gimple_seq
*pre_p
)
1588 tree ret_expr
= TREE_OPERAND (stmt
, 0);
1589 tree result_decl
, result
;
1591 if (ret_expr
== error_mark_node
)
1595 || TREE_CODE (ret_expr
) == RESULT_DECL
)
1597 maybe_add_early_return_predict_stmt (pre_p
);
1598 greturn
*ret
= gimple_build_return (ret_expr
);
1599 gimple_set_no_warning (ret
, TREE_NO_WARNING (stmt
));
1600 gimplify_seq_add_stmt (pre_p
, ret
);
1604 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
))))
1605 result_decl
= NULL_TREE
;
1606 else if (TREE_CODE (ret_expr
) == COMPOUND_EXPR
)
1608 /* Used in C++ for handling EH cleanup of the return value if a local
1609 cleanup throws. Assume the front-end knows what it's doing. */
1610 result_decl
= DECL_RESULT (current_function_decl
);
1611 /* But crash if we end up trying to modify ret_expr below. */
1612 ret_expr
= NULL_TREE
;
1616 result_decl
= TREE_OPERAND (ret_expr
, 0);
1618 /* See through a return by reference. */
1619 if (TREE_CODE (result_decl
) == INDIRECT_REF
)
1620 result_decl
= TREE_OPERAND (result_decl
, 0);
1622 gcc_assert ((TREE_CODE (ret_expr
) == MODIFY_EXPR
1623 || TREE_CODE (ret_expr
) == INIT_EXPR
)
1624 && TREE_CODE (result_decl
) == RESULT_DECL
);
1627 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1628 Recall that aggregate_value_p is FALSE for any aggregate type that is
1629 returned in registers. If we're returning values in registers, then
1630 we don't want to extend the lifetime of the RESULT_DECL, particularly
1631 across another call. In addition, for those aggregates for which
1632 hard_function_value generates a PARALLEL, we'll die during normal
1633 expansion of structure assignments; there's special code in expand_return
1634 to handle this case that does not exist in expand_expr. */
1637 else if (aggregate_value_p (result_decl
, TREE_TYPE (current_function_decl
)))
1639 if (!poly_int_tree_p (DECL_SIZE (result_decl
)))
1641 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl
)))
1642 gimplify_type_sizes (TREE_TYPE (result_decl
), pre_p
);
1643 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1644 should be effectively allocated by the caller, i.e. all calls to
1645 this function must be subject to the Return Slot Optimization. */
1646 gimplify_one_sizepos (&DECL_SIZE (result_decl
), pre_p
);
1647 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl
), pre_p
);
1649 result
= result_decl
;
1651 else if (gimplify_ctxp
->return_temp
)
1652 result
= gimplify_ctxp
->return_temp
;
1655 result
= create_tmp_reg (TREE_TYPE (result_decl
));
1657 /* ??? With complex control flow (usually involving abnormal edges),
1658 we can wind up warning about an uninitialized value for this. Due
1659 to how this variable is constructed and initialized, this is never
1660 true. Give up and never warn. */
1661 TREE_NO_WARNING (result
) = 1;
1663 gimplify_ctxp
->return_temp
= result
;
1666 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1667 Then gimplify the whole thing. */
1668 if (result
!= result_decl
)
1669 TREE_OPERAND (ret_expr
, 0) = result
;
1671 gimplify_and_add (TREE_OPERAND (stmt
, 0), pre_p
);
1673 maybe_add_early_return_predict_stmt (pre_p
);
1674 ret
= gimple_build_return (result
);
1675 gimple_set_no_warning (ret
, TREE_NO_WARNING (stmt
));
1676 gimplify_seq_add_stmt (pre_p
, ret
);
1681 /* Gimplify a variable-length array DECL. */
1684 gimplify_vla_decl (tree decl
, gimple_seq
*seq_p
)
1686 /* This is a variable-sized decl. Simplify its size and mark it
1687 for deferred expansion. */
1688 tree t
, addr
, ptr_type
;
1690 gimplify_one_sizepos (&DECL_SIZE (decl
), seq_p
);
1691 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl
), seq_p
);
1693 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1694 if (DECL_HAS_VALUE_EXPR_P (decl
))
1697 /* All occurrences of this decl in final gimplified code will be
1698 replaced by indirection. Setting DECL_VALUE_EXPR does two
1699 things: First, it lets the rest of the gimplifier know what
1700 replacement to use. Second, it lets the debug info know
1701 where to find the value. */
1702 ptr_type
= build_pointer_type (TREE_TYPE (decl
));
1703 addr
= create_tmp_var (ptr_type
, get_name (decl
));
1704 DECL_IGNORED_P (addr
) = 0;
1705 t
= build_fold_indirect_ref (addr
);
1706 TREE_THIS_NOTRAP (t
) = 1;
1707 SET_DECL_VALUE_EXPR (decl
, t
);
1708 DECL_HAS_VALUE_EXPR_P (decl
) = 1;
1710 t
= build_alloca_call_expr (DECL_SIZE_UNIT (decl
), DECL_ALIGN (decl
),
1711 max_int_size_in_bytes (TREE_TYPE (decl
)));
1712 /* The call has been built for a variable-sized object. */
1713 CALL_ALLOCA_FOR_VAR_P (t
) = 1;
1714 t
= fold_convert (ptr_type
, t
);
1715 t
= build2 (MODIFY_EXPR
, TREE_TYPE (addr
), addr
, t
);
1717 gimplify_and_add (t
, seq_p
);
1719 /* Record the dynamic allocation associated with DECL if requested. */
1720 if (flag_callgraph_info
& CALLGRAPH_INFO_DYNAMIC_ALLOC
)
1721 record_dynamic_alloc (decl
);
1724 /* A helper function to be called via walk_tree. Mark all labels under *TP
1725 as being forced. To be called for DECL_INITIAL of static variables. */
1728 force_labels_r (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
1732 if (TREE_CODE (*tp
) == LABEL_DECL
)
1734 FORCED_LABEL (*tp
) = 1;
1735 cfun
->has_forced_label_in_static
= 1;
1741 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1742 and initialization explicit. */
1744 static enum gimplify_status
1745 gimplify_decl_expr (tree
*stmt_p
, gimple_seq
*seq_p
)
1747 tree stmt
= *stmt_p
;
1748 tree decl
= DECL_EXPR_DECL (stmt
);
1750 *stmt_p
= NULL_TREE
;
1752 if (TREE_TYPE (decl
) == error_mark_node
)
1755 if ((TREE_CODE (decl
) == TYPE_DECL
1757 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl
)))
1759 gimplify_type_sizes (TREE_TYPE (decl
), seq_p
);
1760 if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
)
1761 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl
)), seq_p
);
1764 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1765 in case its size expressions contain problematic nodes like CALL_EXPR. */
1766 if (TREE_CODE (decl
) == TYPE_DECL
1767 && DECL_ORIGINAL_TYPE (decl
)
1768 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl
)))
1770 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl
), seq_p
);
1771 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl
)) == REFERENCE_TYPE
)
1772 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl
)), seq_p
);
1775 if (VAR_P (decl
) && !DECL_EXTERNAL (decl
))
1777 tree init
= DECL_INITIAL (decl
);
1778 bool is_vla
= false;
1781 if (!poly_int_tree_p (DECL_SIZE_UNIT (decl
), &size
)
1782 || (!TREE_STATIC (decl
)
1783 && flag_stack_check
== GENERIC_STACK_CHECK
1785 (unsigned HOST_WIDE_INT
) STACK_CHECK_MAX_VAR_SIZE
)))
1787 gimplify_vla_decl (decl
, seq_p
);
1791 if (asan_poisoned_variables
1793 && TREE_ADDRESSABLE (decl
)
1794 && !TREE_STATIC (decl
)
1795 && !DECL_HAS_VALUE_EXPR_P (decl
)
1796 && DECL_ALIGN (decl
) <= MAX_SUPPORTED_STACK_ALIGNMENT
1797 && dbg_cnt (asan_use_after_scope
)
1798 && !gimplify_omp_ctxp
)
1800 asan_poisoned_variables
->add (decl
);
1801 asan_poison_variable (decl
, false, seq_p
);
1802 if (!DECL_ARTIFICIAL (decl
) && gimplify_ctxp
->live_switch_vars
)
1803 gimplify_ctxp
->live_switch_vars
->add (decl
);
1806 /* Some front ends do not explicitly declare all anonymous
1807 artificial variables. We compensate here by declaring the
1808 variables, though it would be better if the front ends would
1809 explicitly declare them. */
1810 if (!DECL_SEEN_IN_BIND_EXPR_P (decl
)
1811 && DECL_ARTIFICIAL (decl
) && DECL_NAME (decl
) == NULL_TREE
)
1812 gimple_add_tmp_var (decl
);
1814 if (init
&& init
!= error_mark_node
)
1816 if (!TREE_STATIC (decl
))
1818 DECL_INITIAL (decl
) = NULL_TREE
;
1819 init
= build2 (INIT_EXPR
, void_type_node
, decl
, init
);
1820 gimplify_and_add (init
, seq_p
);
1824 /* We must still examine initializers for static variables
1825 as they may contain a label address. */
1826 walk_tree (&init
, force_labels_r
, NULL
, NULL
);
1833 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1834 and replacing the LOOP_EXPR with goto, but if the loop contains an
1835 EXIT_EXPR, we need to append a label for it to jump to. */
1837 static enum gimplify_status
1838 gimplify_loop_expr (tree
*expr_p
, gimple_seq
*pre_p
)
1840 tree saved_label
= gimplify_ctxp
->exit_label
;
1841 tree start_label
= create_artificial_label (UNKNOWN_LOCATION
);
1843 gimplify_seq_add_stmt (pre_p
, gimple_build_label (start_label
));
1845 gimplify_ctxp
->exit_label
= NULL_TREE
;
1847 gimplify_and_add (LOOP_EXPR_BODY (*expr_p
), pre_p
);
1849 gimplify_seq_add_stmt (pre_p
, gimple_build_goto (start_label
));
1851 if (gimplify_ctxp
->exit_label
)
1852 gimplify_seq_add_stmt (pre_p
,
1853 gimple_build_label (gimplify_ctxp
->exit_label
));
1855 gimplify_ctxp
->exit_label
= saved_label
;
1861 /* Gimplify a statement list onto a sequence. These may be created either
1862 by an enlightened front-end, or by shortcut_cond_expr. */
1864 static enum gimplify_status
1865 gimplify_statement_list (tree
*expr_p
, gimple_seq
*pre_p
)
1867 tree temp
= voidify_wrapper_expr (*expr_p
, NULL
);
1869 tree_stmt_iterator i
= tsi_start (*expr_p
);
1871 while (!tsi_end_p (i
))
1873 gimplify_stmt (tsi_stmt_ptr (i
), pre_p
);
1886 /* Callback for walk_gimple_seq. */
1889 warn_switch_unreachable_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
1890 struct walk_stmt_info
*wi
)
1892 gimple
*stmt
= gsi_stmt (*gsi_p
);
1894 *handled_ops_p
= true;
1895 switch (gimple_code (stmt
))
1898 /* A compiler-generated cleanup or a user-written try block.
1899 If it's empty, don't dive into it--that would result in
1900 worse location info. */
1901 if (gimple_try_eval (stmt
) == NULL
)
1904 return integer_zero_node
;
1909 case GIMPLE_EH_FILTER
:
1910 case GIMPLE_TRANSACTION
:
1911 /* Walk the sub-statements. */
1912 *handled_ops_p
= false;
1916 /* Ignore these. We may generate them before declarations that
1917 are never executed. If there's something to warn about,
1918 there will be non-debug stmts too, and we'll catch those. */
1922 if (gimple_call_internal_p (stmt
, IFN_ASAN_MARK
))
1924 *handled_ops_p
= false;
1929 /* Save the first "real" statement (not a decl/lexical scope/...). */
1931 return integer_zero_node
;
1936 /* Possibly warn about unreachable statements between switch's controlling
1937 expression and the first case. SEQ is the body of a switch expression. */
1940 maybe_warn_switch_unreachable (gimple_seq seq
)
1942 if (!warn_switch_unreachable
1943 /* This warning doesn't play well with Fortran when optimizations
1945 || lang_GNU_Fortran ()
1949 struct walk_stmt_info wi
;
1950 memset (&wi
, 0, sizeof (wi
));
1951 walk_gimple_seq (seq
, warn_switch_unreachable_r
, NULL
, &wi
);
1952 gimple
*stmt
= (gimple
*) wi
.info
;
1954 if (stmt
&& gimple_code (stmt
) != GIMPLE_LABEL
)
1956 if (gimple_code (stmt
) == GIMPLE_GOTO
1957 && TREE_CODE (gimple_goto_dest (stmt
)) == LABEL_DECL
1958 && DECL_ARTIFICIAL (gimple_goto_dest (stmt
)))
1959 /* Don't warn for compiler-generated gotos. These occur
1960 in Duff's devices, for example. */;
1962 warning_at (gimple_location (stmt
), OPT_Wswitch_unreachable
,
1963 "statement will never be executed");
1968 /* A label entry that pairs label and a location. */
1975 /* Find LABEL in vector of label entries VEC. */
1977 static struct label_entry
*
1978 find_label_entry (const auto_vec
<struct label_entry
> *vec
, tree label
)
1981 struct label_entry
*l
;
1983 FOR_EACH_VEC_ELT (*vec
, i
, l
)
1984 if (l
->label
== label
)
1989 /* Return true if LABEL, a LABEL_DECL, represents a case label
1990 in a vector of labels CASES. */
1993 case_label_p (const vec
<tree
> *cases
, tree label
)
1998 FOR_EACH_VEC_ELT (*cases
, i
, l
)
1999 if (CASE_LABEL (l
) == label
)
2004 /* Find the last nondebug statement in a scope STMT. */
2007 last_stmt_in_scope (gimple
*stmt
)
2012 switch (gimple_code (stmt
))
2016 gbind
*bind
= as_a
<gbind
*> (stmt
);
2017 stmt
= gimple_seq_last_nondebug_stmt (gimple_bind_body (bind
));
2018 return last_stmt_in_scope (stmt
);
2023 gtry
*try_stmt
= as_a
<gtry
*> (stmt
);
2024 stmt
= gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt
));
2025 gimple
*last_eval
= last_stmt_in_scope (stmt
);
2026 if (gimple_stmt_may_fallthru (last_eval
)
2027 && (last_eval
== NULL
2028 || !gimple_call_internal_p (last_eval
, IFN_FALLTHROUGH
))
2029 && gimple_try_kind (try_stmt
) == GIMPLE_TRY_FINALLY
)
2031 stmt
= gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt
));
2032 return last_stmt_in_scope (stmt
);
2046 /* Collect interesting labels in LABELS and return the statement preceding
2047 another case label, or a user-defined label. Store a location useful
2048 to give warnings at *PREVLOC (usually the location of the returned
2049 statement or of its surrounding scope). */
2052 collect_fallthrough_labels (gimple_stmt_iterator
*gsi_p
,
2053 auto_vec
<struct label_entry
> *labels
,
2054 location_t
*prevloc
)
2056 gimple
*prev
= NULL
;
2058 *prevloc
= UNKNOWN_LOCATION
;
2061 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_BIND
)
2063 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2064 which starts on a GIMPLE_SWITCH and ends with a break label.
2065 Handle that as a single statement that can fall through. */
2066 gbind
*bind
= as_a
<gbind
*> (gsi_stmt (*gsi_p
));
2067 gimple
*first
= gimple_seq_first_stmt (gimple_bind_body (bind
));
2068 gimple
*last
= gimple_seq_last_stmt (gimple_bind_body (bind
));
2070 && gimple_code (first
) == GIMPLE_SWITCH
2071 && gimple_code (last
) == GIMPLE_LABEL
)
2073 tree label
= gimple_label_label (as_a
<glabel
*> (last
));
2074 if (SWITCH_BREAK_LABEL_P (label
))
2082 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_BIND
2083 || gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_TRY
)
2085 /* Nested scope. Only look at the last statement of
2086 the innermost scope. */
2087 location_t bind_loc
= gimple_location (gsi_stmt (*gsi_p
));
2088 gimple
*last
= last_stmt_in_scope (gsi_stmt (*gsi_p
));
2092 /* It might be a label without a location. Use the
2093 location of the scope then. */
2094 if (!gimple_has_location (prev
))
2095 *prevloc
= bind_loc
;
2101 /* Ifs are tricky. */
2102 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_COND
)
2104 gcond
*cond_stmt
= as_a
<gcond
*> (gsi_stmt (*gsi_p
));
2105 tree false_lab
= gimple_cond_false_label (cond_stmt
);
2106 location_t if_loc
= gimple_location (cond_stmt
);
2109 if (i > 1) goto <D.2259>; else goto D;
2110 we can't do much with the else-branch. */
2111 if (!DECL_ARTIFICIAL (false_lab
))
2114 /* Go on until the false label, then one step back. */
2115 for (; !gsi_end_p (*gsi_p
); gsi_next (gsi_p
))
2117 gimple
*stmt
= gsi_stmt (*gsi_p
);
2118 if (gimple_code (stmt
) == GIMPLE_LABEL
2119 && gimple_label_label (as_a
<glabel
*> (stmt
)) == false_lab
)
2123 /* Not found? Oops. */
2124 if (gsi_end_p (*gsi_p
))
2127 struct label_entry l
= { false_lab
, if_loc
};
2128 labels
->safe_push (l
);
2130 /* Go to the last statement of the then branch. */
2133 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2139 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_GOTO
2140 && !gimple_has_location (gsi_stmt (*gsi_p
)))
2142 /* Look at the statement before, it might be
2143 attribute fallthrough, in which case don't warn. */
2145 bool fallthru_before_dest
2146 = gimple_call_internal_p (gsi_stmt (*gsi_p
), IFN_FALLTHROUGH
);
2148 tree goto_dest
= gimple_goto_dest (gsi_stmt (*gsi_p
));
2149 if (!fallthru_before_dest
)
2151 struct label_entry l
= { goto_dest
, if_loc
};
2152 labels
->safe_push (l
);
2155 /* And move back. */
2159 /* Remember the last statement. Skip labels that are of no interest
2161 if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_LABEL
)
2163 tree label
= gimple_label_label (as_a
<glabel
*> (gsi_stmt (*gsi_p
)));
2164 if (find_label_entry (labels
, label
))
2165 prev
= gsi_stmt (*gsi_p
);
2167 else if (gimple_call_internal_p (gsi_stmt (*gsi_p
), IFN_ASAN_MARK
))
2169 else if (gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_PREDICT
)
2171 else if (!is_gimple_debug (gsi_stmt (*gsi_p
)))
2172 prev
= gsi_stmt (*gsi_p
);
2175 while (!gsi_end_p (*gsi_p
)
2176 /* Stop if we find a case or a user-defined label. */
2177 && (gimple_code (gsi_stmt (*gsi_p
)) != GIMPLE_LABEL
2178 || !gimple_has_location (gsi_stmt (*gsi_p
))));
2180 if (prev
&& gimple_has_location (prev
))
2181 *prevloc
= gimple_location (prev
);
2185 /* Return true if the switch fallthough warning should occur. LABEL is
2186 the label statement that we're falling through to. */
2189 should_warn_for_implicit_fallthrough (gimple_stmt_iterator
*gsi_p
, tree label
)
2191 gimple_stmt_iterator gsi
= *gsi_p
;
2193 /* Don't warn if the label is marked with a "falls through" comment. */
2194 if (FALLTHROUGH_LABEL_P (label
))
2197 /* Don't warn for non-case labels followed by a statement:
2202 as these are likely intentional. */
2203 if (!case_label_p (&gimplify_ctxp
->case_labels
, label
))
2206 while (!gsi_end_p (gsi
)
2207 && gimple_code (gsi_stmt (gsi
)) == GIMPLE_LABEL
2208 && (l
= gimple_label_label (as_a
<glabel
*> (gsi_stmt (gsi
))))
2209 && !case_label_p (&gimplify_ctxp
->case_labels
, l
))
2210 gsi_next_nondebug (&gsi
);
2211 if (gsi_end_p (gsi
) || gimple_code (gsi_stmt (gsi
)) != GIMPLE_LABEL
)
2215 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2216 immediately breaks. */
2219 /* Skip all immediately following labels. */
2220 while (!gsi_end_p (gsi
)
2221 && (gimple_code (gsi_stmt (gsi
)) == GIMPLE_LABEL
2222 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_PREDICT
))
2223 gsi_next_nondebug (&gsi
);
2225 /* { ... something; default:; } */
2227 /* { ... something; default: break; } or
2228 { ... something; default: goto L; } */
2229 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_GOTO
2230 /* { ... something; default: return; } */
2231 || gimple_code (gsi_stmt (gsi
)) == GIMPLE_RETURN
)
2237 /* Callback for walk_gimple_seq. */
2240 warn_implicit_fallthrough_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
2241 struct walk_stmt_info
*)
2243 gimple
*stmt
= gsi_stmt (*gsi_p
);
2245 *handled_ops_p
= true;
2246 switch (gimple_code (stmt
))
2251 case GIMPLE_EH_FILTER
:
2252 case GIMPLE_TRANSACTION
:
2253 /* Walk the sub-statements. */
2254 *handled_ops_p
= false;
2257 /* Find a sequence of form:
2264 and possibly warn. */
2267 /* Found a label. Skip all immediately following labels. */
2268 while (!gsi_end_p (*gsi_p
)
2269 && gimple_code (gsi_stmt (*gsi_p
)) == GIMPLE_LABEL
)
2270 gsi_next_nondebug (gsi_p
);
2272 /* There might be no more statements. */
2273 if (gsi_end_p (*gsi_p
))
2274 return integer_zero_node
;
2276 /* Vector of labels that fall through. */
2277 auto_vec
<struct label_entry
> labels
;
2279 gimple
*prev
= collect_fallthrough_labels (gsi_p
, &labels
, &prevloc
);
2281 /* There might be no more statements. */
2282 if (gsi_end_p (*gsi_p
))
2283 return integer_zero_node
;
2285 gimple
*next
= gsi_stmt (*gsi_p
);
2287 /* If what follows is a label, then we may have a fallthrough. */
2288 if (gimple_code (next
) == GIMPLE_LABEL
2289 && gimple_has_location (next
)
2290 && (label
= gimple_label_label (as_a
<glabel
*> (next
)))
2293 struct label_entry
*l
;
2294 bool warned_p
= false;
2295 auto_diagnostic_group d
;
2296 if (!should_warn_for_implicit_fallthrough (gsi_p
, label
))
2298 else if (gimple_code (prev
) == GIMPLE_LABEL
2299 && (label
= gimple_label_label (as_a
<glabel
*> (prev
)))
2300 && (l
= find_label_entry (&labels
, label
)))
2301 warned_p
= warning_at (l
->loc
, OPT_Wimplicit_fallthrough_
,
2302 "this statement may fall through");
2303 else if (!gimple_call_internal_p (prev
, IFN_FALLTHROUGH
)
2304 /* Try to be clever and don't warn when the statement
2305 can't actually fall through. */
2306 && gimple_stmt_may_fallthru (prev
)
2307 && prevloc
!= UNKNOWN_LOCATION
)
2308 warned_p
= warning_at (prevloc
,
2309 OPT_Wimplicit_fallthrough_
,
2310 "this statement may fall through");
2312 inform (gimple_location (next
), "here");
2314 /* Mark this label as processed so as to prevent multiple
2315 warnings in nested switches. */
2316 FALLTHROUGH_LABEL_P (label
) = true;
2318 /* So that next warn_implicit_fallthrough_r will start looking for
2319 a new sequence starting with this label. */
2330 /* Warn when a switch case falls through. */
2333 maybe_warn_implicit_fallthrough (gimple_seq seq
)
2335 if (!warn_implicit_fallthrough
)
2338 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2341 || lang_GNU_OBJC ()))
2344 struct walk_stmt_info wi
;
2345 memset (&wi
, 0, sizeof (wi
));
2346 walk_gimple_seq (seq
, warn_implicit_fallthrough_r
, NULL
, &wi
);
2349 /* Callback for walk_gimple_seq. */
2352 expand_FALLTHROUGH_r (gimple_stmt_iterator
*gsi_p
, bool *handled_ops_p
,
2353 struct walk_stmt_info
*wi
)
2355 gimple
*stmt
= gsi_stmt (*gsi_p
);
2357 *handled_ops_p
= true;
2358 switch (gimple_code (stmt
))
2363 case GIMPLE_EH_FILTER
:
2364 case GIMPLE_TRANSACTION
:
2365 /* Walk the sub-statements. */
2366 *handled_ops_p
= false;
2369 if (gimple_call_internal_p (stmt
, IFN_FALLTHROUGH
))
2371 gsi_remove (gsi_p
, true);
2372 if (gsi_end_p (*gsi_p
))
2374 *static_cast<location_t
*>(wi
->info
) = gimple_location (stmt
);
2375 return integer_zero_node
;
2379 location_t loc
= gimple_location (stmt
);
2381 gimple_stmt_iterator gsi2
= *gsi_p
;
2382 stmt
= gsi_stmt (gsi2
);
2383 if (gimple_code (stmt
) == GIMPLE_GOTO
&& !gimple_has_location (stmt
))
2385 /* Go on until the artificial label. */
2386 tree goto_dest
= gimple_goto_dest (stmt
);
2387 for (; !gsi_end_p (gsi2
); gsi_next (&gsi2
))
2389 if (gimple_code (gsi_stmt (gsi2
)) == GIMPLE_LABEL
2390 && gimple_label_label (as_a
<glabel
*> (gsi_stmt (gsi2
)))
2395 /* Not found? Stop. */
2396 if (gsi_end_p (gsi2
))
2399 /* Look one past it. */
2403 /* We're looking for a case label or default label here. */
2404 while (!gsi_end_p (gsi2
))
2406 stmt
= gsi_stmt (gsi2
);
2407 if (gimple_code (stmt
) == GIMPLE_LABEL
)
2409 tree label
= gimple_label_label (as_a
<glabel
*> (stmt
));
2410 if (gimple_has_location (stmt
) && DECL_ARTIFICIAL (label
))
2416 else if (gimple_call_internal_p (stmt
, IFN_ASAN_MARK
))
2418 else if (!is_gimple_debug (stmt
))
2419 /* Anything else is not expected. */
2424 pedwarn (loc
, 0, "attribute %<fallthrough%> not preceding "
2425 "a case label or default label");
2434 /* Expand all FALLTHROUGH () calls in SEQ. */
2437 expand_FALLTHROUGH (gimple_seq
*seq_p
)
2439 struct walk_stmt_info wi
;
2441 memset (&wi
, 0, sizeof (wi
));
2442 wi
.info
= (void *) &loc
;
2443 walk_gimple_seq_mod (seq_p
, expand_FALLTHROUGH_r
, NULL
, &wi
);
2444 if (wi
.callback_result
== integer_zero_node
)
2445 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2446 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2447 pedwarn (loc
, 0, "attribute %<fallthrough%> not preceding "
2448 "a case label or default label");
2452 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2455 static enum gimplify_status
2456 gimplify_switch_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2458 tree switch_expr
= *expr_p
;
2459 gimple_seq switch_body_seq
= NULL
;
2460 enum gimplify_status ret
;
2461 tree index_type
= TREE_TYPE (switch_expr
);
2462 if (index_type
== NULL_TREE
)
2463 index_type
= TREE_TYPE (SWITCH_COND (switch_expr
));
2465 ret
= gimplify_expr (&SWITCH_COND (switch_expr
), pre_p
, NULL
, is_gimple_val
,
2467 if (ret
== GS_ERROR
|| ret
== GS_UNHANDLED
)
2470 if (SWITCH_BODY (switch_expr
))
2473 vec
<tree
> saved_labels
;
2474 hash_set
<tree
> *saved_live_switch_vars
= NULL
;
2475 tree default_case
= NULL_TREE
;
2476 gswitch
*switch_stmt
;
2478 /* Save old labels, get new ones from body, then restore the old
2479 labels. Save all the things from the switch body to append after. */
2480 saved_labels
= gimplify_ctxp
->case_labels
;
2481 gimplify_ctxp
->case_labels
.create (8);
2483 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2484 saved_live_switch_vars
= gimplify_ctxp
->live_switch_vars
;
2485 tree_code body_type
= TREE_CODE (SWITCH_BODY (switch_expr
));
2486 if (body_type
== BIND_EXPR
|| body_type
== STATEMENT_LIST
)
2487 gimplify_ctxp
->live_switch_vars
= new hash_set
<tree
> (4);
2489 gimplify_ctxp
->live_switch_vars
= NULL
;
2491 bool old_in_switch_expr
= gimplify_ctxp
->in_switch_expr
;
2492 gimplify_ctxp
->in_switch_expr
= true;
2494 gimplify_stmt (&SWITCH_BODY (switch_expr
), &switch_body_seq
);
2496 gimplify_ctxp
->in_switch_expr
= old_in_switch_expr
;
2497 maybe_warn_switch_unreachable (switch_body_seq
);
2498 maybe_warn_implicit_fallthrough (switch_body_seq
);
2499 /* Only do this for the outermost GIMPLE_SWITCH. */
2500 if (!gimplify_ctxp
->in_switch_expr
)
2501 expand_FALLTHROUGH (&switch_body_seq
);
2503 labels
= gimplify_ctxp
->case_labels
;
2504 gimplify_ctxp
->case_labels
= saved_labels
;
2506 if (gimplify_ctxp
->live_switch_vars
)
2508 gcc_assert (gimplify_ctxp
->live_switch_vars
->is_empty ());
2509 delete gimplify_ctxp
->live_switch_vars
;
2511 gimplify_ctxp
->live_switch_vars
= saved_live_switch_vars
;
2513 preprocess_case_label_vec_for_gimple (labels
, index_type
,
2516 bool add_bind
= false;
2519 glabel
*new_default
;
2522 = build_case_label (NULL_TREE
, NULL_TREE
,
2523 create_artificial_label (UNKNOWN_LOCATION
));
2524 if (old_in_switch_expr
)
2526 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case
)) = 1;
2529 new_default
= gimple_build_label (CASE_LABEL (default_case
));
2530 gimplify_seq_add_stmt (&switch_body_seq
, new_default
);
2532 else if (old_in_switch_expr
)
2534 gimple
*last
= gimple_seq_last_stmt (switch_body_seq
);
2535 if (last
&& gimple_code (last
) == GIMPLE_LABEL
)
2537 tree label
= gimple_label_label (as_a
<glabel
*> (last
));
2538 if (SWITCH_BREAK_LABEL_P (label
))
2543 switch_stmt
= gimple_build_switch (SWITCH_COND (switch_expr
),
2544 default_case
, labels
);
2545 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2546 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2547 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2548 so that we can easily find the start and end of the switch
2552 gimple_seq bind_body
= NULL
;
2553 gimplify_seq_add_stmt (&bind_body
, switch_stmt
);
2554 gimple_seq_add_seq (&bind_body
, switch_body_seq
);
2555 gbind
*bind
= gimple_build_bind (NULL_TREE
, bind_body
, NULL_TREE
);
2556 gimple_set_location (bind
, EXPR_LOCATION (switch_expr
));
2557 gimplify_seq_add_stmt (pre_p
, bind
);
2561 gimplify_seq_add_stmt (pre_p
, switch_stmt
);
2562 gimplify_seq_add_seq (pre_p
, switch_body_seq
);
2572 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2574 static enum gimplify_status
2575 gimplify_label_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2577 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p
))
2578 == current_function_decl
);
2580 tree label
= LABEL_EXPR_LABEL (*expr_p
);
2581 glabel
*label_stmt
= gimple_build_label (label
);
2582 gimple_set_location (label_stmt
, EXPR_LOCATION (*expr_p
));
2583 gimplify_seq_add_stmt (pre_p
, label_stmt
);
2585 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label
)))
2586 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_COLD_LABEL
,
2588 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label
)))
2589 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_HOT_LABEL
,
2595 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2597 static enum gimplify_status
2598 gimplify_case_label_expr (tree
*expr_p
, gimple_seq
*pre_p
)
2600 struct gimplify_ctx
*ctxp
;
2603 /* Invalid programs can play Duff's Device type games with, for example,
2604 #pragma omp parallel. At least in the C front end, we don't
2605 detect such invalid branches until after gimplification, in the
2606 diagnose_omp_blocks pass. */
2607 for (ctxp
= gimplify_ctxp
; ; ctxp
= ctxp
->prev_context
)
2608 if (ctxp
->case_labels
.exists ())
2611 tree label
= CASE_LABEL (*expr_p
);
2612 label_stmt
= gimple_build_label (label
);
2613 gimple_set_location (label_stmt
, EXPR_LOCATION (*expr_p
));
2614 ctxp
->case_labels
.safe_push (*expr_p
);
2615 gimplify_seq_add_stmt (pre_p
, label_stmt
);
2617 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label
)))
2618 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_COLD_LABEL
,
2620 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label
)))
2621 gimple_seq_add_stmt (pre_p
, gimple_build_predict (PRED_HOT_LABEL
,
2627 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2631 build_and_jump (tree
*label_p
)
2633 if (label_p
== NULL
)
2634 /* If there's nowhere to jump, just fall through. */
2637 if (*label_p
== NULL_TREE
)
2639 tree label
= create_artificial_label (UNKNOWN_LOCATION
);
2643 return build1 (GOTO_EXPR
, void_type_node
, *label_p
);
2646 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2647 This also involves building a label to jump to and communicating it to
2648 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2650 static enum gimplify_status
2651 gimplify_exit_expr (tree
*expr_p
)
2653 tree cond
= TREE_OPERAND (*expr_p
, 0);
2656 expr
= build_and_jump (&gimplify_ctxp
->exit_label
);
2657 expr
= build3 (COND_EXPR
, void_type_node
, cond
, expr
, NULL_TREE
);
2663 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2664 different from its canonical type, wrap the whole thing inside a
2665 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2668 The canonical type of a COMPONENT_REF is the type of the field being
2669 referenced--unless the field is a bit-field which can be read directly
2670 in a smaller mode, in which case the canonical type is the
2671 sign-appropriate type corresponding to that mode. */
2674 canonicalize_component_ref (tree
*expr_p
)
2676 tree expr
= *expr_p
;
2679 gcc_assert (TREE_CODE (expr
) == COMPONENT_REF
);
2681 if (INTEGRAL_TYPE_P (TREE_TYPE (expr
)))
2682 type
= TREE_TYPE (get_unwidened (expr
, NULL_TREE
));
2684 type
= TREE_TYPE (TREE_OPERAND (expr
, 1));
2686 /* One could argue that all the stuff below is not necessary for
2687 the non-bitfield case and declare it a FE error if type
2688 adjustment would be needed. */
2689 if (TREE_TYPE (expr
) != type
)
2691 #ifdef ENABLE_TYPES_CHECKING
2692 tree old_type
= TREE_TYPE (expr
);
2696 /* We need to preserve qualifiers and propagate them from
2698 type_quals
= TYPE_QUALS (type
)
2699 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr
, 0)));
2700 if (TYPE_QUALS (type
) != type_quals
)
2701 type
= build_qualified_type (TYPE_MAIN_VARIANT (type
), type_quals
);
2703 /* Set the type of the COMPONENT_REF to the underlying type. */
2704 TREE_TYPE (expr
) = type
;
2706 #ifdef ENABLE_TYPES_CHECKING
2707 /* It is now a FE error, if the conversion from the canonical
2708 type to the original expression type is not useless. */
2709 gcc_assert (useless_type_conversion_p (old_type
, type
));
2714 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2715 to foo, embed that change in the ADDR_EXPR by converting
2720 where L is the lower bound. For simplicity, only do this for constant
2722 The constraint is that the type of &array[L] is trivially convertible
2726 canonicalize_addr_expr (tree
*expr_p
)
2728 tree expr
= *expr_p
;
2729 tree addr_expr
= TREE_OPERAND (expr
, 0);
2730 tree datype
, ddatype
, pddatype
;
2732 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2733 if (!POINTER_TYPE_P (TREE_TYPE (expr
))
2734 || TREE_CODE (addr_expr
) != ADDR_EXPR
)
2737 /* The addr_expr type should be a pointer to an array. */
2738 datype
= TREE_TYPE (TREE_TYPE (addr_expr
));
2739 if (TREE_CODE (datype
) != ARRAY_TYPE
)
2742 /* The pointer to element type shall be trivially convertible to
2743 the expression pointer type. */
2744 ddatype
= TREE_TYPE (datype
);
2745 pddatype
= build_pointer_type (ddatype
);
2746 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr
)),
2750 /* The lower bound and element sizes must be constant. */
2751 if (!TYPE_SIZE_UNIT (ddatype
)
2752 || TREE_CODE (TYPE_SIZE_UNIT (ddatype
)) != INTEGER_CST
2753 || !TYPE_DOMAIN (datype
) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype
))
2754 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype
))) != INTEGER_CST
)
2757 /* All checks succeeded. Build a new node to merge the cast. */
2758 *expr_p
= build4 (ARRAY_REF
, ddatype
, TREE_OPERAND (addr_expr
, 0),
2759 TYPE_MIN_VALUE (TYPE_DOMAIN (datype
)),
2760 NULL_TREE
, NULL_TREE
);
2761 *expr_p
= build1 (ADDR_EXPR
, pddatype
, *expr_p
);
2763 /* We can have stripped a required restrict qualifier above. */
2764 if (!useless_type_conversion_p (TREE_TYPE (expr
), TREE_TYPE (*expr_p
)))
2765 *expr_p
= fold_convert (TREE_TYPE (expr
), *expr_p
);
2768 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2769 underneath as appropriate. */
2771 static enum gimplify_status
2772 gimplify_conversion (tree
*expr_p
)
2774 location_t loc
= EXPR_LOCATION (*expr_p
);
2775 gcc_assert (CONVERT_EXPR_P (*expr_p
));
2777 /* Then strip away all but the outermost conversion. */
2778 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p
, 0));
2780 /* And remove the outermost conversion if it's useless. */
2781 if (tree_ssa_useless_type_conversion (*expr_p
))
2782 *expr_p
= TREE_OPERAND (*expr_p
, 0);
2784 /* If we still have a conversion at the toplevel,
2785 then canonicalize some constructs. */
2786 if (CONVERT_EXPR_P (*expr_p
))
2788 tree sub
= TREE_OPERAND (*expr_p
, 0);
2790 /* If a NOP conversion is changing the type of a COMPONENT_REF
2791 expression, then canonicalize its type now in order to expose more
2792 redundant conversions. */
2793 if (TREE_CODE (sub
) == COMPONENT_REF
)
2794 canonicalize_component_ref (&TREE_OPERAND (*expr_p
, 0));
2796 /* If a NOP conversion is changing a pointer to array of foo
2797 to a pointer to foo, embed that change in the ADDR_EXPR. */
2798 else if (TREE_CODE (sub
) == ADDR_EXPR
)
2799 canonicalize_addr_expr (expr_p
);
2802 /* If we have a conversion to a non-register type force the
2803 use of a VIEW_CONVERT_EXPR instead. */
2804 if (CONVERT_EXPR_P (*expr_p
) && !is_gimple_reg_type (TREE_TYPE (*expr_p
)))
2805 *expr_p
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, TREE_TYPE (*expr_p
),
2806 TREE_OPERAND (*expr_p
, 0));
2808 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2809 if (TREE_CODE (*expr_p
) == CONVERT_EXPR
)
2810 TREE_SET_CODE (*expr_p
, NOP_EXPR
);
2815 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2816 DECL_VALUE_EXPR, and it's worth re-examining things. */
2818 static enum gimplify_status
2819 gimplify_var_or_parm_decl (tree
*expr_p
)
2821 tree decl
= *expr_p
;
2823 /* ??? If this is a local variable, and it has not been seen in any
2824 outer BIND_EXPR, then it's probably the result of a duplicate
2825 declaration, for which we've already issued an error. It would
2826 be really nice if the front end wouldn't leak these at all.
2827 Currently the only known culprit is C++ destructors, as seen
2828 in g++.old-deja/g++.jason/binding.C. */
2830 && !DECL_SEEN_IN_BIND_EXPR_P (decl
)
2831 && !TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
)
2832 && decl_function_context (decl
) == current_function_decl
)
2834 gcc_assert (seen_error ());
2838 /* When within an OMP context, notice uses of variables. */
2839 if (gimplify_omp_ctxp
&& omp_notice_variable (gimplify_omp_ctxp
, decl
, true))
2842 /* If the decl is an alias for another expression, substitute it now. */
2843 if (DECL_HAS_VALUE_EXPR_P (decl
))
2845 *expr_p
= unshare_expr (DECL_VALUE_EXPR (decl
));
2852 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2855 recalculate_side_effects (tree t
)
2857 enum tree_code code
= TREE_CODE (t
);
2858 int len
= TREE_OPERAND_LENGTH (t
);
2861 switch (TREE_CODE_CLASS (code
))
2863 case tcc_expression
:
2869 case PREDECREMENT_EXPR
:
2870 case PREINCREMENT_EXPR
:
2871 case POSTDECREMENT_EXPR
:
2872 case POSTINCREMENT_EXPR
:
2873 /* All of these have side-effects, no matter what their
2882 case tcc_comparison
: /* a comparison expression */
2883 case tcc_unary
: /* a unary arithmetic expression */
2884 case tcc_binary
: /* a binary arithmetic expression */
2885 case tcc_reference
: /* a reference */
2886 case tcc_vl_exp
: /* a function call */
2887 TREE_SIDE_EFFECTS (t
) = TREE_THIS_VOLATILE (t
);
2888 for (i
= 0; i
< len
; ++i
)
2890 tree op
= TREE_OPERAND (t
, i
);
2891 if (op
&& TREE_SIDE_EFFECTS (op
))
2892 TREE_SIDE_EFFECTS (t
) = 1;
2897 /* No side-effects. */
2905 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2909 : min_lval '[' val ']'
2911 | compound_lval '[' val ']'
2912 | compound_lval '.' ID
2914 This is not part of the original SIMPLE definition, which separates
2915 array and member references, but it seems reasonable to handle them
2916 together. Also, this way we don't run into problems with union
2917 aliasing; gcc requires that for accesses through a union to alias, the
2918 union reference must be explicit, which was not always the case when we
2919 were splitting up array and member refs.
2921 PRE_P points to the sequence where side effects that must happen before
2922 *EXPR_P should be stored.
2924 POST_P points to the sequence where side effects that must happen after
2925 *EXPR_P should be stored. */
2927 static enum gimplify_status
2928 gimplify_compound_lval (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
2929 fallback_t fallback
)
2932 enum gimplify_status ret
= GS_ALL_DONE
, tret
;
2934 location_t loc
= EXPR_LOCATION (*expr_p
);
2935 tree expr
= *expr_p
;
2937 /* Create a stack of the subexpressions so later we can walk them in
2938 order from inner to outer. */
2939 auto_vec
<tree
, 10> expr_stack
;
2941 /* We can handle anything that get_inner_reference can deal with. */
2942 for (p
= expr_p
; ; p
= &TREE_OPERAND (*p
, 0))
2945 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2946 if (TREE_CODE (*p
) == INDIRECT_REF
)
2947 *p
= fold_indirect_ref_loc (loc
, *p
);
2949 if (handled_component_p (*p
))
2951 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2952 additional COMPONENT_REFs. */
2953 else if ((VAR_P (*p
) || TREE_CODE (*p
) == PARM_DECL
)
2954 && gimplify_var_or_parm_decl (p
) == GS_OK
)
2959 expr_stack
.safe_push (*p
);
2962 gcc_assert (expr_stack
.length ());
2964 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2965 walked through and P points to the innermost expression.
2967 Java requires that we elaborated nodes in source order. That
2968 means we must gimplify the inner expression followed by each of
2969 the indices, in order. But we can't gimplify the inner
2970 expression until we deal with any variable bounds, sizes, or
2971 positions in order to deal with PLACEHOLDER_EXPRs.
2973 So we do this in three steps. First we deal with the annotations
2974 for any variables in the components, then we gimplify the base,
2975 then we gimplify any indices, from left to right. */
2976 for (i
= expr_stack
.length () - 1; i
>= 0; i
--)
2978 tree t
= expr_stack
[i
];
2980 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
2982 /* Gimplify the low bound and element type size and put them into
2983 the ARRAY_REF. If these values are set, they have already been
2985 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
2987 tree low
= unshare_expr (array_ref_low_bound (t
));
2988 if (!is_gimple_min_invariant (low
))
2990 TREE_OPERAND (t
, 2) = low
;
2991 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
,
2992 post_p
, is_gimple_reg
,
2994 ret
= MIN (ret
, tret
);
2999 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
, post_p
,
3000 is_gimple_reg
, fb_rvalue
);
3001 ret
= MIN (ret
, tret
);
3004 if (TREE_OPERAND (t
, 3) == NULL_TREE
)
3006 tree elmt_size
= array_ref_element_size (t
);
3007 if (!is_gimple_min_invariant (elmt_size
))
3009 elmt_size
= unshare_expr (elmt_size
);
3010 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (t
, 0)));
3011 tree factor
= size_int (TYPE_ALIGN_UNIT (elmt_type
));
3013 /* Divide the element size by the alignment of the element
3015 elmt_size
= size_binop_loc (loc
, EXACT_DIV_EXPR
,
3018 TREE_OPERAND (t
, 3) = elmt_size
;
3019 tret
= gimplify_expr (&TREE_OPERAND (t
, 3), pre_p
,
3020 post_p
, is_gimple_reg
,
3022 ret
= MIN (ret
, tret
);
3027 tret
= gimplify_expr (&TREE_OPERAND (t
, 3), pre_p
, post_p
,
3028 is_gimple_reg
, fb_rvalue
);
3029 ret
= MIN (ret
, tret
);
3032 else if (TREE_CODE (t
) == COMPONENT_REF
)
3034 /* Set the field offset into T and gimplify it. */
3035 if (TREE_OPERAND (t
, 2) == NULL_TREE
)
3037 tree offset
= component_ref_field_offset (t
);
3038 if (!is_gimple_min_invariant (offset
))
3040 offset
= unshare_expr (offset
);
3041 tree field
= TREE_OPERAND (t
, 1);
3043 = size_int (DECL_OFFSET_ALIGN (field
) / BITS_PER_UNIT
);
3045 /* Divide the offset by its alignment. */
3046 offset
= size_binop_loc (loc
, EXACT_DIV_EXPR
,
3049 TREE_OPERAND (t
, 2) = offset
;
3050 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
,
3051 post_p
, is_gimple_reg
,
3053 ret
= MIN (ret
, tret
);
3058 tret
= gimplify_expr (&TREE_OPERAND (t
, 2), pre_p
, post_p
,
3059 is_gimple_reg
, fb_rvalue
);
3060 ret
= MIN (ret
, tret
);
3065 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3066 so as to match the min_lval predicate. Failure to do so may result
3067 in the creation of large aggregate temporaries. */
3068 tret
= gimplify_expr (p
, pre_p
, post_p
, is_gimple_min_lval
,
3069 fallback
| fb_lvalue
);
3070 ret
= MIN (ret
, tret
);
3072 /* And finally, the indices and operands of ARRAY_REF. During this
3073 loop we also remove any useless conversions. */
3074 for (; expr_stack
.length () > 0; )
3076 tree t
= expr_stack
.pop ();
3078 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
3080 /* Gimplify the dimension. */
3081 if (!is_gimple_min_invariant (TREE_OPERAND (t
, 1)))
3083 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), pre_p
, post_p
,
3084 is_gimple_val
, fb_rvalue
);
3085 ret
= MIN (ret
, tret
);
3089 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t
, 0));
3091 /* The innermost expression P may have originally had
3092 TREE_SIDE_EFFECTS set which would have caused all the outer
3093 expressions in *EXPR_P leading to P to also have had
3094 TREE_SIDE_EFFECTS set. */
3095 recalculate_side_effects (t
);
3098 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3099 if ((fallback
& fb_rvalue
) && TREE_CODE (*expr_p
) == COMPONENT_REF
)
3101 canonicalize_component_ref (expr_p
);
3104 expr_stack
.release ();
3106 gcc_assert (*expr_p
== expr
|| ret
!= GS_ALL_DONE
);
3111 /* Gimplify the self modifying expression pointed to by EXPR_P
3114 PRE_P points to the list where side effects that must happen before
3115 *EXPR_P should be stored.
3117 POST_P points to the list where side effects that must happen after
3118 *EXPR_P should be stored.
3120 WANT_VALUE is nonzero iff we want to use the value of this expression
3121 in another expression.
3123 ARITH_TYPE is the type the computation should be performed in. */
3125 enum gimplify_status
3126 gimplify_self_mod_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
3127 bool want_value
, tree arith_type
)
3129 enum tree_code code
;
3130 tree lhs
, lvalue
, rhs
, t1
;
3131 gimple_seq post
= NULL
, *orig_post_p
= post_p
;
3133 enum tree_code arith_code
;
3134 enum gimplify_status ret
;
3135 location_t loc
= EXPR_LOCATION (*expr_p
);
3137 code
= TREE_CODE (*expr_p
);
3139 gcc_assert (code
== POSTINCREMENT_EXPR
|| code
== POSTDECREMENT_EXPR
3140 || code
== PREINCREMENT_EXPR
|| code
== PREDECREMENT_EXPR
);
3142 /* Prefix or postfix? */
3143 if (code
== POSTINCREMENT_EXPR
|| code
== POSTDECREMENT_EXPR
)
3144 /* Faster to treat as prefix if result is not used. */
3145 postfix
= want_value
;
3149 /* For postfix, make sure the inner expression's post side effects
3150 are executed after side effects from this expression. */
3154 /* Add or subtract? */
3155 if (code
== PREINCREMENT_EXPR
|| code
== POSTINCREMENT_EXPR
)
3156 arith_code
= PLUS_EXPR
;
3158 arith_code
= MINUS_EXPR
;
3160 /* Gimplify the LHS into a GIMPLE lvalue. */
3161 lvalue
= TREE_OPERAND (*expr_p
, 0);
3162 ret
= gimplify_expr (&lvalue
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
3163 if (ret
== GS_ERROR
)
3166 /* Extract the operands to the arithmetic operation. */
3168 rhs
= TREE_OPERAND (*expr_p
, 1);
3170 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3171 that as the result value and in the postqueue operation. */
3174 ret
= gimplify_expr (&lhs
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
3175 if (ret
== GS_ERROR
)
3178 lhs
= get_initialized_tmp_var (lhs
, pre_p
);
3181 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3182 if (POINTER_TYPE_P (TREE_TYPE (lhs
)))
3184 rhs
= convert_to_ptrofftype_loc (loc
, rhs
);
3185 if (arith_code
== MINUS_EXPR
)
3186 rhs
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (rhs
), rhs
);
3187 t1
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (*expr_p
), lhs
, rhs
);
3190 t1
= fold_convert (TREE_TYPE (*expr_p
),
3191 fold_build2 (arith_code
, arith_type
,
3192 fold_convert (arith_type
, lhs
),
3193 fold_convert (arith_type
, rhs
)));
3197 gimplify_assign (lvalue
, t1
, pre_p
);
3198 gimplify_seq_add_seq (orig_post_p
, post
);
3204 *expr_p
= build2 (MODIFY_EXPR
, TREE_TYPE (lvalue
), lvalue
, t1
);
3209 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3212 maybe_with_size_expr (tree
*expr_p
)
3214 tree expr
= *expr_p
;
3215 tree type
= TREE_TYPE (expr
);
3218 /* If we've already wrapped this or the type is error_mark_node, we can't do
3220 if (TREE_CODE (expr
) == WITH_SIZE_EXPR
3221 || type
== error_mark_node
)
3224 /* If the size isn't known or is a constant, we have nothing to do. */
3225 size
= TYPE_SIZE_UNIT (type
);
3226 if (!size
|| poly_int_tree_p (size
))
3229 /* Otherwise, make a WITH_SIZE_EXPR. */
3230 size
= unshare_expr (size
);
3231 size
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (size
, expr
);
3232 *expr_p
= build2 (WITH_SIZE_EXPR
, type
, expr
, size
);
3235 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3236 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3237 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3238 gimplified to an SSA name. */
3240 enum gimplify_status
3241 gimplify_arg (tree
*arg_p
, gimple_seq
*pre_p
, location_t call_location
,
3244 bool (*test
) (tree
);
3247 /* In general, we allow lvalues for function arguments to avoid
3248 extra overhead of copying large aggregates out of even larger
3249 aggregates into temporaries only to copy the temporaries to
3250 the argument list. Make optimizers happy by pulling out to
3251 temporaries those types that fit in registers. */
3252 if (is_gimple_reg_type (TREE_TYPE (*arg_p
)))
3253 test
= is_gimple_val
, fb
= fb_rvalue
;
3256 test
= is_gimple_lvalue
, fb
= fb_either
;
3257 /* Also strip a TARGET_EXPR that would force an extra copy. */
3258 if (TREE_CODE (*arg_p
) == TARGET_EXPR
)
3260 tree init
= TARGET_EXPR_INITIAL (*arg_p
);
3262 && !VOID_TYPE_P (TREE_TYPE (init
)))
3267 /* If this is a variable sized type, we must remember the size. */
3268 maybe_with_size_expr (arg_p
);
3270 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3271 /* Make sure arguments have the same location as the function call
3273 protected_set_expr_location (*arg_p
, call_location
);
3275 /* There is a sequence point before a function call. Side effects in
3276 the argument list must occur before the actual call. So, when
3277 gimplifying arguments, force gimplify_expr to use an internal
3278 post queue which is then appended to the end of PRE_P. */
3279 return gimplify_expr (arg_p
, pre_p
, NULL
, test
, fb
, allow_ssa
);
3282 /* Don't fold inside offloading or taskreg regions: it can break code by
3283 adding decl references that weren't in the source. We'll do it during
3284 omplower pass instead. */
3287 maybe_fold_stmt (gimple_stmt_iterator
*gsi
)
3289 struct gimplify_omp_ctx
*ctx
;
3290 for (ctx
= gimplify_omp_ctxp
; ctx
; ctx
= ctx
->outer_context
)
3291 if ((ctx
->region_type
& (ORT_TARGET
| ORT_PARALLEL
| ORT_TASK
)) != 0)
3293 else if ((ctx
->region_type
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
3295 /* Delay folding of builtins until the IL is in consistent state
3296 so the diagnostic machinery can do a better job. */
3297 if (gimple_call_builtin_p (gsi_stmt (*gsi
)))
3299 return fold_stmt (gsi
);
3302 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3303 WANT_VALUE is true if the result of the call is desired. */
3305 static enum gimplify_status
3306 gimplify_call_expr (tree
*expr_p
, gimple_seq
*pre_p
, bool want_value
)
3308 tree fndecl
, parms
, p
, fnptrtype
;
3309 enum gimplify_status ret
;
3312 bool builtin_va_start_p
= false;
3313 location_t loc
= EXPR_LOCATION (*expr_p
);
3315 gcc_assert (TREE_CODE (*expr_p
) == CALL_EXPR
);
3317 /* For reliable diagnostics during inlining, it is necessary that
3318 every call_expr be annotated with file and line. */
3319 if (! EXPR_HAS_LOCATION (*expr_p
))
3320 SET_EXPR_LOCATION (*expr_p
, input_location
);
3322 /* Gimplify internal functions created in the FEs. */
3323 if (CALL_EXPR_FN (*expr_p
) == NULL_TREE
)
3328 nargs
= call_expr_nargs (*expr_p
);
3329 enum internal_fn ifn
= CALL_EXPR_IFN (*expr_p
);
3330 auto_vec
<tree
> vargs (nargs
);
3332 for (i
= 0; i
< nargs
; i
++)
3334 gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
,
3335 EXPR_LOCATION (*expr_p
));
3336 vargs
.quick_push (CALL_EXPR_ARG (*expr_p
, i
));
3339 gcall
*call
= gimple_build_call_internal_vec (ifn
, vargs
);
3340 gimple_call_set_nothrow (call
, TREE_NOTHROW (*expr_p
));
3341 gimplify_seq_add_stmt (pre_p
, call
);
3345 /* This may be a call to a builtin function.
3347 Builtin function calls may be transformed into different
3348 (and more efficient) builtin function calls under certain
3349 circumstances. Unfortunately, gimplification can muck things
3350 up enough that the builtin expanders are not aware that certain
3351 transformations are still valid.
3353 So we attempt transformation/gimplification of the call before
3354 we gimplify the CALL_EXPR. At this time we do not manage to
3355 transform all calls in the same manner as the expanders do, but
3356 we do transform most of them. */
3357 fndecl
= get_callee_fndecl (*expr_p
);
3358 if (fndecl
&& fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
3359 switch (DECL_FUNCTION_CODE (fndecl
))
3361 CASE_BUILT_IN_ALLOCA
:
3362 /* If the call has been built for a variable-sized object, then we
3363 want to restore the stack level when the enclosing BIND_EXPR is
3364 exited to reclaim the allocated space; otherwise, we precisely
3365 need to do the opposite and preserve the latest stack level. */
3366 if (CALL_ALLOCA_FOR_VAR_P (*expr_p
))
3367 gimplify_ctxp
->save_stack
= true;
3369 gimplify_ctxp
->keep_stack
= true;
3372 case BUILT_IN_VA_START
:
3374 builtin_va_start_p
= TRUE
;
3375 if (call_expr_nargs (*expr_p
) < 2)
3377 error ("too few arguments to function %<va_start%>");
3378 *expr_p
= build_empty_stmt (EXPR_LOCATION (*expr_p
));
3382 if (fold_builtin_next_arg (*expr_p
, true))
3384 *expr_p
= build_empty_stmt (EXPR_LOCATION (*expr_p
));
3390 case BUILT_IN_EH_RETURN
:
3391 cfun
->calls_eh_return
= true;
3394 case BUILT_IN_CLEAR_PADDING
:
3395 if (call_expr_nargs (*expr_p
) == 1)
3397 /* Remember the original type of the argument in an internal
3398 dummy second argument, as in GIMPLE pointer conversions are
3400 p
= CALL_EXPR_ARG (*expr_p
, 0);
3402 = build_call_expr_loc (EXPR_LOCATION (*expr_p
), fndecl
, 2, p
,
3403 build_zero_cst (TREE_TYPE (p
)));
3411 if (fndecl
&& fndecl_built_in_p (fndecl
))
3413 tree new_tree
= fold_call_expr (input_location
, *expr_p
, !want_value
);
3414 if (new_tree
&& new_tree
!= *expr_p
)
3416 /* There was a transformation of this call which computes the
3417 same value, but in a more efficient way. Return and try
3424 /* Remember the original function pointer type. */
3425 fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*expr_p
));
3430 && (cfun
->curr_properties
& PROP_gimple_any
) == 0)
3432 tree variant
= omp_resolve_declare_variant (fndecl
);
3433 if (variant
!= fndecl
)
3434 CALL_EXPR_FN (*expr_p
) = build1 (ADDR_EXPR
, fnptrtype
, variant
);
3437 /* There is a sequence point before the call, so any side effects in
3438 the calling expression must occur before the actual call. Force
3439 gimplify_expr to use an internal post queue. */
3440 ret
= gimplify_expr (&CALL_EXPR_FN (*expr_p
), pre_p
, NULL
,
3441 is_gimple_call_addr
, fb_rvalue
);
3443 nargs
= call_expr_nargs (*expr_p
);
3445 /* Get argument types for verification. */
3446 fndecl
= get_callee_fndecl (*expr_p
);
3449 parms
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
3451 parms
= TYPE_ARG_TYPES (TREE_TYPE (fnptrtype
));
3453 if (fndecl
&& DECL_ARGUMENTS (fndecl
))
3454 p
= DECL_ARGUMENTS (fndecl
);
3459 for (i
= 0; i
< nargs
&& p
; i
++, p
= TREE_CHAIN (p
))
3462 /* If the last argument is __builtin_va_arg_pack () and it is not
3463 passed as a named argument, decrease the number of CALL_EXPR
3464 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3467 && TREE_CODE (CALL_EXPR_ARG (*expr_p
, nargs
- 1)) == CALL_EXPR
)
3469 tree last_arg
= CALL_EXPR_ARG (*expr_p
, nargs
- 1);
3470 tree last_arg_fndecl
= get_callee_fndecl (last_arg
);
3473 && fndecl_built_in_p (last_arg_fndecl
, BUILT_IN_VA_ARG_PACK
))
3475 tree call
= *expr_p
;
3478 *expr_p
= build_call_array_loc (loc
, TREE_TYPE (call
),
3479 CALL_EXPR_FN (call
),
3480 nargs
, CALL_EXPR_ARGP (call
));
3482 /* Copy all CALL_EXPR flags, location and block, except
3483 CALL_EXPR_VA_ARG_PACK flag. */
3484 CALL_EXPR_STATIC_CHAIN (*expr_p
) = CALL_EXPR_STATIC_CHAIN (call
);
3485 CALL_EXPR_TAILCALL (*expr_p
) = CALL_EXPR_TAILCALL (call
);
3486 CALL_EXPR_RETURN_SLOT_OPT (*expr_p
)
3487 = CALL_EXPR_RETURN_SLOT_OPT (call
);
3488 CALL_FROM_THUNK_P (*expr_p
) = CALL_FROM_THUNK_P (call
);
3489 SET_EXPR_LOCATION (*expr_p
, EXPR_LOCATION (call
));
3491 /* Set CALL_EXPR_VA_ARG_PACK. */
3492 CALL_EXPR_VA_ARG_PACK (*expr_p
) = 1;
3496 /* If the call returns twice then after building the CFG the call
3497 argument computations will no longer dominate the call because
3498 we add an abnormal incoming edge to the call. So do not use SSA
3500 bool returns_twice
= call_expr_flags (*expr_p
) & ECF_RETURNS_TWICE
;
3502 /* Gimplify the function arguments. */
3505 for (i
= (PUSH_ARGS_REVERSED
? nargs
- 1 : 0);
3506 PUSH_ARGS_REVERSED
? i
>= 0 : i
< nargs
;
3507 PUSH_ARGS_REVERSED
? i
-- : i
++)
3509 enum gimplify_status t
;
3511 /* Avoid gimplifying the second argument to va_start, which needs to
3512 be the plain PARM_DECL. */
3513 if ((i
!= 1) || !builtin_va_start_p
)
3515 t
= gimplify_arg (&CALL_EXPR_ARG (*expr_p
, i
), pre_p
,
3516 EXPR_LOCATION (*expr_p
), ! returns_twice
);
3524 /* Gimplify the static chain. */
3525 if (CALL_EXPR_STATIC_CHAIN (*expr_p
))
3527 if (fndecl
&& !DECL_STATIC_CHAIN (fndecl
))
3528 CALL_EXPR_STATIC_CHAIN (*expr_p
) = NULL
;
3531 enum gimplify_status t
;
3532 t
= gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p
), pre_p
,
3533 EXPR_LOCATION (*expr_p
), ! returns_twice
);
3539 /* Verify the function result. */
3540 if (want_value
&& fndecl
3541 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype
))))
3543 error_at (loc
, "using result of function returning %<void%>");
3547 /* Try this again in case gimplification exposed something. */
3548 if (ret
!= GS_ERROR
)
3550 tree new_tree
= fold_call_expr (input_location
, *expr_p
, !want_value
);
3552 if (new_tree
&& new_tree
!= *expr_p
)
3554 /* There was a transformation of this call which computes the
3555 same value, but in a more efficient way. Return and try
3563 *expr_p
= error_mark_node
;
3567 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3568 decl. This allows us to eliminate redundant or useless
3569 calls to "const" functions. */
3570 if (TREE_CODE (*expr_p
) == CALL_EXPR
)
3572 int flags
= call_expr_flags (*expr_p
);
3573 if (flags
& (ECF_CONST
| ECF_PURE
)
3574 /* An infinite loop is considered a side effect. */
3575 && !(flags
& (ECF_LOOPING_CONST_OR_PURE
)))
3576 TREE_SIDE_EFFECTS (*expr_p
) = 0;
3579 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3580 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3581 form and delegate the creation of a GIMPLE_CALL to
3582 gimplify_modify_expr. This is always possible because when
3583 WANT_VALUE is true, the caller wants the result of this call into
3584 a temporary, which means that we will emit an INIT_EXPR in
3585 internal_get_tmp_var which will then be handled by
3586 gimplify_modify_expr. */
3589 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3590 have to do is replicate it as a GIMPLE_CALL tuple. */
3591 gimple_stmt_iterator gsi
;
3592 call
= gimple_build_call_from_tree (*expr_p
, fnptrtype
);
3593 notice_special_calls (call
);
3594 gimplify_seq_add_stmt (pre_p
, call
);
3595 gsi
= gsi_last (*pre_p
);
3596 maybe_fold_stmt (&gsi
);
3597 *expr_p
= NULL_TREE
;
3600 /* Remember the original function type. */
3601 CALL_EXPR_FN (*expr_p
) = build1 (NOP_EXPR
, fnptrtype
,
3602 CALL_EXPR_FN (*expr_p
));
3607 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3608 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3610 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3611 condition is true or false, respectively. If null, we should generate
3612 our own to skip over the evaluation of this specific expression.
3614 LOCUS is the source location of the COND_EXPR.
3616 This function is the tree equivalent of do_jump.
3618 shortcut_cond_r should only be called by shortcut_cond_expr. */
3621 shortcut_cond_r (tree pred
, tree
*true_label_p
, tree
*false_label_p
,
3624 tree local_label
= NULL_TREE
;
3625 tree t
, expr
= NULL
;
3627 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3628 retain the shortcut semantics. Just insert the gotos here;
3629 shortcut_cond_expr will append the real blocks later. */
3630 if (TREE_CODE (pred
) == TRUTH_ANDIF_EXPR
)
3632 location_t new_locus
;
3634 /* Turn if (a && b) into
3636 if (a); else goto no;
3637 if (b) goto yes; else goto no;
3640 if (false_label_p
== NULL
)
3641 false_label_p
= &local_label
;
3643 /* Keep the original source location on the first 'if'. */
3644 t
= shortcut_cond_r (TREE_OPERAND (pred
, 0), NULL
, false_label_p
, locus
);
3645 append_to_statement_list (t
, &expr
);
3647 /* Set the source location of the && on the second 'if'. */
3648 new_locus
= rexpr_location (pred
, locus
);
3649 t
= shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
, false_label_p
,
3651 append_to_statement_list (t
, &expr
);
3653 else if (TREE_CODE (pred
) == TRUTH_ORIF_EXPR
)
3655 location_t new_locus
;
3657 /* Turn if (a || b) into
3660 if (b) goto yes; else goto no;
3663 if (true_label_p
== NULL
)
3664 true_label_p
= &local_label
;
3666 /* Keep the original source location on the first 'if'. */
3667 t
= shortcut_cond_r (TREE_OPERAND (pred
, 0), true_label_p
, NULL
, locus
);
3668 append_to_statement_list (t
, &expr
);
3670 /* Set the source location of the || on the second 'if'. */
3671 new_locus
= rexpr_location (pred
, locus
);
3672 t
= shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
, false_label_p
,
3674 append_to_statement_list (t
, &expr
);
3676 else if (TREE_CODE (pred
) == COND_EXPR
3677 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred
, 1)))
3678 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred
, 2))))
3680 location_t new_locus
;
3682 /* As long as we're messing with gotos, turn if (a ? b : c) into
3684 if (b) goto yes; else goto no;
3686 if (c) goto yes; else goto no;
3688 Don't do this if one of the arms has void type, which can happen
3689 in C++ when the arm is throw. */
3691 /* Keep the original source location on the first 'if'. Set the source
3692 location of the ? on the second 'if'. */
3693 new_locus
= rexpr_location (pred
, locus
);
3694 expr
= build3 (COND_EXPR
, void_type_node
, TREE_OPERAND (pred
, 0),
3695 shortcut_cond_r (TREE_OPERAND (pred
, 1), true_label_p
,
3696 false_label_p
, locus
),
3697 shortcut_cond_r (TREE_OPERAND (pred
, 2), true_label_p
,
3698 false_label_p
, new_locus
));
3702 expr
= build3 (COND_EXPR
, void_type_node
, pred
,
3703 build_and_jump (true_label_p
),
3704 build_and_jump (false_label_p
));
3705 SET_EXPR_LOCATION (expr
, locus
);
3710 t
= build1 (LABEL_EXPR
, void_type_node
, local_label
);
3711 append_to_statement_list (t
, &expr
);
3717 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
3718 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
3719 statement, if it is the last one. Otherwise, return NULL. */
3722 find_goto (tree expr
)
3727 if (TREE_CODE (expr
) == GOTO_EXPR
)
3730 if (TREE_CODE (expr
) != STATEMENT_LIST
)
3733 tree_stmt_iterator i
= tsi_start (expr
);
3735 while (!tsi_end_p (i
) && TREE_CODE (tsi_stmt (i
)) == DEBUG_BEGIN_STMT
)
3738 if (!tsi_one_before_end_p (i
))
3741 return find_goto (tsi_stmt (i
));
3744 /* Same as find_goto, except that it returns NULL if the destination
3745 is not a LABEL_DECL. */
3748 find_goto_label (tree expr
)
3750 tree dest
= find_goto (expr
);
3751 if (dest
&& TREE_CODE (GOTO_DESTINATION (dest
)) == LABEL_DECL
)
3756 /* Given a conditional expression EXPR with short-circuit boolean
3757 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3758 predicate apart into the equivalent sequence of conditionals. */
3761 shortcut_cond_expr (tree expr
)
3763 tree pred
= TREE_OPERAND (expr
, 0);
3764 tree then_
= TREE_OPERAND (expr
, 1);
3765 tree else_
= TREE_OPERAND (expr
, 2);
3766 tree true_label
, false_label
, end_label
, t
;
3768 tree
*false_label_p
;
3769 bool emit_end
, emit_false
, jump_over_else
;
3770 bool then_se
= then_
&& TREE_SIDE_EFFECTS (then_
);
3771 bool else_se
= else_
&& TREE_SIDE_EFFECTS (else_
);
3773 /* First do simple transformations. */
3776 /* If there is no 'else', turn
3779 if (a) if (b) then c. */
3780 while (TREE_CODE (pred
) == TRUTH_ANDIF_EXPR
)
3782 /* Keep the original source location on the first 'if'. */
3783 location_t locus
= EXPR_LOC_OR_LOC (expr
, input_location
);
3784 TREE_OPERAND (expr
, 0) = TREE_OPERAND (pred
, 1);
3785 /* Set the source location of the && on the second 'if'. */
3786 if (rexpr_has_location (pred
))
3787 SET_EXPR_LOCATION (expr
, rexpr_location (pred
));
3788 then_
= shortcut_cond_expr (expr
);
3789 then_se
= then_
&& TREE_SIDE_EFFECTS (then_
);
3790 pred
= TREE_OPERAND (pred
, 0);
3791 expr
= build3 (COND_EXPR
, void_type_node
, pred
, then_
, NULL_TREE
);
3792 SET_EXPR_LOCATION (expr
, locus
);
3798 /* If there is no 'then', turn
3801 if (a); else if (b); else d. */
3802 while (TREE_CODE (pred
) == TRUTH_ORIF_EXPR
)
3804 /* Keep the original source location on the first 'if'. */
3805 location_t locus
= EXPR_LOC_OR_LOC (expr
, input_location
);
3806 TREE_OPERAND (expr
, 0) = TREE_OPERAND (pred
, 1);
3807 /* Set the source location of the || on the second 'if'. */
3808 if (rexpr_has_location (pred
))
3809 SET_EXPR_LOCATION (expr
, rexpr_location (pred
));
3810 else_
= shortcut_cond_expr (expr
);
3811 else_se
= else_
&& TREE_SIDE_EFFECTS (else_
);
3812 pred
= TREE_OPERAND (pred
, 0);
3813 expr
= build3 (COND_EXPR
, void_type_node
, pred
, NULL_TREE
, else_
);
3814 SET_EXPR_LOCATION (expr
, locus
);
3818 /* If we're done, great. */
3819 if (TREE_CODE (pred
) != TRUTH_ANDIF_EXPR
3820 && TREE_CODE (pred
) != TRUTH_ORIF_EXPR
)
3823 /* Otherwise we need to mess with gotos. Change
3826 if (a); else goto no;
3829 and recursively gimplify the condition. */
3831 true_label
= false_label
= end_label
= NULL_TREE
;
3833 /* If our arms just jump somewhere, hijack those labels so we don't
3834 generate jumps to jumps. */
3836 if (tree then_goto
= find_goto_label (then_
))
3838 true_label
= GOTO_DESTINATION (then_goto
);
3843 if (tree else_goto
= find_goto_label (else_
))
3845 false_label
= GOTO_DESTINATION (else_goto
);
3850 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3852 true_label_p
= &true_label
;
3854 true_label_p
= NULL
;
3856 /* The 'else' branch also needs a label if it contains interesting code. */
3857 if (false_label
|| else_se
)
3858 false_label_p
= &false_label
;
3860 false_label_p
= NULL
;
3862 /* If there was nothing else in our arms, just forward the label(s). */
3863 if (!then_se
&& !else_se
)
3864 return shortcut_cond_r (pred
, true_label_p
, false_label_p
,
3865 EXPR_LOC_OR_LOC (expr
, input_location
));
3867 /* If our last subexpression already has a terminal label, reuse it. */
3869 t
= expr_last (else_
);
3871 t
= expr_last (then_
);
3874 if (t
&& TREE_CODE (t
) == LABEL_EXPR
)
3875 end_label
= LABEL_EXPR_LABEL (t
);
3877 /* If we don't care about jumping to the 'else' branch, jump to the end
3878 if the condition is false. */
3880 false_label_p
= &end_label
;
3882 /* We only want to emit these labels if we aren't hijacking them. */
3883 emit_end
= (end_label
== NULL_TREE
);
3884 emit_false
= (false_label
== NULL_TREE
);
3886 /* We only emit the jump over the else clause if we have to--if the
3887 then clause may fall through. Otherwise we can wind up with a
3888 useless jump and a useless label at the end of gimplified code,
3889 which will cause us to think that this conditional as a whole
3890 falls through even if it doesn't. If we then inline a function
3891 which ends with such a condition, that can cause us to issue an
3892 inappropriate warning about control reaching the end of a
3893 non-void function. */
3894 jump_over_else
= block_may_fallthru (then_
);
3896 pred
= shortcut_cond_r (pred
, true_label_p
, false_label_p
,
3897 EXPR_LOC_OR_LOC (expr
, input_location
));
3900 append_to_statement_list (pred
, &expr
);
3902 append_to_statement_list (then_
, &expr
);
3907 tree last
= expr_last (expr
);
3908 t
= build_and_jump (&end_label
);
3909 if (rexpr_has_location (last
))
3910 SET_EXPR_LOCATION (t
, rexpr_location (last
));
3911 append_to_statement_list (t
, &expr
);
3915 t
= build1 (LABEL_EXPR
, void_type_node
, false_label
);
3916 append_to_statement_list (t
, &expr
);
3918 append_to_statement_list (else_
, &expr
);
3920 if (emit_end
&& end_label
)
3922 t
= build1 (LABEL_EXPR
, void_type_node
, end_label
);
3923 append_to_statement_list (t
, &expr
);
3929 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3932 gimple_boolify (tree expr
)
3934 tree type
= TREE_TYPE (expr
);
3935 location_t loc
= EXPR_LOCATION (expr
);
3937 if (TREE_CODE (expr
) == NE_EXPR
3938 && TREE_CODE (TREE_OPERAND (expr
, 0)) == CALL_EXPR
3939 && integer_zerop (TREE_OPERAND (expr
, 1)))
3941 tree call
= TREE_OPERAND (expr
, 0);
3942 tree fn
= get_callee_fndecl (call
);
3944 /* For __builtin_expect ((long) (x), y) recurse into x as well
3945 if x is truth_value_p. */
3947 && fndecl_built_in_p (fn
, BUILT_IN_EXPECT
)
3948 && call_expr_nargs (call
) == 2)
3950 tree arg
= CALL_EXPR_ARG (call
, 0);
3953 if (TREE_CODE (arg
) == NOP_EXPR
3954 && TREE_TYPE (arg
) == TREE_TYPE (call
))
3955 arg
= TREE_OPERAND (arg
, 0);
3956 if (truth_value_p (TREE_CODE (arg
)))
3958 arg
= gimple_boolify (arg
);
3959 CALL_EXPR_ARG (call
, 0)
3960 = fold_convert_loc (loc
, TREE_TYPE (call
), arg
);
3966 switch (TREE_CODE (expr
))
3968 case TRUTH_AND_EXPR
:
3970 case TRUTH_XOR_EXPR
:
3971 case TRUTH_ANDIF_EXPR
:
3972 case TRUTH_ORIF_EXPR
:
3973 /* Also boolify the arguments of truth exprs. */
3974 TREE_OPERAND (expr
, 1) = gimple_boolify (TREE_OPERAND (expr
, 1));
3977 case TRUTH_NOT_EXPR
:
3978 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
3980 /* These expressions always produce boolean results. */
3981 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
3982 TREE_TYPE (expr
) = boolean_type_node
;
3986 switch ((enum annot_expr_kind
) TREE_INT_CST_LOW (TREE_OPERAND (expr
, 1)))
3988 case annot_expr_ivdep_kind
:
3989 case annot_expr_unroll_kind
:
3990 case annot_expr_no_vector_kind
:
3991 case annot_expr_vector_kind
:
3992 case annot_expr_parallel_kind
:
3993 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
3994 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
3995 TREE_TYPE (expr
) = boolean_type_node
;
4002 if (COMPARISON_CLASS_P (expr
))
4004 /* There expressions always prduce boolean results. */
4005 if (TREE_CODE (type
) != BOOLEAN_TYPE
)
4006 TREE_TYPE (expr
) = boolean_type_node
;
4009 /* Other expressions that get here must have boolean values, but
4010 might need to be converted to the appropriate mode. */
4011 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
4013 return fold_convert_loc (loc
, boolean_type_node
, expr
);
4017 /* Given a conditional expression *EXPR_P without side effects, gimplify
4018 its operands. New statements are inserted to PRE_P. */
4020 static enum gimplify_status
4021 gimplify_pure_cond_expr (tree
*expr_p
, gimple_seq
*pre_p
)
4023 tree expr
= *expr_p
, cond
;
4024 enum gimplify_status ret
, tret
;
4025 enum tree_code code
;
4027 cond
= gimple_boolify (COND_EXPR_COND (expr
));
4029 /* We need to handle && and || specially, as their gimplification
4030 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
4031 code
= TREE_CODE (cond
);
4032 if (code
== TRUTH_ANDIF_EXPR
)
4033 TREE_SET_CODE (cond
, TRUTH_AND_EXPR
);
4034 else if (code
== TRUTH_ORIF_EXPR
)
4035 TREE_SET_CODE (cond
, TRUTH_OR_EXPR
);
4036 ret
= gimplify_expr (&cond
, pre_p
, NULL
, is_gimple_condexpr
, fb_rvalue
);
4037 COND_EXPR_COND (*expr_p
) = cond
;
4039 tret
= gimplify_expr (&COND_EXPR_THEN (expr
), pre_p
, NULL
,
4040 is_gimple_val
, fb_rvalue
);
4041 ret
= MIN (ret
, tret
);
4042 tret
= gimplify_expr (&COND_EXPR_ELSE (expr
), pre_p
, NULL
,
4043 is_gimple_val
, fb_rvalue
);
4045 return MIN (ret
, tret
);
4048 /* Return true if evaluating EXPR could trap.
4049 EXPR is GENERIC, while tree_could_trap_p can be called
4053 generic_expr_could_trap_p (tree expr
)
4057 if (!expr
|| is_gimple_val (expr
))
4060 if (!EXPR_P (expr
) || tree_could_trap_p (expr
))
4063 n
= TREE_OPERAND_LENGTH (expr
);
4064 for (i
= 0; i
< n
; i
++)
4065 if (generic_expr_could_trap_p (TREE_OPERAND (expr
, i
)))
4071 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4080 The second form is used when *EXPR_P is of type void.
4082 PRE_P points to the list where side effects that must happen before
4083 *EXPR_P should be stored. */
4085 static enum gimplify_status
4086 gimplify_cond_expr (tree
*expr_p
, gimple_seq
*pre_p
, fallback_t fallback
)
4088 tree expr
= *expr_p
;
4089 tree type
= TREE_TYPE (expr
);
4090 location_t loc
= EXPR_LOCATION (expr
);
4091 tree tmp
, arm1
, arm2
;
4092 enum gimplify_status ret
;
4093 tree label_true
, label_false
, label_cont
;
4094 bool have_then_clause_p
, have_else_clause_p
;
4096 enum tree_code pred_code
;
4097 gimple_seq seq
= NULL
;
4099 /* If this COND_EXPR has a value, copy the values into a temporary within
4101 if (!VOID_TYPE_P (type
))
4103 tree then_
= TREE_OPERAND (expr
, 1), else_
= TREE_OPERAND (expr
, 2);
4106 /* If either an rvalue is ok or we do not require an lvalue, create the
4107 temporary. But we cannot do that if the type is addressable. */
4108 if (((fallback
& fb_rvalue
) || !(fallback
& fb_lvalue
))
4109 && !TREE_ADDRESSABLE (type
))
4111 if (gimplify_ctxp
->allow_rhs_cond_expr
4112 /* If either branch has side effects or could trap, it can't be
4113 evaluated unconditionally. */
4114 && !TREE_SIDE_EFFECTS (then_
)
4115 && !generic_expr_could_trap_p (then_
)
4116 && !TREE_SIDE_EFFECTS (else_
)
4117 && !generic_expr_could_trap_p (else_
))
4118 return gimplify_pure_cond_expr (expr_p
, pre_p
);
4120 tmp
= create_tmp_var (type
, "iftmp");
4124 /* Otherwise, only create and copy references to the values. */
4127 type
= build_pointer_type (type
);
4129 if (!VOID_TYPE_P (TREE_TYPE (then_
)))
4130 then_
= build_fold_addr_expr_loc (loc
, then_
);
4132 if (!VOID_TYPE_P (TREE_TYPE (else_
)))
4133 else_
= build_fold_addr_expr_loc (loc
, else_
);
4136 = build3 (COND_EXPR
, type
, TREE_OPERAND (expr
, 0), then_
, else_
);
4138 tmp
= create_tmp_var (type
, "iftmp");
4139 result
= build_simple_mem_ref_loc (loc
, tmp
);
4142 /* Build the new then clause, `tmp = then_;'. But don't build the
4143 assignment if the value is void; in C++ it can be if it's a throw. */
4144 if (!VOID_TYPE_P (TREE_TYPE (then_
)))
4145 TREE_OPERAND (expr
, 1) = build2 (INIT_EXPR
, type
, tmp
, then_
);
4147 /* Similarly, build the new else clause, `tmp = else_;'. */
4148 if (!VOID_TYPE_P (TREE_TYPE (else_
)))
4149 TREE_OPERAND (expr
, 2) = build2 (INIT_EXPR
, type
, tmp
, else_
);
4151 TREE_TYPE (expr
) = void_type_node
;
4152 recalculate_side_effects (expr
);
4154 /* Move the COND_EXPR to the prequeue. */
4155 gimplify_stmt (&expr
, pre_p
);
4161 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4162 STRIP_TYPE_NOPS (TREE_OPERAND (expr
, 0));
4163 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == COMPOUND_EXPR
)
4164 gimplify_compound_expr (&TREE_OPERAND (expr
, 0), pre_p
, true);
4166 /* Make sure the condition has BOOLEAN_TYPE. */
4167 TREE_OPERAND (expr
, 0) = gimple_boolify (TREE_OPERAND (expr
, 0));
4169 /* Break apart && and || conditions. */
4170 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == TRUTH_ANDIF_EXPR
4171 || TREE_CODE (TREE_OPERAND (expr
, 0)) == TRUTH_ORIF_EXPR
)
4173 expr
= shortcut_cond_expr (expr
);
4175 if (expr
!= *expr_p
)
4179 /* We can't rely on gimplify_expr to re-gimplify the expanded
4180 form properly, as cleanups might cause the target labels to be
4181 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4182 set up a conditional context. */
4183 gimple_push_condition ();
4184 gimplify_stmt (expr_p
, &seq
);
4185 gimple_pop_condition (pre_p
);
4186 gimple_seq_add_seq (pre_p
, seq
);
4192 /* Now do the normal gimplification. */
4194 /* Gimplify condition. */
4195 ret
= gimplify_expr (&TREE_OPERAND (expr
, 0), pre_p
, NULL
,
4196 is_gimple_condexpr_for_cond
, fb_rvalue
);
4197 if (ret
== GS_ERROR
)
4199 gcc_assert (TREE_OPERAND (expr
, 0) != NULL_TREE
);
4201 gimple_push_condition ();
4203 have_then_clause_p
= have_else_clause_p
= false;
4204 label_true
= find_goto_label (TREE_OPERAND (expr
, 1));
4206 && DECL_CONTEXT (GOTO_DESTINATION (label_true
)) == current_function_decl
4207 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4208 have different locations, otherwise we end up with incorrect
4209 location information on the branches. */
4211 || !EXPR_HAS_LOCATION (expr
)
4212 || !rexpr_has_location (label_true
)
4213 || EXPR_LOCATION (expr
) == rexpr_location (label_true
)))
4215 have_then_clause_p
= true;
4216 label_true
= GOTO_DESTINATION (label_true
);
4219 label_true
= create_artificial_label (UNKNOWN_LOCATION
);
4220 label_false
= find_goto_label (TREE_OPERAND (expr
, 2));
4222 && DECL_CONTEXT (GOTO_DESTINATION (label_false
)) == current_function_decl
4223 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4224 have different locations, otherwise we end up with incorrect
4225 location information on the branches. */
4227 || !EXPR_HAS_LOCATION (expr
)
4228 || !rexpr_has_location (label_false
)
4229 || EXPR_LOCATION (expr
) == rexpr_location (label_false
)))
4231 have_else_clause_p
= true;
4232 label_false
= GOTO_DESTINATION (label_false
);
4235 label_false
= create_artificial_label (UNKNOWN_LOCATION
);
4237 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr
), &pred_code
, &arm1
,
4239 cond_stmt
= gimple_build_cond (pred_code
, arm1
, arm2
, label_true
,
4241 gimple_set_no_warning (cond_stmt
, TREE_NO_WARNING (COND_EXPR_COND (expr
)));
4242 gimplify_seq_add_stmt (&seq
, cond_stmt
);
4243 gimple_stmt_iterator gsi
= gsi_last (seq
);
4244 maybe_fold_stmt (&gsi
);
4246 label_cont
= NULL_TREE
;
4247 if (!have_then_clause_p
)
4249 /* For if (...) {} else { code; } put label_true after
4251 if (TREE_OPERAND (expr
, 1) == NULL_TREE
4252 && !have_else_clause_p
4253 && TREE_OPERAND (expr
, 2) != NULL_TREE
)
4254 label_cont
= label_true
;
4257 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_true
));
4258 have_then_clause_p
= gimplify_stmt (&TREE_OPERAND (expr
, 1), &seq
);
4259 /* For if (...) { code; } else {} or
4260 if (...) { code; } else goto label; or
4261 if (...) { code; return; } else { ... }
4262 label_cont isn't needed. */
4263 if (!have_else_clause_p
4264 && TREE_OPERAND (expr
, 2) != NULL_TREE
4265 && gimple_seq_may_fallthru (seq
))
4268 label_cont
= create_artificial_label (UNKNOWN_LOCATION
);
4270 g
= gimple_build_goto (label_cont
);
4272 /* GIMPLE_COND's are very low level; they have embedded
4273 gotos. This particular embedded goto should not be marked
4274 with the location of the original COND_EXPR, as it would
4275 correspond to the COND_EXPR's condition, not the ELSE or the
4276 THEN arms. To avoid marking it with the wrong location, flag
4277 it as "no location". */
4278 gimple_set_do_not_emit_location (g
);
4280 gimplify_seq_add_stmt (&seq
, g
);
4284 if (!have_else_clause_p
)
4286 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_false
));
4287 have_else_clause_p
= gimplify_stmt (&TREE_OPERAND (expr
, 2), &seq
);
4290 gimplify_seq_add_stmt (&seq
, gimple_build_label (label_cont
));
4292 gimple_pop_condition (pre_p
);
4293 gimple_seq_add_seq (pre_p
, seq
);
4295 if (ret
== GS_ERROR
)
4297 else if (have_then_clause_p
|| have_else_clause_p
)
4301 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4302 expr
= TREE_OPERAND (expr
, 0);
4303 gimplify_stmt (&expr
, pre_p
);
4310 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4311 to be marked addressable.
4313 We cannot rely on such an expression being directly markable if a temporary
4314 has been created by the gimplification. In this case, we create another
4315 temporary and initialize it with a copy, which will become a store after we
4316 mark it addressable. This can happen if the front-end passed us something
4317 that it could not mark addressable yet, like a Fortran pass-by-reference
4318 parameter (int) floatvar. */
4321 prepare_gimple_addressable (tree
*expr_p
, gimple_seq
*seq_p
)
4323 while (handled_component_p (*expr_p
))
4324 expr_p
= &TREE_OPERAND (*expr_p
, 0);
4325 if (is_gimple_reg (*expr_p
))
4327 /* Do not allow an SSA name as the temporary. */
4328 tree var
= get_initialized_tmp_var (*expr_p
, seq_p
, NULL
, false);
4329 DECL_NOT_GIMPLE_REG_P (var
) = 1;
4334 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4335 a call to __builtin_memcpy. */
4337 static enum gimplify_status
4338 gimplify_modify_expr_to_memcpy (tree
*expr_p
, tree size
, bool want_value
,
4341 tree t
, to
, to_ptr
, from
, from_ptr
;
4343 location_t loc
= EXPR_LOCATION (*expr_p
);
4345 to
= TREE_OPERAND (*expr_p
, 0);
4346 from
= TREE_OPERAND (*expr_p
, 1);
4348 /* Mark the RHS addressable. Beware that it may not be possible to do so
4349 directly if a temporary has been created by the gimplification. */
4350 prepare_gimple_addressable (&from
, seq_p
);
4352 mark_addressable (from
);
4353 from_ptr
= build_fold_addr_expr_loc (loc
, from
);
4354 gimplify_arg (&from_ptr
, seq_p
, loc
);
4356 mark_addressable (to
);
4357 to_ptr
= build_fold_addr_expr_loc (loc
, to
);
4358 gimplify_arg (&to_ptr
, seq_p
, loc
);
4360 t
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
4362 gs
= gimple_build_call (t
, 3, to_ptr
, from_ptr
, size
);
4363 gimple_call_set_alloca_for_var (gs
, true);
4367 /* tmp = memcpy() */
4368 t
= create_tmp_var (TREE_TYPE (to_ptr
));
4369 gimple_call_set_lhs (gs
, t
);
4370 gimplify_seq_add_stmt (seq_p
, gs
);
4372 *expr_p
= build_simple_mem_ref (t
);
4376 gimplify_seq_add_stmt (seq_p
, gs
);
4381 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4382 a call to __builtin_memset. In this case we know that the RHS is
4383 a CONSTRUCTOR with an empty element list. */
4385 static enum gimplify_status
4386 gimplify_modify_expr_to_memset (tree
*expr_p
, tree size
, bool want_value
,
4389 tree t
, from
, to
, to_ptr
;
4391 location_t loc
= EXPR_LOCATION (*expr_p
);
4393 /* Assert our assumptions, to abort instead of producing wrong code
4394 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4395 not be immediately exposed. */
4396 from
= TREE_OPERAND (*expr_p
, 1);
4397 if (TREE_CODE (from
) == WITH_SIZE_EXPR
)
4398 from
= TREE_OPERAND (from
, 0);
4400 gcc_assert (TREE_CODE (from
) == CONSTRUCTOR
4401 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from
)));
4404 to
= TREE_OPERAND (*expr_p
, 0);
4406 to_ptr
= build_fold_addr_expr_loc (loc
, to
);
4407 gimplify_arg (&to_ptr
, seq_p
, loc
);
4408 t
= builtin_decl_implicit (BUILT_IN_MEMSET
);
4410 gs
= gimple_build_call (t
, 3, to_ptr
, integer_zero_node
, size
);
4414 /* tmp = memset() */
4415 t
= create_tmp_var (TREE_TYPE (to_ptr
));
4416 gimple_call_set_lhs (gs
, t
);
4417 gimplify_seq_add_stmt (seq_p
, gs
);
4419 *expr_p
= build1 (INDIRECT_REF
, TREE_TYPE (to
), t
);
4423 gimplify_seq_add_stmt (seq_p
, gs
);
4428 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4429 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4430 assignment. Return non-null if we detect a potential overlap. */
4432 struct gimplify_init_ctor_preeval_data
4434 /* The base decl of the lhs object. May be NULL, in which case we
4435 have to assume the lhs is indirect. */
4438 /* The alias set of the lhs object. */
4439 alias_set_type lhs_alias_set
;
4443 gimplify_init_ctor_preeval_1 (tree
*tp
, int *walk_subtrees
, void *xdata
)
4445 struct gimplify_init_ctor_preeval_data
*data
4446 = (struct gimplify_init_ctor_preeval_data
*) xdata
;
4449 /* If we find the base object, obviously we have overlap. */
4450 if (data
->lhs_base_decl
== t
)
4453 /* If the constructor component is indirect, determine if we have a
4454 potential overlap with the lhs. The only bits of information we
4455 have to go on at this point are addressability and alias sets. */
4456 if ((INDIRECT_REF_P (t
)
4457 || TREE_CODE (t
) == MEM_REF
)
4458 && (!data
->lhs_base_decl
|| TREE_ADDRESSABLE (data
->lhs_base_decl
))
4459 && alias_sets_conflict_p (data
->lhs_alias_set
, get_alias_set (t
)))
4462 /* If the constructor component is a call, determine if it can hide a
4463 potential overlap with the lhs through an INDIRECT_REF like above.
4464 ??? Ugh - this is completely broken. In fact this whole analysis
4465 doesn't look conservative. */
4466 if (TREE_CODE (t
) == CALL_EXPR
)
4468 tree type
, fntype
= TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t
)));
4470 for (type
= TYPE_ARG_TYPES (fntype
); type
; type
= TREE_CHAIN (type
))
4471 if (POINTER_TYPE_P (TREE_VALUE (type
))
4472 && (!data
->lhs_base_decl
|| TREE_ADDRESSABLE (data
->lhs_base_decl
))
4473 && alias_sets_conflict_p (data
->lhs_alias_set
,
4475 (TREE_TYPE (TREE_VALUE (type
)))))
4479 if (IS_TYPE_OR_DECL_P (t
))
4484 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4485 force values that overlap with the lhs (as described by *DATA)
4486 into temporaries. */
4489 gimplify_init_ctor_preeval (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
4490 struct gimplify_init_ctor_preeval_data
*data
)
4492 enum gimplify_status one
;
4494 /* If the value is constant, then there's nothing to pre-evaluate. */
4495 if (TREE_CONSTANT (*expr_p
))
4497 /* Ensure it does not have side effects, it might contain a reference to
4498 the object we're initializing. */
4499 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p
));
4503 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4504 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p
)))
4507 /* Recurse for nested constructors. */
4508 if (TREE_CODE (*expr_p
) == CONSTRUCTOR
)
4510 unsigned HOST_WIDE_INT ix
;
4511 constructor_elt
*ce
;
4512 vec
<constructor_elt
, va_gc
> *v
= CONSTRUCTOR_ELTS (*expr_p
);
4514 FOR_EACH_VEC_SAFE_ELT (v
, ix
, ce
)
4515 gimplify_init_ctor_preeval (&ce
->value
, pre_p
, post_p
, data
);
4520 /* If this is a variable sized type, we must remember the size. */
4521 maybe_with_size_expr (expr_p
);
4523 /* Gimplify the constructor element to something appropriate for the rhs
4524 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4525 the gimplifier will consider this a store to memory. Doing this
4526 gimplification now means that we won't have to deal with complicated
4527 language-specific trees, nor trees like SAVE_EXPR that can induce
4528 exponential search behavior. */
4529 one
= gimplify_expr (expr_p
, pre_p
, post_p
, is_gimple_mem_rhs
, fb_rvalue
);
4530 if (one
== GS_ERROR
)
4536 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4537 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4538 always be true for all scalars, since is_gimple_mem_rhs insists on a
4539 temporary variable for them. */
4540 if (DECL_P (*expr_p
))
4543 /* If this is of variable size, we have no choice but to assume it doesn't
4544 overlap since we can't make a temporary for it. */
4545 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p
))) != INTEGER_CST
)
4548 /* Otherwise, we must search for overlap ... */
4549 if (!walk_tree (expr_p
, gimplify_init_ctor_preeval_1
, data
, NULL
))
4552 /* ... and if found, force the value into a temporary. */
4553 *expr_p
= get_formal_tmp_var (*expr_p
, pre_p
);
4556 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4557 a RANGE_EXPR in a CONSTRUCTOR for an array.
4561 object[var] = value;
4568 We increment var _after_ the loop exit check because we might otherwise
4569 fail if upper == TYPE_MAX_VALUE (type for upper).
4571 Note that we never have to deal with SAVE_EXPRs here, because this has
4572 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4574 static void gimplify_init_ctor_eval (tree
, vec
<constructor_elt
, va_gc
> *,
4575 gimple_seq
*, bool);
4578 gimplify_init_ctor_eval_range (tree object
, tree lower
, tree upper
,
4579 tree value
, tree array_elt_type
,
4580 gimple_seq
*pre_p
, bool cleared
)
4582 tree loop_entry_label
, loop_exit_label
, fall_thru_label
;
4583 tree var
, var_type
, cref
, tmp
;
4585 loop_entry_label
= create_artificial_label (UNKNOWN_LOCATION
);
4586 loop_exit_label
= create_artificial_label (UNKNOWN_LOCATION
);
4587 fall_thru_label
= create_artificial_label (UNKNOWN_LOCATION
);
4589 /* Create and initialize the index variable. */
4590 var_type
= TREE_TYPE (upper
);
4591 var
= create_tmp_var (var_type
);
4592 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (var
, lower
));
4594 /* Add the loop entry label. */
4595 gimplify_seq_add_stmt (pre_p
, gimple_build_label (loop_entry_label
));
4597 /* Build the reference. */
4598 cref
= build4 (ARRAY_REF
, array_elt_type
, unshare_expr (object
),
4599 var
, NULL_TREE
, NULL_TREE
);
4601 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4602 the store. Otherwise just assign value to the reference. */
4604 if (TREE_CODE (value
) == CONSTRUCTOR
)
4605 /* NB we might have to call ourself recursively through
4606 gimplify_init_ctor_eval if the value is a constructor. */
4607 gimplify_init_ctor_eval (cref
, CONSTRUCTOR_ELTS (value
),
4611 if (gimplify_expr (&value
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
4613 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (cref
, value
));
4616 /* We exit the loop when the index var is equal to the upper bound. */
4617 gimplify_seq_add_stmt (pre_p
,
4618 gimple_build_cond (EQ_EXPR
, var
, upper
,
4619 loop_exit_label
, fall_thru_label
));
4621 gimplify_seq_add_stmt (pre_p
, gimple_build_label (fall_thru_label
));
4623 /* Otherwise, increment the index var... */
4624 tmp
= build2 (PLUS_EXPR
, var_type
, var
,
4625 fold_convert (var_type
, integer_one_node
));
4626 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (var
, tmp
));
4628 /* ...and jump back to the loop entry. */
4629 gimplify_seq_add_stmt (pre_p
, gimple_build_goto (loop_entry_label
));
4631 /* Add the loop exit label. */
4632 gimplify_seq_add_stmt (pre_p
, gimple_build_label (loop_exit_label
));
4635 /* Return true if FDECL is accessing a field that is zero sized. */
4638 zero_sized_field_decl (const_tree fdecl
)
4640 if (TREE_CODE (fdecl
) == FIELD_DECL
&& DECL_SIZE (fdecl
)
4641 && integer_zerop (DECL_SIZE (fdecl
)))
4646 /* Return true if TYPE is zero sized. */
4649 zero_sized_type (const_tree type
)
4651 if (AGGREGATE_TYPE_P (type
) && TYPE_SIZE (type
)
4652 && integer_zerop (TYPE_SIZE (type
)))
4657 /* A subroutine of gimplify_init_constructor. Generate individual
4658 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4659 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4660 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4664 gimplify_init_ctor_eval (tree object
, vec
<constructor_elt
, va_gc
> *elts
,
4665 gimple_seq
*pre_p
, bool cleared
)
4667 tree array_elt_type
= NULL
;
4668 unsigned HOST_WIDE_INT ix
;
4669 tree purpose
, value
;
4671 if (TREE_CODE (TREE_TYPE (object
)) == ARRAY_TYPE
)
4672 array_elt_type
= TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object
)));
4674 FOR_EACH_CONSTRUCTOR_ELT (elts
, ix
, purpose
, value
)
4678 /* NULL values are created above for gimplification errors. */
4682 if (cleared
&& initializer_zerop (value
))
4685 /* ??? Here's to hoping the front end fills in all of the indices,
4686 so we don't have to figure out what's missing ourselves. */
4687 gcc_assert (purpose
);
4689 /* Skip zero-sized fields, unless value has side-effects. This can
4690 happen with calls to functions returning a zero-sized type, which
4691 we shouldn't discard. As a number of downstream passes don't
4692 expect sets of zero-sized fields, we rely on the gimplification of
4693 the MODIFY_EXPR we make below to drop the assignment statement. */
4694 if (! TREE_SIDE_EFFECTS (value
) && zero_sized_field_decl (purpose
))
4697 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4699 if (TREE_CODE (purpose
) == RANGE_EXPR
)
4701 tree lower
= TREE_OPERAND (purpose
, 0);
4702 tree upper
= TREE_OPERAND (purpose
, 1);
4704 /* If the lower bound is equal to upper, just treat it as if
4705 upper was the index. */
4706 if (simple_cst_equal (lower
, upper
))
4710 gimplify_init_ctor_eval_range (object
, lower
, upper
, value
,
4711 array_elt_type
, pre_p
, cleared
);
4718 /* Do not use bitsizetype for ARRAY_REF indices. */
4719 if (TYPE_DOMAIN (TREE_TYPE (object
)))
4721 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object
))),
4723 cref
= build4 (ARRAY_REF
, array_elt_type
, unshare_expr (object
),
4724 purpose
, NULL_TREE
, NULL_TREE
);
4728 gcc_assert (TREE_CODE (purpose
) == FIELD_DECL
);
4729 cref
= build3 (COMPONENT_REF
, TREE_TYPE (purpose
),
4730 unshare_expr (object
), purpose
, NULL_TREE
);
4733 if (TREE_CODE (value
) == CONSTRUCTOR
4734 && TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
)
4735 gimplify_init_ctor_eval (cref
, CONSTRUCTOR_ELTS (value
),
4739 tree init
= build2 (INIT_EXPR
, TREE_TYPE (cref
), cref
, value
);
4740 gimplify_and_add (init
, pre_p
);
4746 /* Return the appropriate RHS predicate for this LHS. */
4749 rhs_predicate_for (tree lhs
)
4751 if (is_gimple_reg (lhs
))
4752 return is_gimple_reg_rhs_or_call
;
4754 return is_gimple_mem_rhs_or_call
;
4757 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4758 before the LHS has been gimplified. */
4760 static gimple_predicate
4761 initial_rhs_predicate_for (tree lhs
)
4763 if (is_gimple_reg_type (TREE_TYPE (lhs
)))
4764 return is_gimple_reg_rhs_or_call
;
4766 return is_gimple_mem_rhs_or_call
;
4769 /* Gimplify a C99 compound literal expression. This just means adding
4770 the DECL_EXPR before the current statement and using its anonymous
4773 static enum gimplify_status
4774 gimplify_compound_literal_expr (tree
*expr_p
, gimple_seq
*pre_p
,
4775 bool (*gimple_test_f
) (tree
),
4776 fallback_t fallback
)
4778 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p
);
4779 tree decl
= DECL_EXPR_DECL (decl_s
);
4780 tree init
= DECL_INITIAL (decl
);
4781 /* Mark the decl as addressable if the compound literal
4782 expression is addressable now, otherwise it is marked too late
4783 after we gimplify the initialization expression. */
4784 if (TREE_ADDRESSABLE (*expr_p
))
4785 TREE_ADDRESSABLE (decl
) = 1;
4786 /* Otherwise, if we don't need an lvalue and have a literal directly
4787 substitute it. Check if it matches the gimple predicate, as
4788 otherwise we'd generate a new temporary, and we can as well just
4789 use the decl we already have. */
4790 else if (!TREE_ADDRESSABLE (decl
)
4791 && !TREE_THIS_VOLATILE (decl
)
4793 && (fallback
& fb_lvalue
) == 0
4794 && gimple_test_f (init
))
4800 /* If the decl is not addressable, then it is being used in some
4801 expression or on the right hand side of a statement, and it can
4802 be put into a readonly data section. */
4803 if (!TREE_ADDRESSABLE (decl
) && (fallback
& fb_lvalue
) == 0)
4804 TREE_READONLY (decl
) = 1;
4806 /* This decl isn't mentioned in the enclosing block, so add it to the
4807 list of temps. FIXME it seems a bit of a kludge to say that
4808 anonymous artificial vars aren't pushed, but everything else is. */
4809 if (DECL_NAME (decl
) == NULL_TREE
&& !DECL_SEEN_IN_BIND_EXPR_P (decl
))
4810 gimple_add_tmp_var (decl
);
4812 gimplify_and_add (decl_s
, pre_p
);
4817 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4818 return a new CONSTRUCTOR if something changed. */
4821 optimize_compound_literals_in_ctor (tree orig_ctor
)
4823 tree ctor
= orig_ctor
;
4824 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (ctor
);
4825 unsigned int idx
, num
= vec_safe_length (elts
);
4827 for (idx
= 0; idx
< num
; idx
++)
4829 tree value
= (*elts
)[idx
].value
;
4830 tree newval
= value
;
4831 if (TREE_CODE (value
) == CONSTRUCTOR
)
4832 newval
= optimize_compound_literals_in_ctor (value
);
4833 else if (TREE_CODE (value
) == COMPOUND_LITERAL_EXPR
)
4835 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (value
);
4836 tree decl
= DECL_EXPR_DECL (decl_s
);
4837 tree init
= DECL_INITIAL (decl
);
4839 if (!TREE_ADDRESSABLE (value
)
4840 && !TREE_ADDRESSABLE (decl
)
4842 && TREE_CODE (init
) == CONSTRUCTOR
)
4843 newval
= optimize_compound_literals_in_ctor (init
);
4845 if (newval
== value
)
4848 if (ctor
== orig_ctor
)
4850 ctor
= copy_node (orig_ctor
);
4851 CONSTRUCTOR_ELTS (ctor
) = vec_safe_copy (elts
);
4852 elts
= CONSTRUCTOR_ELTS (ctor
);
4854 (*elts
)[idx
].value
= newval
;
4859 /* A subroutine of gimplify_modify_expr. Break out elements of a
4860 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4862 Note that we still need to clear any elements that don't have explicit
4863 initializers, so if not all elements are initialized we keep the
4864 original MODIFY_EXPR, we just remove all of the constructor elements.
4866 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4867 GS_ERROR if we would have to create a temporary when gimplifying
4868 this constructor. Otherwise, return GS_OK.
4870 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4872 static enum gimplify_status
4873 gimplify_init_constructor (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
4874 bool want_value
, bool notify_temp_creation
)
4876 tree object
, ctor
, type
;
4877 enum gimplify_status ret
;
4878 vec
<constructor_elt
, va_gc
> *elts
;
4880 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p
, 1)) == CONSTRUCTOR
);
4882 if (!notify_temp_creation
)
4884 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
4885 is_gimple_lvalue
, fb_lvalue
);
4886 if (ret
== GS_ERROR
)
4890 object
= TREE_OPERAND (*expr_p
, 0);
4891 ctor
= TREE_OPERAND (*expr_p
, 1)
4892 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p
, 1));
4893 type
= TREE_TYPE (ctor
);
4894 elts
= CONSTRUCTOR_ELTS (ctor
);
4897 switch (TREE_CODE (type
))
4901 case QUAL_UNION_TYPE
:
4904 /* Use readonly data for initializers of this or smaller size
4905 regardless of the num_nonzero_elements / num_unique_nonzero_elements
4907 const HOST_WIDE_INT min_unique_size
= 64;
4908 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
4909 is smaller than this, use readonly data. */
4910 const int unique_nonzero_ratio
= 8;
4911 /* True if a single access of the object must be ensured. This is the
4912 case if the target is volatile, the type is non-addressable and more
4913 than one field need to be assigned. */
4914 const bool ensure_single_access
4915 = TREE_THIS_VOLATILE (object
)
4916 && !TREE_ADDRESSABLE (type
)
4917 && vec_safe_length (elts
) > 1;
4918 struct gimplify_init_ctor_preeval_data preeval_data
;
4919 HOST_WIDE_INT num_ctor_elements
, num_nonzero_elements
;
4920 HOST_WIDE_INT num_unique_nonzero_elements
;
4921 bool cleared
, complete_p
, valid_const_initializer
;
4923 /* Aggregate types must lower constructors to initialization of
4924 individual elements. The exception is that a CONSTRUCTOR node
4925 with no elements indicates zero-initialization of the whole. */
4926 if (vec_safe_is_empty (elts
))
4928 if (notify_temp_creation
)
4933 /* Fetch information about the constructor to direct later processing.
4934 We might want to make static versions of it in various cases, and
4935 can only do so if it known to be a valid constant initializer. */
4936 valid_const_initializer
4937 = categorize_ctor_elements (ctor
, &num_nonzero_elements
,
4938 &num_unique_nonzero_elements
,
4939 &num_ctor_elements
, &complete_p
);
4941 /* If a const aggregate variable is being initialized, then it
4942 should never be a lose to promote the variable to be static. */
4943 if (valid_const_initializer
4944 && num_nonzero_elements
> 1
4945 && TREE_READONLY (object
)
4947 && !DECL_REGISTER (object
)
4948 && (flag_merge_constants
>= 2 || !TREE_ADDRESSABLE (object
))
4949 /* For ctors that have many repeated nonzero elements
4950 represented through RANGE_EXPRs, prefer initializing
4951 those through runtime loops over copies of large amounts
4952 of data from readonly data section. */
4953 && (num_unique_nonzero_elements
4954 > num_nonzero_elements
/ unique_nonzero_ratio
4955 || ((unsigned HOST_WIDE_INT
) int_size_in_bytes (type
)
4956 <= (unsigned HOST_WIDE_INT
) min_unique_size
)))
4958 if (notify_temp_creation
)
4961 DECL_INITIAL (object
) = ctor
;
4962 TREE_STATIC (object
) = 1;
4963 if (!DECL_NAME (object
))
4964 DECL_NAME (object
) = create_tmp_var_name ("C");
4965 walk_tree (&DECL_INITIAL (object
), force_labels_r
, NULL
, NULL
);
4967 /* ??? C++ doesn't automatically append a .<number> to the
4968 assembler name, and even when it does, it looks at FE private
4969 data structures to figure out what that number should be,
4970 which are not set for this variable. I suppose this is
4971 important for local statics for inline functions, which aren't
4972 "local" in the object file sense. So in order to get a unique
4973 TU-local symbol, we must invoke the lhd version now. */
4974 lhd_set_decl_assembler_name (object
);
4976 *expr_p
= NULL_TREE
;
4980 /* If there are "lots" of initialized elements, even discounting
4981 those that are not address constants (and thus *must* be
4982 computed at runtime), then partition the constructor into
4983 constant and non-constant parts. Block copy the constant
4984 parts in, then generate code for the non-constant parts. */
4985 /* TODO. There's code in cp/typeck.c to do this. */
4987 if (int_size_in_bytes (TREE_TYPE (ctor
)) < 0)
4988 /* store_constructor will ignore the clearing of variable-sized
4989 objects. Initializers for such objects must explicitly set
4990 every field that needs to be set. */
4992 else if (!complete_p
)
4993 /* If the constructor isn't complete, clear the whole object
4994 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4996 ??? This ought not to be needed. For any element not present
4997 in the initializer, we should simply set them to zero. Except
4998 we'd need to *find* the elements that are not present, and that
4999 requires trickery to avoid quadratic compile-time behavior in
5000 large cases or excessive memory use in small cases. */
5001 cleared
= !CONSTRUCTOR_NO_CLEARING (ctor
);
5002 else if (num_ctor_elements
- num_nonzero_elements
5003 > CLEAR_RATIO (optimize_function_for_speed_p (cfun
))
5004 && num_nonzero_elements
< num_ctor_elements
/ 4)
5005 /* If there are "lots" of zeros, it's more efficient to clear
5006 the memory and then set the nonzero elements. */
5008 else if (ensure_single_access
&& num_nonzero_elements
== 0)
5009 /* If a single access to the target must be ensured and all elements
5010 are zero, then it's optimal to clear whatever their number. */
5015 /* If there are "lots" of initialized elements, and all of them
5016 are valid address constants, then the entire initializer can
5017 be dropped to memory, and then memcpy'd out. Don't do this
5018 for sparse arrays, though, as it's more efficient to follow
5019 the standard CONSTRUCTOR behavior of memset followed by
5020 individual element initialization. Also don't do this for small
5021 all-zero initializers (which aren't big enough to merit
5022 clearing), and don't try to make bitwise copies of
5023 TREE_ADDRESSABLE types. */
5024 if (valid_const_initializer
5026 && !(cleared
|| num_nonzero_elements
== 0)
5027 && !TREE_ADDRESSABLE (type
))
5029 HOST_WIDE_INT size
= int_size_in_bytes (type
);
5032 /* ??? We can still get unbounded array types, at least
5033 from the C++ front end. This seems wrong, but attempt
5034 to work around it for now. */
5037 size
= int_size_in_bytes (TREE_TYPE (object
));
5039 TREE_TYPE (ctor
) = type
= TREE_TYPE (object
);
5042 /* Find the maximum alignment we can assume for the object. */
5043 /* ??? Make use of DECL_OFFSET_ALIGN. */
5044 if (DECL_P (object
))
5045 align
= DECL_ALIGN (object
);
5047 align
= TYPE_ALIGN (type
);
5049 /* Do a block move either if the size is so small as to make
5050 each individual move a sub-unit move on average, or if it
5051 is so large as to make individual moves inefficient. */
5053 && num_nonzero_elements
> 1
5054 /* For ctors that have many repeated nonzero elements
5055 represented through RANGE_EXPRs, prefer initializing
5056 those through runtime loops over copies of large amounts
5057 of data from readonly data section. */
5058 && (num_unique_nonzero_elements
5059 > num_nonzero_elements
/ unique_nonzero_ratio
5060 || size
<= min_unique_size
)
5061 && (size
< num_nonzero_elements
5062 || !can_move_by_pieces (size
, align
)))
5064 if (notify_temp_creation
)
5067 walk_tree (&ctor
, force_labels_r
, NULL
, NULL
);
5068 ctor
= tree_output_constant_def (ctor
);
5069 if (!useless_type_conversion_p (type
, TREE_TYPE (ctor
)))
5070 ctor
= build1 (VIEW_CONVERT_EXPR
, type
, ctor
);
5071 TREE_OPERAND (*expr_p
, 1) = ctor
;
5073 /* This is no longer an assignment of a CONSTRUCTOR, but
5074 we still may have processing to do on the LHS. So
5075 pretend we didn't do anything here to let that happen. */
5076 return GS_UNHANDLED
;
5080 /* If a single access to the target must be ensured and there are
5081 nonzero elements or the zero elements are not assigned en masse,
5082 initialize the target from a temporary. */
5083 if (ensure_single_access
&& (num_nonzero_elements
> 0 || !cleared
))
5085 if (notify_temp_creation
)
5088 tree temp
= create_tmp_var (TYPE_MAIN_VARIANT (type
));
5089 TREE_OPERAND (*expr_p
, 0) = temp
;
5090 *expr_p
= build2 (COMPOUND_EXPR
, TREE_TYPE (*expr_p
),
5092 build2 (MODIFY_EXPR
, void_type_node
,
5097 if (notify_temp_creation
)
5100 /* If there are nonzero elements and if needed, pre-evaluate to capture
5101 elements overlapping with the lhs into temporaries. We must do this
5102 before clearing to fetch the values before they are zeroed-out. */
5103 if (num_nonzero_elements
> 0 && TREE_CODE (*expr_p
) != INIT_EXPR
)
5105 preeval_data
.lhs_base_decl
= get_base_address (object
);
5106 if (!DECL_P (preeval_data
.lhs_base_decl
))
5107 preeval_data
.lhs_base_decl
= NULL
;
5108 preeval_data
.lhs_alias_set
= get_alias_set (object
);
5110 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p
, 1),
5111 pre_p
, post_p
, &preeval_data
);
5114 bool ctor_has_side_effects_p
5115 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p
, 1));
5119 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5120 Note that we still have to gimplify, in order to handle the
5121 case of variable sized types. Avoid shared tree structures. */
5122 CONSTRUCTOR_ELTS (ctor
) = NULL
;
5123 TREE_SIDE_EFFECTS (ctor
) = 0;
5124 object
= unshare_expr (object
);
5125 gimplify_stmt (expr_p
, pre_p
);
5128 /* If we have not block cleared the object, or if there are nonzero
5129 elements in the constructor, or if the constructor has side effects,
5130 add assignments to the individual scalar fields of the object. */
5132 || num_nonzero_elements
> 0
5133 || ctor_has_side_effects_p
)
5134 gimplify_init_ctor_eval (object
, elts
, pre_p
, cleared
);
5136 *expr_p
= NULL_TREE
;
5144 if (notify_temp_creation
)
5147 /* Extract the real and imaginary parts out of the ctor. */
5148 gcc_assert (elts
->length () == 2);
5149 r
= (*elts
)[0].value
;
5150 i
= (*elts
)[1].value
;
5151 if (r
== NULL
|| i
== NULL
)
5153 tree zero
= build_zero_cst (TREE_TYPE (type
));
5160 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5161 represent creation of a complex value. */
5162 if (TREE_CONSTANT (r
) && TREE_CONSTANT (i
))
5164 ctor
= build_complex (type
, r
, i
);
5165 TREE_OPERAND (*expr_p
, 1) = ctor
;
5169 ctor
= build2 (COMPLEX_EXPR
, type
, r
, i
);
5170 TREE_OPERAND (*expr_p
, 1) = ctor
;
5171 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1),
5174 rhs_predicate_for (TREE_OPERAND (*expr_p
, 0)),
5182 unsigned HOST_WIDE_INT ix
;
5183 constructor_elt
*ce
;
5185 if (notify_temp_creation
)
5188 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5189 if (TREE_CONSTANT (ctor
))
5191 bool constant_p
= true;
5194 /* Even when ctor is constant, it might contain non-*_CST
5195 elements, such as addresses or trapping values like
5196 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5197 in VECTOR_CST nodes. */
5198 FOR_EACH_CONSTRUCTOR_VALUE (elts
, ix
, value
)
5199 if (!CONSTANT_CLASS_P (value
))
5207 TREE_OPERAND (*expr_p
, 1) = build_vector_from_ctor (type
, elts
);
5211 TREE_CONSTANT (ctor
) = 0;
5214 /* Vector types use CONSTRUCTOR all the way through gimple
5215 compilation as a general initializer. */
5216 FOR_EACH_VEC_SAFE_ELT (elts
, ix
, ce
)
5218 enum gimplify_status tret
;
5219 tret
= gimplify_expr (&ce
->value
, pre_p
, post_p
, is_gimple_val
,
5221 if (tret
== GS_ERROR
)
5223 else if (TREE_STATIC (ctor
)
5224 && !initializer_constant_valid_p (ce
->value
,
5225 TREE_TYPE (ce
->value
)))
5226 TREE_STATIC (ctor
) = 0;
5228 if (!is_gimple_reg (TREE_OPERAND (*expr_p
, 0)))
5229 TREE_OPERAND (*expr_p
, 1) = get_formal_tmp_var (ctor
, pre_p
);
5234 /* So how did we get a CONSTRUCTOR for a scalar type? */
5238 if (ret
== GS_ERROR
)
5240 /* If we have gimplified both sides of the initializer but have
5241 not emitted an assignment, do so now. */
5244 tree lhs
= TREE_OPERAND (*expr_p
, 0);
5245 tree rhs
= TREE_OPERAND (*expr_p
, 1);
5246 if (want_value
&& object
== lhs
)
5247 lhs
= unshare_expr (lhs
);
5248 gassign
*init
= gimple_build_assign (lhs
, rhs
);
5249 gimplify_seq_add_stmt (pre_p
, init
);
5263 /* Given a pointer value OP0, return a simplified version of an
5264 indirection through OP0, or NULL_TREE if no simplification is
5265 possible. This may only be applied to a rhs of an expression.
5266 Note that the resulting type may be different from the type pointed
5267 to in the sense that it is still compatible from the langhooks
5271 gimple_fold_indirect_ref_rhs (tree t
)
5273 return gimple_fold_indirect_ref (t
);
5276 /* Subroutine of gimplify_modify_expr to do simplifications of
5277 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5278 something changes. */
5280 static enum gimplify_status
5281 gimplify_modify_expr_rhs (tree
*expr_p
, tree
*from_p
, tree
*to_p
,
5282 gimple_seq
*pre_p
, gimple_seq
*post_p
,
5285 enum gimplify_status ret
= GS_UNHANDLED
;
5291 switch (TREE_CODE (*from_p
))
5294 /* If we're assigning from a read-only variable initialized with
5295 a constructor and not volatile, do the direct assignment from
5296 the constructor, but only if the target is not volatile either
5297 since this latter assignment might end up being done on a per
5298 field basis. However, if the target is volatile and the type
5299 is aggregate and non-addressable, gimplify_init_constructor
5300 knows that it needs to ensure a single access to the target
5301 and it will return GS_OK only in this case. */
5302 if (TREE_READONLY (*from_p
)
5303 && DECL_INITIAL (*from_p
)
5304 && TREE_CODE (DECL_INITIAL (*from_p
)) == CONSTRUCTOR
5305 && !TREE_THIS_VOLATILE (*from_p
)
5306 && (!TREE_THIS_VOLATILE (*to_p
)
5307 || (AGGREGATE_TYPE_P (TREE_TYPE (*to_p
))
5308 && !TREE_ADDRESSABLE (TREE_TYPE (*to_p
)))))
5310 tree old_from
= *from_p
;
5311 enum gimplify_status subret
;
5313 /* Move the constructor into the RHS. */
5314 *from_p
= unshare_expr (DECL_INITIAL (*from_p
));
5316 /* Let's see if gimplify_init_constructor will need to put
5318 subret
= gimplify_init_constructor (expr_p
, NULL
, NULL
,
5320 if (subret
== GS_ERROR
)
5322 /* If so, revert the change. */
5334 /* If we have code like
5338 where the type of "x" is a (possibly cv-qualified variant
5339 of "A"), treat the entire expression as identical to "x".
5340 This kind of code arises in C++ when an object is bound
5341 to a const reference, and if "x" is a TARGET_EXPR we want
5342 to take advantage of the optimization below. */
5343 bool volatile_p
= TREE_THIS_VOLATILE (*from_p
);
5344 tree t
= gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p
, 0));
5347 if (TREE_THIS_VOLATILE (t
) != volatile_p
)
5350 t
= build_simple_mem_ref_loc (EXPR_LOCATION (*from_p
),
5351 build_fold_addr_expr (t
));
5352 if (REFERENCE_CLASS_P (t
))
5353 TREE_THIS_VOLATILE (t
) = volatile_p
;
5364 /* If we are initializing something from a TARGET_EXPR, strip the
5365 TARGET_EXPR and initialize it directly, if possible. This can't
5366 be done if the initializer is void, since that implies that the
5367 temporary is set in some non-trivial way.
5369 ??? What about code that pulls out the temp and uses it
5370 elsewhere? I think that such code never uses the TARGET_EXPR as
5371 an initializer. If I'm wrong, we'll die because the temp won't
5372 have any RTL. In that case, I guess we'll need to replace
5373 references somehow. */
5374 tree init
= TARGET_EXPR_INITIAL (*from_p
);
5377 && (TREE_CODE (*expr_p
) != MODIFY_EXPR
5378 || !TARGET_EXPR_NO_ELIDE (*from_p
))
5379 && !VOID_TYPE_P (TREE_TYPE (init
)))
5389 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5391 gimplify_compound_expr (from_p
, pre_p
, true);
5397 /* If we already made some changes, let the front end have a
5398 crack at this before we break it down. */
5399 if (ret
!= GS_UNHANDLED
)
5401 /* If we're initializing from a CONSTRUCTOR, break this into
5402 individual MODIFY_EXPRs. */
5403 return gimplify_init_constructor (expr_p
, pre_p
, post_p
, want_value
,
5407 /* If we're assigning to a non-register type, push the assignment
5408 down into the branches. This is mandatory for ADDRESSABLE types,
5409 since we cannot generate temporaries for such, but it saves a
5410 copy in other cases as well. */
5411 if (!is_gimple_reg_type (TREE_TYPE (*from_p
)))
5413 /* This code should mirror the code in gimplify_cond_expr. */
5414 enum tree_code code
= TREE_CODE (*expr_p
);
5415 tree cond
= *from_p
;
5416 tree result
= *to_p
;
5418 ret
= gimplify_expr (&result
, pre_p
, post_p
,
5419 is_gimple_lvalue
, fb_lvalue
);
5420 if (ret
!= GS_ERROR
)
5423 /* If we are going to write RESULT more than once, clear
5424 TREE_READONLY flag, otherwise we might incorrectly promote
5425 the variable to static const and initialize it at compile
5426 time in one of the branches. */
5428 && TREE_TYPE (TREE_OPERAND (cond
, 1)) != void_type_node
5429 && TREE_TYPE (TREE_OPERAND (cond
, 2)) != void_type_node
)
5430 TREE_READONLY (result
) = 0;
5431 if (TREE_TYPE (TREE_OPERAND (cond
, 1)) != void_type_node
)
5432 TREE_OPERAND (cond
, 1)
5433 = build2 (code
, void_type_node
, result
,
5434 TREE_OPERAND (cond
, 1));
5435 if (TREE_TYPE (TREE_OPERAND (cond
, 2)) != void_type_node
)
5436 TREE_OPERAND (cond
, 2)
5437 = build2 (code
, void_type_node
, unshare_expr (result
),
5438 TREE_OPERAND (cond
, 2));
5440 TREE_TYPE (cond
) = void_type_node
;
5441 recalculate_side_effects (cond
);
5445 gimplify_and_add (cond
, pre_p
);
5446 *expr_p
= unshare_expr (result
);
5455 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5456 return slot so that we don't generate a temporary. */
5457 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p
)
5458 && aggregate_value_p (*from_p
, *from_p
))
5462 if (!(rhs_predicate_for (*to_p
))(*from_p
))
5463 /* If we need a temporary, *to_p isn't accurate. */
5465 /* It's OK to use the return slot directly unless it's an NRV. */
5466 else if (TREE_CODE (*to_p
) == RESULT_DECL
5467 && DECL_NAME (*to_p
) == NULL_TREE
5468 && needs_to_live_in_memory (*to_p
))
5470 else if (is_gimple_reg_type (TREE_TYPE (*to_p
))
5471 || (DECL_P (*to_p
) && DECL_REGISTER (*to_p
)))
5472 /* Don't force regs into memory. */
5474 else if (TREE_CODE (*expr_p
) == INIT_EXPR
)
5475 /* It's OK to use the target directly if it's being
5478 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p
)))
5480 /* Always use the target and thus RSO for variable-sized types.
5481 GIMPLE cannot deal with a variable-sized assignment
5482 embedded in a call statement. */
5484 else if (TREE_CODE (*to_p
) != SSA_NAME
5485 && (!is_gimple_variable (*to_p
)
5486 || needs_to_live_in_memory (*to_p
)))
5487 /* Don't use the original target if it's already addressable;
5488 if its address escapes, and the called function uses the
5489 NRV optimization, a conforming program could see *to_p
5490 change before the called function returns; see c++/19317.
5491 When optimizing, the return_slot pass marks more functions
5492 as safe after we have escape info. */
5499 CALL_EXPR_RETURN_SLOT_OPT (*from_p
) = 1;
5500 mark_addressable (*to_p
);
5505 case WITH_SIZE_EXPR
:
5506 /* Likewise for calls that return an aggregate of non-constant size,
5507 since we would not be able to generate a temporary at all. */
5508 if (TREE_CODE (TREE_OPERAND (*from_p
, 0)) == CALL_EXPR
)
5510 *from_p
= TREE_OPERAND (*from_p
, 0);
5511 /* We don't change ret in this case because the
5512 WITH_SIZE_EXPR might have been added in
5513 gimplify_modify_expr, so returning GS_OK would lead to an
5519 /* If we're initializing from a container, push the initialization
5521 case CLEANUP_POINT_EXPR
:
5523 case STATEMENT_LIST
:
5525 tree wrap
= *from_p
;
5528 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_min_lval
,
5530 if (ret
!= GS_ERROR
)
5533 t
= voidify_wrapper_expr (wrap
, *expr_p
);
5534 gcc_assert (t
== *expr_p
);
5538 gimplify_and_add (wrap
, pre_p
);
5539 *expr_p
= unshare_expr (*to_p
);
5547 /* Pull out compound literal expressions from a NOP_EXPR.
5548 Those are created in the C FE to drop qualifiers during
5549 lvalue conversion. */
5550 if ((TREE_CODE (TREE_OPERAND (*from_p
, 0)) == COMPOUND_LITERAL_EXPR
)
5551 && tree_ssa_useless_type_conversion (*from_p
))
5553 *from_p
= TREE_OPERAND (*from_p
, 0);
5559 case COMPOUND_LITERAL_EXPR
:
5561 tree complit
= TREE_OPERAND (*expr_p
, 1);
5562 tree decl_s
= COMPOUND_LITERAL_EXPR_DECL_EXPR (complit
);
5563 tree decl
= DECL_EXPR_DECL (decl_s
);
5564 tree init
= DECL_INITIAL (decl
);
5566 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5567 into struct T x = { 0, 1, 2 } if the address of the
5568 compound literal has never been taken. */
5569 if (!TREE_ADDRESSABLE (complit
)
5570 && !TREE_ADDRESSABLE (decl
)
5573 *expr_p
= copy_node (*expr_p
);
5574 TREE_OPERAND (*expr_p
, 1) = init
;
5589 /* Return true if T looks like a valid GIMPLE statement. */
5592 is_gimple_stmt (tree t
)
5594 const enum tree_code code
= TREE_CODE (t
);
5599 /* The only valid NOP_EXPR is the empty statement. */
5600 return IS_EMPTY_STMT (t
);
5604 /* These are only valid if they're void. */
5605 return TREE_TYPE (t
) == NULL
|| VOID_TYPE_P (TREE_TYPE (t
));
5611 case CASE_LABEL_EXPR
:
5612 case TRY_CATCH_EXPR
:
5613 case TRY_FINALLY_EXPR
:
5614 case EH_FILTER_EXPR
:
5617 case STATEMENT_LIST
:
5622 case OACC_HOST_DATA
:
5625 case OACC_ENTER_DATA
:
5626 case OACC_EXIT_DATA
:
5631 case OMP_DISTRIBUTE
:
5644 case OMP_TARGET_DATA
:
5645 case OMP_TARGET_UPDATE
:
5646 case OMP_TARGET_ENTER_DATA
:
5647 case OMP_TARGET_EXIT_DATA
:
5650 /* These are always void. */
5656 /* These are valid regardless of their type. */
5665 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5666 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a gimple register.
5668 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5669 other, unmodified part of the complex object just before the total store.
5670 As a consequence, if the object is still uninitialized, an undefined value
5671 will be loaded into a register, which may result in a spurious exception
5672 if the register is floating-point and the value happens to be a signaling
5673 NaN for example. Then the fully-fledged complex operations lowering pass
5674 followed by a DCE pass are necessary in order to fix things up. */
5676 static enum gimplify_status
5677 gimplify_modify_expr_complex_part (tree
*expr_p
, gimple_seq
*pre_p
,
5680 enum tree_code code
, ocode
;
5681 tree lhs
, rhs
, new_rhs
, other
, realpart
, imagpart
;
5683 lhs
= TREE_OPERAND (*expr_p
, 0);
5684 rhs
= TREE_OPERAND (*expr_p
, 1);
5685 code
= TREE_CODE (lhs
);
5686 lhs
= TREE_OPERAND (lhs
, 0);
5688 ocode
= code
== REALPART_EXPR
? IMAGPART_EXPR
: REALPART_EXPR
;
5689 other
= build1 (ocode
, TREE_TYPE (rhs
), lhs
);
5690 TREE_NO_WARNING (other
) = 1;
5691 other
= get_formal_tmp_var (other
, pre_p
);
5693 realpart
= code
== REALPART_EXPR
? rhs
: other
;
5694 imagpart
= code
== REALPART_EXPR
? other
: rhs
;
5696 if (TREE_CONSTANT (realpart
) && TREE_CONSTANT (imagpart
))
5697 new_rhs
= build_complex (TREE_TYPE (lhs
), realpart
, imagpart
);
5699 new_rhs
= build2 (COMPLEX_EXPR
, TREE_TYPE (lhs
), realpart
, imagpart
);
5701 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (lhs
, new_rhs
));
5702 *expr_p
= (want_value
) ? rhs
: NULL_TREE
;
5707 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5713 PRE_P points to the list where side effects that must happen before
5714 *EXPR_P should be stored.
5716 POST_P points to the list where side effects that must happen after
5717 *EXPR_P should be stored.
5719 WANT_VALUE is nonzero iff we want to use the value of this expression
5720 in another expression. */
5722 static enum gimplify_status
5723 gimplify_modify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
5726 tree
*from_p
= &TREE_OPERAND (*expr_p
, 1);
5727 tree
*to_p
= &TREE_OPERAND (*expr_p
, 0);
5728 enum gimplify_status ret
= GS_UNHANDLED
;
5730 location_t loc
= EXPR_LOCATION (*expr_p
);
5731 gimple_stmt_iterator gsi
;
5733 gcc_assert (TREE_CODE (*expr_p
) == MODIFY_EXPR
5734 || TREE_CODE (*expr_p
) == INIT_EXPR
);
5736 /* Trying to simplify a clobber using normal logic doesn't work,
5737 so handle it here. */
5738 if (TREE_CLOBBER_P (*from_p
))
5740 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
5741 if (ret
== GS_ERROR
)
5743 gcc_assert (!want_value
);
5744 if (!VAR_P (*to_p
) && TREE_CODE (*to_p
) != MEM_REF
)
5746 tree addr
= get_initialized_tmp_var (build_fold_addr_expr (*to_p
),
5748 *to_p
= build_simple_mem_ref_loc (EXPR_LOCATION (*to_p
), addr
);
5750 gimplify_seq_add_stmt (pre_p
, gimple_build_assign (*to_p
, *from_p
));
5755 /* Insert pointer conversions required by the middle-end that are not
5756 required by the frontend. This fixes middle-end type checking for
5757 for example gcc.dg/redecl-6.c. */
5758 if (POINTER_TYPE_P (TREE_TYPE (*to_p
)))
5760 STRIP_USELESS_TYPE_CONVERSION (*from_p
);
5761 if (!useless_type_conversion_p (TREE_TYPE (*to_p
), TREE_TYPE (*from_p
)))
5762 *from_p
= fold_convert_loc (loc
, TREE_TYPE (*to_p
), *from_p
);
5765 /* See if any simplifications can be done based on what the RHS is. */
5766 ret
= gimplify_modify_expr_rhs (expr_p
, from_p
, to_p
, pre_p
, post_p
,
5768 if (ret
!= GS_UNHANDLED
)
5771 /* For zero sized types only gimplify the left hand side and right hand
5772 side as statements and throw away the assignment. Do this after
5773 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5775 if (zero_sized_type (TREE_TYPE (*from_p
))
5777 /* Don't do this for calls that return addressable types, expand_call
5778 relies on those having a lhs. */
5779 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p
))
5780 && TREE_CODE (*from_p
) == CALL_EXPR
))
5782 gimplify_stmt (from_p
, pre_p
);
5783 gimplify_stmt (to_p
, pre_p
);
5784 *expr_p
= NULL_TREE
;
5788 /* If the value being copied is of variable width, compute the length
5789 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5790 before gimplifying any of the operands so that we can resolve any
5791 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5792 the size of the expression to be copied, not of the destination, so
5793 that is what we must do here. */
5794 maybe_with_size_expr (from_p
);
5796 /* As a special case, we have to temporarily allow for assignments
5797 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5798 a toplevel statement, when gimplifying the GENERIC expression
5799 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5800 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5802 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5803 prevent gimplify_expr from trying to create a new temporary for
5804 foo's LHS, we tell it that it should only gimplify until it
5805 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5806 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5807 and all we need to do here is set 'a' to be its LHS. */
5809 /* Gimplify the RHS first for C++17 and bug 71104. */
5810 gimple_predicate initial_pred
= initial_rhs_predicate_for (*to_p
);
5811 ret
= gimplify_expr (from_p
, pre_p
, post_p
, initial_pred
, fb_rvalue
);
5812 if (ret
== GS_ERROR
)
5815 /* Then gimplify the LHS. */
5816 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5817 twice we have to make sure to gimplify into non-SSA as otherwise
5818 the abnormal edge added later will make those defs not dominate
5820 ??? Technically this applies only to the registers used in the
5821 resulting non-register *TO_P. */
5822 bool saved_into_ssa
= gimplify_ctxp
->into_ssa
;
5824 && TREE_CODE (*from_p
) == CALL_EXPR
5825 && call_expr_flags (*from_p
) & ECF_RETURNS_TWICE
)
5826 gimplify_ctxp
->into_ssa
= false;
5827 ret
= gimplify_expr (to_p
, pre_p
, post_p
, is_gimple_lvalue
, fb_lvalue
);
5828 gimplify_ctxp
->into_ssa
= saved_into_ssa
;
5829 if (ret
== GS_ERROR
)
5832 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5833 guess for the predicate was wrong. */
5834 gimple_predicate final_pred
= rhs_predicate_for (*to_p
);
5835 if (final_pred
!= initial_pred
)
5837 ret
= gimplify_expr (from_p
, pre_p
, post_p
, final_pred
, fb_rvalue
);
5838 if (ret
== GS_ERROR
)
5842 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5843 size as argument to the call. */
5844 if (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
)
5846 tree call
= TREE_OPERAND (*from_p
, 0);
5847 tree vlasize
= TREE_OPERAND (*from_p
, 1);
5849 if (TREE_CODE (call
) == CALL_EXPR
5850 && CALL_EXPR_IFN (call
) == IFN_VA_ARG
)
5852 int nargs
= call_expr_nargs (call
);
5853 tree type
= TREE_TYPE (call
);
5854 tree ap
= CALL_EXPR_ARG (call
, 0);
5855 tree tag
= CALL_EXPR_ARG (call
, 1);
5856 tree aptag
= CALL_EXPR_ARG (call
, 2);
5857 tree newcall
= build_call_expr_internal_loc (EXPR_LOCATION (call
),
5861 TREE_OPERAND (*from_p
, 0) = newcall
;
5865 /* Now see if the above changed *from_p to something we handle specially. */
5866 ret
= gimplify_modify_expr_rhs (expr_p
, from_p
, to_p
, pre_p
, post_p
,
5868 if (ret
!= GS_UNHANDLED
)
5871 /* If we've got a variable sized assignment between two lvalues (i.e. does
5872 not involve a call), then we can make things a bit more straightforward
5873 by converting the assignment to memcpy or memset. */
5874 if (TREE_CODE (*from_p
) == WITH_SIZE_EXPR
)
5876 tree from
= TREE_OPERAND (*from_p
, 0);
5877 tree size
= TREE_OPERAND (*from_p
, 1);
5879 if (TREE_CODE (from
) == CONSTRUCTOR
)
5880 return gimplify_modify_expr_to_memset (expr_p
, size
, want_value
, pre_p
);
5882 if (is_gimple_addressable (from
))
5885 return gimplify_modify_expr_to_memcpy (expr_p
, size
, want_value
,
5890 /* Transform partial stores to non-addressable complex variables into
5891 total stores. This allows us to use real instead of virtual operands
5892 for these variables, which improves optimization. */
5893 if ((TREE_CODE (*to_p
) == REALPART_EXPR
5894 || TREE_CODE (*to_p
) == IMAGPART_EXPR
)
5895 && is_gimple_reg (TREE_OPERAND (*to_p
, 0)))
5896 return gimplify_modify_expr_complex_part (expr_p
, pre_p
, want_value
);
5898 /* Try to alleviate the effects of the gimplification creating artificial
5899 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5900 make sure not to create DECL_DEBUG_EXPR links across functions. */
5901 if (!gimplify_ctxp
->into_ssa
5903 && DECL_IGNORED_P (*from_p
)
5905 && !DECL_IGNORED_P (*to_p
)
5906 && decl_function_context (*to_p
) == current_function_decl
5907 && decl_function_context (*from_p
) == current_function_decl
)
5909 if (!DECL_NAME (*from_p
) && DECL_NAME (*to_p
))
5911 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p
)));
5912 DECL_HAS_DEBUG_EXPR_P (*from_p
) = 1;
5913 SET_DECL_DEBUG_EXPR (*from_p
, *to_p
);
5916 if (want_value
&& TREE_THIS_VOLATILE (*to_p
))
5917 *from_p
= get_initialized_tmp_var (*from_p
, pre_p
, post_p
);
5919 if (TREE_CODE (*from_p
) == CALL_EXPR
)
5921 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5922 instead of a GIMPLE_ASSIGN. */
5924 if (CALL_EXPR_FN (*from_p
) == NULL_TREE
)
5926 /* Gimplify internal functions created in the FEs. */
5927 int nargs
= call_expr_nargs (*from_p
), i
;
5928 enum internal_fn ifn
= CALL_EXPR_IFN (*from_p
);
5929 auto_vec
<tree
> vargs (nargs
);
5931 for (i
= 0; i
< nargs
; i
++)
5933 gimplify_arg (&CALL_EXPR_ARG (*from_p
, i
), pre_p
,
5934 EXPR_LOCATION (*from_p
));
5935 vargs
.quick_push (CALL_EXPR_ARG (*from_p
, i
));
5937 call_stmt
= gimple_build_call_internal_vec (ifn
, vargs
);
5938 gimple_call_set_nothrow (call_stmt
, TREE_NOTHROW (*from_p
));
5939 gimple_set_location (call_stmt
, EXPR_LOCATION (*expr_p
));
5943 tree fnptrtype
= TREE_TYPE (CALL_EXPR_FN (*from_p
));
5944 CALL_EXPR_FN (*from_p
) = TREE_OPERAND (CALL_EXPR_FN (*from_p
), 0);
5945 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p
));
5946 tree fndecl
= get_callee_fndecl (*from_p
);
5948 && fndecl_built_in_p (fndecl
, BUILT_IN_EXPECT
)
5949 && call_expr_nargs (*from_p
) == 3)
5950 call_stmt
= gimple_build_call_internal (IFN_BUILTIN_EXPECT
, 3,
5951 CALL_EXPR_ARG (*from_p
, 0),
5952 CALL_EXPR_ARG (*from_p
, 1),
5953 CALL_EXPR_ARG (*from_p
, 2));
5956 call_stmt
= gimple_build_call_from_tree (*from_p
, fnptrtype
);
5959 notice_special_calls (call_stmt
);
5960 if (!gimple_call_noreturn_p (call_stmt
) || !should_remove_lhs_p (*to_p
))
5961 gimple_call_set_lhs (call_stmt
, *to_p
);
5962 else if (TREE_CODE (*to_p
) == SSA_NAME
)
5963 /* The above is somewhat premature, avoid ICEing later for a
5964 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5965 ??? This doesn't make it a default-def. */
5966 SSA_NAME_DEF_STMT (*to_p
) = gimple_build_nop ();
5972 assign
= gimple_build_assign (*to_p
, *from_p
);
5973 gimple_set_location (assign
, EXPR_LOCATION (*expr_p
));
5974 if (COMPARISON_CLASS_P (*from_p
))
5975 gimple_set_no_warning (assign
, TREE_NO_WARNING (*from_p
));
5978 if (gimplify_ctxp
->into_ssa
&& is_gimple_reg (*to_p
))
5980 /* We should have got an SSA name from the start. */
5981 gcc_assert (TREE_CODE (*to_p
) == SSA_NAME
5982 || ! gimple_in_ssa_p (cfun
));
5985 gimplify_seq_add_stmt (pre_p
, assign
);
5986 gsi
= gsi_last (*pre_p
);
5987 maybe_fold_stmt (&gsi
);
5991 *expr_p
= TREE_THIS_VOLATILE (*to_p
) ? *from_p
: unshare_expr (*to_p
);
6000 /* Gimplify a comparison between two variable-sized objects. Do this
6001 with a call to BUILT_IN_MEMCMP. */
6003 static enum gimplify_status
6004 gimplify_variable_sized_compare (tree
*expr_p
)
6006 location_t loc
= EXPR_LOCATION (*expr_p
);
6007 tree op0
= TREE_OPERAND (*expr_p
, 0);
6008 tree op1
= TREE_OPERAND (*expr_p
, 1);
6009 tree t
, arg
, dest
, src
, expr
;
6011 arg
= TYPE_SIZE_UNIT (TREE_TYPE (op0
));
6012 arg
= unshare_expr (arg
);
6013 arg
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg
, op0
);
6014 src
= build_fold_addr_expr_loc (loc
, op1
);
6015 dest
= build_fold_addr_expr_loc (loc
, op0
);
6016 t
= builtin_decl_implicit (BUILT_IN_MEMCMP
);
6017 t
= build_call_expr_loc (loc
, t
, 3, dest
, src
, arg
);
6020 = build2 (TREE_CODE (*expr_p
), TREE_TYPE (*expr_p
), t
, integer_zero_node
);
6021 SET_EXPR_LOCATION (expr
, loc
);
6027 /* Gimplify a comparison between two aggregate objects of integral scalar
6028 mode as a comparison between the bitwise equivalent scalar values. */
6030 static enum gimplify_status
6031 gimplify_scalar_mode_aggregate_compare (tree
*expr_p
)
6033 location_t loc
= EXPR_LOCATION (*expr_p
);
6034 tree op0
= TREE_OPERAND (*expr_p
, 0);
6035 tree op1
= TREE_OPERAND (*expr_p
, 1);
6037 tree type
= TREE_TYPE (op0
);
6038 tree scalar_type
= lang_hooks
.types
.type_for_mode (TYPE_MODE (type
), 1);
6040 op0
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, scalar_type
, op0
);
6041 op1
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, scalar_type
, op1
);
6044 = fold_build2_loc (loc
, TREE_CODE (*expr_p
), TREE_TYPE (*expr_p
), op0
, op1
);
6049 /* Gimplify an expression sequence. This function gimplifies each
6050 expression and rewrites the original expression with the last
6051 expression of the sequence in GIMPLE form.
6053 PRE_P points to the list where the side effects for all the
6054 expressions in the sequence will be emitted.
6056 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6058 static enum gimplify_status
6059 gimplify_compound_expr (tree
*expr_p
, gimple_seq
*pre_p
, bool want_value
)
6065 tree
*sub_p
= &TREE_OPERAND (t
, 0);
6067 if (TREE_CODE (*sub_p
) == COMPOUND_EXPR
)
6068 gimplify_compound_expr (sub_p
, pre_p
, false);
6070 gimplify_stmt (sub_p
, pre_p
);
6072 t
= TREE_OPERAND (t
, 1);
6074 while (TREE_CODE (t
) == COMPOUND_EXPR
);
6081 gimplify_stmt (expr_p
, pre_p
);
6086 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
6087 gimplify. After gimplification, EXPR_P will point to a new temporary
6088 that holds the original value of the SAVE_EXPR node.
6090 PRE_P points to the list where side effects that must happen before
6091 *EXPR_P should be stored. */
6093 static enum gimplify_status
6094 gimplify_save_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6096 enum gimplify_status ret
= GS_ALL_DONE
;
6099 gcc_assert (TREE_CODE (*expr_p
) == SAVE_EXPR
);
6100 val
= TREE_OPERAND (*expr_p
, 0);
6102 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
6103 if (!SAVE_EXPR_RESOLVED_P (*expr_p
))
6105 /* The operand may be a void-valued expression. It is
6106 being executed only for its side-effects. */
6107 if (TREE_TYPE (val
) == void_type_node
)
6109 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
6110 is_gimple_stmt
, fb_none
);
6114 /* The temporary may not be an SSA name as later abnormal and EH
6115 control flow may invalidate use/def domination. When in SSA
6116 form then assume there are no such issues and SAVE_EXPRs only
6117 appear via GENERIC foldings. */
6118 val
= get_initialized_tmp_var (val
, pre_p
, post_p
,
6119 gimple_in_ssa_p (cfun
));
6121 TREE_OPERAND (*expr_p
, 0) = val
;
6122 SAVE_EXPR_RESOLVED_P (*expr_p
) = 1;
6130 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6137 PRE_P points to the list where side effects that must happen before
6138 *EXPR_P should be stored.
6140 POST_P points to the list where side effects that must happen after
6141 *EXPR_P should be stored. */
6143 static enum gimplify_status
6144 gimplify_addr_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6146 tree expr
= *expr_p
;
6147 tree op0
= TREE_OPERAND (expr
, 0);
6148 enum gimplify_status ret
;
6149 location_t loc
= EXPR_LOCATION (*expr_p
);
6151 switch (TREE_CODE (op0
))
6155 /* Check if we are dealing with an expression of the form '&*ptr'.
6156 While the front end folds away '&*ptr' into 'ptr', these
6157 expressions may be generated internally by the compiler (e.g.,
6158 builtins like __builtin_va_end). */
6159 /* Caution: the silent array decomposition semantics we allow for
6160 ADDR_EXPR means we can't always discard the pair. */
6161 /* Gimplification of the ADDR_EXPR operand may drop
6162 cv-qualification conversions, so make sure we add them if
6165 tree op00
= TREE_OPERAND (op0
, 0);
6166 tree t_expr
= TREE_TYPE (expr
);
6167 tree t_op00
= TREE_TYPE (op00
);
6169 if (!useless_type_conversion_p (t_expr
, t_op00
))
6170 op00
= fold_convert_loc (loc
, TREE_TYPE (expr
), op00
);
6176 case VIEW_CONVERT_EXPR
:
6177 /* Take the address of our operand and then convert it to the type of
6180 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6181 all clear. The impact of this transformation is even less clear. */
6183 /* If the operand is a useless conversion, look through it. Doing so
6184 guarantees that the ADDR_EXPR and its operand will remain of the
6186 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0
, 0)))
6187 op0
= TREE_OPERAND (op0
, 0);
6189 *expr_p
= fold_convert_loc (loc
, TREE_TYPE (expr
),
6190 build_fold_addr_expr_loc (loc
,
6191 TREE_OPERAND (op0
, 0)));
6196 if (integer_zerop (TREE_OPERAND (op0
, 1)))
6197 goto do_indirect_ref
;
6202 /* If we see a call to a declared builtin or see its address
6203 being taken (we can unify those cases here) then we can mark
6204 the builtin for implicit generation by GCC. */
6205 if (TREE_CODE (op0
) == FUNCTION_DECL
6206 && fndecl_built_in_p (op0
, BUILT_IN_NORMAL
)
6207 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0
)))
6208 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0
), true);
6210 /* We use fb_either here because the C frontend sometimes takes
6211 the address of a call that returns a struct; see
6212 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6213 the implied temporary explicit. */
6215 /* Make the operand addressable. */
6216 ret
= gimplify_expr (&TREE_OPERAND (expr
, 0), pre_p
, post_p
,
6217 is_gimple_addressable
, fb_either
);
6218 if (ret
== GS_ERROR
)
6221 /* Then mark it. Beware that it may not be possible to do so directly
6222 if a temporary has been created by the gimplification. */
6223 prepare_gimple_addressable (&TREE_OPERAND (expr
, 0), pre_p
);
6225 op0
= TREE_OPERAND (expr
, 0);
6227 /* For various reasons, the gimplification of the expression
6228 may have made a new INDIRECT_REF. */
6229 if (TREE_CODE (op0
) == INDIRECT_REF
6230 || (TREE_CODE (op0
) == MEM_REF
6231 && integer_zerop (TREE_OPERAND (op0
, 1))))
6232 goto do_indirect_ref
;
6234 mark_addressable (TREE_OPERAND (expr
, 0));
6236 /* The FEs may end up building ADDR_EXPRs early on a decl with
6237 an incomplete type. Re-build ADDR_EXPRs in canonical form
6239 if (!types_compatible_p (TREE_TYPE (op0
), TREE_TYPE (TREE_TYPE (expr
))))
6240 *expr_p
= build_fold_addr_expr (op0
);
6242 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6243 recompute_tree_invariant_for_addr_expr (*expr_p
);
6245 /* If we re-built the ADDR_EXPR add a conversion to the original type
6247 if (!useless_type_conversion_p (TREE_TYPE (expr
), TREE_TYPE (*expr_p
)))
6248 *expr_p
= fold_convert (TREE_TYPE (expr
), *expr_p
);
6256 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6257 value; output operands should be a gimple lvalue. */
6259 static enum gimplify_status
6260 gimplify_asm_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6264 const char **oconstraints
;
6267 const char *constraint
;
6268 bool allows_mem
, allows_reg
, is_inout
;
6269 enum gimplify_status ret
, tret
;
6271 vec
<tree
, va_gc
> *inputs
;
6272 vec
<tree
, va_gc
> *outputs
;
6273 vec
<tree
, va_gc
> *clobbers
;
6274 vec
<tree
, va_gc
> *labels
;
6278 noutputs
= list_length (ASM_OUTPUTS (expr
));
6279 oconstraints
= (const char **) alloca ((noutputs
) * sizeof (const char *));
6287 link_next
= NULL_TREE
;
6288 for (i
= 0, link
= ASM_OUTPUTS (expr
); link
; ++i
, link
= link_next
)
6291 size_t constraint_len
;
6293 link_next
= TREE_CHAIN (link
);
6297 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
6298 constraint_len
= strlen (constraint
);
6299 if (constraint_len
== 0)
6302 ok
= parse_output_constraint (&constraint
, i
, 0, 0,
6303 &allows_mem
, &allows_reg
, &is_inout
);
6310 /* If we can't make copies, we can only accept memory.
6311 Similarly for VLAs. */
6312 tree outtype
= TREE_TYPE (TREE_VALUE (link
));
6313 if (outtype
!= error_mark_node
6314 && (TREE_ADDRESSABLE (outtype
)
6315 || !COMPLETE_TYPE_P (outtype
)
6316 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (outtype
))))
6322 error ("impossible constraint in %<asm%>");
6323 error ("non-memory output %d must stay in memory", i
);
6328 if (!allows_reg
&& allows_mem
)
6329 mark_addressable (TREE_VALUE (link
));
6331 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6332 is_inout
? is_gimple_min_lval
: is_gimple_lvalue
,
6333 fb_lvalue
| fb_mayfail
);
6334 if (tret
== GS_ERROR
)
6336 error ("invalid lvalue in %<asm%> output %d", i
);
6340 /* If the constraint does not allow memory make sure we gimplify
6341 it to a register if it is not already but its base is. This
6342 happens for complex and vector components. */
6345 tree op
= TREE_VALUE (link
);
6346 if (! is_gimple_val (op
)
6347 && is_gimple_reg_type (TREE_TYPE (op
))
6348 && is_gimple_reg (get_base_address (op
)))
6350 tree tem
= create_tmp_reg (TREE_TYPE (op
));
6354 ass
= build2 (MODIFY_EXPR
, TREE_TYPE (tem
),
6355 tem
, unshare_expr (op
));
6356 gimplify_and_add (ass
, pre_p
);
6358 ass
= build2 (MODIFY_EXPR
, TREE_TYPE (tem
), op
, tem
);
6359 gimplify_and_add (ass
, post_p
);
6361 TREE_VALUE (link
) = tem
;
6366 vec_safe_push (outputs
, link
);
6367 TREE_CHAIN (link
) = NULL_TREE
;
6371 /* An input/output operand. To give the optimizers more
6372 flexibility, split it into separate input and output
6375 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6378 /* Turn the in/out constraint into an output constraint. */
6379 char *p
= xstrdup (constraint
);
6381 TREE_VALUE (TREE_PURPOSE (link
)) = build_string (constraint_len
, p
);
6383 /* And add a matching input constraint. */
6386 sprintf (buf
, "%u", i
);
6388 /* If there are multiple alternatives in the constraint,
6389 handle each of them individually. Those that allow register
6390 will be replaced with operand number, the others will stay
6392 if (strchr (p
, ',') != NULL
)
6394 size_t len
= 0, buflen
= strlen (buf
);
6395 char *beg
, *end
, *str
, *dst
;
6399 end
= strchr (beg
, ',');
6401 end
= strchr (beg
, '\0');
6402 if ((size_t) (end
- beg
) < buflen
)
6405 len
+= end
- beg
+ 1;
6412 str
= (char *) alloca (len
);
6413 for (beg
= p
+ 1, dst
= str
;;)
6416 bool mem_p
, reg_p
, inout_p
;
6418 end
= strchr (beg
, ',');
6423 parse_output_constraint (&tem
, i
, 0, 0,
6424 &mem_p
, ®_p
, &inout_p
);
6429 memcpy (dst
, buf
, buflen
);
6438 memcpy (dst
, beg
, len
);
6447 input
= build_string (dst
- str
, str
);
6450 input
= build_string (strlen (buf
), buf
);
6453 input
= build_string (constraint_len
- 1, constraint
+ 1);
6457 input
= build_tree_list (build_tree_list (NULL_TREE
, input
),
6458 unshare_expr (TREE_VALUE (link
)));
6459 ASM_INPUTS (expr
) = chainon (ASM_INPUTS (expr
), input
);
6463 link_next
= NULL_TREE
;
6464 for (link
= ASM_INPUTS (expr
); link
; ++i
, link
= link_next
)
6466 link_next
= TREE_CHAIN (link
);
6467 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
6468 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
6469 oconstraints
, &allows_mem
, &allows_reg
);
6471 /* If we can't make copies, we can only accept memory. */
6472 tree intype
= TREE_TYPE (TREE_VALUE (link
));
6473 if (intype
!= error_mark_node
6474 && (TREE_ADDRESSABLE (intype
)
6475 || !COMPLETE_TYPE_P (intype
)
6476 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (intype
))))
6482 error ("impossible constraint in %<asm%>");
6483 error ("non-memory input %d must stay in memory", i
);
6488 /* If the operand is a memory input, it should be an lvalue. */
6489 if (!allows_reg
&& allows_mem
)
6491 tree inputv
= TREE_VALUE (link
);
6492 STRIP_NOPS (inputv
);
6493 if (TREE_CODE (inputv
) == PREDECREMENT_EXPR
6494 || TREE_CODE (inputv
) == PREINCREMENT_EXPR
6495 || TREE_CODE (inputv
) == POSTDECREMENT_EXPR
6496 || TREE_CODE (inputv
) == POSTINCREMENT_EXPR
6497 || TREE_CODE (inputv
) == MODIFY_EXPR
)
6498 TREE_VALUE (link
) = error_mark_node
;
6499 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6500 is_gimple_lvalue
, fb_lvalue
| fb_mayfail
);
6501 if (tret
!= GS_ERROR
)
6503 /* Unlike output operands, memory inputs are not guaranteed
6504 to be lvalues by the FE, and while the expressions are
6505 marked addressable there, if it is e.g. a statement
6506 expression, temporaries in it might not end up being
6507 addressable. They might be already used in the IL and thus
6508 it is too late to make them addressable now though. */
6509 tree x
= TREE_VALUE (link
);
6510 while (handled_component_p (x
))
6511 x
= TREE_OPERAND (x
, 0);
6512 if (TREE_CODE (x
) == MEM_REF
6513 && TREE_CODE (TREE_OPERAND (x
, 0)) == ADDR_EXPR
)
6514 x
= TREE_OPERAND (TREE_OPERAND (x
, 0), 0);
6516 || TREE_CODE (x
) == PARM_DECL
6517 || TREE_CODE (x
) == RESULT_DECL
)
6518 && !TREE_ADDRESSABLE (x
)
6519 && is_gimple_reg (x
))
6521 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link
),
6523 "memory input %d is not directly addressable",
6525 prepare_gimple_addressable (&TREE_VALUE (link
), pre_p
);
6528 mark_addressable (TREE_VALUE (link
));
6529 if (tret
== GS_ERROR
)
6531 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link
), input_location
),
6532 "memory input %d is not directly addressable", i
);
6538 tret
= gimplify_expr (&TREE_VALUE (link
), pre_p
, post_p
,
6539 is_gimple_asm_val
, fb_rvalue
);
6540 if (tret
== GS_ERROR
)
6544 TREE_CHAIN (link
) = NULL_TREE
;
6545 vec_safe_push (inputs
, link
);
6548 link_next
= NULL_TREE
;
6549 for (link
= ASM_CLOBBERS (expr
); link
; ++i
, link
= link_next
)
6551 link_next
= TREE_CHAIN (link
);
6552 TREE_CHAIN (link
) = NULL_TREE
;
6553 vec_safe_push (clobbers
, link
);
6556 link_next
= NULL_TREE
;
6557 for (link
= ASM_LABELS (expr
); link
; ++i
, link
= link_next
)
6559 link_next
= TREE_CHAIN (link
);
6560 TREE_CHAIN (link
) = NULL_TREE
;
6561 vec_safe_push (labels
, link
);
6564 /* Do not add ASMs with errors to the gimple IL stream. */
6565 if (ret
!= GS_ERROR
)
6567 stmt
= gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr
)),
6568 inputs
, outputs
, clobbers
, labels
);
6570 gimple_asm_set_volatile (stmt
, ASM_VOLATILE_P (expr
) || noutputs
== 0);
6571 gimple_asm_set_input (stmt
, ASM_INPUT_P (expr
));
6572 gimple_asm_set_inline (stmt
, ASM_INLINE_P (expr
));
6574 gimplify_seq_add_stmt (pre_p
, stmt
);
6580 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6581 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6582 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6583 return to this function.
6585 FIXME should we complexify the prequeue handling instead? Or use flags
6586 for all the cleanups and let the optimizer tighten them up? The current
6587 code seems pretty fragile; it will break on a cleanup within any
6588 non-conditional nesting. But any such nesting would be broken, anyway;
6589 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6590 and continues out of it. We can do that at the RTL level, though, so
6591 having an optimizer to tighten up try/finally regions would be a Good
6594 static enum gimplify_status
6595 gimplify_cleanup_point_expr (tree
*expr_p
, gimple_seq
*pre_p
)
6597 gimple_stmt_iterator iter
;
6598 gimple_seq body_sequence
= NULL
;
6600 tree temp
= voidify_wrapper_expr (*expr_p
, NULL
);
6602 /* We only care about the number of conditions between the innermost
6603 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6604 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6605 int old_conds
= gimplify_ctxp
->conditions
;
6606 gimple_seq old_cleanups
= gimplify_ctxp
->conditional_cleanups
;
6607 bool old_in_cleanup_point_expr
= gimplify_ctxp
->in_cleanup_point_expr
;
6608 gimplify_ctxp
->conditions
= 0;
6609 gimplify_ctxp
->conditional_cleanups
= NULL
;
6610 gimplify_ctxp
->in_cleanup_point_expr
= true;
6612 gimplify_stmt (&TREE_OPERAND (*expr_p
, 0), &body_sequence
);
6614 gimplify_ctxp
->conditions
= old_conds
;
6615 gimplify_ctxp
->conditional_cleanups
= old_cleanups
;
6616 gimplify_ctxp
->in_cleanup_point_expr
= old_in_cleanup_point_expr
;
6618 for (iter
= gsi_start (body_sequence
); !gsi_end_p (iter
); )
6620 gimple
*wce
= gsi_stmt (iter
);
6622 if (gimple_code (wce
) == GIMPLE_WITH_CLEANUP_EXPR
)
6624 if (gsi_one_before_end_p (iter
))
6626 /* Note that gsi_insert_seq_before and gsi_remove do not
6627 scan operands, unlike some other sequence mutators. */
6628 if (!gimple_wce_cleanup_eh_only (wce
))
6629 gsi_insert_seq_before_without_update (&iter
,
6630 gimple_wce_cleanup (wce
),
6632 gsi_remove (&iter
, true);
6639 enum gimple_try_flags kind
;
6641 if (gimple_wce_cleanup_eh_only (wce
))
6642 kind
= GIMPLE_TRY_CATCH
;
6644 kind
= GIMPLE_TRY_FINALLY
;
6645 seq
= gsi_split_seq_after (iter
);
6647 gtry
= gimple_build_try (seq
, gimple_wce_cleanup (wce
), kind
);
6648 /* Do not use gsi_replace here, as it may scan operands.
6649 We want to do a simple structural modification only. */
6650 gsi_set_stmt (&iter
, gtry
);
6651 iter
= gsi_start (gtry
->eval
);
6658 gimplify_seq_add_seq (pre_p
, body_sequence
);
6671 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6672 is the cleanup action required. EH_ONLY is true if the cleanup should
6673 only be executed if an exception is thrown, not on normal exit.
6674 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6675 only valid for clobbers. */
6678 gimple_push_cleanup (tree var
, tree cleanup
, bool eh_only
, gimple_seq
*pre_p
,
6679 bool force_uncond
= false)
6682 gimple_seq cleanup_stmts
= NULL
;
6684 /* Errors can result in improperly nested cleanups. Which results in
6685 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6689 if (gimple_conditional_context ())
6691 /* If we're in a conditional context, this is more complex. We only
6692 want to run the cleanup if we actually ran the initialization that
6693 necessitates it, but we want to run it after the end of the
6694 conditional context. So we wrap the try/finally around the
6695 condition and use a flag to determine whether or not to actually
6696 run the destructor. Thus
6700 becomes (approximately)
6704 if (test) { A::A(temp); flag = 1; val = f(temp); }
6707 if (flag) A::~A(temp);
6713 gimplify_stmt (&cleanup
, &cleanup_stmts
);
6714 wce
= gimple_build_wce (cleanup_stmts
);
6715 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, wce
);
6719 tree flag
= create_tmp_var (boolean_type_node
, "cleanup");
6720 gassign
*ffalse
= gimple_build_assign (flag
, boolean_false_node
);
6721 gassign
*ftrue
= gimple_build_assign (flag
, boolean_true_node
);
6723 cleanup
= build3 (COND_EXPR
, void_type_node
, flag
, cleanup
, NULL
);
6724 gimplify_stmt (&cleanup
, &cleanup_stmts
);
6725 wce
= gimple_build_wce (cleanup_stmts
);
6727 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, ffalse
);
6728 gimplify_seq_add_stmt (&gimplify_ctxp
->conditional_cleanups
, wce
);
6729 gimplify_seq_add_stmt (pre_p
, ftrue
);
6731 /* Because of this manipulation, and the EH edges that jump
6732 threading cannot redirect, the temporary (VAR) will appear
6733 to be used uninitialized. Don't warn. */
6734 TREE_NO_WARNING (var
) = 1;
6739 gimplify_stmt (&cleanup
, &cleanup_stmts
);
6740 wce
= gimple_build_wce (cleanup_stmts
);
6741 gimple_wce_set_cleanup_eh_only (wce
, eh_only
);
6742 gimplify_seq_add_stmt (pre_p
, wce
);
6746 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6748 static enum gimplify_status
6749 gimplify_target_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
)
6751 tree targ
= *expr_p
;
6752 tree temp
= TARGET_EXPR_SLOT (targ
);
6753 tree init
= TARGET_EXPR_INITIAL (targ
);
6754 enum gimplify_status ret
;
6756 bool unpoison_empty_seq
= false;
6757 gimple_stmt_iterator unpoison_it
;
6761 tree cleanup
= NULL_TREE
;
6763 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6764 to the temps list. Handle also variable length TARGET_EXPRs. */
6765 if (!poly_int_tree_p (DECL_SIZE (temp
)))
6767 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp
)))
6768 gimplify_type_sizes (TREE_TYPE (temp
), pre_p
);
6769 gimplify_vla_decl (temp
, pre_p
);
6773 /* Save location where we need to place unpoisoning. It's possible
6774 that a variable will be converted to needs_to_live_in_memory. */
6775 unpoison_it
= gsi_last (*pre_p
);
6776 unpoison_empty_seq
= gsi_end_p (unpoison_it
);
6778 gimple_add_tmp_var (temp
);
6781 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6782 expression is supposed to initialize the slot. */
6783 if (VOID_TYPE_P (TREE_TYPE (init
)))
6784 ret
= gimplify_expr (&init
, pre_p
, post_p
, is_gimple_stmt
, fb_none
);
6787 tree init_expr
= build2 (INIT_EXPR
, void_type_node
, temp
, init
);
6789 ret
= gimplify_expr (&init
, pre_p
, post_p
, is_gimple_stmt
, fb_none
);
6791 ggc_free (init_expr
);
6793 if (ret
== GS_ERROR
)
6795 /* PR c++/28266 Make sure this is expanded only once. */
6796 TARGET_EXPR_INITIAL (targ
) = NULL_TREE
;
6800 gimplify_and_add (init
, pre_p
);
6802 /* If needed, push the cleanup for the temp. */
6803 if (TARGET_EXPR_CLEANUP (targ
))
6805 if (CLEANUP_EH_ONLY (targ
))
6806 gimple_push_cleanup (temp
, TARGET_EXPR_CLEANUP (targ
),
6807 CLEANUP_EH_ONLY (targ
), pre_p
);
6809 cleanup
= TARGET_EXPR_CLEANUP (targ
);
6812 /* Add a clobber for the temporary going out of scope, like
6813 gimplify_bind_expr. */
6814 if (gimplify_ctxp
->in_cleanup_point_expr
6815 && needs_to_live_in_memory (temp
))
6817 if (flag_stack_reuse
== SR_ALL
)
6819 tree clobber
= build_clobber (TREE_TYPE (temp
));
6820 clobber
= build2 (MODIFY_EXPR
, TREE_TYPE (temp
), temp
, clobber
);
6821 gimple_push_cleanup (temp
, clobber
, false, pre_p
, true);
6823 if (asan_poisoned_variables
6824 && DECL_ALIGN (temp
) <= MAX_SUPPORTED_STACK_ALIGNMENT
6825 && !TREE_STATIC (temp
)
6826 && dbg_cnt (asan_use_after_scope
)
6827 && !gimplify_omp_ctxp
)
6829 tree asan_cleanup
= build_asan_poison_call_expr (temp
);
6832 if (unpoison_empty_seq
)
6833 unpoison_it
= gsi_start (*pre_p
);
6835 asan_poison_variable (temp
, false, &unpoison_it
,
6836 unpoison_empty_seq
);
6837 gimple_push_cleanup (temp
, asan_cleanup
, false, pre_p
);
6842 gimple_push_cleanup (temp
, cleanup
, false, pre_p
);
6844 /* Only expand this once. */
6845 TREE_OPERAND (targ
, 3) = init
;
6846 TARGET_EXPR_INITIAL (targ
) = NULL_TREE
;
6849 /* We should have expanded this before. */
6850 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp
));
6856 /* Gimplification of expression trees. */
6858 /* Gimplify an expression which appears at statement context. The
6859 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6860 NULL, a new sequence is allocated.
6862 Return true if we actually added a statement to the queue. */
6865 gimplify_stmt (tree
*stmt_p
, gimple_seq
*seq_p
)
6867 gimple_seq_node last
;
6869 last
= gimple_seq_last (*seq_p
);
6870 gimplify_expr (stmt_p
, seq_p
, NULL
, is_gimple_stmt
, fb_none
);
6871 return last
!= gimple_seq_last (*seq_p
);
6874 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6875 to CTX. If entries already exist, force them to be some flavor of private.
6876 If there is no enclosing parallel, do nothing. */
6879 omp_firstprivatize_variable (struct gimplify_omp_ctx
*ctx
, tree decl
)
6883 if (decl
== NULL
|| !DECL_P (decl
) || ctx
->region_type
== ORT_NONE
)
6888 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
6891 if (n
->value
& GOVD_SHARED
)
6892 n
->value
= GOVD_FIRSTPRIVATE
| (n
->value
& GOVD_SEEN
);
6893 else if (n
->value
& GOVD_MAP
)
6894 n
->value
|= GOVD_MAP_TO_ONLY
;
6898 else if ((ctx
->region_type
& ORT_TARGET
) != 0)
6900 if (ctx
->defaultmap
[GDMK_SCALAR
] & GOVD_FIRSTPRIVATE
)
6901 omp_add_variable (ctx
, decl
, GOVD_FIRSTPRIVATE
);
6903 omp_add_variable (ctx
, decl
, GOVD_MAP
| GOVD_MAP_TO_ONLY
);
6905 else if (ctx
->region_type
!= ORT_WORKSHARE
6906 && ctx
->region_type
!= ORT_TASKGROUP
6907 && ctx
->region_type
!= ORT_SIMD
6908 && ctx
->region_type
!= ORT_ACC
6909 && !(ctx
->region_type
& ORT_TARGET_DATA
))
6910 omp_add_variable (ctx
, decl
, GOVD_FIRSTPRIVATE
);
6912 ctx
= ctx
->outer_context
;
6917 /* Similarly for each of the type sizes of TYPE. */
6920 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx
*ctx
, tree type
)
6922 if (type
== NULL
|| type
== error_mark_node
)
6924 type
= TYPE_MAIN_VARIANT (type
);
6926 if (ctx
->privatized_types
->add (type
))
6929 switch (TREE_CODE (type
))
6935 case FIXED_POINT_TYPE
:
6936 omp_firstprivatize_variable (ctx
, TYPE_MIN_VALUE (type
));
6937 omp_firstprivatize_variable (ctx
, TYPE_MAX_VALUE (type
));
6941 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (type
));
6942 omp_firstprivatize_type_sizes (ctx
, TYPE_DOMAIN (type
));
6947 case QUAL_UNION_TYPE
:
6950 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
6951 if (TREE_CODE (field
) == FIELD_DECL
)
6953 omp_firstprivatize_variable (ctx
, DECL_FIELD_OFFSET (field
));
6954 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (field
));
6960 case REFERENCE_TYPE
:
6961 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (type
));
6968 omp_firstprivatize_variable (ctx
, TYPE_SIZE (type
));
6969 omp_firstprivatize_variable (ctx
, TYPE_SIZE_UNIT (type
));
6970 lang_hooks
.types
.omp_firstprivatize_type_sizes (ctx
, type
);
6973 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
6976 omp_add_variable (struct gimplify_omp_ctx
*ctx
, tree decl
, unsigned int flags
)
6979 unsigned int nflags
;
6982 if (error_operand_p (decl
) || ctx
->region_type
== ORT_NONE
)
6985 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
6986 there are constructors involved somewhere. Exception is a shared clause,
6987 there is nothing privatized in that case. */
6988 if ((flags
& GOVD_SHARED
) == 0
6989 && (TREE_ADDRESSABLE (TREE_TYPE (decl
))
6990 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl
))))
6993 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
6994 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
6996 /* We shouldn't be re-adding the decl with the same data
6998 gcc_assert ((n
->value
& GOVD_DATA_SHARE_CLASS
& flags
) == 0);
6999 nflags
= n
->value
| flags
;
7000 /* The only combination of data sharing classes we should see is
7001 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
7002 reduction variables to be used in data sharing clauses. */
7003 gcc_assert ((ctx
->region_type
& ORT_ACC
) != 0
7004 || ((nflags
& GOVD_DATA_SHARE_CLASS
)
7005 == (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
))
7006 || (flags
& GOVD_DATA_SHARE_CLASS
) == 0);
7011 /* When adding a variable-sized variable, we have to handle all sorts
7012 of additional bits of data: the pointer replacement variable, and
7013 the parameters of the type. */
7014 if (DECL_SIZE (decl
) && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
7016 /* Add the pointer replacement variable as PRIVATE if the variable
7017 replacement is private, else FIRSTPRIVATE since we'll need the
7018 address of the original variable either for SHARED, or for the
7019 copy into or out of the context. */
7020 if (!(flags
& GOVD_LOCAL
) && ctx
->region_type
!= ORT_TASKGROUP
)
7022 if (flags
& GOVD_MAP
)
7023 nflags
= GOVD_MAP
| GOVD_MAP_TO_ONLY
| GOVD_EXPLICIT
;
7024 else if (flags
& GOVD_PRIVATE
)
7025 nflags
= GOVD_PRIVATE
;
7026 else if (((ctx
->region_type
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
7027 && (flags
& GOVD_FIRSTPRIVATE
))
7028 || (ctx
->region_type
== ORT_TARGET_DATA
7029 && (flags
& GOVD_DATA_SHARE_CLASS
) == 0))
7030 nflags
= GOVD_PRIVATE
| GOVD_EXPLICIT
;
7032 nflags
= GOVD_FIRSTPRIVATE
;
7033 nflags
|= flags
& GOVD_SEEN
;
7034 t
= DECL_VALUE_EXPR (decl
);
7035 gcc_assert (TREE_CODE (t
) == INDIRECT_REF
);
7036 t
= TREE_OPERAND (t
, 0);
7037 gcc_assert (DECL_P (t
));
7038 omp_add_variable (ctx
, t
, nflags
);
7041 /* Add all of the variable and type parameters (which should have
7042 been gimplified to a formal temporary) as FIRSTPRIVATE. */
7043 omp_firstprivatize_variable (ctx
, DECL_SIZE_UNIT (decl
));
7044 omp_firstprivatize_variable (ctx
, DECL_SIZE (decl
));
7045 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (decl
));
7047 /* The variable-sized variable itself is never SHARED, only some form
7048 of PRIVATE. The sharing would take place via the pointer variable
7049 which we remapped above. */
7050 if (flags
& GOVD_SHARED
)
7051 flags
= GOVD_SHARED
| GOVD_DEBUG_PRIVATE
7052 | (flags
& (GOVD_SEEN
| GOVD_EXPLICIT
));
7054 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
7055 alloca statement we generate for the variable, so make sure it
7056 is available. This isn't automatically needed for the SHARED
7057 case, since we won't be allocating local storage then.
7058 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
7059 in this case omp_notice_variable will be called later
7060 on when it is gimplified. */
7061 else if (! (flags
& (GOVD_LOCAL
| GOVD_MAP
))
7062 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl
))))
7063 omp_notice_variable (ctx
, TYPE_SIZE_UNIT (TREE_TYPE (decl
)), true);
7065 else if ((flags
& (GOVD_MAP
| GOVD_LOCAL
)) == 0
7066 && lang_hooks
.decls
.omp_privatize_by_reference (decl
))
7068 omp_firstprivatize_type_sizes (ctx
, TREE_TYPE (decl
));
7070 /* Similar to the direct variable sized case above, we'll need the
7071 size of references being privatized. */
7072 if ((flags
& GOVD_SHARED
) == 0)
7074 t
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)));
7076 omp_notice_variable (ctx
, t
, true);
7083 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl
, flags
);
7085 /* For reductions clauses in OpenACC loop directives, by default create a
7086 copy clause on the enclosing parallel construct for carrying back the
7088 if (ctx
->region_type
== ORT_ACC
&& (flags
& GOVD_REDUCTION
))
7090 struct gimplify_omp_ctx
*outer_ctx
= ctx
->outer_context
;
7093 n
= splay_tree_lookup (outer_ctx
->variables
, (splay_tree_key
)decl
);
7096 /* Ignore local variables and explicitly declared clauses. */
7097 if (n
->value
& (GOVD_LOCAL
| GOVD_EXPLICIT
))
7099 else if (outer_ctx
->region_type
== ORT_ACC_KERNELS
)
7101 /* According to the OpenACC spec, such a reduction variable
7102 should already have a copy map on a kernels construct,
7103 verify that here. */
7104 gcc_assert (!(n
->value
& GOVD_FIRSTPRIVATE
)
7105 && (n
->value
& GOVD_MAP
));
7107 else if (outer_ctx
->region_type
== ORT_ACC_PARALLEL
)
7109 /* Remove firstprivate and make it a copy map. */
7110 n
->value
&= ~GOVD_FIRSTPRIVATE
;
7111 n
->value
|= GOVD_MAP
;
7114 else if (outer_ctx
->region_type
== ORT_ACC_PARALLEL
)
7116 splay_tree_insert (outer_ctx
->variables
, (splay_tree_key
)decl
,
7117 GOVD_MAP
| GOVD_SEEN
);
7120 outer_ctx
= outer_ctx
->outer_context
;
7125 /* Notice a threadprivate variable DECL used in OMP context CTX.
7126 This just prints out diagnostics about threadprivate variable uses
7127 in untied tasks. If DECL2 is non-NULL, prevent this warning
7128 on that variable. */
7131 omp_notice_threadprivate_variable (struct gimplify_omp_ctx
*ctx
, tree decl
,
7135 struct gimplify_omp_ctx
*octx
;
7137 for (octx
= ctx
; octx
; octx
= octx
->outer_context
)
7138 if ((octx
->region_type
& ORT_TARGET
) != 0
7139 || octx
->order_concurrent
)
7141 n
= splay_tree_lookup (octx
->variables
, (splay_tree_key
)decl
);
7144 if (octx
->order_concurrent
)
7146 error ("threadprivate variable %qE used in a region with"
7147 " %<order(concurrent)%> clause", DECL_NAME (decl
));
7148 inform (octx
->location
, "enclosing region");
7152 error ("threadprivate variable %qE used in target region",
7154 inform (octx
->location
, "enclosing target region");
7156 splay_tree_insert (octx
->variables
, (splay_tree_key
)decl
, 0);
7159 splay_tree_insert (octx
->variables
, (splay_tree_key
)decl2
, 0);
7162 if (ctx
->region_type
!= ORT_UNTIED_TASK
)
7164 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7167 error ("threadprivate variable %qE used in untied task",
7169 inform (ctx
->location
, "enclosing task");
7170 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl
, 0);
7173 splay_tree_insert (ctx
->variables
, (splay_tree_key
)decl2
, 0);
7177 /* Return true if global var DECL is device resident. */
7180 device_resident_p (tree decl
)
7182 tree attr
= lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl
));
7187 for (tree t
= TREE_VALUE (attr
); t
; t
= TREE_PURPOSE (t
))
7189 tree c
= TREE_VALUE (t
);
7190 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DEVICE_RESIDENT
)
7197 /* Return true if DECL has an ACC DECLARE attribute. */
7200 is_oacc_declared (tree decl
)
7202 tree t
= TREE_CODE (decl
) == MEM_REF
? TREE_OPERAND (decl
, 0) : decl
;
7203 tree declared
= lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t
));
7204 return declared
!= NULL_TREE
;
7207 /* Determine outer default flags for DECL mentioned in an OMP region
7208 but not declared in an enclosing clause.
7210 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7211 remapped firstprivate instead of shared. To some extent this is
7212 addressed in omp_firstprivatize_type_sizes, but not
7216 omp_default_clause (struct gimplify_omp_ctx
*ctx
, tree decl
,
7217 bool in_code
, unsigned flags
)
7219 enum omp_clause_default_kind default_kind
= ctx
->default_kind
;
7220 enum omp_clause_default_kind kind
;
7222 kind
= lang_hooks
.decls
.omp_predetermined_sharing (decl
);
7223 if (ctx
->region_type
& ORT_TASK
)
7225 tree detach_clause
= omp_find_clause (ctx
->clauses
, OMP_CLAUSE_DETACH
);
7227 /* The event-handle specified by a detach clause should always be firstprivate,
7228 regardless of the current default. */
7229 if (detach_clause
&& OMP_CLAUSE_DECL (detach_clause
) == decl
)
7230 kind
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
7232 if (kind
!= OMP_CLAUSE_DEFAULT_UNSPECIFIED
)
7233 default_kind
= kind
;
7234 else if (VAR_P (decl
) && TREE_STATIC (decl
) && DECL_IN_CONSTANT_POOL (decl
))
7235 default_kind
= OMP_CLAUSE_DEFAULT_SHARED
;
7237 switch (default_kind
)
7239 case OMP_CLAUSE_DEFAULT_NONE
:
7243 if (ctx
->region_type
& ORT_PARALLEL
)
7245 else if ((ctx
->region_type
& ORT_TASKLOOP
) == ORT_TASKLOOP
)
7247 else if (ctx
->region_type
& ORT_TASK
)
7249 else if (ctx
->region_type
& ORT_TEAMS
)
7254 error ("%qE not specified in enclosing %qs",
7255 DECL_NAME (lang_hooks
.decls
.omp_report_decl (decl
)), rtype
);
7256 inform (ctx
->location
, "enclosing %qs", rtype
);
7259 case OMP_CLAUSE_DEFAULT_SHARED
:
7260 flags
|= GOVD_SHARED
;
7262 case OMP_CLAUSE_DEFAULT_PRIVATE
:
7263 flags
|= GOVD_PRIVATE
;
7265 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
:
7266 flags
|= GOVD_FIRSTPRIVATE
;
7268 case OMP_CLAUSE_DEFAULT_UNSPECIFIED
:
7269 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7270 gcc_assert ((ctx
->region_type
& ORT_TASK
) != 0);
7271 if (struct gimplify_omp_ctx
*octx
= ctx
->outer_context
)
7273 omp_notice_variable (octx
, decl
, in_code
);
7274 for (; octx
; octx
= octx
->outer_context
)
7278 n2
= splay_tree_lookup (octx
->variables
, (splay_tree_key
) decl
);
7279 if ((octx
->region_type
& (ORT_TARGET_DATA
| ORT_TARGET
)) != 0
7280 && (n2
== NULL
|| (n2
->value
& GOVD_DATA_SHARE_CLASS
) == 0))
7282 if (n2
&& (n2
->value
& GOVD_DATA_SHARE_CLASS
) != GOVD_SHARED
)
7284 flags
|= GOVD_FIRSTPRIVATE
;
7287 if ((octx
->region_type
& (ORT_PARALLEL
| ORT_TEAMS
)) != 0)
7289 flags
|= GOVD_SHARED
;
7295 if (TREE_CODE (decl
) == PARM_DECL
7296 || (!is_global_var (decl
)
7297 && DECL_CONTEXT (decl
) == current_function_decl
))
7298 flags
|= GOVD_FIRSTPRIVATE
;
7300 flags
|= GOVD_SHARED
;
7312 /* Determine outer default flags for DECL mentioned in an OACC region
7313 but not declared in an enclosing clause. */
7316 oacc_default_clause (struct gimplify_omp_ctx
*ctx
, tree decl
, unsigned flags
)
7319 bool on_device
= false;
7320 bool is_private
= false;
7321 bool declared
= is_oacc_declared (decl
);
7322 tree type
= TREE_TYPE (decl
);
7324 if (lang_hooks
.decls
.omp_privatize_by_reference (decl
))
7325 type
= TREE_TYPE (type
);
7327 /* For Fortran COMMON blocks, only used variables in those blocks are
7328 transfered and remapped. The block itself will have a private clause to
7329 avoid transfering the data twice.
7330 The hook evaluates to false by default. For a variable in Fortran's COMMON
7331 or EQUIVALENCE block, returns 'true' (as we have shared=false) - as only
7332 the variables in such a COMMON/EQUIVALENCE block shall be privatized not
7333 the whole block. For C++ and Fortran, it can also be true under certain
7334 other conditions, if DECL_HAS_VALUE_EXPR. */
7335 if (RECORD_OR_UNION_TYPE_P (type
))
7336 is_private
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, false);
7338 if ((ctx
->region_type
& (ORT_ACC_PARALLEL
| ORT_ACC_KERNELS
)) != 0
7339 && is_global_var (decl
)
7340 && device_resident_p (decl
)
7344 flags
|= GOVD_MAP_TO_ONLY
;
7347 switch (ctx
->region_type
)
7349 case ORT_ACC_KERNELS
:
7353 flags
|= GOVD_FIRSTPRIVATE
;
7354 else if (AGGREGATE_TYPE_P (type
))
7356 /* Aggregates default to 'present_or_copy', or 'present'. */
7357 if (ctx
->default_kind
!= OMP_CLAUSE_DEFAULT_PRESENT
)
7360 flags
|= GOVD_MAP
| GOVD_MAP_FORCE_PRESENT
;
7363 /* Scalars default to 'copy'. */
7364 flags
|= GOVD_MAP
| GOVD_MAP_FORCE
;
7368 case ORT_ACC_PARALLEL
:
7369 case ORT_ACC_SERIAL
:
7370 rkind
= ctx
->region_type
== ORT_ACC_PARALLEL
? "parallel" : "serial";
7373 flags
|= GOVD_FIRSTPRIVATE
;
7374 else if (on_device
|| declared
)
7376 else if (AGGREGATE_TYPE_P (type
))
7378 /* Aggregates default to 'present_or_copy', or 'present'. */
7379 if (ctx
->default_kind
!= OMP_CLAUSE_DEFAULT_PRESENT
)
7382 flags
|= GOVD_MAP
| GOVD_MAP_FORCE_PRESENT
;
7385 /* Scalars default to 'firstprivate'. */
7386 flags
|= GOVD_FIRSTPRIVATE
;
7394 if (DECL_ARTIFICIAL (decl
))
7395 ; /* We can get compiler-generated decls, and should not complain
7397 else if (ctx
->default_kind
== OMP_CLAUSE_DEFAULT_NONE
)
7399 error ("%qE not specified in enclosing OpenACC %qs construct",
7400 DECL_NAME (lang_hooks
.decls
.omp_report_decl (decl
)), rkind
);
7401 inform (ctx
->location
, "enclosing OpenACC %qs construct", rkind
);
7403 else if (ctx
->default_kind
== OMP_CLAUSE_DEFAULT_PRESENT
)
7404 ; /* Handled above. */
7406 gcc_checking_assert (ctx
->default_kind
== OMP_CLAUSE_DEFAULT_SHARED
);
7411 /* Record the fact that DECL was used within the OMP context CTX.
7412 IN_CODE is true when real code uses DECL, and false when we should
7413 merely emit default(none) errors. Return true if DECL is going to
7414 be remapped and thus DECL shouldn't be gimplified into its
7415 DECL_VALUE_EXPR (if any). */
7418 omp_notice_variable (struct gimplify_omp_ctx
*ctx
, tree decl
, bool in_code
)
7421 unsigned flags
= in_code
? GOVD_SEEN
: 0;
7422 bool ret
= false, shared
;
7424 if (error_operand_p (decl
))
7427 if (ctx
->region_type
== ORT_NONE
)
7428 return lang_hooks
.decls
.omp_disregard_value_expr (decl
, false);
7430 if (is_global_var (decl
))
7432 /* Threadprivate variables are predetermined. */
7433 if (DECL_THREAD_LOCAL_P (decl
))
7434 return omp_notice_threadprivate_variable (ctx
, decl
, NULL_TREE
);
7436 if (DECL_HAS_VALUE_EXPR_P (decl
))
7438 if (ctx
->region_type
& ORT_ACC
)
7439 /* For OpenACC, defer expansion of value to avoid transfering
7440 privatized common block data instead of im-/explicitly transfered
7441 variables which are in common blocks. */
7445 tree value
= get_base_address (DECL_VALUE_EXPR (decl
));
7447 if (value
&& DECL_P (value
) && DECL_THREAD_LOCAL_P (value
))
7448 return omp_notice_threadprivate_variable (ctx
, decl
, value
);
7452 if (gimplify_omp_ctxp
->outer_context
== NULL
7454 && oacc_get_fn_attrib (current_function_decl
))
7456 location_t loc
= DECL_SOURCE_LOCATION (decl
);
7458 if (lookup_attribute ("omp declare target link",
7459 DECL_ATTRIBUTES (decl
)))
7462 "%qE with %<link%> clause used in %<routine%> function",
7466 else if (!lookup_attribute ("omp declare target",
7467 DECL_ATTRIBUTES (decl
)))
7470 "%qE requires a %<declare%> directive for use "
7471 "in a %<routine%> function", DECL_NAME (decl
));
7477 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7478 if ((ctx
->region_type
& ORT_TARGET
) != 0)
7480 if (ctx
->region_type
& ORT_ACC
)
7481 /* For OpenACC, as remarked above, defer expansion. */
7486 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
7489 unsigned nflags
= flags
;
7490 if ((ctx
->region_type
& ORT_ACC
) == 0)
7492 bool is_declare_target
= false;
7493 if (is_global_var (decl
)
7494 && varpool_node::get_create (decl
)->offloadable
)
7496 struct gimplify_omp_ctx
*octx
;
7497 for (octx
= ctx
->outer_context
;
7498 octx
; octx
= octx
->outer_context
)
7500 n
= splay_tree_lookup (octx
->variables
,
7501 (splay_tree_key
)decl
);
7503 && (n
->value
& GOVD_DATA_SHARE_CLASS
) != GOVD_SHARED
7504 && (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
7507 is_declare_target
= octx
== NULL
;
7509 if (!is_declare_target
)
7512 enum omp_clause_defaultmap_kind kind
;
7513 if (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
7514 || (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
7515 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl
)))
7517 gdmk
= GDMK_POINTER
;
7518 else if (lang_hooks
.decls
.omp_scalar_p (decl
))
7521 gdmk
= GDMK_AGGREGATE
;
7522 kind
= lang_hooks
.decls
.omp_predetermined_mapping (decl
);
7523 if (kind
!= OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED
)
7525 if (kind
== OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE
)
7526 nflags
|= GOVD_FIRSTPRIVATE
;
7527 else if (kind
== OMP_CLAUSE_DEFAULTMAP_TO
)
7528 nflags
|= GOVD_MAP
| GOVD_MAP_TO_ONLY
;
7532 else if (ctx
->defaultmap
[gdmk
] == 0)
7534 tree d
= lang_hooks
.decls
.omp_report_decl (decl
);
7535 error ("%qE not specified in enclosing %<target%>",
7537 inform (ctx
->location
, "enclosing %<target%>");
7539 else if (ctx
->defaultmap
[gdmk
]
7540 & (GOVD_MAP_0LEN_ARRAY
| GOVD_FIRSTPRIVATE
))
7541 nflags
|= ctx
->defaultmap
[gdmk
];
7544 gcc_assert (ctx
->defaultmap
[gdmk
] & GOVD_MAP
);
7545 nflags
|= ctx
->defaultmap
[gdmk
] & ~GOVD_MAP
;
7550 struct gimplify_omp_ctx
*octx
= ctx
->outer_context
;
7551 if ((ctx
->region_type
& ORT_ACC
) && octx
)
7553 /* Look in outer OpenACC contexts, to see if there's a
7554 data attribute for this variable. */
7555 omp_notice_variable (octx
, decl
, in_code
);
7557 for (; octx
; octx
= octx
->outer_context
)
7559 if (!(octx
->region_type
& (ORT_TARGET_DATA
| ORT_TARGET
)))
7562 = splay_tree_lookup (octx
->variables
,
7563 (splay_tree_key
) decl
);
7566 if (octx
->region_type
== ORT_ACC_HOST_DATA
)
7567 error ("variable %qE declared in enclosing "
7568 "%<host_data%> region", DECL_NAME (decl
));
7570 if (octx
->region_type
== ORT_ACC_DATA
7571 && (n2
->value
& GOVD_MAP_0LEN_ARRAY
))
7572 nflags
|= GOVD_MAP_0LEN_ARRAY
;
7578 if ((nflags
& ~(GOVD_MAP_TO_ONLY
| GOVD_MAP_FROM_ONLY
7579 | GOVD_MAP_ALLOC_ONLY
)) == flags
)
7581 tree type
= TREE_TYPE (decl
);
7583 if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
7584 && lang_hooks
.decls
.omp_privatize_by_reference (decl
))
7585 type
= TREE_TYPE (type
);
7586 if (!lang_hooks
.types
.omp_mappable_type (type
))
7588 error ("%qD referenced in target region does not have "
7589 "a mappable type", decl
);
7590 nflags
|= GOVD_MAP
| GOVD_EXPLICIT
;
7594 if ((ctx
->region_type
& ORT_ACC
) != 0)
7595 nflags
= oacc_default_clause (ctx
, decl
, flags
);
7601 omp_add_variable (ctx
, decl
, nflags
);
7605 /* If nothing changed, there's nothing left to do. */
7606 if ((n
->value
& flags
) == flags
)
7616 if (ctx
->region_type
== ORT_WORKSHARE
7617 || ctx
->region_type
== ORT_TASKGROUP
7618 || ctx
->region_type
== ORT_SIMD
7619 || ctx
->region_type
== ORT_ACC
7620 || (ctx
->region_type
& ORT_TARGET_DATA
) != 0)
7623 flags
= omp_default_clause (ctx
, decl
, in_code
, flags
);
7625 if ((flags
& GOVD_PRIVATE
)
7626 && lang_hooks
.decls
.omp_private_outer_ref (decl
))
7627 flags
|= GOVD_PRIVATE_OUTER_REF
;
7629 omp_add_variable (ctx
, decl
, flags
);
7631 shared
= (flags
& GOVD_SHARED
) != 0;
7632 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
7636 /* Don't mark as GOVD_SEEN addressable temporaries seen only in simd
7637 lb, b or incr expressions, those shouldn't be turned into simd arrays. */
7638 if (ctx
->region_type
== ORT_SIMD
7639 && ctx
->in_for_exprs
7640 && ((n
->value
& (GOVD_PRIVATE
| GOVD_SEEN
| GOVD_EXPLICIT
))
7642 flags
&= ~GOVD_SEEN
;
7644 if ((n
->value
& (GOVD_SEEN
| GOVD_LOCAL
)) == 0
7645 && (flags
& (GOVD_SEEN
| GOVD_LOCAL
)) == GOVD_SEEN
7646 && DECL_SIZE (decl
))
7648 if (TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
7651 tree t
= DECL_VALUE_EXPR (decl
);
7652 gcc_assert (TREE_CODE (t
) == INDIRECT_REF
);
7653 t
= TREE_OPERAND (t
, 0);
7654 gcc_assert (DECL_P (t
));
7655 n2
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
7656 n2
->value
|= GOVD_SEEN
;
7658 else if (lang_hooks
.decls
.omp_privatize_by_reference (decl
)
7659 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)))
7660 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
))))
7664 tree t
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
)));
7665 gcc_assert (DECL_P (t
));
7666 n2
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
7668 omp_notice_variable (ctx
, t
, true);
7672 if (ctx
->region_type
& ORT_ACC
)
7673 /* For OpenACC, as remarked above, defer expansion. */
7676 shared
= ((flags
| n
->value
) & GOVD_SHARED
) != 0;
7677 ret
= lang_hooks
.decls
.omp_disregard_value_expr (decl
, shared
);
7679 /* If nothing changed, there's nothing left to do. */
7680 if ((n
->value
& flags
) == flags
)
7686 /* If the variable is private in the current context, then we don't
7687 need to propagate anything to an outer context. */
7688 if ((flags
& GOVD_PRIVATE
) && !(flags
& GOVD_PRIVATE_OUTER_REF
))
7690 if ((flags
& (GOVD_LINEAR
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7691 == (GOVD_LINEAR
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7693 if ((flags
& (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
7694 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7695 == (GOVD_LASTPRIVATE
| GOVD_LINEAR_LASTPRIVATE_NO_OUTER
))
7697 if (ctx
->outer_context
7698 && omp_notice_variable (ctx
->outer_context
, decl
, in_code
))
7703 /* Verify that DECL is private within CTX. If there's specific information
7704 to the contrary in the innermost scope, generate an error. */
7707 omp_is_private (struct gimplify_omp_ctx
*ctx
, tree decl
, int simd
)
7711 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
7714 if (n
->value
& GOVD_SHARED
)
7716 if (ctx
== gimplify_omp_ctxp
)
7719 error ("iteration variable %qE is predetermined linear",
7722 error ("iteration variable %qE should be private",
7724 n
->value
= GOVD_PRIVATE
;
7730 else if ((n
->value
& GOVD_EXPLICIT
) != 0
7731 && (ctx
== gimplify_omp_ctxp
7732 || (ctx
->region_type
== ORT_COMBINED_PARALLEL
7733 && gimplify_omp_ctxp
->outer_context
== ctx
)))
7735 if ((n
->value
& GOVD_FIRSTPRIVATE
) != 0)
7736 error ("iteration variable %qE should not be firstprivate",
7738 else if ((n
->value
& GOVD_REDUCTION
) != 0)
7739 error ("iteration variable %qE should not be reduction",
7741 else if (simd
!= 1 && (n
->value
& GOVD_LINEAR
) != 0)
7742 error ("iteration variable %qE should not be linear",
7745 return (ctx
== gimplify_omp_ctxp
7746 || (ctx
->region_type
== ORT_COMBINED_PARALLEL
7747 && gimplify_omp_ctxp
->outer_context
== ctx
));
7750 if (ctx
->region_type
!= ORT_WORKSHARE
7751 && ctx
->region_type
!= ORT_TASKGROUP
7752 && ctx
->region_type
!= ORT_SIMD
7753 && ctx
->region_type
!= ORT_ACC
)
7755 else if (ctx
->outer_context
)
7756 return omp_is_private (ctx
->outer_context
, decl
, simd
);
7760 /* Return true if DECL is private within a parallel region
7761 that binds to the current construct's context or in parallel
7762 region's REDUCTION clause. */
7765 omp_check_private (struct gimplify_omp_ctx
*ctx
, tree decl
, bool copyprivate
)
7771 ctx
= ctx
->outer_context
;
7774 if (is_global_var (decl
))
7777 /* References might be private, but might be shared too,
7778 when checking for copyprivate, assume they might be
7779 private, otherwise assume they might be shared. */
7783 if (lang_hooks
.decls
.omp_privatize_by_reference (decl
))
7786 /* Treat C++ privatized non-static data members outside
7787 of the privatization the same. */
7788 if (omp_member_access_dummy_var (decl
))
7794 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
7796 if ((ctx
->region_type
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
7797 && (n
== NULL
|| (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0))
7802 if ((n
->value
& GOVD_LOCAL
) != 0
7803 && omp_member_access_dummy_var (decl
))
7805 return (n
->value
& GOVD_SHARED
) == 0;
7808 while (ctx
->region_type
== ORT_WORKSHARE
7809 || ctx
->region_type
== ORT_TASKGROUP
7810 || ctx
->region_type
== ORT_SIMD
7811 || ctx
->region_type
== ORT_ACC
);
7815 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7818 find_decl_expr (tree
*tp
, int *walk_subtrees
, void *data
)
7822 /* If this node has been visited, unmark it and keep looking. */
7823 if (TREE_CODE (t
) == DECL_EXPR
&& DECL_EXPR_DECL (t
) == (tree
) data
)
7826 if (IS_TYPE_OR_DECL_P (t
))
7831 /* If *LIST_P contains any OpenMP depend clauses with iterators,
7832 lower all the depend clauses by populating corresponding depend
7833 array. Returns 0 if there are no such depend clauses, or
7834 2 if all depend clauses should be removed, 1 otherwise. */
7837 gimplify_omp_depend (tree
*list_p
, gimple_seq
*pre_p
)
7841 size_t n
[4] = { 0, 0, 0, 0 };
7843 tree counts
[4] = { NULL_TREE
, NULL_TREE
, NULL_TREE
, NULL_TREE
};
7844 tree last_iter
= NULL_TREE
, last_count
= NULL_TREE
;
7846 location_t first_loc
= UNKNOWN_LOCATION
;
7848 for (c
= *list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
7849 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
7851 switch (OMP_CLAUSE_DEPEND_KIND (c
))
7853 case OMP_CLAUSE_DEPEND_IN
:
7856 case OMP_CLAUSE_DEPEND_OUT
:
7857 case OMP_CLAUSE_DEPEND_INOUT
:
7860 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
7863 case OMP_CLAUSE_DEPEND_DEPOBJ
:
7866 case OMP_CLAUSE_DEPEND_SOURCE
:
7867 case OMP_CLAUSE_DEPEND_SINK
:
7872 tree t
= OMP_CLAUSE_DECL (c
);
7873 if (first_loc
== UNKNOWN_LOCATION
)
7874 first_loc
= OMP_CLAUSE_LOCATION (c
);
7875 if (TREE_CODE (t
) == TREE_LIST
7877 && TREE_CODE (TREE_PURPOSE (t
)) == TREE_VEC
)
7879 if (TREE_PURPOSE (t
) != last_iter
)
7881 tree tcnt
= size_one_node
;
7882 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
7884 if (gimplify_expr (&TREE_VEC_ELT (it
, 1), pre_p
, NULL
,
7885 is_gimple_val
, fb_rvalue
) == GS_ERROR
7886 || gimplify_expr (&TREE_VEC_ELT (it
, 2), pre_p
, NULL
,
7887 is_gimple_val
, fb_rvalue
) == GS_ERROR
7888 || gimplify_expr (&TREE_VEC_ELT (it
, 3), pre_p
, NULL
,
7889 is_gimple_val
, fb_rvalue
) == GS_ERROR
7890 || (gimplify_expr (&TREE_VEC_ELT (it
, 4), pre_p
, NULL
,
7891 is_gimple_val
, fb_rvalue
)
7894 tree var
= TREE_VEC_ELT (it
, 0);
7895 tree begin
= TREE_VEC_ELT (it
, 1);
7896 tree end
= TREE_VEC_ELT (it
, 2);
7897 tree step
= TREE_VEC_ELT (it
, 3);
7898 tree orig_step
= TREE_VEC_ELT (it
, 4);
7899 tree type
= TREE_TYPE (var
);
7900 tree stype
= TREE_TYPE (step
);
7901 location_t loc
= DECL_SOURCE_LOCATION (var
);
7903 /* Compute count for this iterator as
7905 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
7906 : (begin > end ? (end - begin + (step + 1)) / step : 0)
7907 and compute product of those for the entire depend
7909 if (POINTER_TYPE_P (type
))
7910 endmbegin
= fold_build2_loc (loc
, POINTER_DIFF_EXPR
,
7913 endmbegin
= fold_build2_loc (loc
, MINUS_EXPR
, type
,
7915 tree stepm1
= fold_build2_loc (loc
, MINUS_EXPR
, stype
,
7917 build_int_cst (stype
, 1));
7918 tree stepp1
= fold_build2_loc (loc
, PLUS_EXPR
, stype
, step
,
7919 build_int_cst (stype
, 1));
7920 tree pos
= fold_build2_loc (loc
, PLUS_EXPR
, stype
,
7921 unshare_expr (endmbegin
),
7923 pos
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, stype
,
7925 tree neg
= fold_build2_loc (loc
, PLUS_EXPR
, stype
,
7927 if (TYPE_UNSIGNED (stype
))
7929 neg
= fold_build1_loc (loc
, NEGATE_EXPR
, stype
, neg
);
7930 step
= fold_build1_loc (loc
, NEGATE_EXPR
, stype
, step
);
7932 neg
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, stype
,
7935 tree cond
= fold_build2_loc (loc
, LT_EXPR
,
7938 pos
= fold_build3_loc (loc
, COND_EXPR
, stype
, cond
, pos
,
7939 build_int_cst (stype
, 0));
7940 cond
= fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
,
7942 neg
= fold_build3_loc (loc
, COND_EXPR
, stype
, cond
, neg
,
7943 build_int_cst (stype
, 0));
7944 tree osteptype
= TREE_TYPE (orig_step
);
7945 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
7947 build_int_cst (osteptype
, 0));
7948 tree cnt
= fold_build3_loc (loc
, COND_EXPR
, stype
,
7950 cnt
= fold_convert_loc (loc
, sizetype
, cnt
);
7951 if (gimplify_expr (&cnt
, pre_p
, NULL
, is_gimple_val
,
7952 fb_rvalue
) == GS_ERROR
)
7954 tcnt
= size_binop_loc (loc
, MULT_EXPR
, tcnt
, cnt
);
7956 if (gimplify_expr (&tcnt
, pre_p
, NULL
, is_gimple_val
,
7957 fb_rvalue
) == GS_ERROR
)
7959 last_iter
= TREE_PURPOSE (t
);
7962 if (counts
[i
] == NULL_TREE
)
7963 counts
[i
] = last_count
;
7965 counts
[i
] = size_binop_loc (OMP_CLAUSE_LOCATION (c
),
7966 PLUS_EXPR
, counts
[i
], last_count
);
7971 for (i
= 0; i
< 4; i
++)
7977 tree total
= size_zero_node
;
7978 for (i
= 0; i
< 4; i
++)
7980 unused
[i
] = counts
[i
] == NULL_TREE
&& n
[i
] == 0;
7981 if (counts
[i
] == NULL_TREE
)
7982 counts
[i
] = size_zero_node
;
7984 counts
[i
] = size_binop (PLUS_EXPR
, counts
[i
], size_int (n
[i
]));
7985 if (gimplify_expr (&counts
[i
], pre_p
, NULL
, is_gimple_val
,
7986 fb_rvalue
) == GS_ERROR
)
7988 total
= size_binop (PLUS_EXPR
, total
, counts
[i
]);
7991 if (gimplify_expr (&total
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
7994 bool is_old
= unused
[1] && unused
[3];
7995 tree totalpx
= size_binop (PLUS_EXPR
, unshare_expr (total
),
7996 size_int (is_old
? 1 : 4));
7997 tree type
= build_array_type (ptr_type_node
, build_index_type (totalpx
));
7998 tree array
= create_tmp_var_raw (type
);
7999 TREE_ADDRESSABLE (array
) = 1;
8000 if (!poly_int_tree_p (totalpx
))
8002 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array
)))
8003 gimplify_type_sizes (TREE_TYPE (array
), pre_p
);
8004 if (gimplify_omp_ctxp
)
8006 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
8008 && (ctx
->region_type
== ORT_WORKSHARE
8009 || ctx
->region_type
== ORT_TASKGROUP
8010 || ctx
->region_type
== ORT_SIMD
8011 || ctx
->region_type
== ORT_ACC
))
8012 ctx
= ctx
->outer_context
;
8014 omp_add_variable (ctx
, array
, GOVD_LOCAL
| GOVD_SEEN
);
8016 gimplify_vla_decl (array
, pre_p
);
8019 gimple_add_tmp_var (array
);
8020 tree r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (0), NULL_TREE
,
8025 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
,
8026 build_int_cst (ptr_type_node
, 0));
8027 gimplify_and_add (tem
, pre_p
);
8028 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (1), NULL_TREE
,
8031 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
,
8032 fold_convert (ptr_type_node
, total
));
8033 gimplify_and_add (tem
, pre_p
);
8034 for (i
= 1; i
< (is_old
? 2 : 4); i
++)
8036 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, size_int (i
+ !is_old
),
8037 NULL_TREE
, NULL_TREE
);
8038 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
, counts
[i
- 1]);
8039 gimplify_and_add (tem
, pre_p
);
8046 for (i
= 0; i
< 4; i
++)
8048 if (i
&& (i
>= j
|| unused
[i
- 1]))
8050 cnts
[i
] = cnts
[i
- 1];
8053 cnts
[i
] = create_tmp_var (sizetype
);
8055 g
= gimple_build_assign (cnts
[i
], size_int (is_old
? 2 : 5));
8060 t
= size_binop (PLUS_EXPR
, counts
[0], size_int (2));
8062 t
= size_binop (PLUS_EXPR
, cnts
[i
- 1], counts
[i
- 1]);
8063 if (gimplify_expr (&t
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
8066 g
= gimple_build_assign (cnts
[i
], t
);
8068 gimple_seq_add_stmt (pre_p
, g
);
8071 last_iter
= NULL_TREE
;
8072 tree last_bind
= NULL_TREE
;
8073 tree
*last_body
= NULL
;
8074 for (c
= *list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
8075 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
)
8077 switch (OMP_CLAUSE_DEPEND_KIND (c
))
8079 case OMP_CLAUSE_DEPEND_IN
:
8082 case OMP_CLAUSE_DEPEND_OUT
:
8083 case OMP_CLAUSE_DEPEND_INOUT
:
8086 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET
:
8089 case OMP_CLAUSE_DEPEND_DEPOBJ
:
8092 case OMP_CLAUSE_DEPEND_SOURCE
:
8093 case OMP_CLAUSE_DEPEND_SINK
:
8098 tree t
= OMP_CLAUSE_DECL (c
);
8099 if (TREE_CODE (t
) == TREE_LIST
8101 && TREE_CODE (TREE_PURPOSE (t
)) == TREE_VEC
)
8103 if (TREE_PURPOSE (t
) != last_iter
)
8106 gimplify_and_add (last_bind
, pre_p
);
8107 tree block
= TREE_VEC_ELT (TREE_PURPOSE (t
), 5);
8108 last_bind
= build3 (BIND_EXPR
, void_type_node
,
8109 BLOCK_VARS (block
), NULL
, block
);
8110 TREE_SIDE_EFFECTS (last_bind
) = 1;
8111 SET_EXPR_LOCATION (last_bind
, OMP_CLAUSE_LOCATION (c
));
8112 tree
*p
= &BIND_EXPR_BODY (last_bind
);
8113 for (tree it
= TREE_PURPOSE (t
); it
; it
= TREE_CHAIN (it
))
8115 tree var
= TREE_VEC_ELT (it
, 0);
8116 tree begin
= TREE_VEC_ELT (it
, 1);
8117 tree end
= TREE_VEC_ELT (it
, 2);
8118 tree step
= TREE_VEC_ELT (it
, 3);
8119 tree orig_step
= TREE_VEC_ELT (it
, 4);
8120 tree type
= TREE_TYPE (var
);
8121 location_t loc
= DECL_SOURCE_LOCATION (var
);
8129 if (orig_step > 0) {
8130 if (var < end) goto beg_label;
8132 if (var > end) goto beg_label;
8134 for each iterator, with inner iterators added to
8136 tree beg_label
= create_artificial_label (loc
);
8137 tree cond_label
= NULL_TREE
;
8138 tem
= build2_loc (loc
, MODIFY_EXPR
, void_type_node
,
8140 append_to_statement_list_force (tem
, p
);
8141 tem
= build_and_jump (&cond_label
);
8142 append_to_statement_list_force (tem
, p
);
8143 tem
= build1 (LABEL_EXPR
, void_type_node
, beg_label
);
8144 append_to_statement_list (tem
, p
);
8145 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL_TREE
,
8146 NULL_TREE
, NULL_TREE
);
8147 TREE_SIDE_EFFECTS (bind
) = 1;
8148 SET_EXPR_LOCATION (bind
, loc
);
8149 append_to_statement_list_force (bind
, p
);
8150 if (POINTER_TYPE_P (type
))
8151 tem
= build2_loc (loc
, POINTER_PLUS_EXPR
, type
,
8152 var
, fold_convert_loc (loc
, sizetype
,
8155 tem
= build2_loc (loc
, PLUS_EXPR
, type
, var
, step
);
8156 tem
= build2_loc (loc
, MODIFY_EXPR
, void_type_node
,
8158 append_to_statement_list_force (tem
, p
);
8159 tem
= build1 (LABEL_EXPR
, void_type_node
, cond_label
);
8160 append_to_statement_list (tem
, p
);
8161 tree cond
= fold_build2_loc (loc
, LT_EXPR
,
8165 = fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
8166 cond
, build_and_jump (&beg_label
),
8168 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8171 = fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
8172 cond
, build_and_jump (&beg_label
),
8174 tree osteptype
= TREE_TYPE (orig_step
);
8175 cond
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
,
8177 build_int_cst (osteptype
, 0));
8178 tem
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
8180 append_to_statement_list_force (tem
, p
);
8181 p
= &BIND_EXPR_BODY (bind
);
8185 last_iter
= TREE_PURPOSE (t
);
8186 if (TREE_CODE (TREE_VALUE (t
)) == COMPOUND_EXPR
)
8188 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t
),
8190 TREE_VALUE (t
) = TREE_OPERAND (TREE_VALUE (t
), 1);
8192 if (error_operand_p (TREE_VALUE (t
)))
8194 TREE_VALUE (t
) = build_fold_addr_expr (TREE_VALUE (t
));
8195 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[i
],
8196 NULL_TREE
, NULL_TREE
);
8197 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
8198 void_type_node
, r
, TREE_VALUE (t
));
8199 append_to_statement_list_force (tem
, last_body
);
8200 tem
= build2_loc (OMP_CLAUSE_LOCATION (c
), MODIFY_EXPR
,
8201 void_type_node
, cnts
[i
],
8202 size_binop (PLUS_EXPR
, cnts
[i
], size_int (1)));
8203 append_to_statement_list_force (tem
, last_body
);
8204 TREE_VALUE (t
) = null_pointer_node
;
8210 gimplify_and_add (last_bind
, pre_p
);
8211 last_bind
= NULL_TREE
;
8213 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPOUND_EXPR
)
8215 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0), pre_p
,
8216 NULL
, is_gimple_val
, fb_rvalue
);
8217 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
8219 if (error_operand_p (OMP_CLAUSE_DECL (c
)))
8221 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (OMP_CLAUSE_DECL (c
));
8222 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
8223 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8225 r
= build4 (ARRAY_REF
, ptr_type_node
, array
, cnts
[i
],
8226 NULL_TREE
, NULL_TREE
);
8227 tem
= build2 (MODIFY_EXPR
, void_type_node
, r
, OMP_CLAUSE_DECL (c
));
8228 gimplify_and_add (tem
, pre_p
);
8229 g
= gimple_build_assign (cnts
[i
], size_binop (PLUS_EXPR
, cnts
[i
],
8231 gimple_seq_add_stmt (pre_p
, g
);
8235 gimplify_and_add (last_bind
, pre_p
);
8236 tree cond
= boolean_false_node
;
8240 cond
= build2_loc (first_loc
, NE_EXPR
, boolean_type_node
, cnts
[0],
8241 size_binop_loc (first_loc
, PLUS_EXPR
, counts
[0],
8244 cond
= build2_loc (first_loc
, TRUTH_OR_EXPR
, boolean_type_node
, cond
,
8245 build2_loc (first_loc
, NE_EXPR
, boolean_type_node
,
8247 size_binop_loc (first_loc
, PLUS_EXPR
,
8253 tree prev
= size_int (5);
8254 for (i
= 0; i
< 4; i
++)
8258 prev
= size_binop_loc (first_loc
, PLUS_EXPR
, counts
[i
], prev
);
8259 cond
= build2_loc (first_loc
, TRUTH_OR_EXPR
, boolean_type_node
, cond
,
8260 build2_loc (first_loc
, NE_EXPR
, boolean_type_node
,
8261 cnts
[i
], unshare_expr (prev
)));
8264 tem
= build3_loc (first_loc
, COND_EXPR
, void_type_node
, cond
,
8265 build_call_expr_loc (first_loc
,
8266 builtin_decl_explicit (BUILT_IN_TRAP
),
8268 gimplify_and_add (tem
, pre_p
);
8269 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_DEPEND
);
8270 OMP_CLAUSE_DEPEND_KIND (c
) = OMP_CLAUSE_DEPEND_LAST
;
8271 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (array
);
8272 OMP_CLAUSE_CHAIN (c
) = *list_p
;
8277 /* Insert a GOMP_MAP_ALLOC or GOMP_MAP_RELEASE node following a
8278 GOMP_MAP_STRUCT mapping. C is an always_pointer mapping. STRUCT_NODE is
8279 the struct node to insert the new mapping after (when the struct node is
8280 initially created). PREV_NODE is the first of two or three mappings for a
8281 pointer, and is either:
8282 - the node before C, when a pair of mappings is used, e.g. for a C/C++
8284 - not the node before C. This is true when we have a reference-to-pointer
8285 type (with a mapping for the reference and for the pointer), or for
8286 Fortran derived-type mappings with a GOMP_MAP_TO_PSET.
8287 If SCP is non-null, the new node is inserted before *SCP.
8288 if SCP is null, the new node is inserted before PREV_NODE.
8290 - PREV_NODE, if SCP is non-null.
8291 - The newly-created ALLOC or RELEASE node, if SCP is null.
8292 - The second newly-created ALLOC or RELEASE node, if we are mapping a
8293 reference to a pointer. */
8296 insert_struct_comp_map (enum tree_code code
, tree c
, tree struct_node
,
8297 tree prev_node
, tree
*scp
)
8299 enum gomp_map_kind mkind
8300 = (code
== OMP_TARGET_EXIT_DATA
|| code
== OACC_EXIT_DATA
)
8301 ? GOMP_MAP_RELEASE
: GOMP_MAP_ALLOC
;
8303 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
8304 tree cl
= scp
? prev_node
: c2
;
8305 OMP_CLAUSE_SET_MAP_KIND (c2
, mkind
);
8306 OMP_CLAUSE_DECL (c2
) = unshare_expr (OMP_CLAUSE_DECL (c
));
8307 OMP_CLAUSE_CHAIN (c2
) = scp
? *scp
: prev_node
;
8308 if (OMP_CLAUSE_CHAIN (prev_node
) != c
8309 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (prev_node
)) == OMP_CLAUSE_MAP
8310 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node
))
8311 == GOMP_MAP_TO_PSET
))
8312 OMP_CLAUSE_SIZE (c2
) = OMP_CLAUSE_SIZE (OMP_CLAUSE_CHAIN (prev_node
));
8314 OMP_CLAUSE_SIZE (c2
) = TYPE_SIZE_UNIT (ptr_type_node
);
8316 OMP_CLAUSE_CHAIN (struct_node
) = c2
;
8318 /* We might need to create an additional mapping if we have a reference to a
8319 pointer (in C++). Don't do this if we have something other than a
8320 GOMP_MAP_ALWAYS_POINTER though, i.e. a GOMP_MAP_TO_PSET. */
8321 if (OMP_CLAUSE_CHAIN (prev_node
) != c
8322 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (prev_node
)) == OMP_CLAUSE_MAP
8323 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node
))
8324 == GOMP_MAP_ALWAYS_POINTER
)
8325 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node
))
8326 == GOMP_MAP_ATTACH_DETACH
)))
8328 tree c4
= OMP_CLAUSE_CHAIN (prev_node
);
8329 tree c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_MAP
);
8330 OMP_CLAUSE_SET_MAP_KIND (c3
, mkind
);
8331 OMP_CLAUSE_DECL (c3
) = unshare_expr (OMP_CLAUSE_DECL (c4
));
8332 OMP_CLAUSE_SIZE (c3
) = TYPE_SIZE_UNIT (ptr_type_node
);
8333 OMP_CLAUSE_CHAIN (c3
) = prev_node
;
8335 OMP_CLAUSE_CHAIN (c2
) = c3
;
8346 /* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
8347 and set *BITPOSP and *POFFSETP to the bit offset of the access.
8348 If BASE_REF is non-NULL and the containing object is a reference, set
8349 *BASE_REF to that reference before dereferencing the object.
8350 If BASE_REF is NULL, check that the containing object is a COMPONENT_REF or
8351 has array type, else return NULL. */
8354 extract_base_bit_offset (tree base
, tree
*base_ref
, poly_int64
*bitposp
,
8355 poly_offset_int
*poffsetp
)
8358 poly_int64 bitsize
, bitpos
;
8360 int unsignedp
, reversep
, volatilep
= 0;
8361 poly_offset_int poffset
;
8365 *base_ref
= NULL_TREE
;
8367 while (TREE_CODE (base
) == ARRAY_REF
)
8368 base
= TREE_OPERAND (base
, 0);
8370 if (TREE_CODE (base
) == INDIRECT_REF
)
8371 base
= TREE_OPERAND (base
, 0);
8375 if (TREE_CODE (base
) == ARRAY_REF
)
8377 while (TREE_CODE (base
) == ARRAY_REF
)
8378 base
= TREE_OPERAND (base
, 0);
8379 if (TREE_CODE (base
) != COMPONENT_REF
8380 || TREE_CODE (TREE_TYPE (base
)) != ARRAY_TYPE
)
8383 else if (TREE_CODE (base
) == INDIRECT_REF
8384 && TREE_CODE (TREE_OPERAND (base
, 0)) == COMPONENT_REF
8385 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base
, 0)))
8387 base
= TREE_OPERAND (base
, 0);
8390 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
, &mode
,
8391 &unsignedp
, &reversep
, &volatilep
);
8393 tree orig_base
= base
;
8395 if ((TREE_CODE (base
) == INDIRECT_REF
8396 || (TREE_CODE (base
) == MEM_REF
8397 && integer_zerop (TREE_OPERAND (base
, 1))))
8398 && DECL_P (TREE_OPERAND (base
, 0))
8399 && TREE_CODE (TREE_TYPE (TREE_OPERAND (base
, 0))) == REFERENCE_TYPE
)
8400 base
= TREE_OPERAND (base
, 0);
8402 gcc_assert (offset
== NULL_TREE
|| poly_int_tree_p (offset
));
8405 poffset
= wi::to_poly_offset (offset
);
8409 if (maybe_ne (bitpos
, 0))
8410 poffset
+= bits_to_bytes_round_down (bitpos
);
8413 *poffsetp
= poffset
;
8415 /* Set *BASE_REF if BASE was a dereferenced reference variable. */
8416 if (base_ref
&& orig_base
!= base
)
8417 *base_ref
= orig_base
;
8422 /* Returns true if EXPR is or contains (as a sub-component) BASE_PTR. */
8425 is_or_contains_p (tree expr
, tree base_ptr
)
8427 while (expr
!= base_ptr
)
8428 if (TREE_CODE (base_ptr
) == COMPONENT_REF
)
8429 base_ptr
= TREE_OPERAND (base_ptr
, 0);
8432 return expr
== base_ptr
;
8435 /* Implement OpenMP 5.x map ordering rules for target directives. There are
8436 several rules, and with some level of ambiguity, hopefully we can at least
8437 collect the complexity here in one place. */
8440 omp_target_reorder_clauses (tree
*list_p
)
8442 /* Collect refs to alloc/release/delete maps. */
8443 auto_vec
<tree
, 32> ard
;
8445 while (*cp
!= NULL_TREE
)
8446 if (OMP_CLAUSE_CODE (*cp
) == OMP_CLAUSE_MAP
8447 && (OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_ALLOC
8448 || OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_RELEASE
8449 || OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_DELETE
))
8451 /* Unlink cp and push to ard. */
8453 tree nc
= OMP_CLAUSE_CHAIN (c
);
8457 /* Any associated pointer type maps should also move along. */
8458 while (*cp
!= NULL_TREE
8459 && OMP_CLAUSE_CODE (*cp
) == OMP_CLAUSE_MAP
8460 && (OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
8461 || OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_FIRSTPRIVATE_POINTER
8462 || OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_ATTACH_DETACH
8463 || OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_POINTER
8464 || OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_ALWAYS_POINTER
8465 || OMP_CLAUSE_MAP_KIND (*cp
) == GOMP_MAP_TO_PSET
))
8468 nc
= OMP_CLAUSE_CHAIN (c
);
8474 cp
= &OMP_CLAUSE_CHAIN (*cp
);
8476 /* Link alloc/release/delete maps to the end of list. */
8477 for (unsigned int i
= 0; i
< ard
.length (); i
++)
8480 cp
= &OMP_CLAUSE_CHAIN (ard
[i
]);
8484 /* OpenMP 5.0 requires that pointer variables are mapped before
8485 its use as a base-pointer. */
8486 auto_vec
<tree
*, 32> atf
;
8487 for (tree
*cp
= list_p
; *cp
; cp
= &OMP_CLAUSE_CHAIN (*cp
))
8488 if (OMP_CLAUSE_CODE (*cp
) == OMP_CLAUSE_MAP
)
8490 /* Collect alloc, to, from, to/from clause tree pointers. */
8491 gomp_map_kind k
= OMP_CLAUSE_MAP_KIND (*cp
);
8492 if (k
== GOMP_MAP_ALLOC
8494 || k
== GOMP_MAP_FROM
8495 || k
== GOMP_MAP_TOFROM
8496 || k
== GOMP_MAP_ALWAYS_TO
8497 || k
== GOMP_MAP_ALWAYS_FROM
8498 || k
== GOMP_MAP_ALWAYS_TOFROM
)
8502 for (unsigned int i
= 0; i
< atf
.length (); i
++)
8506 tree decl
= OMP_CLAUSE_DECL (*cp
);
8507 if (TREE_CODE (decl
) == INDIRECT_REF
|| TREE_CODE (decl
) == MEM_REF
)
8509 tree base_ptr
= TREE_OPERAND (decl
, 0);
8510 STRIP_TYPE_NOPS (base_ptr
);
8511 for (unsigned int j
= i
+ 1; j
< atf
.length (); j
++)
8514 tree decl2
= OMP_CLAUSE_DECL (*cp2
);
8515 if (is_or_contains_p (decl2
, base_ptr
))
8517 /* Move *cp2 to before *cp. */
8519 *cp2
= OMP_CLAUSE_CHAIN (c
);
8520 OMP_CLAUSE_CHAIN (c
) = *cp
;
8529 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
8530 and previous omp contexts. */
8533 gimplify_scan_omp_clauses (tree
*list_p
, gimple_seq
*pre_p
,
8534 enum omp_region_type region_type
,
8535 enum tree_code code
)
8537 struct gimplify_omp_ctx
*ctx
, *outer_ctx
;
8539 hash_map
<tree
, tree
> *struct_map_to_clause
= NULL
;
8540 hash_set
<tree
> *struct_deref_set
= NULL
;
8541 tree
*prev_list_p
= NULL
, *orig_list_p
= list_p
;
8542 int handled_depend_iterators
= -1;
8545 ctx
= new_omp_context (region_type
);
8547 outer_ctx
= ctx
->outer_context
;
8548 if (code
== OMP_TARGET
)
8550 if (!lang_GNU_Fortran ())
8551 ctx
->defaultmap
[GDMK_POINTER
] = GOVD_MAP
| GOVD_MAP_0LEN_ARRAY
;
8552 ctx
->defaultmap
[GDMK_SCALAR
] = GOVD_FIRSTPRIVATE
;
8554 if (!lang_GNU_Fortran ())
8558 case OMP_TARGET_DATA
:
8559 case OMP_TARGET_ENTER_DATA
:
8560 case OMP_TARGET_EXIT_DATA
:
8562 case OACC_HOST_DATA
:
8565 ctx
->target_firstprivatize_array_bases
= true;
8570 if (code
== OMP_TARGET
8571 || code
== OMP_TARGET_DATA
8572 || code
== OMP_TARGET_ENTER_DATA
8573 || code
== OMP_TARGET_EXIT_DATA
)
8574 omp_target_reorder_clauses (list_p
);
8576 while ((c
= *list_p
) != NULL
)
8578 bool remove
= false;
8579 bool notice_outer
= true;
8580 const char *check_non_private
= NULL
;
8584 switch (OMP_CLAUSE_CODE (c
))
8586 case OMP_CLAUSE_PRIVATE
:
8587 flags
= GOVD_PRIVATE
| GOVD_EXPLICIT
;
8588 if (lang_hooks
.decls
.omp_private_outer_ref (OMP_CLAUSE_DECL (c
)))
8590 flags
|= GOVD_PRIVATE_OUTER_REF
;
8591 OMP_CLAUSE_PRIVATE_OUTER_REF (c
) = 1;
8594 notice_outer
= false;
8596 case OMP_CLAUSE_SHARED
:
8597 flags
= GOVD_SHARED
| GOVD_EXPLICIT
;
8599 case OMP_CLAUSE_FIRSTPRIVATE
:
8600 flags
= GOVD_FIRSTPRIVATE
| GOVD_EXPLICIT
;
8601 check_non_private
= "firstprivate";
8603 case OMP_CLAUSE_LASTPRIVATE
:
8604 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
8607 case OMP_DISTRIBUTE
:
8608 error_at (OMP_CLAUSE_LOCATION (c
),
8609 "conditional %<lastprivate%> clause on "
8610 "%qs construct", "distribute");
8611 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
8614 error_at (OMP_CLAUSE_LOCATION (c
),
8615 "conditional %<lastprivate%> clause on "
8616 "%qs construct", "taskloop");
8617 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
8622 flags
= GOVD_LASTPRIVATE
| GOVD_SEEN
| GOVD_EXPLICIT
;
8623 if (code
!= OMP_LOOP
)
8624 check_non_private
= "lastprivate";
8625 decl
= OMP_CLAUSE_DECL (c
);
8626 if (error_operand_p (decl
))
8628 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
)
8629 && !lang_hooks
.decls
.omp_scalar_p (decl
))
8631 error_at (OMP_CLAUSE_LOCATION (c
),
8632 "non-scalar variable %qD in conditional "
8633 "%<lastprivate%> clause", decl
);
8634 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) = 0;
8636 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
8637 flags
|= GOVD_LASTPRIVATE_CONDITIONAL
;
8639 && (outer_ctx
->region_type
== ORT_COMBINED_PARALLEL
8640 || ((outer_ctx
->region_type
& ORT_COMBINED_TEAMS
)
8641 == ORT_COMBINED_TEAMS
))
8642 && splay_tree_lookup (outer_ctx
->variables
,
8643 (splay_tree_key
) decl
) == NULL
)
8645 omp_add_variable (outer_ctx
, decl
, GOVD_SHARED
| GOVD_SEEN
);
8646 if (outer_ctx
->outer_context
)
8647 omp_notice_variable (outer_ctx
->outer_context
, decl
, true);
8650 && (outer_ctx
->region_type
& ORT_TASK
) != 0
8651 && outer_ctx
->combined_loop
8652 && splay_tree_lookup (outer_ctx
->variables
,
8653 (splay_tree_key
) decl
) == NULL
)
8655 omp_add_variable (outer_ctx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
8656 if (outer_ctx
->outer_context
)
8657 omp_notice_variable (outer_ctx
->outer_context
, decl
, true);
8660 && (outer_ctx
->region_type
== ORT_WORKSHARE
8661 || outer_ctx
->region_type
== ORT_ACC
)
8662 && outer_ctx
->combined_loop
8663 && splay_tree_lookup (outer_ctx
->variables
,
8664 (splay_tree_key
) decl
) == NULL
8665 && !omp_check_private (outer_ctx
, decl
, false))
8667 omp_add_variable (outer_ctx
, decl
, GOVD_LASTPRIVATE
| GOVD_SEEN
);
8668 if (outer_ctx
->outer_context
8669 && (outer_ctx
->outer_context
->region_type
8670 == ORT_COMBINED_PARALLEL
)
8671 && splay_tree_lookup (outer_ctx
->outer_context
->variables
,
8672 (splay_tree_key
) decl
) == NULL
)
8674 struct gimplify_omp_ctx
*octx
= outer_ctx
->outer_context
;
8675 omp_add_variable (octx
, decl
, GOVD_SHARED
| GOVD_SEEN
);
8676 if (octx
->outer_context
)
8678 octx
= octx
->outer_context
;
8679 if (octx
->region_type
== ORT_WORKSHARE
8680 && octx
->combined_loop
8681 && splay_tree_lookup (octx
->variables
,
8682 (splay_tree_key
) decl
) == NULL
8683 && !omp_check_private (octx
, decl
, false))
8685 omp_add_variable (octx
, decl
,
8686 GOVD_LASTPRIVATE
| GOVD_SEEN
);
8687 octx
= octx
->outer_context
;
8689 && ((octx
->region_type
& ORT_COMBINED_TEAMS
)
8690 == ORT_COMBINED_TEAMS
)
8691 && (splay_tree_lookup (octx
->variables
,
8692 (splay_tree_key
) decl
)
8695 omp_add_variable (octx
, decl
,
8696 GOVD_SHARED
| GOVD_SEEN
);
8697 octx
= octx
->outer_context
;
8701 omp_notice_variable (octx
, decl
, true);
8704 else if (outer_ctx
->outer_context
)
8705 omp_notice_variable (outer_ctx
->outer_context
, decl
, true);
8708 case OMP_CLAUSE_REDUCTION
:
8709 if (OMP_CLAUSE_REDUCTION_TASK (c
))
8711 if (region_type
== ORT_WORKSHARE
)
8714 nowait
= omp_find_clause (*list_p
,
8715 OMP_CLAUSE_NOWAIT
) != NULL_TREE
;
8717 && (outer_ctx
== NULL
8718 || outer_ctx
->region_type
!= ORT_COMBINED_PARALLEL
))
8720 error_at (OMP_CLAUSE_LOCATION (c
),
8721 "%<task%> reduction modifier on a construct "
8722 "with a %<nowait%> clause");
8723 OMP_CLAUSE_REDUCTION_TASK (c
) = 0;
8726 else if ((region_type
& ORT_PARALLEL
) != ORT_PARALLEL
)
8728 error_at (OMP_CLAUSE_LOCATION (c
),
8729 "invalid %<task%> reduction modifier on construct "
8730 "other than %<parallel%>, %qs or %<sections%>",
8731 lang_GNU_Fortran () ? "do" : "for");
8732 OMP_CLAUSE_REDUCTION_TASK (c
) = 0;
8735 if (OMP_CLAUSE_REDUCTION_INSCAN (c
))
8739 error_at (OMP_CLAUSE_LOCATION (c
),
8740 "%<inscan%> %<reduction%> clause on "
8741 "%qs construct", "sections");
8742 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
8745 error_at (OMP_CLAUSE_LOCATION (c
),
8746 "%<inscan%> %<reduction%> clause on "
8747 "%qs construct", "parallel");
8748 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
8751 error_at (OMP_CLAUSE_LOCATION (c
),
8752 "%<inscan%> %<reduction%> clause on "
8753 "%qs construct", "teams");
8754 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
8757 error_at (OMP_CLAUSE_LOCATION (c
),
8758 "%<inscan%> %<reduction%> clause on "
8759 "%qs construct", "taskloop");
8760 OMP_CLAUSE_REDUCTION_INSCAN (c
) = 0;
8766 case OMP_CLAUSE_IN_REDUCTION
:
8767 case OMP_CLAUSE_TASK_REDUCTION
:
8768 flags
= GOVD_REDUCTION
| GOVD_SEEN
| GOVD_EXPLICIT
;
8769 /* OpenACC permits reductions on private variables. */
8770 if (!(region_type
& ORT_ACC
)
8771 /* taskgroup is actually not a worksharing region. */
8772 && code
!= OMP_TASKGROUP
)
8773 check_non_private
= omp_clause_code_name
[OMP_CLAUSE_CODE (c
)];
8774 decl
= OMP_CLAUSE_DECL (c
);
8775 if (TREE_CODE (decl
) == MEM_REF
)
8777 tree type
= TREE_TYPE (decl
);
8778 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type
)), pre_p
,
8779 NULL
, is_gimple_val
, fb_rvalue
, false)
8785 tree v
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
8788 omp_firstprivatize_variable (ctx
, v
);
8789 omp_notice_variable (ctx
, v
, true);
8791 decl
= TREE_OPERAND (decl
, 0);
8792 if (TREE_CODE (decl
) == POINTER_PLUS_EXPR
)
8794 if (gimplify_expr (&TREE_OPERAND (decl
, 1), pre_p
,
8795 NULL
, is_gimple_val
, fb_rvalue
, false)
8801 v
= TREE_OPERAND (decl
, 1);
8804 omp_firstprivatize_variable (ctx
, v
);
8805 omp_notice_variable (ctx
, v
, true);
8807 decl
= TREE_OPERAND (decl
, 0);
8809 if (TREE_CODE (decl
) == ADDR_EXPR
8810 || TREE_CODE (decl
) == INDIRECT_REF
)
8811 decl
= TREE_OPERAND (decl
, 0);
8814 case OMP_CLAUSE_LINEAR
:
8815 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c
), pre_p
, NULL
,
8816 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
8823 if (code
== OMP_SIMD
8824 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
8826 struct gimplify_omp_ctx
*octx
= outer_ctx
;
8828 && octx
->region_type
== ORT_WORKSHARE
8829 && octx
->combined_loop
8830 && !octx
->distribute
)
8832 if (octx
->outer_context
8833 && (octx
->outer_context
->region_type
8834 == ORT_COMBINED_PARALLEL
))
8835 octx
= octx
->outer_context
->outer_context
;
8837 octx
= octx
->outer_context
;
8840 && octx
->region_type
== ORT_WORKSHARE
8841 && octx
->combined_loop
8842 && octx
->distribute
)
8844 error_at (OMP_CLAUSE_LOCATION (c
),
8845 "%<linear%> clause for variable other than "
8846 "loop iterator specified on construct "
8847 "combined with %<distribute%>");
8852 /* For combined #pragma omp parallel for simd, need to put
8853 lastprivate and perhaps firstprivate too on the
8854 parallel. Similarly for #pragma omp for simd. */
8855 struct gimplify_omp_ctx
*octx
= outer_ctx
;
8859 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
8860 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
8862 decl
= OMP_CLAUSE_DECL (c
);
8863 if (error_operand_p (decl
))
8869 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
8870 flags
|= GOVD_FIRSTPRIVATE
;
8871 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
8872 flags
|= GOVD_LASTPRIVATE
;
8874 && octx
->region_type
== ORT_WORKSHARE
8875 && octx
->combined_loop
)
8877 if (octx
->outer_context
8878 && (octx
->outer_context
->region_type
8879 == ORT_COMBINED_PARALLEL
))
8880 octx
= octx
->outer_context
;
8881 else if (omp_check_private (octx
, decl
, false))
8885 && (octx
->region_type
& ORT_TASK
) != 0
8886 && octx
->combined_loop
)
8889 && octx
->region_type
== ORT_COMBINED_PARALLEL
8890 && ctx
->region_type
== ORT_WORKSHARE
8891 && octx
== outer_ctx
)
8892 flags
= GOVD_SEEN
| GOVD_SHARED
;
8894 && ((octx
->region_type
& ORT_COMBINED_TEAMS
)
8895 == ORT_COMBINED_TEAMS
))
8896 flags
= GOVD_SEEN
| GOVD_SHARED
;
8898 && octx
->region_type
== ORT_COMBINED_TARGET
)
8900 flags
&= ~GOVD_LASTPRIVATE
;
8901 if (flags
== GOVD_SEEN
)
8907 = splay_tree_lookup (octx
->variables
,
8908 (splay_tree_key
) decl
);
8909 if (on
&& (on
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
8914 omp_add_variable (octx
, decl
, flags
);
8915 if (octx
->outer_context
== NULL
)
8917 octx
= octx
->outer_context
;
8922 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
8923 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
8924 omp_notice_variable (octx
, decl
, true);
8926 flags
= GOVD_LINEAR
| GOVD_EXPLICIT
;
8927 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c
)
8928 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
8930 notice_outer
= false;
8931 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
8935 case OMP_CLAUSE_MAP
:
8936 decl
= OMP_CLAUSE_DECL (c
);
8937 if (error_operand_p (decl
))
8944 if (TREE_CODE (TREE_TYPE (decl
)) != ARRAY_TYPE
)
8947 case OMP_TARGET_DATA
:
8948 case OMP_TARGET_ENTER_DATA
:
8949 case OMP_TARGET_EXIT_DATA
:
8950 case OACC_ENTER_DATA
:
8951 case OACC_EXIT_DATA
:
8952 case OACC_HOST_DATA
:
8953 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
8954 || (OMP_CLAUSE_MAP_KIND (c
)
8955 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
8956 /* For target {,enter ,exit }data only the array slice is
8957 mapped, but not the pointer to it. */
8963 /* For Fortran, not only the pointer to the data is mapped but also
8964 the address of the pointer, the array descriptor etc.; for
8965 'exit data' - and in particular for 'delete:' - having an 'alloc:'
8966 does not make sense. Likewise, for 'update' only transferring the
8967 data itself is needed as the rest has been handled in previous
8968 directives. However, for 'exit data', the array descriptor needs
8969 to be delete; hence, we turn the MAP_TO_PSET into a MAP_DELETE.
8971 NOTE: Generally, it is not safe to perform "enter data" operations
8972 on arrays where the data *or the descriptor* may go out of scope
8973 before a corresponding "exit data" operation -- and such a
8974 descriptor may be synthesized temporarily, e.g. to pass an
8975 explicit-shape array to a function expecting an assumed-shape
8976 argument. Performing "enter data" inside the called function
8977 would thus be problematic. */
8978 if (code
== OMP_TARGET_EXIT_DATA
8979 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_TO_PSET
)
8980 OMP_CLAUSE_SET_MAP_KIND (c
, OMP_CLAUSE_MAP_KIND (*prev_list_p
)
8982 ? GOMP_MAP_DELETE
: GOMP_MAP_RELEASE
);
8983 else if ((code
== OMP_TARGET_EXIT_DATA
|| code
== OMP_TARGET_UPDATE
)
8984 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_POINTER
8985 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_TO_PSET
))
8990 if (DECL_P (decl
) && outer_ctx
&& (region_type
& ORT_ACC
))
8992 struct gimplify_omp_ctx
*octx
;
8993 for (octx
= outer_ctx
; octx
; octx
= octx
->outer_context
)
8995 if (octx
->region_type
!= ORT_ACC_HOST_DATA
)
8998 = splay_tree_lookup (octx
->variables
,
8999 (splay_tree_key
) decl
);
9001 error_at (OMP_CLAUSE_LOCATION (c
), "variable %qE "
9002 "declared in enclosing %<host_data%> region",
9006 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
9007 OMP_CLAUSE_SIZE (c
) = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
9008 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
9009 if (gimplify_expr (&OMP_CLAUSE_SIZE (c
), pre_p
,
9010 NULL
, is_gimple_val
, fb_rvalue
) == GS_ERROR
)
9015 else if ((OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
9016 || (OMP_CLAUSE_MAP_KIND (c
)
9017 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
)
9018 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
9019 && TREE_CODE (OMP_CLAUSE_SIZE (c
)) != INTEGER_CST
)
9022 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c
), pre_p
, NULL
,
9024 if ((region_type
& ORT_TARGET
) != 0)
9025 omp_add_variable (ctx
, OMP_CLAUSE_SIZE (c
),
9026 GOVD_FIRSTPRIVATE
| GOVD_SEEN
);
9032 if (TREE_CODE (d
) == ARRAY_REF
)
9034 while (TREE_CODE (d
) == ARRAY_REF
)
9035 d
= TREE_OPERAND (d
, 0);
9036 if (TREE_CODE (d
) == COMPONENT_REF
9037 && TREE_CODE (TREE_TYPE (d
)) == ARRAY_TYPE
)
9040 pd
= &OMP_CLAUSE_DECL (c
);
9042 && TREE_CODE (decl
) == INDIRECT_REF
9043 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
9044 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
9047 pd
= &TREE_OPERAND (decl
, 0);
9048 decl
= TREE_OPERAND (decl
, 0);
9050 bool indir_p
= false;
9051 tree orig_decl
= decl
;
9052 tree decl_ref
= NULL_TREE
;
9053 if ((region_type
& (ORT_ACC
| ORT_TARGET
| ORT_TARGET_DATA
)) != 0
9054 && TREE_CODE (*pd
) == COMPONENT_REF
9055 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
9056 && code
!= OACC_UPDATE
)
9058 while (TREE_CODE (decl
) == COMPONENT_REF
)
9060 decl
= TREE_OPERAND (decl
, 0);
9061 if (((TREE_CODE (decl
) == MEM_REF
9062 && integer_zerop (TREE_OPERAND (decl
, 1)))
9063 || INDIRECT_REF_P (decl
))
9064 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
9068 decl
= TREE_OPERAND (decl
, 0);
9070 if (TREE_CODE (decl
) == INDIRECT_REF
9071 && DECL_P (TREE_OPERAND (decl
, 0))
9072 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
9076 decl
= TREE_OPERAND (decl
, 0);
9080 else if (TREE_CODE (decl
) == COMPONENT_REF
)
9082 while (TREE_CODE (decl
) == COMPONENT_REF
)
9083 decl
= TREE_OPERAND (decl
, 0);
9084 if (TREE_CODE (decl
) == INDIRECT_REF
9085 && DECL_P (TREE_OPERAND (decl
, 0))
9086 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
9088 decl
= TREE_OPERAND (decl
, 0);
9090 if (decl
!= orig_decl
&& DECL_P (decl
) && indir_p
)
9093 = ((code
== OACC_EXIT_DATA
|| code
== OMP_TARGET_EXIT_DATA
)
9094 ? GOMP_MAP_DETACH
: GOMP_MAP_ATTACH
);
9095 /* We have a dereference of a struct member. Make this an
9096 attach/detach operation, and ensure the base pointer is
9097 mapped as a FIRSTPRIVATE_POINTER. */
9098 OMP_CLAUSE_SET_MAP_KIND (c
, k
);
9099 flags
= GOVD_MAP
| GOVD_SEEN
| GOVD_EXPLICIT
;
9100 tree next_clause
= OMP_CLAUSE_CHAIN (c
);
9101 if (k
== GOMP_MAP_ATTACH
9102 && code
!= OACC_ENTER_DATA
9103 && code
!= OMP_TARGET_ENTER_DATA
9105 || (OMP_CLAUSE_CODE (next_clause
) != OMP_CLAUSE_MAP
)
9106 || (OMP_CLAUSE_MAP_KIND (next_clause
)
9107 != GOMP_MAP_POINTER
)
9108 || OMP_CLAUSE_DECL (next_clause
) != decl
)
9109 && (!struct_deref_set
9110 || !struct_deref_set
->contains (decl
)))
9112 if (!struct_deref_set
)
9113 struct_deref_set
= new hash_set
<tree
> ();
9114 /* As well as the attach, we also need a
9115 FIRSTPRIVATE_POINTER clause to properly map the
9116 pointer to the struct base. */
9117 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
9119 OMP_CLAUSE_SET_MAP_KIND (c2
, GOMP_MAP_ALLOC
);
9120 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c2
)
9123 = build_int_cst (build_pointer_type (char_type_node
),
9125 OMP_CLAUSE_DECL (c2
)
9126 = build2 (MEM_REF
, char_type_node
,
9127 decl_ref
? decl_ref
: decl
, charptr_zero
);
9128 OMP_CLAUSE_SIZE (c2
) = size_zero_node
;
9129 tree c3
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
9131 OMP_CLAUSE_SET_MAP_KIND (c3
,
9132 GOMP_MAP_FIRSTPRIVATE_POINTER
);
9133 OMP_CLAUSE_DECL (c3
) = decl
;
9134 OMP_CLAUSE_SIZE (c3
) = size_zero_node
;
9135 tree mapgrp
= *prev_list_p
;
9137 OMP_CLAUSE_CHAIN (c3
) = mapgrp
;
9138 OMP_CLAUSE_CHAIN (c2
) = c3
;
9140 struct_deref_set
->add (decl
);
9144 /* An "attach/detach" operation on an update directive should
9145 behave as a GOMP_MAP_ALWAYS_POINTER. Beware that
9146 unlike attach or detach map kinds, GOMP_MAP_ALWAYS_POINTER
9147 depends on the previous mapping. */
9148 if (code
== OACC_UPDATE
9149 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
9150 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_ALWAYS_POINTER
);
9152 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_TO_PSET
9153 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ATTACH
9154 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_DETACH
9155 && code
!= OACC_UPDATE
9156 && code
!= OMP_TARGET_UPDATE
)
9158 if (error_operand_p (decl
))
9164 tree stype
= TREE_TYPE (decl
);
9165 if (TREE_CODE (stype
) == REFERENCE_TYPE
)
9166 stype
= TREE_TYPE (stype
);
9167 if (TYPE_SIZE_UNIT (stype
) == NULL
9168 || TREE_CODE (TYPE_SIZE_UNIT (stype
)) != INTEGER_CST
)
9170 error_at (OMP_CLAUSE_LOCATION (c
),
9171 "mapping field %qE of variable length "
9172 "structure", OMP_CLAUSE_DECL (c
));
9177 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_POINTER
9178 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
9180 /* Error recovery. */
9181 if (prev_list_p
== NULL
)
9186 if (OMP_CLAUSE_CHAIN (*prev_list_p
) != c
)
9188 tree ch
= OMP_CLAUSE_CHAIN (*prev_list_p
);
9189 if (ch
== NULL_TREE
|| OMP_CLAUSE_CHAIN (ch
) != c
)
9197 poly_offset_int offset1
;
9202 = extract_base_bit_offset (OMP_CLAUSE_DECL (c
), &base_ref
,
9203 &bitpos1
, &offset1
);
9205 gcc_assert (base
== decl
);
9208 = splay_tree_lookup (ctx
->variables
, (splay_tree_key
)decl
);
9209 bool ptr
= (OMP_CLAUSE_MAP_KIND (c
)
9210 == GOMP_MAP_ALWAYS_POINTER
);
9211 bool attach_detach
= (OMP_CLAUSE_MAP_KIND (c
)
9212 == GOMP_MAP_ATTACH_DETACH
);
9213 bool attach
= OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH
9214 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_DETACH
;
9215 bool has_attachments
= false;
9216 /* For OpenACC, pointers in structs should trigger an
9219 && ((region_type
& (ORT_ACC
| ORT_TARGET
| ORT_TARGET_DATA
))
9220 || code
== OMP_TARGET_ENTER_DATA
9221 || code
== OMP_TARGET_EXIT_DATA
))
9224 /* Turn a GOMP_MAP_ATTACH_DETACH clause into a
9225 GOMP_MAP_ATTACH or GOMP_MAP_DETACH clause after we
9226 have detected a case that needs a GOMP_MAP_STRUCT
9229 = ((code
== OACC_EXIT_DATA
|| code
== OMP_TARGET_EXIT_DATA
)
9230 ? GOMP_MAP_DETACH
: GOMP_MAP_ATTACH
);
9231 OMP_CLAUSE_SET_MAP_KIND (c
, k
);
9232 has_attachments
= true;
9234 if (n
== NULL
|| (n
->value
& GOVD_MAP
) == 0)
9236 tree l
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
9238 gomp_map_kind k
= attach
? GOMP_MAP_FORCE_PRESENT
9241 OMP_CLAUSE_SET_MAP_KIND (l
, k
);
9243 OMP_CLAUSE_DECL (l
) = unshare_expr (base_ref
);
9245 OMP_CLAUSE_DECL (l
) = decl
;
9249 : DECL_P (OMP_CLAUSE_DECL (l
))
9250 ? DECL_SIZE_UNIT (OMP_CLAUSE_DECL (l
))
9251 : TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (l
))));
9252 if (struct_map_to_clause
== NULL
)
9253 struct_map_to_clause
= new hash_map
<tree
, tree
>;
9254 struct_map_to_clause
->put (decl
, l
);
9255 if (ptr
|| attach_detach
)
9257 insert_struct_comp_map (code
, c
, l
, *prev_list_p
,
9264 OMP_CLAUSE_CHAIN (l
) = c
;
9266 list_p
= &OMP_CLAUSE_CHAIN (l
);
9268 if (base_ref
&& code
== OMP_TARGET
)
9270 tree c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
9272 enum gomp_map_kind mkind
9273 = GOMP_MAP_FIRSTPRIVATE_REFERENCE
;
9274 OMP_CLAUSE_SET_MAP_KIND (c2
, mkind
);
9275 OMP_CLAUSE_DECL (c2
) = decl
;
9276 OMP_CLAUSE_SIZE (c2
) = size_zero_node
;
9277 OMP_CLAUSE_CHAIN (c2
) = OMP_CLAUSE_CHAIN (l
);
9278 OMP_CLAUSE_CHAIN (l
) = c2
;
9280 flags
= GOVD_MAP
| GOVD_EXPLICIT
;
9281 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
))
9285 if (has_attachments
)
9286 flags
|= GOVD_MAP_HAS_ATTACHMENTS
;
9289 else if (struct_map_to_clause
)
9291 tree
*osc
= struct_map_to_clause
->get (decl
);
9292 tree
*sc
= NULL
, *scp
= NULL
;
9293 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
))
9296 n
->value
|= GOVD_SEEN
;
9297 sc
= &OMP_CLAUSE_CHAIN (*osc
);
9299 && (OMP_CLAUSE_MAP_KIND (*sc
)
9300 == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
9301 sc
= &OMP_CLAUSE_CHAIN (*sc
);
9302 /* Here "prev_list_p" is the end of the inserted
9303 alloc/release nodes after the struct node, OSC. */
9304 for (; *sc
!= c
; sc
= &OMP_CLAUSE_CHAIN (*sc
))
9305 if ((ptr
|| attach_detach
) && sc
== prev_list_p
)
9307 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc
))
9309 && (TREE_CODE (OMP_CLAUSE_DECL (*sc
))
9311 && (TREE_CODE (OMP_CLAUSE_DECL (*sc
))
9316 tree sc_decl
= OMP_CLAUSE_DECL (*sc
);
9317 poly_offset_int offsetn
;
9320 = extract_base_bit_offset (sc_decl
, NULL
,
9321 &bitposn
, &offsetn
);
9326 if ((region_type
& ORT_ACC
) != 0)
9328 /* This duplicate checking code is currently only
9329 enabled for OpenACC. */
9330 tree d1
= OMP_CLAUSE_DECL (*sc
);
9331 tree d2
= OMP_CLAUSE_DECL (c
);
9332 while (TREE_CODE (d1
) == ARRAY_REF
)
9333 d1
= TREE_OPERAND (d1
, 0);
9334 while (TREE_CODE (d2
) == ARRAY_REF
)
9335 d2
= TREE_OPERAND (d2
, 0);
9336 if (TREE_CODE (d1
) == INDIRECT_REF
)
9337 d1
= TREE_OPERAND (d1
, 0);
9338 if (TREE_CODE (d2
) == INDIRECT_REF
)
9339 d2
= TREE_OPERAND (d2
, 0);
9340 while (TREE_CODE (d1
) == COMPONENT_REF
)
9341 if (TREE_CODE (d2
) == COMPONENT_REF
9342 && TREE_OPERAND (d1
, 1)
9343 == TREE_OPERAND (d2
, 1))
9345 d1
= TREE_OPERAND (d1
, 0);
9346 d2
= TREE_OPERAND (d2
, 0);
9352 error_at (OMP_CLAUSE_LOCATION (c
),
9353 "%qE appears more than once in map "
9354 "clauses", OMP_CLAUSE_DECL (c
));
9359 if (maybe_lt (offset1
, offsetn
)
9360 || (known_eq (offset1
, offsetn
)
9361 && maybe_lt (bitpos1
, bitposn
)))
9363 if (ptr
|| attach_detach
)
9372 OMP_CLAUSE_SIZE (*osc
)
9373 = size_binop (PLUS_EXPR
, OMP_CLAUSE_SIZE (*osc
),
9375 if (ptr
|| attach_detach
)
9377 tree cl
= insert_struct_comp_map (code
, c
, NULL
,
9379 if (sc
== prev_list_p
)
9386 *prev_list_p
= OMP_CLAUSE_CHAIN (c
);
9387 list_p
= prev_list_p
;
9389 OMP_CLAUSE_CHAIN (c
) = *sc
;
9396 *list_p
= OMP_CLAUSE_CHAIN (c
);
9397 OMP_CLAUSE_CHAIN (c
) = *sc
;
9404 if (gimplify_expr (pd
, pre_p
, NULL
, is_gimple_lvalue
, fb_lvalue
)
9412 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ALWAYS_POINTER
9413 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_ATTACH_DETACH
9414 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_TO_PSET
9415 && OMP_CLAUSE_CHAIN (c
)
9416 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c
)) == OMP_CLAUSE_MAP
9417 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
9418 == GOMP_MAP_ALWAYS_POINTER
)
9419 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
9420 == GOMP_MAP_ATTACH_DETACH
)
9421 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c
))
9422 == GOMP_MAP_TO_PSET
)))
9423 prev_list_p
= list_p
;
9429 /* DECL_P (decl) == true */
9431 if (struct_map_to_clause
9432 && (sc
= struct_map_to_clause
->get (decl
)) != NULL
9433 && OMP_CLAUSE_MAP_KIND (*sc
) == GOMP_MAP_STRUCT
9434 && decl
== OMP_CLAUSE_DECL (*sc
))
9436 /* We have found a map of the whole structure after a
9437 leading GOMP_MAP_STRUCT has been created, so refill the
9438 leading clause into a map of the whole structure
9439 variable, and remove the current one.
9440 TODO: we should be able to remove some maps of the
9441 following structure element maps if they are of
9442 compatible TO/FROM/ALLOC type. */
9443 OMP_CLAUSE_SET_MAP_KIND (*sc
, OMP_CLAUSE_MAP_KIND (c
));
9444 OMP_CLAUSE_SIZE (*sc
) = unshare_expr (OMP_CLAUSE_SIZE (c
));
9449 flags
= GOVD_MAP
| GOVD_EXPLICIT
;
9450 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_TO
9451 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_TOFROM
)
9452 flags
|= GOVD_MAP_ALWAYS_TO
;
9454 if ((code
== OMP_TARGET
9455 || code
== OMP_TARGET_DATA
9456 || code
== OMP_TARGET_ENTER_DATA
9457 || code
== OMP_TARGET_EXIT_DATA
)
9458 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ATTACH_DETACH
)
9460 for (struct gimplify_omp_ctx
*octx
= outer_ctx
; octx
;
9461 octx
= octx
->outer_context
)
9464 = splay_tree_lookup (octx
->variables
,
9465 (splay_tree_key
) OMP_CLAUSE_DECL (c
));
9466 /* If this is contained in an outer OpenMP region as a
9467 firstprivate value, remove the attach/detach. */
9468 if (n
&& (n
->value
& GOVD_FIRSTPRIVATE
))
9470 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
9475 enum gomp_map_kind map_kind
= (code
== OMP_TARGET_EXIT_DATA
9478 OMP_CLAUSE_SET_MAP_KIND (c
, map_kind
);
9483 case OMP_CLAUSE_DEPEND
:
9484 if (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
9486 tree deps
= OMP_CLAUSE_DECL (c
);
9487 while (deps
&& TREE_CODE (deps
) == TREE_LIST
)
9489 if (TREE_CODE (TREE_PURPOSE (deps
)) == TRUNC_DIV_EXPR
9490 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps
), 1)))
9491 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps
), 1),
9492 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
9493 deps
= TREE_CHAIN (deps
);
9497 else if (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
)
9499 if (handled_depend_iterators
== -1)
9500 handled_depend_iterators
= gimplify_omp_depend (list_p
, pre_p
);
9501 if (handled_depend_iterators
)
9503 if (handled_depend_iterators
== 2)
9507 if (TREE_CODE (OMP_CLAUSE_DECL (c
)) == COMPOUND_EXPR
)
9509 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0), pre_p
,
9510 NULL
, is_gimple_val
, fb_rvalue
);
9511 OMP_CLAUSE_DECL (c
) = TREE_OPERAND (OMP_CLAUSE_DECL (c
), 1);
9513 if (error_operand_p (OMP_CLAUSE_DECL (c
)))
9518 OMP_CLAUSE_DECL (c
) = build_fold_addr_expr (OMP_CLAUSE_DECL (c
));
9519 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
, NULL
,
9520 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
9525 if (code
== OMP_TASK
)
9526 ctx
->has_depend
= true;
9530 case OMP_CLAUSE_FROM
:
9531 case OMP_CLAUSE__CACHE_
:
9532 decl
= OMP_CLAUSE_DECL (c
);
9533 if (error_operand_p (decl
))
9538 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
9539 OMP_CLAUSE_SIZE (c
) = DECL_P (decl
) ? DECL_SIZE_UNIT (decl
)
9540 : TYPE_SIZE_UNIT (TREE_TYPE (decl
));
9541 if (gimplify_expr (&OMP_CLAUSE_SIZE (c
), pre_p
,
9542 NULL
, is_gimple_val
, fb_rvalue
) == GS_ERROR
)
9549 if (gimplify_expr (&OMP_CLAUSE_DECL (c
), pre_p
,
9550 NULL
, is_gimple_lvalue
, fb_lvalue
)
9560 case OMP_CLAUSE_USE_DEVICE_PTR
:
9561 case OMP_CLAUSE_USE_DEVICE_ADDR
:
9562 flags
= GOVD_EXPLICIT
;
9565 case OMP_CLAUSE_IS_DEVICE_PTR
:
9566 flags
= GOVD_FIRSTPRIVATE
| GOVD_EXPLICIT
;
9570 decl
= OMP_CLAUSE_DECL (c
);
9572 if (error_operand_p (decl
))
9577 if (DECL_NAME (decl
) == NULL_TREE
&& (flags
& GOVD_SHARED
) == 0)
9579 tree t
= omp_member_access_dummy_var (decl
);
9582 tree v
= DECL_VALUE_EXPR (decl
);
9583 DECL_NAME (decl
) = DECL_NAME (TREE_OPERAND (v
, 1));
9585 omp_notice_variable (outer_ctx
, t
, true);
9588 if (code
== OACC_DATA
9589 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
9590 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
9591 flags
|= GOVD_MAP_0LEN_ARRAY
;
9592 omp_add_variable (ctx
, decl
, flags
);
9593 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9594 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_IN_REDUCTION
9595 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_TASK_REDUCTION
)
9596 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
))
9598 omp_add_variable (ctx
, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
),
9599 GOVD_LOCAL
| GOVD_SEEN
);
9600 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
)
9601 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c
),
9603 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
),
9605 omp_add_variable (ctx
,
9606 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
),
9607 GOVD_LOCAL
| GOVD_SEEN
);
9608 gimplify_omp_ctxp
= ctx
;
9609 push_gimplify_context ();
9611 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
) = NULL
;
9612 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
) = NULL
;
9614 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c
),
9615 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
));
9616 pop_gimplify_context
9617 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c
)));
9618 push_gimplify_context ();
9619 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c
),
9620 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
));
9621 pop_gimplify_context
9622 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c
)));
9623 OMP_CLAUSE_REDUCTION_INIT (c
) = NULL_TREE
;
9624 OMP_CLAUSE_REDUCTION_MERGE (c
) = NULL_TREE
;
9626 gimplify_omp_ctxp
= outer_ctx
;
9628 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
9629 && OMP_CLAUSE_LASTPRIVATE_STMT (c
))
9631 gimplify_omp_ctxp
= ctx
;
9632 push_gimplify_context ();
9633 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c
)) != BIND_EXPR
)
9635 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
,
9637 TREE_SIDE_EFFECTS (bind
) = 1;
9638 BIND_EXPR_BODY (bind
) = OMP_CLAUSE_LASTPRIVATE_STMT (c
);
9639 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = bind
;
9641 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c
),
9642 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
));
9643 pop_gimplify_context
9644 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
)));
9645 OMP_CLAUSE_LASTPRIVATE_STMT (c
) = NULL_TREE
;
9647 gimplify_omp_ctxp
= outer_ctx
;
9649 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
9650 && OMP_CLAUSE_LINEAR_STMT (c
))
9652 gimplify_omp_ctxp
= ctx
;
9653 push_gimplify_context ();
9654 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c
)) != BIND_EXPR
)
9656 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
,
9658 TREE_SIDE_EFFECTS (bind
) = 1;
9659 BIND_EXPR_BODY (bind
) = OMP_CLAUSE_LINEAR_STMT (c
);
9660 OMP_CLAUSE_LINEAR_STMT (c
) = bind
;
9662 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c
),
9663 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
));
9664 pop_gimplify_context
9665 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
)));
9666 OMP_CLAUSE_LINEAR_STMT (c
) = NULL_TREE
;
9668 gimplify_omp_ctxp
= outer_ctx
;
9674 case OMP_CLAUSE_COPYIN
:
9675 case OMP_CLAUSE_COPYPRIVATE
:
9676 decl
= OMP_CLAUSE_DECL (c
);
9677 if (error_operand_p (decl
))
9682 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_COPYPRIVATE
9684 && !omp_check_private (ctx
, decl
, true))
9687 if (is_global_var (decl
))
9689 if (DECL_THREAD_LOCAL_P (decl
))
9691 else if (DECL_HAS_VALUE_EXPR_P (decl
))
9693 tree value
= get_base_address (DECL_VALUE_EXPR (decl
));
9697 && DECL_THREAD_LOCAL_P (value
))
9702 error_at (OMP_CLAUSE_LOCATION (c
),
9703 "copyprivate variable %qE is not threadprivate"
9704 " or private in outer context", DECL_NAME (decl
));
9707 if ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9708 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
9709 || OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
9711 && ((region_type
& ORT_TASKLOOP
) == ORT_TASKLOOP
9712 || (region_type
== ORT_WORKSHARE
9713 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9714 && (OMP_CLAUSE_REDUCTION_INSCAN (c
)
9715 || code
== OMP_LOOP
)))
9716 && (outer_ctx
->region_type
== ORT_COMBINED_PARALLEL
9717 || (code
== OMP_LOOP
9718 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9719 && ((outer_ctx
->region_type
& ORT_COMBINED_TEAMS
)
9720 == ORT_COMBINED_TEAMS
))))
9723 = splay_tree_lookup (outer_ctx
->variables
,
9724 (splay_tree_key
)decl
);
9725 if (on
== NULL
|| (on
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
9727 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_REDUCTION
9728 && TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
9729 && (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
9730 || (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
9731 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl
)))
9733 omp_firstprivatize_variable (outer_ctx
, decl
);
9736 omp_add_variable (outer_ctx
, decl
,
9737 GOVD_SEEN
| GOVD_SHARED
);
9738 if (outer_ctx
->outer_context
)
9739 omp_notice_variable (outer_ctx
->outer_context
, decl
,
9745 omp_notice_variable (outer_ctx
, decl
, true);
9746 if (check_non_private
9747 && region_type
== ORT_WORKSHARE
9748 && (OMP_CLAUSE_CODE (c
) != OMP_CLAUSE_REDUCTION
9749 || decl
== OMP_CLAUSE_DECL (c
)
9750 || (TREE_CODE (OMP_CLAUSE_DECL (c
)) == MEM_REF
9751 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
9753 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c
), 0))
9754 == POINTER_PLUS_EXPR
9755 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
9756 (OMP_CLAUSE_DECL (c
), 0), 0))
9758 && omp_check_private (ctx
, decl
, false))
9760 error ("%s variable %qE is private in outer context",
9761 check_non_private
, DECL_NAME (decl
));
9766 case OMP_CLAUSE_DETACH
:
9767 flags
= GOVD_FIRSTPRIVATE
| GOVD_SEEN
;
9771 if (OMP_CLAUSE_IF_MODIFIER (c
) != ERROR_MARK
9772 && OMP_CLAUSE_IF_MODIFIER (c
) != code
)
9775 for (int i
= 0; i
< 2; i
++)
9776 switch (i
? OMP_CLAUSE_IF_MODIFIER (c
) : code
)
9778 case VOID_CST
: p
[i
] = "cancel"; break;
9779 case OMP_PARALLEL
: p
[i
] = "parallel"; break;
9780 case OMP_SIMD
: p
[i
] = "simd"; break;
9781 case OMP_TASK
: p
[i
] = "task"; break;
9782 case OMP_TASKLOOP
: p
[i
] = "taskloop"; break;
9783 case OMP_TARGET_DATA
: p
[i
] = "target data"; break;
9784 case OMP_TARGET
: p
[i
] = "target"; break;
9785 case OMP_TARGET_UPDATE
: p
[i
] = "target update"; break;
9786 case OMP_TARGET_ENTER_DATA
:
9787 p
[i
] = "target enter data"; break;
9788 case OMP_TARGET_EXIT_DATA
: p
[i
] = "target exit data"; break;
9789 default: gcc_unreachable ();
9791 error_at (OMP_CLAUSE_LOCATION (c
),
9792 "expected %qs %<if%> clause modifier rather than %qs",
9798 case OMP_CLAUSE_FINAL
:
9799 OMP_CLAUSE_OPERAND (c
, 0)
9800 = gimple_boolify (OMP_CLAUSE_OPERAND (c
, 0));
9803 case OMP_CLAUSE_SCHEDULE
:
9804 case OMP_CLAUSE_NUM_THREADS
:
9805 case OMP_CLAUSE_NUM_TEAMS
:
9806 case OMP_CLAUSE_THREAD_LIMIT
:
9807 case OMP_CLAUSE_DIST_SCHEDULE
:
9808 case OMP_CLAUSE_DEVICE
:
9809 case OMP_CLAUSE_PRIORITY
:
9810 case OMP_CLAUSE_GRAINSIZE
:
9811 case OMP_CLAUSE_NUM_TASKS
:
9812 case OMP_CLAUSE_HINT
:
9813 case OMP_CLAUSE_ASYNC
:
9814 case OMP_CLAUSE_WAIT
:
9815 case OMP_CLAUSE_NUM_GANGS
:
9816 case OMP_CLAUSE_NUM_WORKERS
:
9817 case OMP_CLAUSE_VECTOR_LENGTH
:
9818 case OMP_CLAUSE_WORKER
:
9819 case OMP_CLAUSE_VECTOR
:
9820 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 0), pre_p
, NULL
,
9821 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
9825 case OMP_CLAUSE_GANG
:
9826 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 0), pre_p
, NULL
,
9827 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
9829 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c
, 1), pre_p
, NULL
,
9830 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
9834 case OMP_CLAUSE_NOWAIT
:
9838 case OMP_CLAUSE_ORDERED
:
9839 case OMP_CLAUSE_UNTIED
:
9840 case OMP_CLAUSE_COLLAPSE
:
9841 case OMP_CLAUSE_TILE
:
9842 case OMP_CLAUSE_AUTO
:
9843 case OMP_CLAUSE_SEQ
:
9844 case OMP_CLAUSE_INDEPENDENT
:
9845 case OMP_CLAUSE_MERGEABLE
:
9846 case OMP_CLAUSE_PROC_BIND
:
9847 case OMP_CLAUSE_SAFELEN
:
9848 case OMP_CLAUSE_SIMDLEN
:
9849 case OMP_CLAUSE_NOGROUP
:
9850 case OMP_CLAUSE_THREADS
:
9851 case OMP_CLAUSE_SIMD
:
9852 case OMP_CLAUSE_BIND
:
9853 case OMP_CLAUSE_IF_PRESENT
:
9854 case OMP_CLAUSE_FINALIZE
:
9857 case OMP_CLAUSE_ORDER
:
9858 ctx
->order_concurrent
= true;
9861 case OMP_CLAUSE_DEFAULTMAP
:
9862 enum gimplify_defaultmap_kind gdmkmin
, gdmkmax
;
9863 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c
))
9865 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED
:
9866 gdmkmin
= GDMK_SCALAR
;
9867 gdmkmax
= GDMK_POINTER
;
9869 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR
:
9870 gdmkmin
= gdmkmax
= GDMK_SCALAR
;
9872 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE
:
9873 gdmkmin
= gdmkmax
= GDMK_AGGREGATE
;
9875 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE
:
9876 gdmkmin
= gdmkmax
= GDMK_ALLOCATABLE
;
9878 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER
:
9879 gdmkmin
= gdmkmax
= GDMK_POINTER
;
9884 for (int gdmk
= gdmkmin
; gdmk
<= gdmkmax
; gdmk
++)
9885 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c
))
9887 case OMP_CLAUSE_DEFAULTMAP_ALLOC
:
9888 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_ALLOC_ONLY
;
9890 case OMP_CLAUSE_DEFAULTMAP_TO
:
9891 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_TO_ONLY
;
9893 case OMP_CLAUSE_DEFAULTMAP_FROM
:
9894 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_FROM_ONLY
;
9896 case OMP_CLAUSE_DEFAULTMAP_TOFROM
:
9897 ctx
->defaultmap
[gdmk
] = GOVD_MAP
;
9899 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE
:
9900 ctx
->defaultmap
[gdmk
] = GOVD_FIRSTPRIVATE
;
9902 case OMP_CLAUSE_DEFAULTMAP_NONE
:
9903 ctx
->defaultmap
[gdmk
] = 0;
9905 case OMP_CLAUSE_DEFAULTMAP_DEFAULT
:
9909 ctx
->defaultmap
[gdmk
] = GOVD_FIRSTPRIVATE
;
9911 case GDMK_AGGREGATE
:
9912 case GDMK_ALLOCATABLE
:
9913 ctx
->defaultmap
[gdmk
] = GOVD_MAP
;
9916 ctx
->defaultmap
[gdmk
] = GOVD_MAP
| GOVD_MAP_0LEN_ARRAY
;
9927 case OMP_CLAUSE_ALIGNED
:
9928 decl
= OMP_CLAUSE_DECL (c
);
9929 if (error_operand_p (decl
))
9934 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c
), pre_p
, NULL
,
9935 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
9940 if (!is_global_var (decl
)
9941 && TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
9942 omp_add_variable (ctx
, decl
, GOVD_ALIGNED
);
9945 case OMP_CLAUSE_NONTEMPORAL
:
9946 decl
= OMP_CLAUSE_DECL (c
);
9947 if (error_operand_p (decl
))
9952 omp_add_variable (ctx
, decl
, GOVD_NONTEMPORAL
);
9955 case OMP_CLAUSE_ALLOCATE
:
9956 decl
= OMP_CLAUSE_DECL (c
);
9957 if (error_operand_p (decl
))
9962 if (gimplify_expr (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
), pre_p
, NULL
,
9963 is_gimple_val
, fb_rvalue
) == GS_ERROR
)
9968 else if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
) == NULL_TREE
9969 || (TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
))
9972 else if (code
== OMP_TASKLOOP
9973 || !DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)))
9974 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)
9975 = get_initialized_tmp_var (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
),
9976 pre_p
, NULL
, false);
9979 case OMP_CLAUSE_DEFAULT
:
9980 ctx
->default_kind
= OMP_CLAUSE_DEFAULT_KIND (c
);
9983 case OMP_CLAUSE_INCLUSIVE
:
9984 case OMP_CLAUSE_EXCLUSIVE
:
9985 decl
= OMP_CLAUSE_DECL (c
);
9987 splay_tree_node n
= splay_tree_lookup (outer_ctx
->variables
,
9988 (splay_tree_key
) decl
);
9989 if (n
== NULL
|| (n
->value
& GOVD_REDUCTION
) == 0)
9991 error_at (OMP_CLAUSE_LOCATION (c
),
9992 "%qD specified in %qs clause but not in %<inscan%> "
9993 "%<reduction%> clause on the containing construct",
9994 decl
, omp_clause_code_name
[OMP_CLAUSE_CODE (c
)]);
9999 n
->value
|= GOVD_REDUCTION_INSCAN
;
10000 if (outer_ctx
->region_type
== ORT_SIMD
10001 && outer_ctx
->outer_context
10002 && outer_ctx
->outer_context
->region_type
== ORT_WORKSHARE
)
10004 n
= splay_tree_lookup (outer_ctx
->outer_context
->variables
,
10005 (splay_tree_key
) decl
);
10006 if (n
&& (n
->value
& GOVD_REDUCTION
) != 0)
10007 n
->value
|= GOVD_REDUCTION_INSCAN
;
10014 gcc_unreachable ();
10017 if (code
== OACC_DATA
10018 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
10019 && (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
10020 || OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
10023 *list_p
= OMP_CLAUSE_CHAIN (c
);
10025 list_p
= &OMP_CLAUSE_CHAIN (c
);
10028 ctx
->clauses
= *orig_list_p
;
10029 gimplify_omp_ctxp
= ctx
;
10030 if (struct_map_to_clause
)
10031 delete struct_map_to_clause
;
10032 if (struct_deref_set
)
10033 delete struct_deref_set
;
10036 /* Return true if DECL is a candidate for shared to firstprivate
10037 optimization. We only consider non-addressable scalars, not
10038 too big, and not references. */
10041 omp_shared_to_firstprivate_optimizable_decl_p (tree decl
)
10043 if (TREE_ADDRESSABLE (decl
))
10045 tree type
= TREE_TYPE (decl
);
10046 if (!is_gimple_reg_type (type
)
10047 || TREE_CODE (type
) == REFERENCE_TYPE
10048 || TREE_ADDRESSABLE (type
))
10050 /* Don't optimize too large decls, as each thread/task will have
10052 HOST_WIDE_INT len
= int_size_in_bytes (type
);
10053 if (len
== -1 || len
> 4 * POINTER_SIZE
/ BITS_PER_UNIT
)
10055 if (lang_hooks
.decls
.omp_privatize_by_reference (decl
))
10060 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
10061 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
10062 GOVD_WRITTEN in outer contexts. */
10065 omp_mark_stores (struct gimplify_omp_ctx
*ctx
, tree decl
)
10067 for (; ctx
; ctx
= ctx
->outer_context
)
10069 splay_tree_node n
= splay_tree_lookup (ctx
->variables
,
10070 (splay_tree_key
) decl
);
10073 else if (n
->value
& GOVD_SHARED
)
10075 n
->value
|= GOVD_WRITTEN
;
10078 else if (n
->value
& GOVD_DATA_SHARE_CLASS
)
10083 /* Helper callback for walk_gimple_seq to discover possible stores
10084 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
10085 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
10089 omp_find_stores_op (tree
*tp
, int *walk_subtrees
, void *data
)
10091 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) data
;
10093 *walk_subtrees
= 0;
10100 if (handled_component_p (op
))
10101 op
= TREE_OPERAND (op
, 0);
10102 else if ((TREE_CODE (op
) == MEM_REF
|| TREE_CODE (op
) == TARGET_MEM_REF
)
10103 && TREE_CODE (TREE_OPERAND (op
, 0)) == ADDR_EXPR
)
10104 op
= TREE_OPERAND (TREE_OPERAND (op
, 0), 0);
10109 if (!DECL_P (op
) || !omp_shared_to_firstprivate_optimizable_decl_p (op
))
10112 omp_mark_stores (gimplify_omp_ctxp
, op
);
10116 /* Helper callback for walk_gimple_seq to discover possible stores
10117 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
10118 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
10122 omp_find_stores_stmt (gimple_stmt_iterator
*gsi_p
,
10123 bool *handled_ops_p
,
10124 struct walk_stmt_info
*wi
)
10126 gimple
*stmt
= gsi_stmt (*gsi_p
);
10127 switch (gimple_code (stmt
))
10129 /* Don't recurse on OpenMP constructs for which
10130 gimplify_adjust_omp_clauses already handled the bodies,
10131 except handle gimple_omp_for_pre_body. */
10132 case GIMPLE_OMP_FOR
:
10133 *handled_ops_p
= true;
10134 if (gimple_omp_for_pre_body (stmt
))
10135 walk_gimple_seq (gimple_omp_for_pre_body (stmt
),
10136 omp_find_stores_stmt
, omp_find_stores_op
, wi
);
10138 case GIMPLE_OMP_PARALLEL
:
10139 case GIMPLE_OMP_TASK
:
10140 case GIMPLE_OMP_SECTIONS
:
10141 case GIMPLE_OMP_SINGLE
:
10142 case GIMPLE_OMP_TARGET
:
10143 case GIMPLE_OMP_TEAMS
:
10144 case GIMPLE_OMP_CRITICAL
:
10145 *handled_ops_p
= true;
10153 struct gimplify_adjust_omp_clauses_data
10159 /* For all variables that were not actually used within the context,
10160 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
10163 gimplify_adjust_omp_clauses_1 (splay_tree_node n
, void *data
)
10165 tree
*list_p
= ((struct gimplify_adjust_omp_clauses_data
*) data
)->list_p
;
10167 = ((struct gimplify_adjust_omp_clauses_data
*) data
)->pre_p
;
10168 tree decl
= (tree
) n
->key
;
10169 unsigned flags
= n
->value
;
10170 enum omp_clause_code code
;
10172 bool private_debug
;
10174 if (gimplify_omp_ctxp
->region_type
== ORT_COMBINED_PARALLEL
10175 && (flags
& GOVD_LASTPRIVATE_CONDITIONAL
) != 0)
10176 flags
= GOVD_SHARED
| GOVD_SEEN
| GOVD_WRITTEN
;
10177 if (flags
& (GOVD_EXPLICIT
| GOVD_LOCAL
))
10179 if ((flags
& GOVD_SEEN
) == 0)
10181 if ((flags
& GOVD_MAP_HAS_ATTACHMENTS
) != 0)
10183 if (flags
& GOVD_DEBUG_PRIVATE
)
10185 gcc_assert ((flags
& GOVD_DATA_SHARE_CLASS
) == GOVD_SHARED
);
10186 private_debug
= true;
10188 else if (flags
& GOVD_MAP
)
10189 private_debug
= false;
10192 = lang_hooks
.decls
.omp_private_debug_clause (decl
,
10193 !!(flags
& GOVD_SHARED
));
10195 code
= OMP_CLAUSE_PRIVATE
;
10196 else if (flags
& GOVD_MAP
)
10198 code
= OMP_CLAUSE_MAP
;
10199 if ((gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0
10200 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl
))))
10202 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl
);
10206 && DECL_IN_CONSTANT_POOL (decl
)
10207 && !lookup_attribute ("omp declare target",
10208 DECL_ATTRIBUTES (decl
)))
10210 tree id
= get_identifier ("omp declare target");
10211 DECL_ATTRIBUTES (decl
)
10212 = tree_cons (id
, NULL_TREE
, DECL_ATTRIBUTES (decl
));
10213 varpool_node
*node
= varpool_node::get (decl
);
10216 node
->offloadable
= 1;
10217 if (ENABLE_OFFLOADING
)
10218 g
->have_offload
= true;
10222 else if (flags
& GOVD_SHARED
)
10224 if (is_global_var (decl
))
10226 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
->outer_context
;
10227 while (ctx
!= NULL
)
10230 = splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10231 if (on
&& (on
->value
& (GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE
10232 | GOVD_PRIVATE
| GOVD_REDUCTION
10233 | GOVD_LINEAR
| GOVD_MAP
)) != 0)
10235 ctx
= ctx
->outer_context
;
10240 code
= OMP_CLAUSE_SHARED
;
10241 /* Don't optimize shared into firstprivate for read-only vars
10242 on tasks with depend clause, we shouldn't try to copy them
10243 until the dependencies are satisfied. */
10244 if (gimplify_omp_ctxp
->has_depend
)
10245 flags
|= GOVD_WRITTEN
;
10247 else if (flags
& GOVD_PRIVATE
)
10248 code
= OMP_CLAUSE_PRIVATE
;
10249 else if (flags
& GOVD_FIRSTPRIVATE
)
10251 code
= OMP_CLAUSE_FIRSTPRIVATE
;
10252 if ((gimplify_omp_ctxp
->region_type
& ORT_TARGET
)
10253 && (gimplify_omp_ctxp
->region_type
& ORT_ACC
) == 0
10254 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl
))))
10256 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
10257 "%<target%> construct", decl
);
10261 else if (flags
& GOVD_LASTPRIVATE
)
10262 code
= OMP_CLAUSE_LASTPRIVATE
;
10263 else if (flags
& (GOVD_ALIGNED
| GOVD_NONTEMPORAL
))
10265 else if (flags
& GOVD_CONDTEMP
)
10267 code
= OMP_CLAUSE__CONDTEMP_
;
10268 gimple_add_tmp_var (decl
);
10271 gcc_unreachable ();
10273 if (((flags
& GOVD_LASTPRIVATE
)
10274 || (code
== OMP_CLAUSE_SHARED
&& (flags
& GOVD_WRITTEN
)))
10275 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
10276 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
10278 tree chain
= *list_p
;
10279 clause
= build_omp_clause (input_location
, code
);
10280 OMP_CLAUSE_DECL (clause
) = decl
;
10281 OMP_CLAUSE_CHAIN (clause
) = chain
;
10283 OMP_CLAUSE_PRIVATE_DEBUG (clause
) = 1;
10284 else if (code
== OMP_CLAUSE_PRIVATE
&& (flags
& GOVD_PRIVATE_OUTER_REF
))
10285 OMP_CLAUSE_PRIVATE_OUTER_REF (clause
) = 1;
10286 else if (code
== OMP_CLAUSE_SHARED
10287 && (flags
& GOVD_WRITTEN
) == 0
10288 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
10289 OMP_CLAUSE_SHARED_READONLY (clause
) = 1;
10290 else if (code
== OMP_CLAUSE_FIRSTPRIVATE
&& (flags
& GOVD_EXPLICIT
) == 0)
10291 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause
) = 1;
10292 else if (code
== OMP_CLAUSE_MAP
&& (flags
& GOVD_MAP_0LEN_ARRAY
) != 0)
10294 tree nc
= build_omp_clause (input_location
, OMP_CLAUSE_MAP
);
10295 OMP_CLAUSE_DECL (nc
) = decl
;
10296 if (TREE_CODE (TREE_TYPE (decl
)) == REFERENCE_TYPE
10297 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl
))) == POINTER_TYPE
)
10298 OMP_CLAUSE_DECL (clause
)
10299 = build_simple_mem_ref_loc (input_location
, decl
);
10300 OMP_CLAUSE_DECL (clause
)
10301 = build2 (MEM_REF
, char_type_node
, OMP_CLAUSE_DECL (clause
),
10302 build_int_cst (build_pointer_type (char_type_node
), 0));
10303 OMP_CLAUSE_SIZE (clause
) = size_zero_node
;
10304 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
10305 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_ALLOC
);
10306 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause
) = 1;
10307 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
10308 OMP_CLAUSE_CHAIN (nc
) = chain
;
10309 OMP_CLAUSE_CHAIN (clause
) = nc
;
10310 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
10311 gimplify_omp_ctxp
= ctx
->outer_context
;
10312 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause
), 0),
10313 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
10314 gimplify_omp_ctxp
= ctx
;
10316 else if (code
== OMP_CLAUSE_MAP
)
10319 /* Not all combinations of these GOVD_MAP flags are actually valid. */
10320 switch (flags
& (GOVD_MAP_TO_ONLY
10322 | GOVD_MAP_FORCE_PRESENT
10323 | GOVD_MAP_ALLOC_ONLY
10324 | GOVD_MAP_FROM_ONLY
))
10327 kind
= GOMP_MAP_TOFROM
;
10329 case GOVD_MAP_FORCE
:
10330 kind
= GOMP_MAP_TOFROM
| GOMP_MAP_FLAG_FORCE
;
10332 case GOVD_MAP_TO_ONLY
:
10333 kind
= GOMP_MAP_TO
;
10335 case GOVD_MAP_FROM_ONLY
:
10336 kind
= GOMP_MAP_FROM
;
10338 case GOVD_MAP_ALLOC_ONLY
:
10339 kind
= GOMP_MAP_ALLOC
;
10341 case GOVD_MAP_TO_ONLY
| GOVD_MAP_FORCE
:
10342 kind
= GOMP_MAP_TO
| GOMP_MAP_FLAG_FORCE
;
10344 case GOVD_MAP_FORCE_PRESENT
:
10345 kind
= GOMP_MAP_FORCE_PRESENT
;
10348 gcc_unreachable ();
10350 OMP_CLAUSE_SET_MAP_KIND (clause
, kind
);
10351 if (DECL_SIZE (decl
)
10352 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
10354 tree decl2
= DECL_VALUE_EXPR (decl
);
10355 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
10356 decl2
= TREE_OPERAND (decl2
, 0);
10357 gcc_assert (DECL_P (decl2
));
10358 tree mem
= build_simple_mem_ref (decl2
);
10359 OMP_CLAUSE_DECL (clause
) = mem
;
10360 OMP_CLAUSE_SIZE (clause
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
10361 if (gimplify_omp_ctxp
->outer_context
)
10363 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
->outer_context
;
10364 omp_notice_variable (ctx
, decl2
, true);
10365 omp_notice_variable (ctx
, OMP_CLAUSE_SIZE (clause
), true);
10367 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
),
10369 OMP_CLAUSE_DECL (nc
) = decl
;
10370 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
10371 if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
)
10372 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_POINTER
);
10374 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_POINTER
);
10375 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (clause
);
10376 OMP_CLAUSE_CHAIN (clause
) = nc
;
10378 else if (gimplify_omp_ctxp
->target_firstprivatize_array_bases
10379 && lang_hooks
.decls
.omp_privatize_by_reference (decl
))
10381 OMP_CLAUSE_DECL (clause
) = build_simple_mem_ref (decl
);
10382 OMP_CLAUSE_SIZE (clause
)
10383 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl
))));
10384 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
10385 gimplify_omp_ctxp
= ctx
->outer_context
;
10386 gimplify_expr (&OMP_CLAUSE_SIZE (clause
),
10387 pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
10388 gimplify_omp_ctxp
= ctx
;
10389 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
),
10391 OMP_CLAUSE_DECL (nc
) = decl
;
10392 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
10393 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_FIRSTPRIVATE_REFERENCE
);
10394 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (clause
);
10395 OMP_CLAUSE_CHAIN (clause
) = nc
;
10398 OMP_CLAUSE_SIZE (clause
) = DECL_SIZE_UNIT (decl
);
10400 if (code
== OMP_CLAUSE_FIRSTPRIVATE
&& (flags
& GOVD_LASTPRIVATE
) != 0)
10402 tree nc
= build_omp_clause (input_location
, OMP_CLAUSE_LASTPRIVATE
);
10403 OMP_CLAUSE_DECL (nc
) = decl
;
10404 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc
) = 1;
10405 OMP_CLAUSE_CHAIN (nc
) = chain
;
10406 OMP_CLAUSE_CHAIN (clause
) = nc
;
10407 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
10408 gimplify_omp_ctxp
= ctx
->outer_context
;
10409 lang_hooks
.decls
.omp_finish_clause (nc
, pre_p
,
10410 (ctx
->region_type
& ORT_ACC
) != 0);
10411 gimplify_omp_ctxp
= ctx
;
10414 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
10415 gimplify_omp_ctxp
= ctx
->outer_context
;
10416 lang_hooks
.decls
.omp_finish_clause (clause
, pre_p
,
10417 (ctx
->region_type
& ORT_ACC
) != 0);
10418 if (gimplify_omp_ctxp
)
10419 for (; clause
!= chain
; clause
= OMP_CLAUSE_CHAIN (clause
))
10420 if (OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_MAP
10421 && DECL_P (OMP_CLAUSE_SIZE (clause
)))
10422 omp_notice_variable (gimplify_omp_ctxp
, OMP_CLAUSE_SIZE (clause
),
10424 gimplify_omp_ctxp
= ctx
;
10429 gimplify_adjust_omp_clauses (gimple_seq
*pre_p
, gimple_seq body
, tree
*list_p
,
10430 enum tree_code code
)
10432 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
10433 tree
*orig_list_p
= list_p
;
10435 bool has_inscan_reductions
= false;
10439 struct gimplify_omp_ctx
*octx
;
10440 for (octx
= ctx
; octx
; octx
= octx
->outer_context
)
10441 if ((octx
->region_type
& (ORT_PARALLEL
| ORT_TASK
| ORT_TEAMS
)) != 0)
10445 struct walk_stmt_info wi
;
10446 memset (&wi
, 0, sizeof (wi
));
10447 walk_gimple_seq (body
, omp_find_stores_stmt
,
10448 omp_find_stores_op
, &wi
);
10452 if (ctx
->add_safelen1
)
10454 /* If there are VLAs in the body of simd loop, prevent
10456 gcc_assert (ctx
->region_type
== ORT_SIMD
);
10457 c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_SAFELEN
);
10458 OMP_CLAUSE_SAFELEN_EXPR (c
) = integer_one_node
;
10459 OMP_CLAUSE_CHAIN (c
) = *list_p
;
10461 list_p
= &OMP_CLAUSE_CHAIN (c
);
10464 if (ctx
->region_type
== ORT_WORKSHARE
10465 && ctx
->outer_context
10466 && ctx
->outer_context
->region_type
== ORT_COMBINED_PARALLEL
)
10468 for (c
= ctx
->outer_context
->clauses
; c
; c
= OMP_CLAUSE_CHAIN (c
))
10469 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
10470 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
10472 decl
= OMP_CLAUSE_DECL (c
);
10474 = splay_tree_lookup (ctx
->outer_context
->variables
,
10475 (splay_tree_key
) decl
);
10476 gcc_checking_assert (!splay_tree_lookup (ctx
->variables
,
10477 (splay_tree_key
) decl
));
10478 omp_add_variable (ctx
, decl
, n
->value
);
10479 tree c2
= copy_node (c
);
10480 OMP_CLAUSE_CHAIN (c2
) = *list_p
;
10482 if ((n
->value
& GOVD_FIRSTPRIVATE
) == 0)
10484 c2
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
10485 OMP_CLAUSE_FIRSTPRIVATE
);
10486 OMP_CLAUSE_DECL (c2
) = decl
;
10487 OMP_CLAUSE_CHAIN (c2
) = *list_p
;
10491 while ((c
= *list_p
) != NULL
)
10494 bool remove
= false;
10496 switch (OMP_CLAUSE_CODE (c
))
10498 case OMP_CLAUSE_FIRSTPRIVATE
:
10499 if ((ctx
->region_type
& ORT_TARGET
)
10500 && (ctx
->region_type
& ORT_ACC
) == 0
10501 && TYPE_ATOMIC (strip_array_types
10502 (TREE_TYPE (OMP_CLAUSE_DECL (c
)))))
10504 error_at (OMP_CLAUSE_LOCATION (c
),
10505 "%<_Atomic%> %qD in %<firstprivate%> clause on "
10506 "%<target%> construct", OMP_CLAUSE_DECL (c
));
10511 case OMP_CLAUSE_PRIVATE
:
10512 case OMP_CLAUSE_SHARED
:
10513 case OMP_CLAUSE_LINEAR
:
10514 decl
= OMP_CLAUSE_DECL (c
);
10515 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10516 remove
= !(n
->value
& GOVD_SEEN
);
10517 if ((n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
) != 0
10518 && code
== OMP_PARALLEL
10519 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_FIRSTPRIVATE
)
10523 bool shared
= OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
;
10524 if ((n
->value
& GOVD_DEBUG_PRIVATE
)
10525 || lang_hooks
.decls
.omp_private_debug_clause (decl
, shared
))
10527 gcc_assert ((n
->value
& GOVD_DEBUG_PRIVATE
) == 0
10528 || ((n
->value
& GOVD_DATA_SHARE_CLASS
)
10530 OMP_CLAUSE_SET_CODE (c
, OMP_CLAUSE_PRIVATE
);
10531 OMP_CLAUSE_PRIVATE_DEBUG (c
) = 1;
10533 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
10536 n
->value
|= GOVD_WRITTEN
;
10537 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
10538 && (n
->value
& GOVD_WRITTEN
) == 0
10540 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
10541 OMP_CLAUSE_SHARED_READONLY (c
) = 1;
10542 else if (DECL_P (decl
)
10543 && ((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_SHARED
10544 && (n
->value
& GOVD_WRITTEN
) != 0)
10545 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
10546 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)))
10547 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
10548 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
10551 n
->value
&= ~GOVD_EXPLICIT
;
10554 case OMP_CLAUSE_LASTPRIVATE
:
10555 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
10556 accurately reflect the presence of a FIRSTPRIVATE clause. */
10557 decl
= OMP_CLAUSE_DECL (c
);
10558 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10559 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
)
10560 = (n
->value
& GOVD_FIRSTPRIVATE
) != 0;
10561 if (code
== OMP_DISTRIBUTE
10562 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
10565 error_at (OMP_CLAUSE_LOCATION (c
),
10566 "same variable used in %<firstprivate%> and "
10567 "%<lastprivate%> clauses on %<distribute%> "
10571 && OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
10573 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
10574 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
10575 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
) && code
== OMP_PARALLEL
)
10579 case OMP_CLAUSE_ALIGNED
:
10580 decl
= OMP_CLAUSE_DECL (c
);
10581 if (!is_global_var (decl
))
10583 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10584 remove
= n
== NULL
|| !(n
->value
& GOVD_SEEN
);
10585 if (!remove
&& TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
)
10587 struct gimplify_omp_ctx
*octx
;
10589 && (n
->value
& (GOVD_DATA_SHARE_CLASS
10590 & ~GOVD_FIRSTPRIVATE
)))
10593 for (octx
= ctx
->outer_context
; octx
;
10594 octx
= octx
->outer_context
)
10596 n
= splay_tree_lookup (octx
->variables
,
10597 (splay_tree_key
) decl
);
10600 if (n
->value
& GOVD_LOCAL
)
10602 /* We have to avoid assigning a shared variable
10603 to itself when trying to add
10604 __builtin_assume_aligned. */
10605 if (n
->value
& GOVD_SHARED
)
10613 else if (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
)
10615 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10616 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
10621 case OMP_CLAUSE_NONTEMPORAL
:
10622 decl
= OMP_CLAUSE_DECL (c
);
10623 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10624 remove
= n
== NULL
|| !(n
->value
& GOVD_SEEN
);
10627 case OMP_CLAUSE_MAP
:
10628 if (code
== OMP_TARGET_EXIT_DATA
10629 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_ALWAYS_POINTER
)
10634 decl
= OMP_CLAUSE_DECL (c
);
10635 /* Data clauses associated with reductions must be
10636 compatible with present_or_copy. Warn and adjust the clause
10637 if that is not the case. */
10638 if (ctx
->region_type
== ORT_ACC_PARALLEL
10639 || ctx
->region_type
== ORT_ACC_SERIAL
)
10641 tree t
= DECL_P (decl
) ? decl
: TREE_OPERAND (decl
, 0);
10645 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) t
);
10647 if (n
&& (n
->value
& GOVD_REDUCTION
))
10649 enum gomp_map_kind kind
= OMP_CLAUSE_MAP_KIND (c
);
10651 OMP_CLAUSE_MAP_IN_REDUCTION (c
) = 1;
10652 if ((kind
& GOMP_MAP_TOFROM
) != GOMP_MAP_TOFROM
10653 && kind
!= GOMP_MAP_FORCE_PRESENT
10654 && kind
!= GOMP_MAP_POINTER
)
10656 warning_at (OMP_CLAUSE_LOCATION (c
), 0,
10657 "incompatible data clause with reduction "
10658 "on %qE; promoting to %<present_or_copy%>",
10660 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TOFROM
);
10664 if (!DECL_P (decl
))
10666 if ((ctx
->region_type
& ORT_TARGET
) != 0
10667 && OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_FIRSTPRIVATE_POINTER
)
10669 if (TREE_CODE (decl
) == INDIRECT_REF
10670 && TREE_CODE (TREE_OPERAND (decl
, 0)) == COMPONENT_REF
10671 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl
, 0)))
10672 == REFERENCE_TYPE
))
10673 decl
= TREE_OPERAND (decl
, 0);
10674 if (TREE_CODE (decl
) == COMPONENT_REF
)
10676 while (TREE_CODE (decl
) == COMPONENT_REF
)
10677 decl
= TREE_OPERAND (decl
, 0);
10680 n
= splay_tree_lookup (ctx
->variables
,
10681 (splay_tree_key
) decl
);
10682 if (!(n
->value
& GOVD_SEEN
))
10689 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10690 if ((ctx
->region_type
& ORT_TARGET
) != 0
10691 && !(n
->value
& GOVD_SEEN
)
10692 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c
)) == 0
10693 && (!is_global_var (decl
)
10694 || !lookup_attribute ("omp declare target link",
10695 DECL_ATTRIBUTES (decl
))))
10698 /* For struct element mapping, if struct is never referenced
10699 in target block and none of the mapping has always modifier,
10700 remove all the struct element mappings, which immediately
10701 follow the GOMP_MAP_STRUCT map clause. */
10702 if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT
)
10704 HOST_WIDE_INT cnt
= tree_to_shwi (OMP_CLAUSE_SIZE (c
));
10706 OMP_CLAUSE_CHAIN (c
)
10707 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c
));
10710 else if (OMP_CLAUSE_MAP_KIND (c
) == GOMP_MAP_STRUCT
10711 && (code
== OMP_TARGET_EXIT_DATA
10712 || code
== OACC_EXIT_DATA
))
10714 else if (DECL_SIZE (decl
)
10715 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
10716 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_POINTER
10717 && OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FIRSTPRIVATE_POINTER
10718 && (OMP_CLAUSE_MAP_KIND (c
)
10719 != GOMP_MAP_FIRSTPRIVATE_REFERENCE
))
10721 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
10722 for these, TREE_CODE (DECL_SIZE (decl)) will always be
10724 gcc_assert (OMP_CLAUSE_MAP_KIND (c
) != GOMP_MAP_FORCE_DEVICEPTR
);
10726 tree decl2
= DECL_VALUE_EXPR (decl
);
10727 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
10728 decl2
= TREE_OPERAND (decl2
, 0);
10729 gcc_assert (DECL_P (decl2
));
10730 tree mem
= build_simple_mem_ref (decl2
);
10731 OMP_CLAUSE_DECL (c
) = mem
;
10732 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
10733 if (ctx
->outer_context
)
10735 omp_notice_variable (ctx
->outer_context
, decl2
, true);
10736 omp_notice_variable (ctx
->outer_context
,
10737 OMP_CLAUSE_SIZE (c
), true);
10739 if (((ctx
->region_type
& ORT_TARGET
) != 0
10740 || !ctx
->target_firstprivatize_array_bases
)
10741 && ((n
->value
& GOVD_SEEN
) == 0
10742 || (n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
)) == 0))
10744 tree nc
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
10746 OMP_CLAUSE_DECL (nc
) = decl
;
10747 OMP_CLAUSE_SIZE (nc
) = size_zero_node
;
10748 if (ctx
->target_firstprivatize_array_bases
)
10749 OMP_CLAUSE_SET_MAP_KIND (nc
,
10750 GOMP_MAP_FIRSTPRIVATE_POINTER
);
10752 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_POINTER
);
10753 OMP_CLAUSE_CHAIN (nc
) = OMP_CLAUSE_CHAIN (c
);
10754 OMP_CLAUSE_CHAIN (c
) = nc
;
10760 if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
10761 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
10762 gcc_assert ((n
->value
& GOVD_SEEN
) == 0
10763 || ((n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
))
10768 case OMP_CLAUSE_TO
:
10769 case OMP_CLAUSE_FROM
:
10770 case OMP_CLAUSE__CACHE_
:
10771 decl
= OMP_CLAUSE_DECL (c
);
10772 if (!DECL_P (decl
))
10774 if (DECL_SIZE (decl
)
10775 && TREE_CODE (DECL_SIZE (decl
)) != INTEGER_CST
)
10777 tree decl2
= DECL_VALUE_EXPR (decl
);
10778 gcc_assert (TREE_CODE (decl2
) == INDIRECT_REF
);
10779 decl2
= TREE_OPERAND (decl2
, 0);
10780 gcc_assert (DECL_P (decl2
));
10781 tree mem
= build_simple_mem_ref (decl2
);
10782 OMP_CLAUSE_DECL (c
) = mem
;
10783 OMP_CLAUSE_SIZE (c
) = TYPE_SIZE_UNIT (TREE_TYPE (decl
));
10784 if (ctx
->outer_context
)
10786 omp_notice_variable (ctx
->outer_context
, decl2
, true);
10787 omp_notice_variable (ctx
->outer_context
,
10788 OMP_CLAUSE_SIZE (c
), true);
10791 else if (OMP_CLAUSE_SIZE (c
) == NULL_TREE
)
10792 OMP_CLAUSE_SIZE (c
) = DECL_SIZE_UNIT (decl
);
10795 case OMP_CLAUSE_REDUCTION
:
10796 if (OMP_CLAUSE_REDUCTION_INSCAN (c
))
10798 decl
= OMP_CLAUSE_DECL (c
);
10799 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10800 if ((n
->value
& GOVD_REDUCTION_INSCAN
) == 0)
10803 error_at (OMP_CLAUSE_LOCATION (c
),
10804 "%qD specified in %<inscan%> %<reduction%> clause "
10805 "but not in %<scan%> directive clause", decl
);
10808 has_inscan_reductions
= true;
10811 case OMP_CLAUSE_IN_REDUCTION
:
10812 case OMP_CLAUSE_TASK_REDUCTION
:
10813 decl
= OMP_CLAUSE_DECL (c
);
10814 /* OpenACC reductions need a present_or_copy data clause.
10815 Add one if necessary. Emit error when the reduction is private. */
10816 if (ctx
->region_type
== ORT_ACC_PARALLEL
10817 || ctx
->region_type
== ORT_ACC_SERIAL
)
10819 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10820 if (n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
))
10823 error_at (OMP_CLAUSE_LOCATION (c
), "invalid private "
10824 "reduction on %qE", DECL_NAME (decl
));
10826 else if ((n
->value
& GOVD_MAP
) == 0)
10828 tree next
= OMP_CLAUSE_CHAIN (c
);
10829 tree nc
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_MAP
);
10830 OMP_CLAUSE_SET_MAP_KIND (nc
, GOMP_MAP_TOFROM
);
10831 OMP_CLAUSE_DECL (nc
) = decl
;
10832 OMP_CLAUSE_CHAIN (c
) = nc
;
10833 lang_hooks
.decls
.omp_finish_clause (nc
, pre_p
,
10838 OMP_CLAUSE_MAP_IN_REDUCTION (nc
) = 1;
10839 if (OMP_CLAUSE_CHAIN (nc
) == NULL
)
10841 nc
= OMP_CLAUSE_CHAIN (nc
);
10843 OMP_CLAUSE_CHAIN (nc
) = next
;
10844 n
->value
|= GOVD_MAP
;
10848 && omp_shared_to_firstprivate_optimizable_decl_p (decl
))
10849 omp_mark_stores (gimplify_omp_ctxp
->outer_context
, decl
);
10852 case OMP_CLAUSE_ALLOCATE
:
10853 decl
= OMP_CLAUSE_DECL (c
);
10854 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) decl
);
10855 if (n
!= NULL
&& !(n
->value
& GOVD_SEEN
))
10857 if ((n
->value
& (GOVD_PRIVATE
| GOVD_FIRSTPRIVATE
| GOVD_LINEAR
))
10859 && (n
->value
& (GOVD_REDUCTION
| GOVD_LASTPRIVATE
)) == 0)
10863 && OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)
10864 && TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)) != INTEGER_CST
10865 && ((ctx
->region_type
& (ORT_PARALLEL
| ORT_TARGET
)) != 0
10866 || (ctx
->region_type
& ORT_TASKLOOP
) == ORT_TASK
10867 || (ctx
->region_type
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
))
10869 tree allocator
= OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
);
10870 n
= splay_tree_lookup (ctx
->variables
, (splay_tree_key
) allocator
);
10873 enum omp_clause_default_kind default_kind
10874 = ctx
->default_kind
;
10875 ctx
->default_kind
= OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
;
10876 omp_notice_variable (ctx
, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
),
10878 ctx
->default_kind
= default_kind
;
10881 omp_notice_variable (ctx
, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
),
10886 case OMP_CLAUSE_COPYIN
:
10887 case OMP_CLAUSE_COPYPRIVATE
:
10888 case OMP_CLAUSE_IF
:
10889 case OMP_CLAUSE_NUM_THREADS
:
10890 case OMP_CLAUSE_NUM_TEAMS
:
10891 case OMP_CLAUSE_THREAD_LIMIT
:
10892 case OMP_CLAUSE_DIST_SCHEDULE
:
10893 case OMP_CLAUSE_DEVICE
:
10894 case OMP_CLAUSE_SCHEDULE
:
10895 case OMP_CLAUSE_NOWAIT
:
10896 case OMP_CLAUSE_ORDERED
:
10897 case OMP_CLAUSE_DEFAULT
:
10898 case OMP_CLAUSE_UNTIED
:
10899 case OMP_CLAUSE_COLLAPSE
:
10900 case OMP_CLAUSE_FINAL
:
10901 case OMP_CLAUSE_MERGEABLE
:
10902 case OMP_CLAUSE_PROC_BIND
:
10903 case OMP_CLAUSE_SAFELEN
:
10904 case OMP_CLAUSE_SIMDLEN
:
10905 case OMP_CLAUSE_DEPEND
:
10906 case OMP_CLAUSE_PRIORITY
:
10907 case OMP_CLAUSE_GRAINSIZE
:
10908 case OMP_CLAUSE_NUM_TASKS
:
10909 case OMP_CLAUSE_NOGROUP
:
10910 case OMP_CLAUSE_THREADS
:
10911 case OMP_CLAUSE_SIMD
:
10912 case OMP_CLAUSE_HINT
:
10913 case OMP_CLAUSE_DEFAULTMAP
:
10914 case OMP_CLAUSE_ORDER
:
10915 case OMP_CLAUSE_BIND
:
10916 case OMP_CLAUSE_DETACH
:
10917 case OMP_CLAUSE_USE_DEVICE_PTR
:
10918 case OMP_CLAUSE_USE_DEVICE_ADDR
:
10919 case OMP_CLAUSE_IS_DEVICE_PTR
:
10920 case OMP_CLAUSE_ASYNC
:
10921 case OMP_CLAUSE_WAIT
:
10922 case OMP_CLAUSE_INDEPENDENT
:
10923 case OMP_CLAUSE_NUM_GANGS
:
10924 case OMP_CLAUSE_NUM_WORKERS
:
10925 case OMP_CLAUSE_VECTOR_LENGTH
:
10926 case OMP_CLAUSE_GANG
:
10927 case OMP_CLAUSE_WORKER
:
10928 case OMP_CLAUSE_VECTOR
:
10929 case OMP_CLAUSE_AUTO
:
10930 case OMP_CLAUSE_SEQ
:
10931 case OMP_CLAUSE_TILE
:
10932 case OMP_CLAUSE_IF_PRESENT
:
10933 case OMP_CLAUSE_FINALIZE
:
10934 case OMP_CLAUSE_INCLUSIVE
:
10935 case OMP_CLAUSE_EXCLUSIVE
:
10939 gcc_unreachable ();
10943 *list_p
= OMP_CLAUSE_CHAIN (c
);
10945 list_p
= &OMP_CLAUSE_CHAIN (c
);
10948 /* Add in any implicit data sharing. */
10949 struct gimplify_adjust_omp_clauses_data data
;
10950 data
.list_p
= list_p
;
10951 data
.pre_p
= pre_p
;
10952 splay_tree_foreach (ctx
->variables
, gimplify_adjust_omp_clauses_1
, &data
);
10954 if (has_inscan_reductions
)
10955 for (c
= *orig_list_p
; c
; c
= OMP_CLAUSE_CHAIN (c
))
10956 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
10957 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c
))
10959 error_at (OMP_CLAUSE_LOCATION (c
),
10960 "%<inscan%> %<reduction%> clause used together with "
10961 "%<linear%> clause for a variable other than loop "
10966 gimplify_omp_ctxp
= ctx
->outer_context
;
10967 delete_omp_context (ctx
);
10970 /* Return 0 if CONSTRUCTS selectors don't match the OpenMP context,
10971 -1 if unknown yet (simd is involved, won't be known until vectorization)
10972 and 1 if they do. If SCORES is non-NULL, it should point to an array
10973 of at least 2*NCONSTRUCTS+2 ints, and will be filled with the positions
10974 of the CONSTRUCTS (position -1 if it will never match) followed by
10975 number of constructs in the OpenMP context construct trait. If the
10976 score depends on whether it will be in a declare simd clone or not,
10977 the function returns 2 and there will be two sets of the scores, the first
10978 one for the case that it is not in a declare simd clone, the other
10979 that it is in a declare simd clone. */
10982 omp_construct_selector_matches (enum tree_code
*constructs
, int nconstructs
,
10985 int matched
= 0, cnt
= 0;
10986 bool simd_seen
= false;
10987 bool target_seen
= false;
10988 int declare_simd_cnt
= -1;
10989 auto_vec
<enum tree_code
, 16> codes
;
10990 for (struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
; ctx
;)
10992 if (((ctx
->region_type
& ORT_PARALLEL
) && ctx
->code
== OMP_PARALLEL
)
10993 || ((ctx
->region_type
& (ORT_TARGET
| ORT_IMPLICIT_TARGET
| ORT_ACC
))
10994 == ORT_TARGET
&& ctx
->code
== OMP_TARGET
)
10995 || ((ctx
->region_type
& ORT_TEAMS
) && ctx
->code
== OMP_TEAMS
)
10996 || (ctx
->region_type
== ORT_WORKSHARE
&& ctx
->code
== OMP_FOR
)
10997 || (ctx
->region_type
== ORT_SIMD
10998 && ctx
->code
== OMP_SIMD
10999 && !omp_find_clause (ctx
->clauses
, OMP_CLAUSE_BIND
)))
11003 codes
.safe_push (ctx
->code
);
11004 else if (matched
< nconstructs
&& ctx
->code
== constructs
[matched
])
11006 if (ctx
->code
== OMP_SIMD
)
11014 if (ctx
->code
== OMP_TARGET
)
11016 if (scores
== NULL
)
11017 return matched
< nconstructs
? 0 : simd_seen
? -1 : 1;
11018 target_seen
= true;
11022 else if (ctx
->region_type
== ORT_WORKSHARE
11023 && ctx
->code
== OMP_LOOP
11024 && ctx
->outer_context
11025 && ctx
->outer_context
->region_type
== ORT_COMBINED_PARALLEL
11026 && ctx
->outer_context
->outer_context
11027 && ctx
->outer_context
->outer_context
->code
== OMP_LOOP
11028 && ctx
->outer_context
->outer_context
->distribute
)
11029 ctx
= ctx
->outer_context
->outer_context
;
11030 ctx
= ctx
->outer_context
;
11033 && lookup_attribute ("omp declare simd",
11034 DECL_ATTRIBUTES (current_function_decl
)))
11036 /* Declare simd is a maybe case, it is supposed to be added only to the
11037 omp-simd-clone.c added clones and not to the base function. */
11038 declare_simd_cnt
= cnt
++;
11040 codes
.safe_push (OMP_SIMD
);
11042 && constructs
[0] == OMP_SIMD
)
11044 gcc_assert (matched
== 0);
11046 if (++matched
== nconstructs
)
11050 if (tree attr
= lookup_attribute ("omp declare variant variant",
11051 DECL_ATTRIBUTES (current_function_decl
)))
11053 enum tree_code variant_constructs
[5];
11054 int variant_nconstructs
= 0;
11056 variant_nconstructs
11057 = omp_constructor_traits_to_codes (TREE_VALUE (attr
),
11058 variant_constructs
);
11059 for (int i
= 0; i
< variant_nconstructs
; i
++)
11063 codes
.safe_push (variant_constructs
[i
]);
11064 else if (matched
< nconstructs
11065 && variant_constructs
[i
] == constructs
[matched
])
11067 if (variant_constructs
[i
] == OMP_SIMD
)
11078 && lookup_attribute ("omp declare target block",
11079 DECL_ATTRIBUTES (current_function_decl
)))
11082 codes
.safe_push (OMP_TARGET
);
11083 else if (matched
< nconstructs
&& constructs
[matched
] == OMP_TARGET
)
11088 for (int pass
= 0; pass
< (declare_simd_cnt
== -1 ? 1 : 2); pass
++)
11090 int j
= codes
.length () - 1;
11091 for (int i
= nconstructs
- 1; i
>= 0; i
--)
11094 && (pass
!= 0 || declare_simd_cnt
!= j
)
11095 && constructs
[i
] != codes
[j
])
11097 if (pass
== 0 && declare_simd_cnt
!= -1 && j
> declare_simd_cnt
)
11102 *scores
++ = ((pass
== 0 && declare_simd_cnt
!= -1)
11103 ? codes
.length () - 1 : codes
.length ());
11105 return declare_simd_cnt
== -1 ? 1 : 2;
11107 if (matched
== nconstructs
)
11108 return simd_seen
? -1 : 1;
11112 /* Gimplify OACC_CACHE. */
11115 gimplify_oacc_cache (tree
*expr_p
, gimple_seq
*pre_p
)
11117 tree expr
= *expr_p
;
11119 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr
), pre_p
, ORT_ACC
,
11121 gimplify_adjust_omp_clauses (pre_p
, NULL
, &OACC_CACHE_CLAUSES (expr
),
11124 /* TODO: Do something sensible with this information. */
11126 *expr_p
= NULL_TREE
;
11129 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
11130 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
11131 kind. The entry kind will replace the one in CLAUSE, while the exit
11132 kind will be used in a new omp_clause and returned to the caller. */
11135 gimplify_oacc_declare_1 (tree clause
)
11137 HOST_WIDE_INT kind
, new_op
;
11141 kind
= OMP_CLAUSE_MAP_KIND (clause
);
11145 case GOMP_MAP_ALLOC
:
11146 new_op
= GOMP_MAP_RELEASE
;
11150 case GOMP_MAP_FROM
:
11151 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_FORCE_ALLOC
);
11152 new_op
= GOMP_MAP_FROM
;
11156 case GOMP_MAP_TOFROM
:
11157 OMP_CLAUSE_SET_MAP_KIND (clause
, GOMP_MAP_TO
);
11158 new_op
= GOMP_MAP_FROM
;
11162 case GOMP_MAP_DEVICE_RESIDENT
:
11163 case GOMP_MAP_FORCE_DEVICEPTR
:
11164 case GOMP_MAP_FORCE_PRESENT
:
11165 case GOMP_MAP_LINK
:
11166 case GOMP_MAP_POINTER
:
11171 gcc_unreachable ();
11177 c
= build_omp_clause (OMP_CLAUSE_LOCATION (clause
), OMP_CLAUSE_MAP
);
11178 OMP_CLAUSE_SET_MAP_KIND (c
, new_op
);
11179 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (clause
);
11185 /* Gimplify OACC_DECLARE. */
11188 gimplify_oacc_declare (tree
*expr_p
, gimple_seq
*pre_p
)
11190 tree expr
= *expr_p
;
11192 tree clauses
, t
, decl
;
11194 clauses
= OACC_DECLARE_CLAUSES (expr
);
11196 gimplify_scan_omp_clauses (&clauses
, pre_p
, ORT_TARGET_DATA
, OACC_DECLARE
);
11197 gimplify_adjust_omp_clauses (pre_p
, NULL
, &clauses
, OACC_DECLARE
);
11199 for (t
= clauses
; t
; t
= OMP_CLAUSE_CHAIN (t
))
11201 decl
= OMP_CLAUSE_DECL (t
);
11203 if (TREE_CODE (decl
) == MEM_REF
)
11204 decl
= TREE_OPERAND (decl
, 0);
11206 if (VAR_P (decl
) && !is_oacc_declared (decl
))
11208 tree attr
= get_identifier ("oacc declare target");
11209 DECL_ATTRIBUTES (decl
) = tree_cons (attr
, NULL_TREE
,
11210 DECL_ATTRIBUTES (decl
));
11214 && !is_global_var (decl
)
11215 && DECL_CONTEXT (decl
) == current_function_decl
)
11217 tree c
= gimplify_oacc_declare_1 (t
);
11220 if (oacc_declare_returns
== NULL
)
11221 oacc_declare_returns
= new hash_map
<tree
, tree
>;
11223 oacc_declare_returns
->put (decl
, c
);
11227 if (gimplify_omp_ctxp
)
11228 omp_add_variable (gimplify_omp_ctxp
, decl
, GOVD_SEEN
);
11231 stmt
= gimple_build_omp_target (NULL
, GF_OMP_TARGET_KIND_OACC_DECLARE
,
11234 gimplify_seq_add_stmt (pre_p
, stmt
);
11236 *expr_p
= NULL_TREE
;
11239 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
11240 gimplification of the body, as well as scanning the body for used
11241 variables. We need to do this scan now, because variable-sized
11242 decls will be decomposed during gimplification. */
11245 gimplify_omp_parallel (tree
*expr_p
, gimple_seq
*pre_p
)
11247 tree expr
= *expr_p
;
11249 gimple_seq body
= NULL
;
11251 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr
), pre_p
,
11252 OMP_PARALLEL_COMBINED (expr
)
11253 ? ORT_COMBINED_PARALLEL
11254 : ORT_PARALLEL
, OMP_PARALLEL
);
11256 push_gimplify_context ();
11258 g
= gimplify_and_return_first (OMP_PARALLEL_BODY (expr
), &body
);
11259 if (gimple_code (g
) == GIMPLE_BIND
)
11260 pop_gimplify_context (g
);
11262 pop_gimplify_context (NULL
);
11264 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_PARALLEL_CLAUSES (expr
),
11267 g
= gimple_build_omp_parallel (body
,
11268 OMP_PARALLEL_CLAUSES (expr
),
11269 NULL_TREE
, NULL_TREE
);
11270 if (OMP_PARALLEL_COMBINED (expr
))
11271 gimple_omp_set_subcode (g
, GF_OMP_PARALLEL_COMBINED
);
11272 gimplify_seq_add_stmt (pre_p
, g
);
11273 *expr_p
= NULL_TREE
;
11276 /* Gimplify the contents of an OMP_TASK statement. This involves
11277 gimplification of the body, as well as scanning the body for used
11278 variables. We need to do this scan now, because variable-sized
11279 decls will be decomposed during gimplification. */
11282 gimplify_omp_task (tree
*expr_p
, gimple_seq
*pre_p
)
11284 tree expr
= *expr_p
;
11286 gimple_seq body
= NULL
;
11288 if (OMP_TASK_BODY (expr
) == NULL_TREE
)
11289 for (tree c
= OMP_TASK_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
11290 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
11291 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET
)
11293 error_at (OMP_CLAUSE_LOCATION (c
),
11294 "%<mutexinoutset%> kind in %<depend%> clause on a "
11295 "%<taskwait%> construct");
11299 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr
), pre_p
,
11300 omp_find_clause (OMP_TASK_CLAUSES (expr
),
11302 ? ORT_UNTIED_TASK
: ORT_TASK
, OMP_TASK
);
11304 if (OMP_TASK_BODY (expr
))
11306 push_gimplify_context ();
11308 g
= gimplify_and_return_first (OMP_TASK_BODY (expr
), &body
);
11309 if (gimple_code (g
) == GIMPLE_BIND
)
11310 pop_gimplify_context (g
);
11312 pop_gimplify_context (NULL
);
11315 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_TASK_CLAUSES (expr
),
11318 g
= gimple_build_omp_task (body
,
11319 OMP_TASK_CLAUSES (expr
),
11320 NULL_TREE
, NULL_TREE
,
11321 NULL_TREE
, NULL_TREE
, NULL_TREE
);
11322 if (OMP_TASK_BODY (expr
) == NULL_TREE
)
11323 gimple_omp_task_set_taskwait_p (g
, true);
11324 gimplify_seq_add_stmt (pre_p
, g
);
11325 *expr_p
= NULL_TREE
;
11328 /* Helper function for gimplify_omp_for. If *TP is not a gimple constant,
11329 force it into a temporary initialized in PRE_P and add firstprivate clause
11330 to ORIG_FOR_STMT. */
11333 gimplify_omp_taskloop_expr (tree type
, tree
*tp
, gimple_seq
*pre_p
,
11334 tree orig_for_stmt
)
11336 if (*tp
== NULL
|| is_gimple_constant (*tp
))
11339 *tp
= get_initialized_tmp_var (*tp
, pre_p
, NULL
, false);
11340 /* Reference to pointer conversion is considered useless,
11341 but is significant for firstprivate clause. Force it
11344 && TREE_CODE (type
) == POINTER_TYPE
11345 && TREE_CODE (TREE_TYPE (*tp
)) == REFERENCE_TYPE
)
11347 tree v
= create_tmp_var (TYPE_MAIN_VARIANT (type
));
11348 tree m
= build2 (INIT_EXPR
, TREE_TYPE (v
), v
, *tp
);
11349 gimplify_and_add (m
, pre_p
);
11353 tree c
= build_omp_clause (input_location
, OMP_CLAUSE_FIRSTPRIVATE
);
11354 OMP_CLAUSE_DECL (c
) = *tp
;
11355 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (orig_for_stmt
);
11356 OMP_FOR_CLAUSES (orig_for_stmt
) = c
;
11359 /* Gimplify the gross structure of an OMP_FOR statement. */
11361 static enum gimplify_status
11362 gimplify_omp_for (tree
*expr_p
, gimple_seq
*pre_p
)
11364 tree for_stmt
, orig_for_stmt
, inner_for_stmt
= NULL_TREE
, decl
, var
, t
;
11365 enum gimplify_status ret
= GS_ALL_DONE
;
11366 enum gimplify_status tret
;
11368 gimple_seq for_body
, for_pre_body
;
11370 bitmap has_decl_expr
= NULL
;
11371 enum omp_region_type ort
= ORT_WORKSHARE
;
11372 bool openacc
= TREE_CODE (*expr_p
) == OACC_LOOP
;
11374 orig_for_stmt
= for_stmt
= *expr_p
;
11376 bool loop_p
= (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_BIND
)
11378 if (OMP_FOR_INIT (for_stmt
) == NULL_TREE
)
11380 tree
*data
[4] = { NULL
, NULL
, NULL
, NULL
};
11381 gcc_assert (TREE_CODE (for_stmt
) != OACC_LOOP
);
11382 inner_for_stmt
= walk_tree (&OMP_FOR_BODY (for_stmt
),
11383 find_combined_omp_for
, data
, NULL
);
11384 if (inner_for_stmt
== NULL_TREE
)
11386 gcc_assert (seen_error ());
11387 *expr_p
= NULL_TREE
;
11390 if (data
[2] && OMP_FOR_PRE_BODY (*data
[2]))
11392 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data
[2]),
11393 &OMP_FOR_PRE_BODY (for_stmt
));
11394 OMP_FOR_PRE_BODY (*data
[2]) = NULL_TREE
;
11396 if (OMP_FOR_PRE_BODY (inner_for_stmt
))
11398 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt
),
11399 &OMP_FOR_PRE_BODY (for_stmt
));
11400 OMP_FOR_PRE_BODY (inner_for_stmt
) = NULL_TREE
;
11405 /* We have some statements or variable declarations in between
11406 the composite construct directives. Move them around the
11409 for (i
= 0; i
< 3; i
++)
11413 if (i
< 2 && data
[i
+ 1] == &OMP_BODY (t
))
11414 data
[i
+ 1] = data
[i
];
11415 *data
[i
] = OMP_BODY (t
);
11416 tree body
= build3 (BIND_EXPR
, void_type_node
, NULL_TREE
,
11417 NULL_TREE
, make_node (BLOCK
));
11418 OMP_BODY (t
) = body
;
11419 append_to_statement_list_force (inner_for_stmt
,
11420 &BIND_EXPR_BODY (body
));
11422 data
[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body
)));
11423 gcc_assert (*data
[3] == inner_for_stmt
);
11428 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt
)); i
++)
11430 && OMP_FOR_ORIG_DECLS (inner_for_stmt
)
11431 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
11433 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
11436 tree orig
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
), i
);
11437 /* Class iterators aren't allowed on OMP_SIMD, so the only
11438 case we need to solve is distribute parallel for. They are
11439 allowed on the loop construct, but that is already handled
11440 in gimplify_omp_loop. */
11441 gcc_assert (TREE_CODE (inner_for_stmt
) == OMP_FOR
11442 && TREE_CODE (for_stmt
) == OMP_DISTRIBUTE
11444 tree orig_decl
= TREE_PURPOSE (orig
);
11445 tree last
= TREE_VALUE (orig
);
11447 for (pc
= &OMP_FOR_CLAUSES (inner_for_stmt
);
11448 *pc
; pc
= &OMP_CLAUSE_CHAIN (*pc
))
11449 if ((OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_PRIVATE
11450 || OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_LASTPRIVATE
)
11451 && OMP_CLAUSE_DECL (*pc
) == orig_decl
)
11453 if (*pc
== NULL_TREE
)
11456 for (spc
= &OMP_PARALLEL_CLAUSES (*data
[1]);
11457 *spc
; spc
= &OMP_CLAUSE_CHAIN (*spc
))
11458 if (OMP_CLAUSE_CODE (*spc
) == OMP_CLAUSE_PRIVATE
11459 && OMP_CLAUSE_DECL (*spc
) == orig_decl
)
11464 *spc
= OMP_CLAUSE_CHAIN (c
);
11465 OMP_CLAUSE_CHAIN (c
) = NULL_TREE
;
11469 if (*pc
== NULL_TREE
)
11471 else if (OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_PRIVATE
)
11473 /* private clause will appear only on inner_for_stmt.
11474 Change it into firstprivate, and add private clause
11476 tree c
= copy_node (*pc
);
11477 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
11478 OMP_FOR_CLAUSES (for_stmt
) = c
;
11479 OMP_CLAUSE_CODE (*pc
) = OMP_CLAUSE_FIRSTPRIVATE
;
11480 lang_hooks
.decls
.omp_finish_clause (*pc
, pre_p
, openacc
);
11484 /* lastprivate clause will appear on both inner_for_stmt
11485 and for_stmt. Add firstprivate clause to
11487 tree c
= build_omp_clause (OMP_CLAUSE_LOCATION (*pc
),
11488 OMP_CLAUSE_FIRSTPRIVATE
);
11489 OMP_CLAUSE_DECL (c
) = OMP_CLAUSE_DECL (*pc
);
11490 OMP_CLAUSE_CHAIN (c
) = *pc
;
11492 lang_hooks
.decls
.omp_finish_clause (*pc
, pre_p
, openacc
);
11494 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
11495 OMP_CLAUSE_FIRSTPRIVATE
);
11496 OMP_CLAUSE_DECL (c
) = last
;
11497 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
11498 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
11499 c
= build_omp_clause (UNKNOWN_LOCATION
,
11500 *pc
? OMP_CLAUSE_SHARED
11501 : OMP_CLAUSE_FIRSTPRIVATE
);
11502 OMP_CLAUSE_DECL (c
) = orig_decl
;
11503 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
11504 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
11506 /* Similarly, take care of C++ range for temporaries, those should
11507 be firstprivate on OMP_PARALLEL if any. */
11509 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt
)); i
++)
11510 if (OMP_FOR_ORIG_DECLS (inner_for_stmt
)
11511 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
11513 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
),
11517 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt
), i
);
11518 tree v
= TREE_CHAIN (orig
);
11519 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
11520 OMP_CLAUSE_FIRSTPRIVATE
);
11521 /* First add firstprivate clause for the __for_end artificial
11523 OMP_CLAUSE_DECL (c
) = TREE_VEC_ELT (v
, 1);
11524 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c
)))
11526 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
) = 1;
11527 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
11528 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
11529 if (TREE_VEC_ELT (v
, 0))
11531 /* And now the same for __for_range artificial decl if it
11533 c
= build_omp_clause (UNKNOWN_LOCATION
,
11534 OMP_CLAUSE_FIRSTPRIVATE
);
11535 OMP_CLAUSE_DECL (c
) = TREE_VEC_ELT (v
, 0);
11536 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c
)))
11538 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c
) = 1;
11539 OMP_CLAUSE_CHAIN (c
) = OMP_PARALLEL_CLAUSES (*data
[1]);
11540 OMP_PARALLEL_CLAUSES (*data
[1]) = c
;
11545 switch (TREE_CODE (for_stmt
))
11548 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt
? inner_for_stmt
: for_stmt
))
11550 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
11551 OMP_CLAUSE_SCHEDULE
))
11552 error_at (EXPR_LOCATION (for_stmt
),
11553 "%qs clause may not appear on non-rectangular %qs",
11554 "schedule", "for");
11555 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_ORDERED
))
11556 error_at (EXPR_LOCATION (for_stmt
),
11557 "%qs clause may not appear on non-rectangular %qs",
11561 case OMP_DISTRIBUTE
:
11562 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt
? inner_for_stmt
: for_stmt
)
11563 && omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
11564 OMP_CLAUSE_DIST_SCHEDULE
))
11565 error_at (EXPR_LOCATION (for_stmt
),
11566 "%qs clause may not appear on non-rectangular %qs",
11567 "dist_schedule", "distribute");
11573 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_UNTIED
))
11574 ort
= ORT_UNTIED_TASKLOOP
;
11576 ort
= ORT_TASKLOOP
;
11582 gcc_unreachable ();
11585 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
11586 clause for the IV. */
11587 if (ort
== ORT_SIMD
&& TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1)
11589 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), 0);
11590 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
11591 decl
= TREE_OPERAND (t
, 0);
11592 for (tree c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
11593 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
11594 && OMP_CLAUSE_DECL (c
) == decl
)
11596 OMP_CLAUSE_LINEAR_NO_COPYIN (c
) = 1;
11601 if (TREE_CODE (for_stmt
) != OMP_TASKLOOP
)
11602 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt
), pre_p
, ort
,
11603 loop_p
&& TREE_CODE (for_stmt
) != OMP_SIMD
11604 ? OMP_LOOP
: TREE_CODE (for_stmt
));
11606 if (TREE_CODE (for_stmt
) == OMP_DISTRIBUTE
)
11607 gimplify_omp_ctxp
->distribute
= true;
11609 /* Handle OMP_FOR_INIT. */
11610 for_pre_body
= NULL
;
11611 if ((ort
== ORT_SIMD
11612 || (inner_for_stmt
&& TREE_CODE (inner_for_stmt
) == OMP_SIMD
))
11613 && OMP_FOR_PRE_BODY (for_stmt
))
11615 has_decl_expr
= BITMAP_ALLOC (NULL
);
11616 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt
)) == DECL_EXPR
11617 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt
)))
11620 t
= OMP_FOR_PRE_BODY (for_stmt
);
11621 bitmap_set_bit (has_decl_expr
, DECL_UID (DECL_EXPR_DECL (t
)));
11623 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt
)) == STATEMENT_LIST
)
11625 tree_stmt_iterator si
;
11626 for (si
= tsi_start (OMP_FOR_PRE_BODY (for_stmt
)); !tsi_end_p (si
);
11630 if (TREE_CODE (t
) == DECL_EXPR
11631 && TREE_CODE (DECL_EXPR_DECL (t
)) == VAR_DECL
)
11632 bitmap_set_bit (has_decl_expr
, DECL_UID (DECL_EXPR_DECL (t
)));
11636 if (OMP_FOR_PRE_BODY (for_stmt
))
11638 if (TREE_CODE (for_stmt
) != OMP_TASKLOOP
|| gimplify_omp_ctxp
)
11639 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt
), &for_pre_body
);
11642 struct gimplify_omp_ctx ctx
;
11643 memset (&ctx
, 0, sizeof (ctx
));
11644 ctx
.region_type
= ORT_NONE
;
11645 gimplify_omp_ctxp
= &ctx
;
11646 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt
), &for_pre_body
);
11647 gimplify_omp_ctxp
= NULL
;
11650 OMP_FOR_PRE_BODY (for_stmt
) = NULL_TREE
;
11652 if (OMP_FOR_INIT (for_stmt
) == NULL_TREE
)
11653 for_stmt
= inner_for_stmt
;
11655 /* For taskloop, need to gimplify the start, end and step before the
11656 taskloop, outside of the taskloop omp context. */
11657 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
11659 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
11661 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
11662 gimple_seq
*for_pre_p
= (gimple_seq_empty_p (for_pre_body
)
11663 ? pre_p
: &for_pre_body
);
11664 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
11665 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
11667 tree v
= TREE_OPERAND (t
, 1);
11668 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 1),
11669 for_pre_p
, orig_for_stmt
);
11670 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 2),
11671 for_pre_p
, orig_for_stmt
);
11674 gimplify_omp_taskloop_expr (type
, &TREE_OPERAND (t
, 1), for_pre_p
,
11677 /* Handle OMP_FOR_COND. */
11678 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
11679 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
11681 tree v
= TREE_OPERAND (t
, 1);
11682 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 1),
11683 for_pre_p
, orig_for_stmt
);
11684 gimplify_omp_taskloop_expr (type
, &TREE_VEC_ELT (v
, 2),
11685 for_pre_p
, orig_for_stmt
);
11688 gimplify_omp_taskloop_expr (type
, &TREE_OPERAND (t
, 1), for_pre_p
,
11691 /* Handle OMP_FOR_INCR. */
11692 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
11693 if (TREE_CODE (t
) == MODIFY_EXPR
)
11695 decl
= TREE_OPERAND (t
, 0);
11696 t
= TREE_OPERAND (t
, 1);
11697 tree
*tp
= &TREE_OPERAND (t
, 1);
11698 if (TREE_CODE (t
) == PLUS_EXPR
&& *tp
== decl
)
11699 tp
= &TREE_OPERAND (t
, 0);
11701 gimplify_omp_taskloop_expr (NULL_TREE
, tp
, for_pre_p
,
11706 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt
), pre_p
, ort
,
11710 if (orig_for_stmt
!= for_stmt
)
11711 gimplify_omp_ctxp
->combined_loop
= true;
11714 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
11715 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt
)));
11716 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
11717 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt
)));
11719 tree c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_ORDERED
);
11720 bool is_doacross
= false;
11721 if (c
&& OMP_CLAUSE_ORDERED_EXPR (c
))
11723 is_doacross
= true;
11724 gimplify_omp_ctxp
->loop_iter_var
.create (TREE_VEC_LENGTH
11725 (OMP_FOR_INIT (for_stmt
))
11728 int collapse
= 1, tile
= 0;
11729 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_COLLAPSE
);
11731 collapse
= tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c
));
11732 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_TILE
);
11734 tile
= list_length (OMP_CLAUSE_TILE_LIST (c
));
11735 c
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
), OMP_CLAUSE_ALLOCATE
);
11736 hash_set
<tree
> *allocate_uids
= NULL
;
11739 allocate_uids
= new hash_set
<tree
>;
11740 for (; c
; c
= OMP_CLAUSE_CHAIN (c
))
11741 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_ALLOCATE
)
11742 allocate_uids
->add (OMP_CLAUSE_DECL (c
));
11744 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
11746 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
11747 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
11748 decl
= TREE_OPERAND (t
, 0);
11749 gcc_assert (DECL_P (decl
));
11750 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl
))
11751 || POINTER_TYPE_P (TREE_TYPE (decl
)));
11754 if (TREE_CODE (for_stmt
) == OMP_FOR
&& OMP_FOR_ORIG_DECLS (for_stmt
))
11756 tree orig_decl
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
11757 if (TREE_CODE (orig_decl
) == TREE_LIST
)
11759 orig_decl
= TREE_PURPOSE (orig_decl
);
11763 gimplify_omp_ctxp
->loop_iter_var
.quick_push (orig_decl
);
11766 gimplify_omp_ctxp
->loop_iter_var
.quick_push (decl
);
11767 gimplify_omp_ctxp
->loop_iter_var
.quick_push (decl
);
11770 /* Make sure the iteration variable is private. */
11771 tree c
= NULL_TREE
;
11772 tree c2
= NULL_TREE
;
11773 if (orig_for_stmt
!= for_stmt
)
11775 /* Preserve this information until we gimplify the inner simd. */
11777 && bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)))
11778 TREE_PRIVATE (t
) = 1;
11780 else if (ort
== ORT_SIMD
)
11782 splay_tree_node n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
11783 (splay_tree_key
) decl
);
11784 omp_is_private (gimplify_omp_ctxp
, decl
,
11785 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
))
11787 if (n
!= NULL
&& (n
->value
& GOVD_DATA_SHARE_CLASS
) != 0)
11789 omp_notice_variable (gimplify_omp_ctxp
, decl
, true);
11790 if (n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
)
11791 for (tree c3
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
11792 OMP_CLAUSE_LASTPRIVATE
);
11793 c3
; c3
= omp_find_clause (OMP_CLAUSE_CHAIN (c3
),
11794 OMP_CLAUSE_LASTPRIVATE
))
11795 if (OMP_CLAUSE_DECL (c3
) == decl
)
11797 warning_at (OMP_CLAUSE_LOCATION (c3
), 0,
11798 "conditional %<lastprivate%> on loop "
11799 "iterator %qD ignored", decl
);
11800 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3
) = 0;
11801 n
->value
&= ~GOVD_LASTPRIVATE_CONDITIONAL
;
11804 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1 && !loop_p
)
11806 c
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
11807 OMP_CLAUSE_LINEAR_NO_COPYIN (c
) = 1;
11808 unsigned int flags
= GOVD_LINEAR
| GOVD_EXPLICIT
| GOVD_SEEN
;
11810 && bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)))
11811 || TREE_PRIVATE (t
))
11813 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
11814 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
11816 struct gimplify_omp_ctx
*outer
11817 = gimplify_omp_ctxp
->outer_context
;
11818 if (outer
&& !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
11820 if (outer
->region_type
== ORT_WORKSHARE
11821 && outer
->combined_loop
)
11823 n
= splay_tree_lookup (outer
->variables
,
11824 (splay_tree_key
)decl
);
11825 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
11827 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
11828 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
11832 struct gimplify_omp_ctx
*octx
= outer
->outer_context
;
11834 && octx
->region_type
== ORT_COMBINED_PARALLEL
11835 && octx
->outer_context
11836 && (octx
->outer_context
->region_type
11838 && octx
->outer_context
->combined_loop
)
11840 octx
= octx
->outer_context
;
11841 n
= splay_tree_lookup (octx
->variables
,
11842 (splay_tree_key
)decl
);
11843 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
11845 OMP_CLAUSE_LINEAR_NO_COPYOUT (c
) = 1;
11846 flags
|= GOVD_LINEAR_LASTPRIVATE_NO_OUTER
;
11853 OMP_CLAUSE_DECL (c
) = decl
;
11854 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
11855 OMP_FOR_CLAUSES (for_stmt
) = c
;
11856 omp_add_variable (gimplify_omp_ctxp
, decl
, flags
);
11857 if (outer
&& !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
))
11859 if (outer
->region_type
== ORT_WORKSHARE
11860 && outer
->combined_loop
)
11862 if (outer
->outer_context
11863 && (outer
->outer_context
->region_type
11864 == ORT_COMBINED_PARALLEL
))
11865 outer
= outer
->outer_context
;
11866 else if (omp_check_private (outer
, decl
, false))
11869 else if (((outer
->region_type
& ORT_TASKLOOP
)
11871 && outer
->combined_loop
11872 && !omp_check_private (gimplify_omp_ctxp
,
11875 else if (outer
->region_type
!= ORT_COMBINED_PARALLEL
)
11877 omp_notice_variable (outer
, decl
, true);
11882 n
= splay_tree_lookup (outer
->variables
,
11883 (splay_tree_key
)decl
);
11884 if (n
== NULL
|| (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
11886 omp_add_variable (outer
, decl
,
11887 GOVD_LASTPRIVATE
| GOVD_SEEN
);
11888 if (outer
->region_type
== ORT_COMBINED_PARALLEL
11889 && outer
->outer_context
11890 && (outer
->outer_context
->region_type
11892 && outer
->outer_context
->combined_loop
)
11894 outer
= outer
->outer_context
;
11895 n
= splay_tree_lookup (outer
->variables
,
11896 (splay_tree_key
)decl
);
11897 if (omp_check_private (outer
, decl
, false))
11900 || ((n
->value
& GOVD_DATA_SHARE_CLASS
)
11902 omp_add_variable (outer
, decl
,
11908 if (outer
&& outer
->outer_context
11909 && ((outer
->outer_context
->region_type
11910 & ORT_COMBINED_TEAMS
) == ORT_COMBINED_TEAMS
11911 || (((outer
->region_type
& ORT_TASKLOOP
)
11913 && (outer
->outer_context
->region_type
11914 == ORT_COMBINED_PARALLEL
))))
11916 outer
= outer
->outer_context
;
11917 n
= splay_tree_lookup (outer
->variables
,
11918 (splay_tree_key
)decl
);
11920 || (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
11921 omp_add_variable (outer
, decl
,
11922 GOVD_SHARED
| GOVD_SEEN
);
11926 if (outer
&& outer
->outer_context
)
11927 omp_notice_variable (outer
->outer_context
, decl
,
11937 || !bitmap_bit_p (has_decl_expr
, DECL_UID (decl
)));
11938 if (TREE_PRIVATE (t
))
11939 lastprivate
= false;
11940 if (loop_p
&& OMP_FOR_ORIG_DECLS (for_stmt
))
11942 tree elt
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
11943 if (TREE_CODE (elt
) == TREE_LIST
&& TREE_PURPOSE (elt
))
11944 lastprivate
= false;
11947 struct gimplify_omp_ctx
*outer
11948 = gimplify_omp_ctxp
->outer_context
;
11949 if (outer
&& lastprivate
)
11951 if (outer
->region_type
== ORT_WORKSHARE
11952 && outer
->combined_loop
)
11954 n
= splay_tree_lookup (outer
->variables
,
11955 (splay_tree_key
)decl
);
11956 if (n
!= NULL
&& (n
->value
& GOVD_LOCAL
) != 0)
11958 lastprivate
= false;
11961 else if (outer
->outer_context
11962 && (outer
->outer_context
->region_type
11963 == ORT_COMBINED_PARALLEL
))
11964 outer
= outer
->outer_context
;
11965 else if (omp_check_private (outer
, decl
, false))
11968 else if (((outer
->region_type
& ORT_TASKLOOP
)
11970 && outer
->combined_loop
11971 && !omp_check_private (gimplify_omp_ctxp
,
11974 else if (outer
->region_type
!= ORT_COMBINED_PARALLEL
)
11976 omp_notice_variable (outer
, decl
, true);
11981 n
= splay_tree_lookup (outer
->variables
,
11982 (splay_tree_key
)decl
);
11983 if (n
== NULL
|| (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
11985 omp_add_variable (outer
, decl
,
11986 GOVD_LASTPRIVATE
| GOVD_SEEN
);
11987 if (outer
->region_type
== ORT_COMBINED_PARALLEL
11988 && outer
->outer_context
11989 && (outer
->outer_context
->region_type
11991 && outer
->outer_context
->combined_loop
)
11993 outer
= outer
->outer_context
;
11994 n
= splay_tree_lookup (outer
->variables
,
11995 (splay_tree_key
)decl
);
11996 if (omp_check_private (outer
, decl
, false))
11999 || ((n
->value
& GOVD_DATA_SHARE_CLASS
)
12001 omp_add_variable (outer
, decl
,
12007 if (outer
&& outer
->outer_context
12008 && ((outer
->outer_context
->region_type
12009 & ORT_COMBINED_TEAMS
) == ORT_COMBINED_TEAMS
12010 || (((outer
->region_type
& ORT_TASKLOOP
)
12012 && (outer
->outer_context
->region_type
12013 == ORT_COMBINED_PARALLEL
))))
12015 outer
= outer
->outer_context
;
12016 n
= splay_tree_lookup (outer
->variables
,
12017 (splay_tree_key
)decl
);
12019 || (n
->value
& GOVD_DATA_SHARE_CLASS
) == 0)
12020 omp_add_variable (outer
, decl
,
12021 GOVD_SHARED
| GOVD_SEEN
);
12025 if (outer
&& outer
->outer_context
)
12026 omp_notice_variable (outer
->outer_context
, decl
,
12032 c
= build_omp_clause (input_location
,
12033 lastprivate
? OMP_CLAUSE_LASTPRIVATE
12034 : OMP_CLAUSE_PRIVATE
);
12035 OMP_CLAUSE_DECL (c
) = decl
;
12036 OMP_CLAUSE_CHAIN (c
) = OMP_FOR_CLAUSES (for_stmt
);
12037 OMP_FOR_CLAUSES (for_stmt
) = c
;
12038 omp_add_variable (gimplify_omp_ctxp
, decl
,
12039 (lastprivate
? GOVD_LASTPRIVATE
: GOVD_PRIVATE
)
12040 | GOVD_EXPLICIT
| GOVD_SEEN
);
12044 else if (omp_is_private (gimplify_omp_ctxp
, decl
, 0))
12046 omp_notice_variable (gimplify_omp_ctxp
, decl
, true);
12047 splay_tree_node n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
12048 (splay_tree_key
) decl
);
12049 if (n
&& (n
->value
& GOVD_LASTPRIVATE_CONDITIONAL
))
12050 for (tree c3
= omp_find_clause (OMP_FOR_CLAUSES (for_stmt
),
12051 OMP_CLAUSE_LASTPRIVATE
);
12052 c3
; c3
= omp_find_clause (OMP_CLAUSE_CHAIN (c3
),
12053 OMP_CLAUSE_LASTPRIVATE
))
12054 if (OMP_CLAUSE_DECL (c3
) == decl
)
12056 warning_at (OMP_CLAUSE_LOCATION (c3
), 0,
12057 "conditional %<lastprivate%> on loop "
12058 "iterator %qD ignored", decl
);
12059 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3
) = 0;
12060 n
->value
&= ~GOVD_LASTPRIVATE_CONDITIONAL
;
12064 omp_add_variable (gimplify_omp_ctxp
, decl
, GOVD_PRIVATE
| GOVD_SEEN
);
12066 /* If DECL is not a gimple register, create a temporary variable to act
12067 as an iteration counter. This is valid, since DECL cannot be
12068 modified in the body of the loop. Similarly for any iteration vars
12069 in simd with collapse > 1 where the iterator vars must be
12070 lastprivate. And similarly for vars mentioned in allocate clauses. */
12071 if (orig_for_stmt
!= for_stmt
)
12073 else if (!is_gimple_reg (decl
)
12074 || (ort
== ORT_SIMD
12075 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) > 1)
12076 || (allocate_uids
&& allocate_uids
->contains (decl
)))
12078 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
12079 /* Make sure omp_add_variable is not called on it prematurely.
12080 We call it ourselves a few lines later. */
12081 gimplify_omp_ctxp
= NULL
;
12082 var
= create_tmp_var (TREE_TYPE (decl
), get_name (decl
));
12083 gimplify_omp_ctxp
= ctx
;
12084 TREE_OPERAND (t
, 0) = var
;
12086 gimplify_seq_add_stmt (&for_body
, gimple_build_assign (decl
, var
));
12088 if (ort
== ORT_SIMD
12089 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)) == 1)
12091 c2
= build_omp_clause (input_location
, OMP_CLAUSE_LINEAR
);
12092 OMP_CLAUSE_LINEAR_NO_COPYIN (c2
) = 1;
12093 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2
) = 1;
12094 OMP_CLAUSE_DECL (c2
) = var
;
12095 OMP_CLAUSE_CHAIN (c2
) = OMP_FOR_CLAUSES (for_stmt
);
12096 OMP_FOR_CLAUSES (for_stmt
) = c2
;
12097 omp_add_variable (gimplify_omp_ctxp
, var
,
12098 GOVD_LINEAR
| GOVD_EXPLICIT
| GOVD_SEEN
);
12099 if (c
== NULL_TREE
)
12106 omp_add_variable (gimplify_omp_ctxp
, var
,
12107 GOVD_PRIVATE
| GOVD_SEEN
);
12112 gimplify_omp_ctxp
->in_for_exprs
= true;
12113 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
12115 tree lb
= TREE_OPERAND (t
, 1);
12116 tret
= gimplify_expr (&TREE_VEC_ELT (lb
, 1), &for_pre_body
, NULL
,
12117 is_gimple_val
, fb_rvalue
, false);
12118 ret
= MIN (ret
, tret
);
12119 tret
= gimplify_expr (&TREE_VEC_ELT (lb
, 2), &for_pre_body
, NULL
,
12120 is_gimple_val
, fb_rvalue
, false);
12123 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
12124 is_gimple_val
, fb_rvalue
, false);
12125 gimplify_omp_ctxp
->in_for_exprs
= false;
12126 ret
= MIN (ret
, tret
);
12127 if (ret
== GS_ERROR
)
12130 /* Handle OMP_FOR_COND. */
12131 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
12132 gcc_assert (COMPARISON_CLASS_P (t
));
12133 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
12135 gimplify_omp_ctxp
->in_for_exprs
= true;
12136 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
)
12138 tree ub
= TREE_OPERAND (t
, 1);
12139 tret
= gimplify_expr (&TREE_VEC_ELT (ub
, 1), &for_pre_body
, NULL
,
12140 is_gimple_val
, fb_rvalue
, false);
12141 ret
= MIN (ret
, tret
);
12142 tret
= gimplify_expr (&TREE_VEC_ELT (ub
, 2), &for_pre_body
, NULL
,
12143 is_gimple_val
, fb_rvalue
, false);
12146 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
12147 is_gimple_val
, fb_rvalue
, false);
12148 gimplify_omp_ctxp
->in_for_exprs
= false;
12149 ret
= MIN (ret
, tret
);
12151 /* Handle OMP_FOR_INCR. */
12152 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
12153 switch (TREE_CODE (t
))
12155 case PREINCREMENT_EXPR
:
12156 case POSTINCREMENT_EXPR
:
12158 tree decl
= TREE_OPERAND (t
, 0);
12159 /* c_omp_for_incr_canonicalize_ptr() should have been
12160 called to massage things appropriately. */
12161 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl
)));
12163 if (orig_for_stmt
!= for_stmt
)
12165 t
= build_int_cst (TREE_TYPE (decl
), 1);
12167 OMP_CLAUSE_LINEAR_STEP (c
) = t
;
12168 t
= build2 (PLUS_EXPR
, TREE_TYPE (decl
), var
, t
);
12169 t
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, t
);
12170 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
) = t
;
12174 case PREDECREMENT_EXPR
:
12175 case POSTDECREMENT_EXPR
:
12176 /* c_omp_for_incr_canonicalize_ptr() should have been
12177 called to massage things appropriately. */
12178 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl
)));
12179 if (orig_for_stmt
!= for_stmt
)
12181 t
= build_int_cst (TREE_TYPE (decl
), -1);
12183 OMP_CLAUSE_LINEAR_STEP (c
) = t
;
12184 t
= build2 (PLUS_EXPR
, TREE_TYPE (decl
), var
, t
);
12185 t
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, t
);
12186 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
) = t
;
12190 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
12191 TREE_OPERAND (t
, 0) = var
;
12193 t
= TREE_OPERAND (t
, 1);
12194 switch (TREE_CODE (t
))
12197 if (TREE_OPERAND (t
, 1) == decl
)
12199 TREE_OPERAND (t
, 1) = TREE_OPERAND (t
, 0);
12200 TREE_OPERAND (t
, 0) = var
;
12206 case POINTER_PLUS_EXPR
:
12207 gcc_assert (TREE_OPERAND (t
, 0) == decl
);
12208 TREE_OPERAND (t
, 0) = var
;
12211 gcc_unreachable ();
12214 gimplify_omp_ctxp
->in_for_exprs
= true;
12215 tret
= gimplify_expr (&TREE_OPERAND (t
, 1), &for_pre_body
, NULL
,
12216 is_gimple_val
, fb_rvalue
, false);
12217 ret
= MIN (ret
, tret
);
12220 tree step
= TREE_OPERAND (t
, 1);
12221 tree stept
= TREE_TYPE (decl
);
12222 if (POINTER_TYPE_P (stept
))
12224 step
= fold_convert (stept
, step
);
12225 if (TREE_CODE (t
) == MINUS_EXPR
)
12226 step
= fold_build1 (NEGATE_EXPR
, stept
, step
);
12227 OMP_CLAUSE_LINEAR_STEP (c
) = step
;
12228 if (step
!= TREE_OPERAND (t
, 1))
12230 tret
= gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c
),
12231 &for_pre_body
, NULL
,
12232 is_gimple_val
, fb_rvalue
, false);
12233 ret
= MIN (ret
, tret
);
12236 gimplify_omp_ctxp
->in_for_exprs
= false;
12240 gcc_unreachable ();
12246 OMP_CLAUSE_LINEAR_STEP (c2
) = OMP_CLAUSE_LINEAR_STEP (c
);
12249 if ((var
!= decl
|| collapse
> 1 || tile
) && orig_for_stmt
== for_stmt
)
12251 for (c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12252 if (((OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
12253 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
) == NULL
)
12254 || (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LINEAR
12255 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c
)
12256 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
) == NULL
))
12257 && OMP_CLAUSE_DECL (c
) == decl
)
12259 if (is_doacross
&& (collapse
== 1 || i
>= collapse
))
12263 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
12264 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
12265 gcc_assert (TREE_OPERAND (t
, 0) == var
);
12266 t
= TREE_OPERAND (t
, 1);
12267 gcc_assert (TREE_CODE (t
) == PLUS_EXPR
12268 || TREE_CODE (t
) == MINUS_EXPR
12269 || TREE_CODE (t
) == POINTER_PLUS_EXPR
);
12270 gcc_assert (TREE_OPERAND (t
, 0) == var
);
12271 t
= build2 (TREE_CODE (t
), TREE_TYPE (decl
),
12272 is_doacross
? var
: decl
,
12273 TREE_OPERAND (t
, 1));
12276 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
)
12277 seq
= &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c
);
12279 seq
= &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c
);
12280 push_gimplify_context ();
12281 gimplify_assign (decl
, t
, seq
);
12282 gimple
*bind
= NULL
;
12283 if (gimplify_ctxp
->temps
)
12285 bind
= gimple_build_bind (NULL_TREE
, *seq
, NULL_TREE
);
12287 gimplify_seq_add_stmt (seq
, bind
);
12289 pop_gimplify_context (bind
);
12292 if (OMP_FOR_NON_RECTANGULAR (for_stmt
) && var
!= decl
)
12293 for (int j
= i
+ 1; j
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); j
++)
12295 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), j
);
12296 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
12297 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
12298 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
12299 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
12300 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), j
);
12301 gcc_assert (COMPARISON_CLASS_P (t
));
12302 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
12303 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
12304 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
12308 BITMAP_FREE (has_decl_expr
);
12309 delete allocate_uids
;
12311 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
12312 || (loop_p
&& orig_for_stmt
== for_stmt
))
12314 push_gimplify_context ();
12315 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt
)) != BIND_EXPR
)
12317 OMP_FOR_BODY (orig_for_stmt
)
12318 = build3 (BIND_EXPR
, void_type_node
, NULL
,
12319 OMP_FOR_BODY (orig_for_stmt
), NULL
);
12320 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt
)) = 1;
12324 gimple
*g
= gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt
),
12327 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
12328 || (loop_p
&& orig_for_stmt
== for_stmt
))
12330 if (gimple_code (g
) == GIMPLE_BIND
)
12331 pop_gimplify_context (g
);
12333 pop_gimplify_context (NULL
);
12336 if (orig_for_stmt
!= for_stmt
)
12337 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
12339 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
12340 decl
= TREE_OPERAND (t
, 0);
12341 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
12342 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
12343 gimplify_omp_ctxp
= ctx
->outer_context
;
12344 var
= create_tmp_var (TREE_TYPE (decl
), get_name (decl
));
12345 gimplify_omp_ctxp
= ctx
;
12346 omp_add_variable (gimplify_omp_ctxp
, var
, GOVD_PRIVATE
| GOVD_SEEN
);
12347 TREE_OPERAND (t
, 0) = var
;
12348 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
12349 TREE_OPERAND (t
, 1) = copy_node (TREE_OPERAND (t
, 1));
12350 TREE_OPERAND (TREE_OPERAND (t
, 1), 0) = var
;
12351 if (OMP_FOR_NON_RECTANGULAR (for_stmt
))
12352 for (int j
= i
+ 1;
12353 j
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); j
++)
12355 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), j
);
12356 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
12357 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
12358 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
12360 TREE_OPERAND (t
, 1) = copy_node (TREE_OPERAND (t
, 1));
12361 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
12363 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), j
);
12364 gcc_assert (COMPARISON_CLASS_P (t
));
12365 if (TREE_CODE (TREE_OPERAND (t
, 1)) == TREE_VEC
12366 && TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) == decl
)
12368 TREE_OPERAND (t
, 1) = copy_node (TREE_OPERAND (t
, 1));
12369 TREE_VEC_ELT (TREE_OPERAND (t
, 1), 0) = var
;
12374 gimplify_adjust_omp_clauses (pre_p
, for_body
,
12375 &OMP_FOR_CLAUSES (orig_for_stmt
),
12376 TREE_CODE (orig_for_stmt
));
12379 switch (TREE_CODE (orig_for_stmt
))
12381 case OMP_FOR
: kind
= GF_OMP_FOR_KIND_FOR
; break;
12382 case OMP_SIMD
: kind
= GF_OMP_FOR_KIND_SIMD
; break;
12383 case OMP_DISTRIBUTE
: kind
= GF_OMP_FOR_KIND_DISTRIBUTE
; break;
12384 case OMP_TASKLOOP
: kind
= GF_OMP_FOR_KIND_TASKLOOP
; break;
12385 case OACC_LOOP
: kind
= GF_OMP_FOR_KIND_OACC_LOOP
; break;
12387 gcc_unreachable ();
12389 if (loop_p
&& kind
== GF_OMP_FOR_KIND_SIMD
)
12391 gimplify_seq_add_seq (pre_p
, for_pre_body
);
12392 for_pre_body
= NULL
;
12394 gfor
= gimple_build_omp_for (for_body
, kind
, OMP_FOR_CLAUSES (orig_for_stmt
),
12395 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)),
12397 if (orig_for_stmt
!= for_stmt
)
12398 gimple_omp_for_set_combined_p (gfor
, true);
12399 if (gimplify_omp_ctxp
12400 && (gimplify_omp_ctxp
->combined_loop
12401 || (gimplify_omp_ctxp
->region_type
== ORT_COMBINED_PARALLEL
12402 && gimplify_omp_ctxp
->outer_context
12403 && gimplify_omp_ctxp
->outer_context
->combined_loop
)))
12405 gimple_omp_for_set_combined_into_p (gfor
, true);
12406 if (gimplify_omp_ctxp
->combined_loop
)
12407 gcc_assert (TREE_CODE (orig_for_stmt
) == OMP_SIMD
);
12409 gcc_assert (TREE_CODE (orig_for_stmt
) == OMP_FOR
);
12412 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
12414 t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
12415 gimple_omp_for_set_index (gfor
, i
, TREE_OPERAND (t
, 0));
12416 gimple_omp_for_set_initial (gfor
, i
, TREE_OPERAND (t
, 1));
12417 t
= TREE_VEC_ELT (OMP_FOR_COND (for_stmt
), i
);
12418 gimple_omp_for_set_cond (gfor
, i
, TREE_CODE (t
));
12419 gimple_omp_for_set_final (gfor
, i
, TREE_OPERAND (t
, 1));
12420 t
= TREE_VEC_ELT (OMP_FOR_INCR (for_stmt
), i
);
12421 gimple_omp_for_set_incr (gfor
, i
, TREE_OPERAND (t
, 1));
12424 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
12425 constructs with GIMPLE_OMP_TASK sandwiched in between them.
12426 The outer taskloop stands for computing the number of iterations,
12427 counts for collapsed loops and holding taskloop specific clauses.
12428 The task construct stands for the effect of data sharing on the
12429 explicit task it creates and the inner taskloop stands for expansion
12430 of the static loop inside of the explicit task construct. */
12431 if (TREE_CODE (orig_for_stmt
) == OMP_TASKLOOP
)
12433 tree
*gfor_clauses_ptr
= gimple_omp_for_clauses_ptr (gfor
);
12434 tree task_clauses
= NULL_TREE
;
12435 tree c
= *gfor_clauses_ptr
;
12436 tree
*gtask_clauses_ptr
= &task_clauses
;
12437 tree outer_for_clauses
= NULL_TREE
;
12438 tree
*gforo_clauses_ptr
= &outer_for_clauses
;
12439 bitmap lastprivate_uids
= NULL
;
12440 if (omp_find_clause (c
, OMP_CLAUSE_ALLOCATE
))
12442 c
= omp_find_clause (c
, OMP_CLAUSE_LASTPRIVATE
);
12445 lastprivate_uids
= BITMAP_ALLOC (NULL
);
12446 for (; c
; c
= omp_find_clause (OMP_CLAUSE_CHAIN (c
),
12447 OMP_CLAUSE_LASTPRIVATE
))
12448 bitmap_set_bit (lastprivate_uids
,
12449 DECL_UID (OMP_CLAUSE_DECL (c
)));
12451 c
= *gfor_clauses_ptr
;
12453 for (; c
; c
= OMP_CLAUSE_CHAIN (c
))
12454 switch (OMP_CLAUSE_CODE (c
))
12456 /* These clauses are allowed on task, move them there. */
12457 case OMP_CLAUSE_SHARED
:
12458 case OMP_CLAUSE_FIRSTPRIVATE
:
12459 case OMP_CLAUSE_DEFAULT
:
12460 case OMP_CLAUSE_IF
:
12461 case OMP_CLAUSE_UNTIED
:
12462 case OMP_CLAUSE_FINAL
:
12463 case OMP_CLAUSE_MERGEABLE
:
12464 case OMP_CLAUSE_PRIORITY
:
12465 case OMP_CLAUSE_REDUCTION
:
12466 case OMP_CLAUSE_IN_REDUCTION
:
12467 *gtask_clauses_ptr
= c
;
12468 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
12470 case OMP_CLAUSE_PRIVATE
:
12471 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c
))
12473 /* We want private on outer for and firstprivate
12476 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
12477 OMP_CLAUSE_FIRSTPRIVATE
);
12478 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
12479 lang_hooks
.decls
.omp_finish_clause (*gtask_clauses_ptr
, NULL
,
12481 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
12482 *gforo_clauses_ptr
= c
;
12483 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
12487 *gtask_clauses_ptr
= c
;
12488 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
12491 /* These clauses go into outer taskloop clauses. */
12492 case OMP_CLAUSE_GRAINSIZE
:
12493 case OMP_CLAUSE_NUM_TASKS
:
12494 case OMP_CLAUSE_NOGROUP
:
12495 *gforo_clauses_ptr
= c
;
12496 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
12498 /* Collapse clause we duplicate on both taskloops. */
12499 case OMP_CLAUSE_COLLAPSE
:
12500 *gfor_clauses_ptr
= c
;
12501 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
12502 *gforo_clauses_ptr
= copy_node (c
);
12503 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr
);
12505 /* For lastprivate, keep the clause on inner taskloop, and add
12506 a shared clause on task. If the same decl is also firstprivate,
12507 add also firstprivate clause on the inner taskloop. */
12508 case OMP_CLAUSE_LASTPRIVATE
:
12509 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
12511 /* For taskloop C++ lastprivate IVs, we want:
12512 1) private on outer taskloop
12513 2) firstprivate and shared on task
12514 3) lastprivate on inner taskloop */
12516 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
12517 OMP_CLAUSE_FIRSTPRIVATE
);
12518 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
12519 lang_hooks
.decls
.omp_finish_clause (*gtask_clauses_ptr
, NULL
,
12521 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
12522 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
) = 1;
12523 *gforo_clauses_ptr
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
12524 OMP_CLAUSE_PRIVATE
);
12525 OMP_CLAUSE_DECL (*gforo_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
12526 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr
) = 1;
12527 TREE_TYPE (*gforo_clauses_ptr
) = TREE_TYPE (c
);
12528 gforo_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr
);
12530 *gfor_clauses_ptr
= c
;
12531 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
12533 = build_omp_clause (OMP_CLAUSE_LOCATION (c
), OMP_CLAUSE_SHARED
);
12534 OMP_CLAUSE_DECL (*gtask_clauses_ptr
) = OMP_CLAUSE_DECL (c
);
12535 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c
))
12536 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr
) = 1;
12538 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
12540 /* Allocate clause we duplicate on task and inner taskloop
12541 if the decl is lastprivate, otherwise just put on task. */
12542 case OMP_CLAUSE_ALLOCATE
:
12543 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)
12544 && DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
)))
12546 /* Additionally, put firstprivate clause on task
12547 for the allocator if it is not constant. */
12549 = build_omp_clause (OMP_CLAUSE_LOCATION (c
),
12550 OMP_CLAUSE_FIRSTPRIVATE
);
12551 OMP_CLAUSE_DECL (*gtask_clauses_ptr
)
12552 = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c
);
12553 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
12555 if (lastprivate_uids
12556 && bitmap_bit_p (lastprivate_uids
,
12557 DECL_UID (OMP_CLAUSE_DECL (c
))))
12559 *gfor_clauses_ptr
= c
;
12560 gfor_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
12561 *gtask_clauses_ptr
= copy_node (c
);
12562 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr
);
12566 *gtask_clauses_ptr
= c
;
12567 gtask_clauses_ptr
= &OMP_CLAUSE_CHAIN (c
);
12571 gcc_unreachable ();
12573 *gfor_clauses_ptr
= NULL_TREE
;
12574 *gtask_clauses_ptr
= NULL_TREE
;
12575 *gforo_clauses_ptr
= NULL_TREE
;
12576 BITMAP_FREE (lastprivate_uids
);
12577 g
= gimple_build_bind (NULL_TREE
, gfor
, NULL_TREE
);
12578 g
= gimple_build_omp_task (g
, task_clauses
, NULL_TREE
, NULL_TREE
,
12579 NULL_TREE
, NULL_TREE
, NULL_TREE
);
12580 gimple_omp_task_set_taskloop_p (g
, true);
12581 g
= gimple_build_bind (NULL_TREE
, g
, NULL_TREE
);
12583 = gimple_build_omp_for (g
, GF_OMP_FOR_KIND_TASKLOOP
, outer_for_clauses
,
12584 gimple_omp_for_collapse (gfor
),
12585 gimple_omp_for_pre_body (gfor
));
12586 gimple_omp_for_set_pre_body (gfor
, NULL
);
12587 gimple_omp_for_set_combined_p (gforo
, true);
12588 gimple_omp_for_set_combined_into_p (gfor
, true);
12589 for (i
= 0; i
< (int) gimple_omp_for_collapse (gfor
); i
++)
12591 tree type
= TREE_TYPE (gimple_omp_for_index (gfor
, i
));
12592 tree v
= create_tmp_var (type
);
12593 gimple_omp_for_set_index (gforo
, i
, v
);
12594 t
= unshare_expr (gimple_omp_for_initial (gfor
, i
));
12595 gimple_omp_for_set_initial (gforo
, i
, t
);
12596 gimple_omp_for_set_cond (gforo
, i
,
12597 gimple_omp_for_cond (gfor
, i
));
12598 t
= unshare_expr (gimple_omp_for_final (gfor
, i
));
12599 gimple_omp_for_set_final (gforo
, i
, t
);
12600 t
= unshare_expr (gimple_omp_for_incr (gfor
, i
));
12601 gcc_assert (TREE_OPERAND (t
, 0) == gimple_omp_for_index (gfor
, i
));
12602 TREE_OPERAND (t
, 0) = v
;
12603 gimple_omp_for_set_incr (gforo
, i
, t
);
12604 t
= build_omp_clause (input_location
, OMP_CLAUSE_PRIVATE
);
12605 OMP_CLAUSE_DECL (t
) = v
;
12606 OMP_CLAUSE_CHAIN (t
) = gimple_omp_for_clauses (gforo
);
12607 gimple_omp_for_set_clauses (gforo
, t
);
12608 if (OMP_FOR_NON_RECTANGULAR (for_stmt
))
12610 tree
*p1
= NULL
, *p2
= NULL
;
12611 t
= gimple_omp_for_initial (gforo
, i
);
12612 if (TREE_CODE (t
) == TREE_VEC
)
12613 p1
= &TREE_VEC_ELT (t
, 0);
12614 t
= gimple_omp_for_final (gforo
, i
);
12615 if (TREE_CODE (t
) == TREE_VEC
)
12618 p2
= &TREE_VEC_ELT (t
, 0);
12620 p1
= &TREE_VEC_ELT (t
, 0);
12625 for (j
= 0; j
< i
; j
++)
12626 if (*p1
== gimple_omp_for_index (gfor
, j
))
12628 *p1
= gimple_omp_for_index (gforo
, j
);
12633 gcc_assert (j
< i
);
12637 gimplify_seq_add_stmt (pre_p
, gforo
);
12640 gimplify_seq_add_stmt (pre_p
, gfor
);
12642 if (TREE_CODE (orig_for_stmt
) == OMP_FOR
)
12644 struct gimplify_omp_ctx
*ctx
= gimplify_omp_ctxp
;
12645 unsigned lastprivate_conditional
= 0;
12647 && (ctx
->region_type
== ORT_TARGET_DATA
12648 || ctx
->region_type
== ORT_TASKGROUP
))
12649 ctx
= ctx
->outer_context
;
12650 if (ctx
&& (ctx
->region_type
& ORT_PARALLEL
) != 0)
12651 for (tree c
= gimple_omp_for_clauses (gfor
);
12652 c
; c
= OMP_CLAUSE_CHAIN (c
))
12653 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
12654 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
12655 ++lastprivate_conditional
;
12656 if (lastprivate_conditional
)
12658 struct omp_for_data fd
;
12659 omp_extract_for_data (gfor
, &fd
, NULL
);
12660 tree type
= build_array_type_nelts (unsigned_type_for (fd
.iter_type
),
12661 lastprivate_conditional
);
12662 tree var
= create_tmp_var_raw (type
);
12663 tree c
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE__CONDTEMP_
);
12664 OMP_CLAUSE_DECL (c
) = var
;
12665 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (gfor
);
12666 gimple_omp_for_set_clauses (gfor
, c
);
12667 omp_add_variable (ctx
, var
, GOVD_CONDTEMP
| GOVD_SEEN
);
12670 else if (TREE_CODE (orig_for_stmt
) == OMP_SIMD
)
12672 unsigned lastprivate_conditional
= 0;
12673 for (tree c
= gimple_omp_for_clauses (gfor
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12674 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_LASTPRIVATE
12675 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c
))
12676 ++lastprivate_conditional
;
12677 if (lastprivate_conditional
)
12679 struct omp_for_data fd
;
12680 omp_extract_for_data (gfor
, &fd
, NULL
);
12681 tree type
= unsigned_type_for (fd
.iter_type
);
12682 while (lastprivate_conditional
--)
12684 tree c
= build_omp_clause (UNKNOWN_LOCATION
,
12685 OMP_CLAUSE__CONDTEMP_
);
12686 OMP_CLAUSE_DECL (c
) = create_tmp_var (type
);
12687 OMP_CLAUSE_CHAIN (c
) = gimple_omp_for_clauses (gfor
);
12688 gimple_omp_for_set_clauses (gfor
, c
);
12693 if (ret
!= GS_ALL_DONE
)
12695 *expr_p
= NULL_TREE
;
12696 return GS_ALL_DONE
;
12699 /* Helper for gimplify_omp_loop, called through walk_tree. */
12702 replace_reduction_placeholders (tree
*tp
, int *walk_subtrees
, void *data
)
12706 tree
*d
= (tree
*) data
;
12707 if (*tp
== OMP_CLAUSE_REDUCTION_PLACEHOLDER (d
[0]))
12709 *tp
= OMP_CLAUSE_REDUCTION_PLACEHOLDER (d
[1]);
12710 *walk_subtrees
= 0;
12712 else if (*tp
== OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (d
[0]))
12714 *tp
= OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (d
[1]);
12715 *walk_subtrees
= 0;
12721 /* Gimplify the gross structure of an OMP_LOOP statement. */
12723 static enum gimplify_status
12724 gimplify_omp_loop (tree
*expr_p
, gimple_seq
*pre_p
)
12726 tree for_stmt
= *expr_p
;
12727 tree clauses
= OMP_FOR_CLAUSES (for_stmt
);
12728 struct gimplify_omp_ctx
*octx
= gimplify_omp_ctxp
;
12729 enum omp_clause_bind_kind kind
= OMP_CLAUSE_BIND_THREAD
;
12732 /* If order is not present, the behavior is as if order(concurrent)
12734 tree order
= omp_find_clause (clauses
, OMP_CLAUSE_ORDER
);
12735 if (order
== NULL_TREE
)
12737 order
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_ORDER
);
12738 OMP_CLAUSE_CHAIN (order
) = clauses
;
12739 OMP_FOR_CLAUSES (for_stmt
) = clauses
= order
;
12742 tree bind
= omp_find_clause (clauses
, OMP_CLAUSE_BIND
);
12743 if (bind
== NULL_TREE
)
12745 if (!flag_openmp
) /* flag_openmp_simd */
12747 else if (octx
&& (octx
->region_type
& ORT_TEAMS
) != 0)
12748 kind
= OMP_CLAUSE_BIND_TEAMS
;
12749 else if (octx
&& (octx
->region_type
& ORT_PARALLEL
) != 0)
12750 kind
= OMP_CLAUSE_BIND_PARALLEL
;
12753 for (; octx
; octx
= octx
->outer_context
)
12755 if ((octx
->region_type
& ORT_ACC
) != 0
12756 || octx
->region_type
== ORT_NONE
12757 || octx
->region_type
== ORT_IMPLICIT_TARGET
)
12761 if (octx
== NULL
&& !in_omp_construct
)
12762 error_at (EXPR_LOCATION (for_stmt
),
12763 "%<bind%> clause not specified on a %<loop%> "
12764 "construct not nested inside another OpenMP construct");
12766 bind
= build_omp_clause (UNKNOWN_LOCATION
, OMP_CLAUSE_BIND
);
12767 OMP_CLAUSE_CHAIN (bind
) = clauses
;
12768 OMP_CLAUSE_BIND_KIND (bind
) = kind
;
12769 OMP_FOR_CLAUSES (for_stmt
) = bind
;
12772 switch (OMP_CLAUSE_BIND_KIND (bind
))
12774 case OMP_CLAUSE_BIND_THREAD
:
12776 case OMP_CLAUSE_BIND_PARALLEL
:
12777 if (!flag_openmp
) /* flag_openmp_simd */
12779 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
12782 for (; octx
; octx
= octx
->outer_context
)
12783 if (octx
->region_type
== ORT_SIMD
12784 && omp_find_clause (octx
->clauses
, OMP_CLAUSE_BIND
) == NULL_TREE
)
12786 error_at (EXPR_LOCATION (for_stmt
),
12787 "%<bind(parallel)%> on a %<loop%> construct nested "
12788 "inside %<simd%> construct");
12789 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
12792 kind
= OMP_CLAUSE_BIND_PARALLEL
;
12794 case OMP_CLAUSE_BIND_TEAMS
:
12795 if (!flag_openmp
) /* flag_openmp_simd */
12797 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
12801 && octx
->region_type
!= ORT_IMPLICIT_TARGET
12802 && octx
->region_type
!= ORT_NONE
12803 && (octx
->region_type
& ORT_TEAMS
) == 0)
12804 || in_omp_construct
)
12806 error_at (EXPR_LOCATION (for_stmt
),
12807 "%<bind(teams)%> on a %<loop%> region not strictly "
12808 "nested inside of a %<teams%> region");
12809 OMP_CLAUSE_BIND_KIND (bind
) = OMP_CLAUSE_BIND_THREAD
;
12812 kind
= OMP_CLAUSE_BIND_TEAMS
;
12815 gcc_unreachable ();
12818 for (tree
*pc
= &OMP_FOR_CLAUSES (for_stmt
); *pc
; )
12819 switch (OMP_CLAUSE_CODE (*pc
))
12821 case OMP_CLAUSE_REDUCTION
:
12822 if (OMP_CLAUSE_REDUCTION_INSCAN (*pc
))
12824 error_at (OMP_CLAUSE_LOCATION (*pc
),
12825 "%<inscan%> %<reduction%> clause on "
12826 "%qs construct", "loop");
12827 OMP_CLAUSE_REDUCTION_INSCAN (*pc
) = 0;
12829 if (OMP_CLAUSE_REDUCTION_TASK (*pc
))
12831 error_at (OMP_CLAUSE_LOCATION (*pc
),
12832 "invalid %<task%> reduction modifier on construct "
12833 "other than %<parallel%>, %qs or %<sections%>",
12834 lang_GNU_Fortran () ? "do" : "for");
12835 OMP_CLAUSE_REDUCTION_TASK (*pc
) = 0;
12837 pc
= &OMP_CLAUSE_CHAIN (*pc
);
12839 case OMP_CLAUSE_LASTPRIVATE
:
12840 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
12842 tree t
= TREE_VEC_ELT (OMP_FOR_INIT (for_stmt
), i
);
12843 gcc_assert (TREE_CODE (t
) == MODIFY_EXPR
);
12844 if (OMP_CLAUSE_DECL (*pc
) == TREE_OPERAND (t
, 0))
12846 if (OMP_FOR_ORIG_DECLS (for_stmt
)
12847 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
),
12849 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
),
12852 tree orig
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
12853 if (OMP_CLAUSE_DECL (*pc
) == TREE_PURPOSE (orig
))
12857 if (i
== TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)))
12859 error_at (OMP_CLAUSE_LOCATION (*pc
),
12860 "%<lastprivate%> clause on a %<loop%> construct refers "
12861 "to a variable %qD which is not the loop iterator",
12862 OMP_CLAUSE_DECL (*pc
));
12863 *pc
= OMP_CLAUSE_CHAIN (*pc
);
12866 pc
= &OMP_CLAUSE_CHAIN (*pc
);
12869 pc
= &OMP_CLAUSE_CHAIN (*pc
);
12873 TREE_SET_CODE (for_stmt
, OMP_SIMD
);
12878 case OMP_CLAUSE_BIND_THREAD
: last
= 0; break;
12879 case OMP_CLAUSE_BIND_PARALLEL
: last
= 1; break;
12880 case OMP_CLAUSE_BIND_TEAMS
: last
= 2; break;
12882 for (int pass
= 1; pass
<= last
; pass
++)
12886 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, NULL
, NULL
);
12887 append_to_statement_list (*expr_p
, &BIND_EXPR_BODY (bind
));
12888 *expr_p
= make_node (OMP_PARALLEL
);
12889 TREE_TYPE (*expr_p
) = void_type_node
;
12890 OMP_PARALLEL_BODY (*expr_p
) = bind
;
12891 OMP_PARALLEL_COMBINED (*expr_p
) = 1;
12892 SET_EXPR_LOCATION (*expr_p
, EXPR_LOCATION (for_stmt
));
12893 tree
*pc
= &OMP_PARALLEL_CLAUSES (*expr_p
);
12894 for (i
= 0; i
< TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt
)); i
++)
12895 if (OMP_FOR_ORIG_DECLS (for_stmt
)
12896 && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
))
12899 tree elt
= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt
), i
);
12900 if (TREE_PURPOSE (elt
) && TREE_VALUE (elt
))
12902 *pc
= build_omp_clause (UNKNOWN_LOCATION
,
12903 OMP_CLAUSE_FIRSTPRIVATE
);
12904 OMP_CLAUSE_DECL (*pc
) = TREE_VALUE (elt
);
12905 pc
= &OMP_CLAUSE_CHAIN (*pc
);
12909 tree t
= make_node (pass
== 2 ? OMP_DISTRIBUTE
: OMP_FOR
);
12910 tree
*pc
= &OMP_FOR_CLAUSES (t
);
12911 TREE_TYPE (t
) = void_type_node
;
12912 OMP_FOR_BODY (t
) = *expr_p
;
12913 SET_EXPR_LOCATION (t
, EXPR_LOCATION (for_stmt
));
12914 for (tree c
= OMP_FOR_CLAUSES (for_stmt
); c
; c
= OMP_CLAUSE_CHAIN (c
))
12915 switch (OMP_CLAUSE_CODE (c
))
12917 case OMP_CLAUSE_BIND
:
12918 case OMP_CLAUSE_ORDER
:
12919 case OMP_CLAUSE_COLLAPSE
:
12920 *pc
= copy_node (c
);
12921 pc
= &OMP_CLAUSE_CHAIN (*pc
);
12923 case OMP_CLAUSE_PRIVATE
:
12924 case OMP_CLAUSE_FIRSTPRIVATE
:
12925 /* Only needed on innermost. */
12927 case OMP_CLAUSE_LASTPRIVATE
:
12928 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
) && pass
!= last
)
12930 *pc
= build_omp_clause (OMP_CLAUSE_LOCATION (c
),
12931 OMP_CLAUSE_FIRSTPRIVATE
);
12932 OMP_CLAUSE_DECL (*pc
) = OMP_CLAUSE_DECL (c
);
12933 lang_hooks
.decls
.omp_finish_clause (*pc
, NULL
, false);
12934 pc
= &OMP_CLAUSE_CHAIN (*pc
);
12936 *pc
= copy_node (c
);
12937 OMP_CLAUSE_LASTPRIVATE_STMT (*pc
) = NULL_TREE
;
12938 TREE_TYPE (*pc
) = unshare_expr (TREE_TYPE (c
));
12939 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c
))
12942 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc
) = 1;
12944 lang_hooks
.decls
.omp_finish_clause (*pc
, NULL
, false);
12945 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc
) = 0;
12947 pc
= &OMP_CLAUSE_CHAIN (*pc
);
12949 case OMP_CLAUSE_REDUCTION
:
12950 *pc
= copy_node (c
);
12951 OMP_CLAUSE_DECL (*pc
) = unshare_expr (OMP_CLAUSE_DECL (c
));
12952 TREE_TYPE (*pc
) = unshare_expr (TREE_TYPE (c
));
12953 OMP_CLAUSE_REDUCTION_INIT (*pc
)
12954 = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c
));
12955 OMP_CLAUSE_REDUCTION_MERGE (*pc
)
12956 = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c
));
12957 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc
))
12959 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc
)
12960 = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c
));
12961 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc
))
12962 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc
)
12963 = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c
));
12965 tree data
[2] = { c
, nc
};
12966 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (nc
),
12967 replace_reduction_placeholders
,
12969 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (nc
),
12970 replace_reduction_placeholders
,
12973 pc
= &OMP_CLAUSE_CHAIN (*pc
);
12976 gcc_unreachable ();
12981 return gimplify_omp_for (expr_p
, pre_p
);
12985 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
12986 of OMP_TARGET's body. */
12989 find_omp_teams (tree
*tp
, int *walk_subtrees
, void *)
12991 *walk_subtrees
= 0;
12992 switch (TREE_CODE (*tp
))
12997 case STATEMENT_LIST
:
12998 *walk_subtrees
= 1;
13006 /* Helper function of optimize_target_teams, determine if the expression
13007 can be computed safely before the target construct on the host. */
13010 computable_teams_clause (tree
*tp
, int *walk_subtrees
, void *)
13016 *walk_subtrees
= 0;
13019 switch (TREE_CODE (*tp
))
13024 *walk_subtrees
= 0;
13025 if (error_operand_p (*tp
)
13026 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp
))
13027 || DECL_HAS_VALUE_EXPR_P (*tp
)
13028 || DECL_THREAD_LOCAL_P (*tp
)
13029 || TREE_SIDE_EFFECTS (*tp
)
13030 || TREE_THIS_VOLATILE (*tp
))
13032 if (is_global_var (*tp
)
13033 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp
))
13034 || lookup_attribute ("omp declare target link",
13035 DECL_ATTRIBUTES (*tp
))))
13038 && !DECL_SEEN_IN_BIND_EXPR_P (*tp
)
13039 && !is_global_var (*tp
)
13040 && decl_function_context (*tp
) == current_function_decl
)
13042 n
= splay_tree_lookup (gimplify_omp_ctxp
->variables
,
13043 (splay_tree_key
) *tp
);
13046 if (gimplify_omp_ctxp
->defaultmap
[GDMK_SCALAR
] & GOVD_FIRSTPRIVATE
)
13050 else if (n
->value
& GOVD_LOCAL
)
13052 else if (n
->value
& GOVD_FIRSTPRIVATE
)
13054 else if ((n
->value
& (GOVD_MAP
| GOVD_MAP_ALWAYS_TO
))
13055 == (GOVD_MAP
| GOVD_MAP_ALWAYS_TO
))
13059 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp
)))
13063 if (TARGET_EXPR_INITIAL (*tp
)
13064 || TREE_CODE (TARGET_EXPR_SLOT (*tp
)) != VAR_DECL
)
13066 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp
),
13067 walk_subtrees
, NULL
);
13068 /* Allow some reasonable subset of integral arithmetics. */
13072 case TRUNC_DIV_EXPR
:
13073 case CEIL_DIV_EXPR
:
13074 case FLOOR_DIV_EXPR
:
13075 case ROUND_DIV_EXPR
:
13076 case TRUNC_MOD_EXPR
:
13077 case CEIL_MOD_EXPR
:
13078 case FLOOR_MOD_EXPR
:
13079 case ROUND_MOD_EXPR
:
13081 case EXACT_DIV_EXPR
:
13092 case NON_LVALUE_EXPR
:
13094 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp
)))
13097 /* And disallow anything else, except for comparisons. */
13099 if (COMPARISON_CLASS_P (*tp
))
13105 /* Try to determine if the num_teams and/or thread_limit expressions
13106 can have their values determined already before entering the
13108 INTEGER_CSTs trivially are,
13109 integral decls that are firstprivate (explicitly or implicitly)
13110 or explicitly map(always, to:) or map(always, tofrom:) on the target
13111 region too, and expressions involving simple arithmetics on those
13112 too, function calls are not ok, dereferencing something neither etc.
13113 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
13114 EXPR based on what we find:
13115 0 stands for clause not specified at all, use implementation default
13116 -1 stands for value that can't be determined easily before entering
13117 the target construct.
13118 If teams construct is not present at all, use 1 for num_teams
13119 and 0 for thread_limit (only one team is involved, and the thread
13120 limit is implementation defined. */
13123 optimize_target_teams (tree target
, gimple_seq
*pre_p
)
13125 tree body
= OMP_BODY (target
);
13126 tree teams
= walk_tree (&body
, find_omp_teams
, NULL
, NULL
);
13127 tree num_teams
= integer_zero_node
;
13128 tree thread_limit
= integer_zero_node
;
13129 location_t num_teams_loc
= EXPR_LOCATION (target
);
13130 location_t thread_limit_loc
= EXPR_LOCATION (target
);
13132 struct gimplify_omp_ctx
*target_ctx
= gimplify_omp_ctxp
;
13134 if (teams
== NULL_TREE
)
13135 num_teams
= integer_one_node
;
13137 for (c
= OMP_TEAMS_CLAUSES (teams
); c
; c
= OMP_CLAUSE_CHAIN (c
))
13139 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_NUM_TEAMS
)
13142 num_teams_loc
= OMP_CLAUSE_LOCATION (c
);
13144 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_THREAD_LIMIT
)
13147 thread_limit_loc
= OMP_CLAUSE_LOCATION (c
);
13151 expr
= OMP_CLAUSE_OPERAND (c
, 0);
13152 if (TREE_CODE (expr
) == INTEGER_CST
)
13157 if (walk_tree (&expr
, computable_teams_clause
, NULL
, NULL
))
13159 *p
= integer_minus_one_node
;
13163 gimplify_omp_ctxp
= gimplify_omp_ctxp
->outer_context
;
13164 if (gimplify_expr (p
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
, false)
13167 gimplify_omp_ctxp
= target_ctx
;
13168 *p
= integer_minus_one_node
;
13171 gimplify_omp_ctxp
= target_ctx
;
13172 if (!DECL_P (expr
) && TREE_CODE (expr
) != TARGET_EXPR
)
13173 OMP_CLAUSE_OPERAND (c
, 0) = *p
;
13175 c
= build_omp_clause (thread_limit_loc
, OMP_CLAUSE_THREAD_LIMIT
);
13176 OMP_CLAUSE_THREAD_LIMIT_EXPR (c
) = thread_limit
;
13177 OMP_CLAUSE_CHAIN (c
) = OMP_TARGET_CLAUSES (target
);
13178 OMP_TARGET_CLAUSES (target
) = c
;
13179 c
= build_omp_clause (num_teams_loc
, OMP_CLAUSE_NUM_TEAMS
);
13180 OMP_CLAUSE_NUM_TEAMS_EXPR (c
) = num_teams
;
13181 OMP_CLAUSE_CHAIN (c
) = OMP_TARGET_CLAUSES (target
);
13182 OMP_TARGET_CLAUSES (target
) = c
;
13185 /* Gimplify the gross structure of several OMP constructs. */
13188 gimplify_omp_workshare (tree
*expr_p
, gimple_seq
*pre_p
)
13190 tree expr
= *expr_p
;
13192 gimple_seq body
= NULL
;
13193 enum omp_region_type ort
;
13195 switch (TREE_CODE (expr
))
13199 ort
= ORT_WORKSHARE
;
13202 ort
= OMP_TARGET_COMBINED (expr
) ? ORT_COMBINED_TARGET
: ORT_TARGET
;
13205 ort
= ORT_ACC_KERNELS
;
13207 case OACC_PARALLEL
:
13208 ort
= ORT_ACC_PARALLEL
;
13211 ort
= ORT_ACC_SERIAL
;
13214 ort
= ORT_ACC_DATA
;
13216 case OMP_TARGET_DATA
:
13217 ort
= ORT_TARGET_DATA
;
13220 ort
= OMP_TEAMS_COMBINED (expr
) ? ORT_COMBINED_TEAMS
: ORT_TEAMS
;
13221 if (gimplify_omp_ctxp
== NULL
13222 || gimplify_omp_ctxp
->region_type
== ORT_IMPLICIT_TARGET
)
13223 ort
= (enum omp_region_type
) (ort
| ORT_HOST_TEAMS
);
13225 case OACC_HOST_DATA
:
13226 ort
= ORT_ACC_HOST_DATA
;
13229 gcc_unreachable ();
13232 bool save_in_omp_construct
= in_omp_construct
;
13233 if ((ort
& ORT_ACC
) == 0)
13234 in_omp_construct
= false;
13235 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr
), pre_p
, ort
,
13237 if (TREE_CODE (expr
) == OMP_TARGET
)
13238 optimize_target_teams (expr
, pre_p
);
13239 if ((ort
& (ORT_TARGET
| ORT_TARGET_DATA
)) != 0
13240 || (ort
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
13242 push_gimplify_context ();
13243 gimple
*g
= gimplify_and_return_first (OMP_BODY (expr
), &body
);
13244 if (gimple_code (g
) == GIMPLE_BIND
)
13245 pop_gimplify_context (g
);
13247 pop_gimplify_context (NULL
);
13248 if ((ort
& ORT_TARGET_DATA
) != 0)
13250 enum built_in_function end_ix
;
13251 switch (TREE_CODE (expr
))
13254 case OACC_HOST_DATA
:
13255 end_ix
= BUILT_IN_GOACC_DATA_END
;
13257 case OMP_TARGET_DATA
:
13258 end_ix
= BUILT_IN_GOMP_TARGET_END_DATA
;
13261 gcc_unreachable ();
13263 tree fn
= builtin_decl_explicit (end_ix
);
13264 g
= gimple_build_call (fn
, 0);
13265 gimple_seq cleanup
= NULL
;
13266 gimple_seq_add_stmt (&cleanup
, g
);
13267 g
= gimple_build_try (body
, cleanup
, GIMPLE_TRY_FINALLY
);
13269 gimple_seq_add_stmt (&body
, g
);
13273 gimplify_and_add (OMP_BODY (expr
), &body
);
13274 gimplify_adjust_omp_clauses (pre_p
, body
, &OMP_CLAUSES (expr
),
13276 in_omp_construct
= save_in_omp_construct
;
13278 switch (TREE_CODE (expr
))
13281 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_DATA
,
13282 OMP_CLAUSES (expr
));
13284 case OACC_HOST_DATA
:
13285 if (omp_find_clause (OMP_CLAUSES (expr
), OMP_CLAUSE_IF_PRESENT
))
13287 for (tree c
= OMP_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
13288 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_USE_DEVICE_PTR
)
13289 OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c
) = 1;
13292 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_HOST_DATA
,
13293 OMP_CLAUSES (expr
));
13296 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_KERNELS
,
13297 OMP_CLAUSES (expr
));
13299 case OACC_PARALLEL
:
13300 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_PARALLEL
,
13301 OMP_CLAUSES (expr
));
13304 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_OACC_SERIAL
,
13305 OMP_CLAUSES (expr
));
13308 stmt
= gimple_build_omp_sections (body
, OMP_CLAUSES (expr
));
13311 stmt
= gimple_build_omp_single (body
, OMP_CLAUSES (expr
));
13314 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_REGION
,
13315 OMP_CLAUSES (expr
));
13317 case OMP_TARGET_DATA
:
13318 /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
13319 to be evaluated before the use_device_{ptr,addr} clauses if they
13320 refer to the same variables. */
13322 tree use_device_clauses
;
13323 tree
*pc
, *uc
= &use_device_clauses
;
13324 for (pc
= &OMP_CLAUSES (expr
); *pc
; )
13325 if (OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_USE_DEVICE_PTR
13326 || OMP_CLAUSE_CODE (*pc
) == OMP_CLAUSE_USE_DEVICE_ADDR
)
13329 *pc
= OMP_CLAUSE_CHAIN (*pc
);
13330 uc
= &OMP_CLAUSE_CHAIN (*uc
);
13333 pc
= &OMP_CLAUSE_CHAIN (*pc
);
13335 *pc
= use_device_clauses
;
13336 stmt
= gimple_build_omp_target (body
, GF_OMP_TARGET_KIND_DATA
,
13337 OMP_CLAUSES (expr
));
13341 stmt
= gimple_build_omp_teams (body
, OMP_CLAUSES (expr
));
13342 if ((ort
& ORT_HOST_TEAMS
) == ORT_HOST_TEAMS
)
13343 gimple_omp_teams_set_host (as_a
<gomp_teams
*> (stmt
), true);
13346 gcc_unreachable ();
13349 gimplify_seq_add_stmt (pre_p
, stmt
);
13350 *expr_p
= NULL_TREE
;
13353 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
13354 target update constructs. */
13357 gimplify_omp_target_update (tree
*expr_p
, gimple_seq
*pre_p
)
13359 tree expr
= *expr_p
;
13362 enum omp_region_type ort
= ORT_WORKSHARE
;
13364 switch (TREE_CODE (expr
))
13366 case OACC_ENTER_DATA
:
13367 case OACC_EXIT_DATA
:
13368 kind
= GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA
;
13372 kind
= GF_OMP_TARGET_KIND_OACC_UPDATE
;
13375 case OMP_TARGET_UPDATE
:
13376 kind
= GF_OMP_TARGET_KIND_UPDATE
;
13378 case OMP_TARGET_ENTER_DATA
:
13379 kind
= GF_OMP_TARGET_KIND_ENTER_DATA
;
13381 case OMP_TARGET_EXIT_DATA
:
13382 kind
= GF_OMP_TARGET_KIND_EXIT_DATA
;
13385 gcc_unreachable ();
13387 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr
), pre_p
,
13388 ort
, TREE_CODE (expr
));
13389 gimplify_adjust_omp_clauses (pre_p
, NULL
, &OMP_STANDALONE_CLAUSES (expr
),
13391 if (TREE_CODE (expr
) == OACC_UPDATE
13392 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr
),
13393 OMP_CLAUSE_IF_PRESENT
))
13395 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
13397 for (tree c
= OMP_STANDALONE_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
13398 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
)
13399 switch (OMP_CLAUSE_MAP_KIND (c
))
13401 case GOMP_MAP_FORCE_TO
:
13402 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_TO
);
13404 case GOMP_MAP_FORCE_FROM
:
13405 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FROM
);
13411 else if (TREE_CODE (expr
) == OACC_EXIT_DATA
13412 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr
),
13413 OMP_CLAUSE_FINALIZE
))
13415 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote "finalize"
13417 bool have_clause
= false;
13418 for (tree c
= OMP_STANDALONE_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
13419 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_MAP
)
13420 switch (OMP_CLAUSE_MAP_KIND (c
))
13422 case GOMP_MAP_FROM
:
13423 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FORCE_FROM
);
13424 have_clause
= true;
13426 case GOMP_MAP_RELEASE
:
13427 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_DELETE
);
13428 have_clause
= true;
13430 case GOMP_MAP_TO_PSET
:
13431 /* Fortran arrays with descriptors must map that descriptor when
13432 doing standalone "attach" operations (in OpenACC). In that
13433 case GOMP_MAP_TO_PSET appears by itself with no preceding
13434 clause (see trans-openmp.c:gfc_trans_omp_clauses). */
13436 case GOMP_MAP_POINTER
:
13437 /* TODO PR92929: we may see these here, but they'll always follow
13438 one of the clauses above, and will be handled by libgomp as
13439 one group, so no handling required here. */
13440 gcc_assert (have_clause
);
13442 case GOMP_MAP_DETACH
:
13443 OMP_CLAUSE_SET_MAP_KIND (c
, GOMP_MAP_FORCE_DETACH
);
13444 have_clause
= false;
13446 case GOMP_MAP_STRUCT
:
13447 have_clause
= false;
13450 gcc_unreachable ();
13453 stmt
= gimple_build_omp_target (NULL
, kind
, OMP_STANDALONE_CLAUSES (expr
));
13455 gimplify_seq_add_stmt (pre_p
, stmt
);
13456 *expr_p
= NULL_TREE
;
13459 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
13460 stabilized the lhs of the atomic operation as *ADDR. Return true if
13461 EXPR is this stabilized form. */
13464 goa_lhs_expr_p (tree expr
, tree addr
)
13466 /* Also include casts to other type variants. The C front end is fond
13467 of adding these for e.g. volatile variables. This is like
13468 STRIP_TYPE_NOPS but includes the main variant lookup. */
13469 STRIP_USELESS_TYPE_CONVERSION (expr
);
13471 if (TREE_CODE (expr
) == INDIRECT_REF
)
13473 expr
= TREE_OPERAND (expr
, 0);
13474 while (expr
!= addr
13475 && (CONVERT_EXPR_P (expr
)
13476 || TREE_CODE (expr
) == NON_LVALUE_EXPR
)
13477 && TREE_CODE (expr
) == TREE_CODE (addr
)
13478 && types_compatible_p (TREE_TYPE (expr
), TREE_TYPE (addr
)))
13480 expr
= TREE_OPERAND (expr
, 0);
13481 addr
= TREE_OPERAND (addr
, 0);
13485 return (TREE_CODE (addr
) == ADDR_EXPR
13486 && TREE_CODE (expr
) == ADDR_EXPR
13487 && TREE_OPERAND (addr
, 0) == TREE_OPERAND (expr
, 0));
13489 if (TREE_CODE (addr
) == ADDR_EXPR
&& expr
== TREE_OPERAND (addr
, 0))
13494 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
13495 expression does not involve the lhs, evaluate it into a temporary.
13496 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
13497 or -1 if an error was encountered. */
13500 goa_stabilize_expr (tree
*expr_p
, gimple_seq
*pre_p
, tree lhs_addr
,
13503 tree expr
= *expr_p
;
13506 if (goa_lhs_expr_p (expr
, lhs_addr
))
13511 if (is_gimple_val (expr
))
13515 switch (TREE_CODE_CLASS (TREE_CODE (expr
)))
13518 case tcc_comparison
:
13519 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
, lhs_addr
,
13523 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
, lhs_addr
,
13526 case tcc_expression
:
13527 switch (TREE_CODE (expr
))
13529 case TRUTH_ANDIF_EXPR
:
13530 case TRUTH_ORIF_EXPR
:
13531 case TRUTH_AND_EXPR
:
13532 case TRUTH_OR_EXPR
:
13533 case TRUTH_XOR_EXPR
:
13534 case BIT_INSERT_EXPR
:
13535 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 1), pre_p
,
13536 lhs_addr
, lhs_var
);
13538 case TRUTH_NOT_EXPR
:
13539 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
13540 lhs_addr
, lhs_var
);
13542 case COMPOUND_EXPR
:
13543 /* Break out any preevaluations from cp_build_modify_expr. */
13544 for (; TREE_CODE (expr
) == COMPOUND_EXPR
;
13545 expr
= TREE_OPERAND (expr
, 1))
13546 gimplify_stmt (&TREE_OPERAND (expr
, 0), pre_p
);
13548 return goa_stabilize_expr (expr_p
, pre_p
, lhs_addr
, lhs_var
);
13553 case tcc_reference
:
13554 if (TREE_CODE (expr
) == BIT_FIELD_REF
)
13555 saw_lhs
|= goa_stabilize_expr (&TREE_OPERAND (expr
, 0), pre_p
,
13556 lhs_addr
, lhs_var
);
13564 enum gimplify_status gs
;
13565 gs
= gimplify_expr (expr_p
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
13566 if (gs
!= GS_ALL_DONE
)
13573 /* Gimplify an OMP_ATOMIC statement. */
13575 static enum gimplify_status
13576 gimplify_omp_atomic (tree
*expr_p
, gimple_seq
*pre_p
)
13578 tree addr
= TREE_OPERAND (*expr_p
, 0);
13579 tree rhs
= TREE_CODE (*expr_p
) == OMP_ATOMIC_READ
13580 ? NULL
: TREE_OPERAND (*expr_p
, 1);
13581 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr
)));
13583 gomp_atomic_load
*loadstmt
;
13584 gomp_atomic_store
*storestmt
;
13586 tmp_load
= create_tmp_reg (type
);
13587 if (rhs
&& goa_stabilize_expr (&rhs
, pre_p
, addr
, tmp_load
) < 0)
13590 if (gimplify_expr (&addr
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
13594 loadstmt
= gimple_build_omp_atomic_load (tmp_load
, addr
,
13595 OMP_ATOMIC_MEMORY_ORDER (*expr_p
));
13596 gimplify_seq_add_stmt (pre_p
, loadstmt
);
13599 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
13600 representatives. Use BIT_FIELD_REF on the lhs instead. */
13601 if (TREE_CODE (rhs
) == BIT_INSERT_EXPR
13602 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load
)))
13604 tree bitpos
= TREE_OPERAND (rhs
, 2);
13605 tree op1
= TREE_OPERAND (rhs
, 1);
13607 tree tmp_store
= tmp_load
;
13608 if (TREE_CODE (*expr_p
) == OMP_ATOMIC_CAPTURE_OLD
)
13609 tmp_store
= get_initialized_tmp_var (tmp_load
, pre_p
);
13610 if (INTEGRAL_TYPE_P (TREE_TYPE (op1
)))
13611 bitsize
= bitsize_int (TYPE_PRECISION (TREE_TYPE (op1
)));
13613 bitsize
= TYPE_SIZE (TREE_TYPE (op1
));
13614 gcc_assert (TREE_OPERAND (rhs
, 0) == tmp_load
);
13615 tree t
= build2_loc (EXPR_LOCATION (rhs
),
13616 MODIFY_EXPR
, void_type_node
,
13617 build3_loc (EXPR_LOCATION (rhs
), BIT_FIELD_REF
,
13618 TREE_TYPE (op1
), tmp_store
, bitsize
,
13620 gimplify_and_add (t
, pre_p
);
13623 if (gimplify_expr (&rhs
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
)
13628 if (TREE_CODE (*expr_p
) == OMP_ATOMIC_READ
)
13631 = gimple_build_omp_atomic_store (rhs
, OMP_ATOMIC_MEMORY_ORDER (*expr_p
));
13632 gimplify_seq_add_stmt (pre_p
, storestmt
);
13633 switch (TREE_CODE (*expr_p
))
13635 case OMP_ATOMIC_READ
:
13636 case OMP_ATOMIC_CAPTURE_OLD
:
13637 *expr_p
= tmp_load
;
13638 gimple_omp_atomic_set_need_value (loadstmt
);
13640 case OMP_ATOMIC_CAPTURE_NEW
:
13642 gimple_omp_atomic_set_need_value (storestmt
);
13649 return GS_ALL_DONE
;
13652 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
13653 body, and adding some EH bits. */
13655 static enum gimplify_status
13656 gimplify_transaction (tree
*expr_p
, gimple_seq
*pre_p
)
13658 tree expr
= *expr_p
, temp
, tbody
= TRANSACTION_EXPR_BODY (expr
);
13660 gtransaction
*trans_stmt
;
13661 gimple_seq body
= NULL
;
13664 /* Wrap the transaction body in a BIND_EXPR so we have a context
13665 where to put decls for OMP. */
13666 if (TREE_CODE (tbody
) != BIND_EXPR
)
13668 tree bind
= build3 (BIND_EXPR
, void_type_node
, NULL
, tbody
, NULL
);
13669 TREE_SIDE_EFFECTS (bind
) = 1;
13670 SET_EXPR_LOCATION (bind
, EXPR_LOCATION (tbody
));
13671 TRANSACTION_EXPR_BODY (expr
) = bind
;
13674 push_gimplify_context ();
13675 temp
= voidify_wrapper_expr (*expr_p
, NULL
);
13677 body_stmt
= gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr
), &body
);
13678 pop_gimplify_context (body_stmt
);
13680 trans_stmt
= gimple_build_transaction (body
);
13681 if (TRANSACTION_EXPR_OUTER (expr
))
13682 subcode
= GTMA_IS_OUTER
;
13683 else if (TRANSACTION_EXPR_RELAXED (expr
))
13684 subcode
= GTMA_IS_RELAXED
;
13685 gimple_transaction_set_subcode (trans_stmt
, subcode
);
13687 gimplify_seq_add_stmt (pre_p
, trans_stmt
);
13695 *expr_p
= NULL_TREE
;
13696 return GS_ALL_DONE
;
13699 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
13700 is the OMP_BODY of the original EXPR (which has already been
13701 gimplified so it's not present in the EXPR).
13703 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
13706 gimplify_omp_ordered (tree expr
, gimple_seq body
)
13711 tree source_c
= NULL_TREE
;
13712 tree sink_c
= NULL_TREE
;
13714 if (gimplify_omp_ctxp
)
13716 for (c
= OMP_ORDERED_CLAUSES (expr
); c
; c
= OMP_CLAUSE_CHAIN (c
))
13717 if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
13718 && gimplify_omp_ctxp
->loop_iter_var
.is_empty ()
13719 && (OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
13720 || OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
))
13722 error_at (OMP_CLAUSE_LOCATION (c
),
13723 "%<ordered%> construct with %<depend%> clause must be "
13724 "closely nested inside a loop with %<ordered%> clause "
13725 "with a parameter");
13728 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
13729 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SINK
)
13732 for (decls
= OMP_CLAUSE_DECL (c
), i
= 0;
13733 decls
&& TREE_CODE (decls
) == TREE_LIST
;
13734 decls
= TREE_CHAIN (decls
), ++i
)
13735 if (i
>= gimplify_omp_ctxp
->loop_iter_var
.length () / 2)
13737 else if (TREE_VALUE (decls
)
13738 != gimplify_omp_ctxp
->loop_iter_var
[2 * i
])
13740 error_at (OMP_CLAUSE_LOCATION (c
),
13741 "variable %qE is not an iteration "
13742 "of outermost loop %d, expected %qE",
13743 TREE_VALUE (decls
), i
+ 1,
13744 gimplify_omp_ctxp
->loop_iter_var
[2 * i
]);
13750 = gimplify_omp_ctxp
->loop_iter_var
[2 * i
+ 1];
13751 if (!fail
&& i
!= gimplify_omp_ctxp
->loop_iter_var
.length () / 2)
13753 error_at (OMP_CLAUSE_LOCATION (c
),
13754 "number of variables in %<depend%> clause with "
13755 "%<sink%> modifier does not match number of "
13756 "iteration variables");
13761 else if (OMP_CLAUSE_CODE (c
) == OMP_CLAUSE_DEPEND
13762 && OMP_CLAUSE_DEPEND_KIND (c
) == OMP_CLAUSE_DEPEND_SOURCE
)
13766 error_at (OMP_CLAUSE_LOCATION (c
),
13767 "more than one %<depend%> clause with %<source%> "
13768 "modifier on an %<ordered%> construct");
13775 if (source_c
&& sink_c
)
13777 error_at (OMP_CLAUSE_LOCATION (source_c
),
13778 "%<depend%> clause with %<source%> modifier specified "
13779 "together with %<depend%> clauses with %<sink%> modifier "
13780 "on the same construct");
13785 return gimple_build_nop ();
13786 return gimple_build_omp_ordered (body
, OMP_ORDERED_CLAUSES (expr
));
13789 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
13790 expression produces a value to be used as an operand inside a GIMPLE
13791 statement, the value will be stored back in *EXPR_P. This value will
13792 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
13793 an SSA_NAME. The corresponding sequence of GIMPLE statements is
13794 emitted in PRE_P and POST_P.
13796 Additionally, this process may overwrite parts of the input
13797 expression during gimplification. Ideally, it should be
13798 possible to do non-destructive gimplification.
13800 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
13801 the expression needs to evaluate to a value to be used as
13802 an operand in a GIMPLE statement, this value will be stored in
13803 *EXPR_P on exit. This happens when the caller specifies one
13804 of fb_lvalue or fb_rvalue fallback flags.
13806 PRE_P will contain the sequence of GIMPLE statements corresponding
13807 to the evaluation of EXPR and all the side-effects that must
13808 be executed before the main expression. On exit, the last
13809 statement of PRE_P is the core statement being gimplified. For
13810 instance, when gimplifying 'if (++a)' the last statement in
13811 PRE_P will be 'if (t.1)' where t.1 is the result of
13812 pre-incrementing 'a'.
13814 POST_P will contain the sequence of GIMPLE statements corresponding
13815 to the evaluation of all the side-effects that must be executed
13816 after the main expression. If this is NULL, the post
13817 side-effects are stored at the end of PRE_P.
13819 The reason why the output is split in two is to handle post
13820 side-effects explicitly. In some cases, an expression may have
13821 inner and outer post side-effects which need to be emitted in
13822 an order different from the one given by the recursive
13823 traversal. For instance, for the expression (*p--)++ the post
13824 side-effects of '--' must actually occur *after* the post
13825 side-effects of '++'. However, gimplification will first visit
13826 the inner expression, so if a separate POST sequence was not
13827 used, the resulting sequence would be:
13834 However, the post-decrement operation in line #2 must not be
13835 evaluated until after the store to *p at line #4, so the
13836 correct sequence should be:
13843 So, by specifying a separate post queue, it is possible
13844 to emit the post side-effects in the correct order.
13845 If POST_P is NULL, an internal queue will be used. Before
13846 returning to the caller, the sequence POST_P is appended to
13847 the main output sequence PRE_P.
13849 GIMPLE_TEST_F points to a function that takes a tree T and
13850 returns nonzero if T is in the GIMPLE form requested by the
13851 caller. The GIMPLE predicates are in gimple.c.
13853 FALLBACK tells the function what sort of a temporary we want if
13854 gimplification cannot produce an expression that complies with
13857 fb_none means that no temporary should be generated
13858 fb_rvalue means that an rvalue is OK to generate
13859 fb_lvalue means that an lvalue is OK to generate
13860 fb_either means that either is OK, but an lvalue is preferable.
13861 fb_mayfail means that gimplification may fail (in which case
13862 GS_ERROR will be returned)
13864 The return value is either GS_ERROR or GS_ALL_DONE, since this
13865 function iterates until EXPR is completely gimplified or an error
13868 enum gimplify_status
13869 gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
13870 bool (*gimple_test_f
) (tree
), fallback_t fallback
)
13873 gimple_seq internal_pre
= NULL
;
13874 gimple_seq internal_post
= NULL
;
13877 location_t saved_location
;
13878 enum gimplify_status ret
;
13879 gimple_stmt_iterator pre_last_gsi
, post_last_gsi
;
13882 save_expr
= *expr_p
;
13883 if (save_expr
== NULL_TREE
)
13884 return GS_ALL_DONE
;
13886 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
13887 is_statement
= gimple_test_f
== is_gimple_stmt
;
13889 gcc_assert (pre_p
);
13891 /* Consistency checks. */
13892 if (gimple_test_f
== is_gimple_reg
)
13893 gcc_assert (fallback
& (fb_rvalue
| fb_lvalue
));
13894 else if (gimple_test_f
== is_gimple_val
13895 || gimple_test_f
== is_gimple_call_addr
13896 || gimple_test_f
== is_gimple_condexpr
13897 || gimple_test_f
== is_gimple_condexpr_for_cond
13898 || gimple_test_f
== is_gimple_mem_rhs
13899 || gimple_test_f
== is_gimple_mem_rhs_or_call
13900 || gimple_test_f
== is_gimple_reg_rhs
13901 || gimple_test_f
== is_gimple_reg_rhs_or_call
13902 || gimple_test_f
== is_gimple_asm_val
13903 || gimple_test_f
== is_gimple_mem_ref_addr
)
13904 gcc_assert (fallback
& fb_rvalue
);
13905 else if (gimple_test_f
== is_gimple_min_lval
13906 || gimple_test_f
== is_gimple_lvalue
)
13907 gcc_assert (fallback
& fb_lvalue
);
13908 else if (gimple_test_f
== is_gimple_addressable
)
13909 gcc_assert (fallback
& fb_either
);
13910 else if (gimple_test_f
== is_gimple_stmt
)
13911 gcc_assert (fallback
== fb_none
);
13914 /* We should have recognized the GIMPLE_TEST_F predicate to
13915 know what kind of fallback to use in case a temporary is
13916 needed to hold the value or address of *EXPR_P. */
13917 gcc_unreachable ();
13920 /* We used to check the predicate here and return immediately if it
13921 succeeds. This is wrong; the design is for gimplification to be
13922 idempotent, and for the predicates to only test for valid forms, not
13923 whether they are fully simplified. */
13925 pre_p
= &internal_pre
;
13927 if (post_p
== NULL
)
13928 post_p
= &internal_post
;
13930 /* Remember the last statements added to PRE_P and POST_P. Every
13931 new statement added by the gimplification helpers needs to be
13932 annotated with location information. To centralize the
13933 responsibility, we remember the last statement that had been
13934 added to both queues before gimplifying *EXPR_P. If
13935 gimplification produces new statements in PRE_P and POST_P, those
13936 statements will be annotated with the same location information
13938 pre_last_gsi
= gsi_last (*pre_p
);
13939 post_last_gsi
= gsi_last (*post_p
);
13941 saved_location
= input_location
;
13942 if (save_expr
!= error_mark_node
13943 && EXPR_HAS_LOCATION (*expr_p
))
13944 input_location
= EXPR_LOCATION (*expr_p
);
13946 /* Loop over the specific gimplifiers until the toplevel node
13947 remains the same. */
13950 /* Strip away as many useless type conversions as possible
13951 at the toplevel. */
13952 STRIP_USELESS_TYPE_CONVERSION (*expr_p
);
13954 /* Remember the expr. */
13955 save_expr
= *expr_p
;
13957 /* Die, die, die, my darling. */
13958 if (error_operand_p (save_expr
))
13964 /* Do any language-specific gimplification. */
13965 ret
= ((enum gimplify_status
)
13966 lang_hooks
.gimplify_expr (expr_p
, pre_p
, post_p
));
13969 if (*expr_p
== NULL_TREE
)
13971 if (*expr_p
!= save_expr
)
13974 else if (ret
!= GS_UNHANDLED
)
13977 /* Make sure that all the cases set 'ret' appropriately. */
13978 ret
= GS_UNHANDLED
;
13979 switch (TREE_CODE (*expr_p
))
13981 /* First deal with the special cases. */
13983 case POSTINCREMENT_EXPR
:
13984 case POSTDECREMENT_EXPR
:
13985 case PREINCREMENT_EXPR
:
13986 case PREDECREMENT_EXPR
:
13987 ret
= gimplify_self_mod_expr (expr_p
, pre_p
, post_p
,
13988 fallback
!= fb_none
,
13989 TREE_TYPE (*expr_p
));
13992 case VIEW_CONVERT_EXPR
:
13993 if ((fallback
& fb_rvalue
)
13994 && is_gimple_reg_type (TREE_TYPE (*expr_p
))
13995 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p
, 0))))
13997 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
13998 post_p
, is_gimple_val
, fb_rvalue
);
13999 recalculate_side_effects (*expr_p
);
14005 case ARRAY_RANGE_REF
:
14006 case REALPART_EXPR
:
14007 case IMAGPART_EXPR
:
14008 case COMPONENT_REF
:
14009 ret
= gimplify_compound_lval (expr_p
, pre_p
, post_p
,
14010 fallback
? fallback
: fb_rvalue
);
14014 ret
= gimplify_cond_expr (expr_p
, pre_p
, fallback
);
14016 /* C99 code may assign to an array in a structure value of a
14017 conditional expression, and this has undefined behavior
14018 only on execution, so create a temporary if an lvalue is
14020 if (fallback
== fb_lvalue
)
14022 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
14023 mark_addressable (*expr_p
);
14029 ret
= gimplify_call_expr (expr_p
, pre_p
, fallback
!= fb_none
);
14031 /* C99 code may assign to an array in a structure returned
14032 from a function, and this has undefined behavior only on
14033 execution, so create a temporary if an lvalue is
14035 if (fallback
== fb_lvalue
)
14037 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
14038 mark_addressable (*expr_p
);
14044 gcc_unreachable ();
14046 case COMPOUND_EXPR
:
14047 ret
= gimplify_compound_expr (expr_p
, pre_p
, fallback
!= fb_none
);
14050 case COMPOUND_LITERAL_EXPR
:
14051 ret
= gimplify_compound_literal_expr (expr_p
, pre_p
,
14052 gimple_test_f
, fallback
);
14057 ret
= gimplify_modify_expr (expr_p
, pre_p
, post_p
,
14058 fallback
!= fb_none
);
14061 case TRUTH_ANDIF_EXPR
:
14062 case TRUTH_ORIF_EXPR
:
14064 /* Preserve the original type of the expression and the
14065 source location of the outer expression. */
14066 tree org_type
= TREE_TYPE (*expr_p
);
14067 *expr_p
= gimple_boolify (*expr_p
);
14068 *expr_p
= build3_loc (input_location
, COND_EXPR
,
14072 org_type
, boolean_true_node
),
14075 org_type
, boolean_false_node
));
14080 case TRUTH_NOT_EXPR
:
14082 tree type
= TREE_TYPE (*expr_p
);
14083 /* The parsers are careful to generate TRUTH_NOT_EXPR
14084 only with operands that are always zero or one.
14085 We do not fold here but handle the only interesting case
14086 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
14087 *expr_p
= gimple_boolify (*expr_p
);
14088 if (TYPE_PRECISION (TREE_TYPE (*expr_p
)) == 1)
14089 *expr_p
= build1_loc (input_location
, BIT_NOT_EXPR
,
14090 TREE_TYPE (*expr_p
),
14091 TREE_OPERAND (*expr_p
, 0));
14093 *expr_p
= build2_loc (input_location
, BIT_XOR_EXPR
,
14094 TREE_TYPE (*expr_p
),
14095 TREE_OPERAND (*expr_p
, 0),
14096 build_int_cst (TREE_TYPE (*expr_p
), 1));
14097 if (!useless_type_conversion_p (type
, TREE_TYPE (*expr_p
)))
14098 *expr_p
= fold_convert_loc (input_location
, type
, *expr_p
);
14104 ret
= gimplify_addr_expr (expr_p
, pre_p
, post_p
);
14107 case ANNOTATE_EXPR
:
14109 tree cond
= TREE_OPERAND (*expr_p
, 0);
14110 tree kind
= TREE_OPERAND (*expr_p
, 1);
14111 tree data
= TREE_OPERAND (*expr_p
, 2);
14112 tree type
= TREE_TYPE (cond
);
14113 if (!INTEGRAL_TYPE_P (type
))
14119 tree tmp
= create_tmp_var (type
);
14120 gimplify_arg (&cond
, pre_p
, EXPR_LOCATION (*expr_p
));
14122 = gimple_build_call_internal (IFN_ANNOTATE
, 3, cond
, kind
, data
);
14123 gimple_call_set_lhs (call
, tmp
);
14124 gimplify_seq_add_stmt (pre_p
, call
);
14131 ret
= gimplify_va_arg_expr (expr_p
, pre_p
, post_p
);
14135 if (IS_EMPTY_STMT (*expr_p
))
14141 if (VOID_TYPE_P (TREE_TYPE (*expr_p
))
14142 || fallback
== fb_none
)
14144 /* Just strip a conversion to void (or in void context) and
14146 *expr_p
= TREE_OPERAND (*expr_p
, 0);
14151 ret
= gimplify_conversion (expr_p
);
14152 if (ret
== GS_ERROR
)
14154 if (*expr_p
!= save_expr
)
14158 case FIX_TRUNC_EXPR
:
14159 /* unary_expr: ... | '(' cast ')' val | ... */
14160 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
14161 is_gimple_val
, fb_rvalue
);
14162 recalculate_side_effects (*expr_p
);
14167 bool volatilep
= TREE_THIS_VOLATILE (*expr_p
);
14168 bool notrap
= TREE_THIS_NOTRAP (*expr_p
);
14169 tree saved_ptr_type
= TREE_TYPE (TREE_OPERAND (*expr_p
, 0));
14171 *expr_p
= fold_indirect_ref_loc (input_location
, *expr_p
);
14172 if (*expr_p
!= save_expr
)
14178 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
14179 is_gimple_reg
, fb_rvalue
);
14180 if (ret
== GS_ERROR
)
14183 recalculate_side_effects (*expr_p
);
14184 *expr_p
= fold_build2_loc (input_location
, MEM_REF
,
14185 TREE_TYPE (*expr_p
),
14186 TREE_OPERAND (*expr_p
, 0),
14187 build_int_cst (saved_ptr_type
, 0));
14188 TREE_THIS_VOLATILE (*expr_p
) = volatilep
;
14189 TREE_THIS_NOTRAP (*expr_p
) = notrap
;
14194 /* We arrive here through the various re-gimplifcation paths. */
14196 /* First try re-folding the whole thing. */
14197 tmp
= fold_binary (MEM_REF
, TREE_TYPE (*expr_p
),
14198 TREE_OPERAND (*expr_p
, 0),
14199 TREE_OPERAND (*expr_p
, 1));
14202 REF_REVERSE_STORAGE_ORDER (tmp
)
14203 = REF_REVERSE_STORAGE_ORDER (*expr_p
);
14205 recalculate_side_effects (*expr_p
);
14209 /* Avoid re-gimplifying the address operand if it is already
14210 in suitable form. Re-gimplifying would mark the address
14211 operand addressable. Always gimplify when not in SSA form
14212 as we still may have to gimplify decls with value-exprs. */
14213 if (!gimplify_ctxp
|| !gimple_in_ssa_p (cfun
)
14214 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p
, 0)))
14216 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
14217 is_gimple_mem_ref_addr
, fb_rvalue
);
14218 if (ret
== GS_ERROR
)
14221 recalculate_side_effects (*expr_p
);
14225 /* Constants need not be gimplified. */
14232 /* Drop the overflow flag on constants, we do not want
14233 that in the GIMPLE IL. */
14234 if (TREE_OVERFLOW_P (*expr_p
))
14235 *expr_p
= drop_tree_overflow (*expr_p
);
14240 /* If we require an lvalue, such as for ADDR_EXPR, retain the
14241 CONST_DECL node. Otherwise the decl is replaceable by its
14243 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
14244 if (fallback
& fb_lvalue
)
14248 *expr_p
= DECL_INITIAL (*expr_p
);
14254 ret
= gimplify_decl_expr (expr_p
, pre_p
);
14258 ret
= gimplify_bind_expr (expr_p
, pre_p
);
14262 ret
= gimplify_loop_expr (expr_p
, pre_p
);
14266 ret
= gimplify_switch_expr (expr_p
, pre_p
);
14270 ret
= gimplify_exit_expr (expr_p
);
14274 /* If the target is not LABEL, then it is a computed jump
14275 and the target needs to be gimplified. */
14276 if (TREE_CODE (GOTO_DESTINATION (*expr_p
)) != LABEL_DECL
)
14278 ret
= gimplify_expr (&GOTO_DESTINATION (*expr_p
), pre_p
,
14279 NULL
, is_gimple_val
, fb_rvalue
);
14280 if (ret
== GS_ERROR
)
14283 gimplify_seq_add_stmt (pre_p
,
14284 gimple_build_goto (GOTO_DESTINATION (*expr_p
)));
14289 gimplify_seq_add_stmt (pre_p
,
14290 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p
),
14291 PREDICT_EXPR_OUTCOME (*expr_p
)));
14296 ret
= gimplify_label_expr (expr_p
, pre_p
);
14297 label
= LABEL_EXPR_LABEL (*expr_p
);
14298 gcc_assert (decl_function_context (label
) == current_function_decl
);
14300 /* If the label is used in a goto statement, or address of the label
14301 is taken, we need to unpoison all variables that were seen so far.
14302 Doing so would prevent us from reporting a false positives. */
14303 if (asan_poisoned_variables
14304 && asan_used_labels
!= NULL
14305 && asan_used_labels
->contains (label
))
14306 asan_poison_variables (asan_poisoned_variables
, false, pre_p
);
14309 case CASE_LABEL_EXPR
:
14310 ret
= gimplify_case_label_expr (expr_p
, pre_p
);
14312 if (gimplify_ctxp
->live_switch_vars
)
14313 asan_poison_variables (gimplify_ctxp
->live_switch_vars
, false,
14318 ret
= gimplify_return_expr (*expr_p
, pre_p
);
14322 /* Don't reduce this in place; let gimplify_init_constructor work its
14323 magic. Buf if we're just elaborating this for side effects, just
14324 gimplify any element that has side-effects. */
14325 if (fallback
== fb_none
)
14327 unsigned HOST_WIDE_INT ix
;
14329 tree temp
= NULL_TREE
;
14330 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p
), ix
, val
)
14331 if (TREE_SIDE_EFFECTS (val
))
14332 append_to_statement_list (val
, &temp
);
14335 ret
= temp
? GS_OK
: GS_ALL_DONE
;
14337 /* C99 code may assign to an array in a constructed
14338 structure or union, and this has undefined behavior only
14339 on execution, so create a temporary if an lvalue is
14341 else if (fallback
== fb_lvalue
)
14343 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, post_p
, false);
14344 mark_addressable (*expr_p
);
14351 /* The following are special cases that are not handled by the
14352 original GIMPLE grammar. */
14354 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
14357 ret
= gimplify_save_expr (expr_p
, pre_p
, post_p
);
14360 case BIT_FIELD_REF
:
14361 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
14362 post_p
, is_gimple_lvalue
, fb_either
);
14363 recalculate_side_effects (*expr_p
);
14366 case TARGET_MEM_REF
:
14368 enum gimplify_status r0
= GS_ALL_DONE
, r1
= GS_ALL_DONE
;
14370 if (TMR_BASE (*expr_p
))
14371 r0
= gimplify_expr (&TMR_BASE (*expr_p
), pre_p
,
14372 post_p
, is_gimple_mem_ref_addr
, fb_either
);
14373 if (TMR_INDEX (*expr_p
))
14374 r1
= gimplify_expr (&TMR_INDEX (*expr_p
), pre_p
,
14375 post_p
, is_gimple_val
, fb_rvalue
);
14376 if (TMR_INDEX2 (*expr_p
))
14377 r1
= gimplify_expr (&TMR_INDEX2 (*expr_p
), pre_p
,
14378 post_p
, is_gimple_val
, fb_rvalue
);
14379 /* TMR_STEP and TMR_OFFSET are always integer constants. */
14380 ret
= MIN (r0
, r1
);
14384 case NON_LVALUE_EXPR
:
14385 /* This should have been stripped above. */
14386 gcc_unreachable ();
14389 ret
= gimplify_asm_expr (expr_p
, pre_p
, post_p
);
14392 case TRY_FINALLY_EXPR
:
14393 case TRY_CATCH_EXPR
:
14395 gimple_seq eval
, cleanup
;
14398 /* Calls to destructors are generated automatically in FINALLY/CATCH
14399 block. They should have location as UNKNOWN_LOCATION. However,
14400 gimplify_call_expr will reset these call stmts to input_location
14401 if it finds stmt's location is unknown. To prevent resetting for
14402 destructors, we set the input_location to unknown.
14403 Note that this only affects the destructor calls in FINALLY/CATCH
14404 block, and will automatically reset to its original value by the
14405 end of gimplify_expr. */
14406 input_location
= UNKNOWN_LOCATION
;
14407 eval
= cleanup
= NULL
;
14408 gimplify_and_add (TREE_OPERAND (*expr_p
, 0), &eval
);
14409 if (TREE_CODE (*expr_p
) == TRY_FINALLY_EXPR
14410 && TREE_CODE (TREE_OPERAND (*expr_p
, 1)) == EH_ELSE_EXPR
)
14412 gimple_seq n
= NULL
, e
= NULL
;
14413 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p
, 1),
14415 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p
, 1),
14417 if (!gimple_seq_empty_p (n
) && !gimple_seq_empty_p (e
))
14419 geh_else
*stmt
= gimple_build_eh_else (n
, e
);
14420 gimple_seq_add_stmt (&cleanup
, stmt
);
14424 gimplify_and_add (TREE_OPERAND (*expr_p
, 1), &cleanup
);
14425 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
14426 if (gimple_seq_empty_p (cleanup
))
14428 gimple_seq_add_seq (pre_p
, eval
);
14432 try_
= gimple_build_try (eval
, cleanup
,
14433 TREE_CODE (*expr_p
) == TRY_FINALLY_EXPR
14434 ? GIMPLE_TRY_FINALLY
14435 : GIMPLE_TRY_CATCH
);
14436 if (EXPR_HAS_LOCATION (save_expr
))
14437 gimple_set_location (try_
, EXPR_LOCATION (save_expr
));
14438 else if (LOCATION_LOCUS (saved_location
) != UNKNOWN_LOCATION
)
14439 gimple_set_location (try_
, saved_location
);
14440 if (TREE_CODE (*expr_p
) == TRY_CATCH_EXPR
)
14441 gimple_try_set_catch_is_cleanup (try_
,
14442 TRY_CATCH_IS_CLEANUP (*expr_p
));
14443 gimplify_seq_add_stmt (pre_p
, try_
);
14448 case CLEANUP_POINT_EXPR
:
14449 ret
= gimplify_cleanup_point_expr (expr_p
, pre_p
);
14453 ret
= gimplify_target_expr (expr_p
, pre_p
, post_p
);
14459 gimple_seq handler
= NULL
;
14460 gimplify_and_add (CATCH_BODY (*expr_p
), &handler
);
14461 c
= gimple_build_catch (CATCH_TYPES (*expr_p
), handler
);
14462 gimplify_seq_add_stmt (pre_p
, c
);
14467 case EH_FILTER_EXPR
:
14470 gimple_seq failure
= NULL
;
14472 gimplify_and_add (EH_FILTER_FAILURE (*expr_p
), &failure
);
14473 ehf
= gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p
), failure
);
14474 gimple_set_no_warning (ehf
, TREE_NO_WARNING (*expr_p
));
14475 gimplify_seq_add_stmt (pre_p
, ehf
);
14482 enum gimplify_status r0
, r1
;
14483 r0
= gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p
), pre_p
,
14484 post_p
, is_gimple_val
, fb_rvalue
);
14485 r1
= gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p
), pre_p
,
14486 post_p
, is_gimple_val
, fb_rvalue
);
14487 TREE_SIDE_EFFECTS (*expr_p
) = 0;
14488 ret
= MIN (r0
, r1
);
14493 /* We get here when taking the address of a label. We mark
14494 the label as "forced"; meaning it can never be removed and
14495 it is a potential target for any computed goto. */
14496 FORCED_LABEL (*expr_p
) = 1;
14500 case STATEMENT_LIST
:
14501 ret
= gimplify_statement_list (expr_p
, pre_p
);
14504 case WITH_SIZE_EXPR
:
14506 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
14507 post_p
== &internal_post
? NULL
: post_p
,
14508 gimple_test_f
, fallback
);
14509 gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
, post_p
,
14510 is_gimple_val
, fb_rvalue
);
14517 ret
= gimplify_var_or_parm_decl (expr_p
);
14521 /* When within an OMP context, notice uses of variables. */
14522 if (gimplify_omp_ctxp
)
14523 omp_notice_variable (gimplify_omp_ctxp
, *expr_p
, true);
14527 case DEBUG_EXPR_DECL
:
14528 gcc_unreachable ();
14530 case DEBUG_BEGIN_STMT
:
14531 gimplify_seq_add_stmt (pre_p
,
14532 gimple_build_debug_begin_stmt
14533 (TREE_BLOCK (*expr_p
),
14534 EXPR_LOCATION (*expr_p
)));
14540 /* Allow callbacks into the gimplifier during optimization. */
14545 gimplify_omp_parallel (expr_p
, pre_p
);
14550 gimplify_omp_task (expr_p
, pre_p
);
14556 case OMP_DISTRIBUTE
:
14559 ret
= gimplify_omp_for (expr_p
, pre_p
);
14563 ret
= gimplify_omp_loop (expr_p
, pre_p
);
14567 gimplify_oacc_cache (expr_p
, pre_p
);
14572 gimplify_oacc_declare (expr_p
, pre_p
);
14576 case OACC_HOST_DATA
:
14579 case OACC_PARALLEL
:
14584 case OMP_TARGET_DATA
:
14586 gimplify_omp_workshare (expr_p
, pre_p
);
14590 case OACC_ENTER_DATA
:
14591 case OACC_EXIT_DATA
:
14593 case OMP_TARGET_UPDATE
:
14594 case OMP_TARGET_ENTER_DATA
:
14595 case OMP_TARGET_EXIT_DATA
:
14596 gimplify_omp_target_update (expr_p
, pre_p
);
14606 gimple_seq body
= NULL
;
14608 bool saved_in_omp_construct
= in_omp_construct
;
14610 in_omp_construct
= true;
14611 gimplify_and_add (OMP_BODY (*expr_p
), &body
);
14612 in_omp_construct
= saved_in_omp_construct
;
14613 switch (TREE_CODE (*expr_p
))
14616 g
= gimple_build_omp_section (body
);
14619 g
= gimple_build_omp_master (body
);
14622 g
= gimplify_omp_ordered (*expr_p
, body
);
14625 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p
),
14626 pre_p
, ORT_WORKSHARE
, OMP_CRITICAL
);
14627 gimplify_adjust_omp_clauses (pre_p
, body
,
14628 &OMP_CRITICAL_CLAUSES (*expr_p
),
14630 g
= gimple_build_omp_critical (body
,
14631 OMP_CRITICAL_NAME (*expr_p
),
14632 OMP_CRITICAL_CLAUSES (*expr_p
));
14635 gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p
),
14636 pre_p
, ORT_WORKSHARE
, OMP_SCAN
);
14637 gimplify_adjust_omp_clauses (pre_p
, body
,
14638 &OMP_SCAN_CLAUSES (*expr_p
),
14640 g
= gimple_build_omp_scan (body
, OMP_SCAN_CLAUSES (*expr_p
));
14643 gcc_unreachable ();
14645 gimplify_seq_add_stmt (pre_p
, g
);
14650 case OMP_TASKGROUP
:
14652 gimple_seq body
= NULL
;
14654 tree
*pclauses
= &OMP_TASKGROUP_CLAUSES (*expr_p
);
14655 bool saved_in_omp_construct
= in_omp_construct
;
14656 gimplify_scan_omp_clauses (pclauses
, pre_p
, ORT_TASKGROUP
,
14658 gimplify_adjust_omp_clauses (pre_p
, NULL
, pclauses
, OMP_TASKGROUP
);
14660 in_omp_construct
= true;
14661 gimplify_and_add (OMP_BODY (*expr_p
), &body
);
14662 in_omp_construct
= saved_in_omp_construct
;
14663 gimple_seq cleanup
= NULL
;
14664 tree fn
= builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END
);
14665 gimple
*g
= gimple_build_call (fn
, 0);
14666 gimple_seq_add_stmt (&cleanup
, g
);
14667 g
= gimple_build_try (body
, cleanup
, GIMPLE_TRY_FINALLY
);
14669 gimple_seq_add_stmt (&body
, g
);
14670 g
= gimple_build_omp_taskgroup (body
, *pclauses
);
14671 gimplify_seq_add_stmt (pre_p
, g
);
14677 case OMP_ATOMIC_READ
:
14678 case OMP_ATOMIC_CAPTURE_OLD
:
14679 case OMP_ATOMIC_CAPTURE_NEW
:
14680 ret
= gimplify_omp_atomic (expr_p
, pre_p
);
14683 case TRANSACTION_EXPR
:
14684 ret
= gimplify_transaction (expr_p
, pre_p
);
14687 case TRUTH_AND_EXPR
:
14688 case TRUTH_OR_EXPR
:
14689 case TRUTH_XOR_EXPR
:
14691 tree orig_type
= TREE_TYPE (*expr_p
);
14692 tree new_type
, xop0
, xop1
;
14693 *expr_p
= gimple_boolify (*expr_p
);
14694 new_type
= TREE_TYPE (*expr_p
);
14695 if (!useless_type_conversion_p (orig_type
, new_type
))
14697 *expr_p
= fold_convert_loc (input_location
, orig_type
, *expr_p
);
14702 /* Boolified binary truth expressions are semantically equivalent
14703 to bitwise binary expressions. Canonicalize them to the
14704 bitwise variant. */
14705 switch (TREE_CODE (*expr_p
))
14707 case TRUTH_AND_EXPR
:
14708 TREE_SET_CODE (*expr_p
, BIT_AND_EXPR
);
14710 case TRUTH_OR_EXPR
:
14711 TREE_SET_CODE (*expr_p
, BIT_IOR_EXPR
);
14713 case TRUTH_XOR_EXPR
:
14714 TREE_SET_CODE (*expr_p
, BIT_XOR_EXPR
);
14719 /* Now make sure that operands have compatible type to
14720 expression's new_type. */
14721 xop0
= TREE_OPERAND (*expr_p
, 0);
14722 xop1
= TREE_OPERAND (*expr_p
, 1);
14723 if (!useless_type_conversion_p (new_type
, TREE_TYPE (xop0
)))
14724 TREE_OPERAND (*expr_p
, 0) = fold_convert_loc (input_location
,
14727 if (!useless_type_conversion_p (new_type
, TREE_TYPE (xop1
)))
14728 TREE_OPERAND (*expr_p
, 1) = fold_convert_loc (input_location
,
14731 /* Continue classified as tcc_binary. */
14735 case VEC_COND_EXPR
:
14738 case VEC_PERM_EXPR
:
14739 /* Classified as tcc_expression. */
14742 case BIT_INSERT_EXPR
:
14743 /* Argument 3 is a constant. */
14746 case POINTER_PLUS_EXPR
:
14748 enum gimplify_status r0
, r1
;
14749 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
14750 post_p
, is_gimple_val
, fb_rvalue
);
14751 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
14752 post_p
, is_gimple_val
, fb_rvalue
);
14753 recalculate_side_effects (*expr_p
);
14754 ret
= MIN (r0
, r1
);
14759 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p
)))
14761 case tcc_comparison
:
14762 /* Handle comparison of objects of non scalar mode aggregates
14763 with a call to memcmp. It would be nice to only have to do
14764 this for variable-sized objects, but then we'd have to allow
14765 the same nest of reference nodes we allow for MODIFY_EXPR and
14766 that's too complex.
14768 Compare scalar mode aggregates as scalar mode values. Using
14769 memcmp for them would be very inefficient at best, and is
14770 plain wrong if bitfields are involved. */
14772 tree type
= TREE_TYPE (TREE_OPERAND (*expr_p
, 1));
14774 /* Vector comparisons need no boolification. */
14775 if (TREE_CODE (type
) == VECTOR_TYPE
)
14777 else if (!AGGREGATE_TYPE_P (type
))
14779 tree org_type
= TREE_TYPE (*expr_p
);
14780 *expr_p
= gimple_boolify (*expr_p
);
14781 if (!useless_type_conversion_p (org_type
,
14782 TREE_TYPE (*expr_p
)))
14784 *expr_p
= fold_convert_loc (input_location
,
14785 org_type
, *expr_p
);
14791 else if (TYPE_MODE (type
) != BLKmode
)
14792 ret
= gimplify_scalar_mode_aggregate_compare (expr_p
);
14794 ret
= gimplify_variable_sized_compare (expr_p
);
14799 /* If *EXPR_P does not need to be special-cased, handle it
14800 according to its class. */
14802 ret
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
14803 post_p
, is_gimple_val
, fb_rvalue
);
14809 enum gimplify_status r0
, r1
;
14811 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
14812 post_p
, is_gimple_val
, fb_rvalue
);
14813 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
14814 post_p
, is_gimple_val
, fb_rvalue
);
14816 ret
= MIN (r0
, r1
);
14822 enum gimplify_status r0
, r1
, r2
;
14824 r0
= gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
,
14825 post_p
, is_gimple_val
, fb_rvalue
);
14826 r1
= gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
,
14827 post_p
, is_gimple_val
, fb_rvalue
);
14828 r2
= gimplify_expr (&TREE_OPERAND (*expr_p
, 2), pre_p
,
14829 post_p
, is_gimple_val
, fb_rvalue
);
14831 ret
= MIN (MIN (r0
, r1
), r2
);
14835 case tcc_declaration
:
14838 goto dont_recalculate
;
14841 gcc_unreachable ();
14844 recalculate_side_effects (*expr_p
);
14850 gcc_assert (*expr_p
|| ret
!= GS_OK
);
14852 while (ret
== GS_OK
);
14854 /* If we encountered an error_mark somewhere nested inside, either
14855 stub out the statement or propagate the error back out. */
14856 if (ret
== GS_ERROR
)
14863 /* This was only valid as a return value from the langhook, which
14864 we handled. Make sure it doesn't escape from any other context. */
14865 gcc_assert (ret
!= GS_UNHANDLED
);
14867 if (fallback
== fb_none
&& *expr_p
&& !is_gimple_stmt (*expr_p
))
14869 /* We aren't looking for a value, and we don't have a valid
14870 statement. If it doesn't have side-effects, throw it away.
14871 We can also get here with code such as "*&&L;", where L is
14872 a LABEL_DECL that is marked as FORCED_LABEL. */
14873 if (TREE_CODE (*expr_p
) == LABEL_DECL
14874 || !TREE_SIDE_EFFECTS (*expr_p
))
14876 else if (!TREE_THIS_VOLATILE (*expr_p
))
14878 /* This is probably a _REF that contains something nested that
14879 has side effects. Recurse through the operands to find it. */
14880 enum tree_code code
= TREE_CODE (*expr_p
);
14884 case COMPONENT_REF
:
14885 case REALPART_EXPR
:
14886 case IMAGPART_EXPR
:
14887 case VIEW_CONVERT_EXPR
:
14888 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
14889 gimple_test_f
, fallback
);
14893 case ARRAY_RANGE_REF
:
14894 gimplify_expr (&TREE_OPERAND (*expr_p
, 0), pre_p
, post_p
,
14895 gimple_test_f
, fallback
);
14896 gimplify_expr (&TREE_OPERAND (*expr_p
, 1), pre_p
, post_p
,
14897 gimple_test_f
, fallback
);
14901 /* Anything else with side-effects must be converted to
14902 a valid statement before we get here. */
14903 gcc_unreachable ();
14908 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p
))
14909 && TYPE_MODE (TREE_TYPE (*expr_p
)) != BLKmode
)
14911 /* Historically, the compiler has treated a bare reference
14912 to a non-BLKmode volatile lvalue as forcing a load. */
14913 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p
));
14915 /* Normally, we do not want to create a temporary for a
14916 TREE_ADDRESSABLE type because such a type should not be
14917 copied by bitwise-assignment. However, we make an
14918 exception here, as all we are doing here is ensuring that
14919 we read the bytes that make up the type. We use
14920 create_tmp_var_raw because create_tmp_var will abort when
14921 given a TREE_ADDRESSABLE type. */
14922 tree tmp
= create_tmp_var_raw (type
, "vol");
14923 gimple_add_tmp_var (tmp
);
14924 gimplify_assign (tmp
, *expr_p
, pre_p
);
14928 /* We can't do anything useful with a volatile reference to
14929 an incomplete type, so just throw it away. Likewise for
14930 a BLKmode type, since any implicit inner load should
14931 already have been turned into an explicit one by the
14932 gimplification process. */
14936 /* If we are gimplifying at the statement level, we're done. Tack
14937 everything together and return. */
14938 if (fallback
== fb_none
|| is_statement
)
14940 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
14941 it out for GC to reclaim it. */
14942 *expr_p
= NULL_TREE
;
14944 if (!gimple_seq_empty_p (internal_pre
)
14945 || !gimple_seq_empty_p (internal_post
))
14947 gimplify_seq_add_seq (&internal_pre
, internal_post
);
14948 gimplify_seq_add_seq (pre_p
, internal_pre
);
14951 /* The result of gimplifying *EXPR_P is going to be the last few
14952 statements in *PRE_P and *POST_P. Add location information
14953 to all the statements that were added by the gimplification
14955 if (!gimple_seq_empty_p (*pre_p
))
14956 annotate_all_with_location_after (*pre_p
, pre_last_gsi
, input_location
);
14958 if (!gimple_seq_empty_p (*post_p
))
14959 annotate_all_with_location_after (*post_p
, post_last_gsi
,
14965 #ifdef ENABLE_GIMPLE_CHECKING
14968 enum tree_code code
= TREE_CODE (*expr_p
);
14969 /* These expressions should already be in gimple IR form. */
14970 gcc_assert (code
!= MODIFY_EXPR
14971 && code
!= ASM_EXPR
14972 && code
!= BIND_EXPR
14973 && code
!= CATCH_EXPR
14974 && (code
!= COND_EXPR
|| gimplify_ctxp
->allow_rhs_cond_expr
)
14975 && code
!= EH_FILTER_EXPR
14976 && code
!= GOTO_EXPR
14977 && code
!= LABEL_EXPR
14978 && code
!= LOOP_EXPR
14979 && code
!= SWITCH_EXPR
14980 && code
!= TRY_FINALLY_EXPR
14981 && code
!= EH_ELSE_EXPR
14982 && code
!= OACC_PARALLEL
14983 && code
!= OACC_KERNELS
14984 && code
!= OACC_SERIAL
14985 && code
!= OACC_DATA
14986 && code
!= OACC_HOST_DATA
14987 && code
!= OACC_DECLARE
14988 && code
!= OACC_UPDATE
14989 && code
!= OACC_ENTER_DATA
14990 && code
!= OACC_EXIT_DATA
14991 && code
!= OACC_CACHE
14992 && code
!= OMP_CRITICAL
14994 && code
!= OACC_LOOP
14995 && code
!= OMP_MASTER
14996 && code
!= OMP_TASKGROUP
14997 && code
!= OMP_ORDERED
14998 && code
!= OMP_PARALLEL
14999 && code
!= OMP_SCAN
15000 && code
!= OMP_SECTIONS
15001 && code
!= OMP_SECTION
15002 && code
!= OMP_SINGLE
);
15006 /* Otherwise we're gimplifying a subexpression, so the resulting
15007 value is interesting. If it's a valid operand that matches
15008 GIMPLE_TEST_F, we're done. Unless we are handling some
15009 post-effects internally; if that's the case, we need to copy into
15010 a temporary before adding the post-effects to POST_P. */
15011 if (gimple_seq_empty_p (internal_post
) && (*gimple_test_f
) (*expr_p
))
15014 /* Otherwise, we need to create a new temporary for the gimplified
15017 /* We can't return an lvalue if we have an internal postqueue. The
15018 object the lvalue refers to would (probably) be modified by the
15019 postqueue; we need to copy the value out first, which means an
15021 if ((fallback
& fb_lvalue
)
15022 && gimple_seq_empty_p (internal_post
)
15023 && is_gimple_addressable (*expr_p
))
15025 /* An lvalue will do. Take the address of the expression, store it
15026 in a temporary, and replace the expression with an INDIRECT_REF of
15028 tree ref_alias_type
= reference_alias_ptr_type (*expr_p
);
15029 unsigned int ref_align
= get_object_alignment (*expr_p
);
15030 tree ref_type
= TREE_TYPE (*expr_p
);
15031 tmp
= build_fold_addr_expr_loc (input_location
, *expr_p
);
15032 gimplify_expr (&tmp
, pre_p
, post_p
, is_gimple_reg
, fb_rvalue
);
15033 if (TYPE_ALIGN (ref_type
) != ref_align
)
15034 ref_type
= build_aligned_type (ref_type
, ref_align
);
15035 *expr_p
= build2 (MEM_REF
, ref_type
,
15036 tmp
, build_zero_cst (ref_alias_type
));
15038 else if ((fallback
& fb_rvalue
) && is_gimple_reg_rhs_or_call (*expr_p
))
15040 /* An rvalue will do. Assign the gimplified expression into a
15041 new temporary TMP and replace the original expression with
15042 TMP. First, make sure that the expression has a type so that
15043 it can be assigned into a temporary. */
15044 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p
)));
15045 *expr_p
= get_formal_tmp_var (*expr_p
, pre_p
);
15049 #ifdef ENABLE_GIMPLE_CHECKING
15050 if (!(fallback
& fb_mayfail
))
15052 fprintf (stderr
, "gimplification failed:\n");
15053 print_generic_expr (stderr
, *expr_p
);
15054 debug_tree (*expr_p
);
15055 internal_error ("gimplification failed");
15058 gcc_assert (fallback
& fb_mayfail
);
15060 /* If this is an asm statement, and the user asked for the
15061 impossible, don't die. Fail and let gimplify_asm_expr
15067 /* Make sure the temporary matches our predicate. */
15068 gcc_assert ((*gimple_test_f
) (*expr_p
));
15070 if (!gimple_seq_empty_p (internal_post
))
15072 annotate_all_with_location (internal_post
, input_location
);
15073 gimplify_seq_add_seq (pre_p
, internal_post
);
15077 input_location
= saved_location
;
15081 /* Like gimplify_expr but make sure the gimplified result is not itself
15082 a SSA name (but a decl if it were). Temporaries required by
15083 evaluating *EXPR_P may be still SSA names. */
15085 static enum gimplify_status
15086 gimplify_expr (tree
*expr_p
, gimple_seq
*pre_p
, gimple_seq
*post_p
,
15087 bool (*gimple_test_f
) (tree
), fallback_t fallback
,
15090 bool was_ssa_name_p
= TREE_CODE (*expr_p
) == SSA_NAME
;
15091 enum gimplify_status ret
= gimplify_expr (expr_p
, pre_p
, post_p
,
15092 gimple_test_f
, fallback
);
15094 && TREE_CODE (*expr_p
) == SSA_NAME
)
15096 tree name
= *expr_p
;
15097 if (was_ssa_name_p
)
15098 *expr_p
= get_initialized_tmp_var (*expr_p
, pre_p
, NULL
, false);
15101 /* Avoid the extra copy if possible. */
15102 *expr_p
= create_tmp_reg (TREE_TYPE (name
));
15103 if (!gimple_nop_p (SSA_NAME_DEF_STMT (name
)))
15104 gimple_set_lhs (SSA_NAME_DEF_STMT (name
), *expr_p
);
15105 release_ssa_name (name
);
15111 /* Look through TYPE for variable-sized objects and gimplify each such
15112 size that we find. Add to LIST_P any statements generated. */
15115 gimplify_type_sizes (tree type
, gimple_seq
*list_p
)
15119 if (type
== NULL
|| type
== error_mark_node
)
15122 /* We first do the main variant, then copy into any other variants. */
15123 type
= TYPE_MAIN_VARIANT (type
);
15125 /* Avoid infinite recursion. */
15126 if (TYPE_SIZES_GIMPLIFIED (type
))
15129 TYPE_SIZES_GIMPLIFIED (type
) = 1;
15131 switch (TREE_CODE (type
))
15134 case ENUMERAL_TYPE
:
15137 case FIXED_POINT_TYPE
:
15138 gimplify_one_sizepos (&TYPE_MIN_VALUE (type
), list_p
);
15139 gimplify_one_sizepos (&TYPE_MAX_VALUE (type
), list_p
);
15141 for (t
= TYPE_NEXT_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
15143 TYPE_MIN_VALUE (t
) = TYPE_MIN_VALUE (type
);
15144 TYPE_MAX_VALUE (t
) = TYPE_MAX_VALUE (type
);
15149 /* These types may not have declarations, so handle them here. */
15150 gimplify_type_sizes (TREE_TYPE (type
), list_p
);
15151 gimplify_type_sizes (TYPE_DOMAIN (type
), list_p
);
15152 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
15153 with assigned stack slots, for -O1+ -g they should be tracked
15155 if (!(TYPE_NAME (type
)
15156 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
15157 && DECL_IGNORED_P (TYPE_NAME (type
)))
15158 && TYPE_DOMAIN (type
)
15159 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type
)))
15161 t
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
15162 if (t
&& VAR_P (t
) && DECL_ARTIFICIAL (t
))
15163 DECL_IGNORED_P (t
) = 0;
15164 t
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
15165 if (t
&& VAR_P (t
) && DECL_ARTIFICIAL (t
))
15166 DECL_IGNORED_P (t
) = 0;
15172 case QUAL_UNION_TYPE
:
15173 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
15174 if (TREE_CODE (field
) == FIELD_DECL
)
15176 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field
), list_p
);
15177 gimplify_one_sizepos (&DECL_SIZE (field
), list_p
);
15178 gimplify_one_sizepos (&DECL_SIZE_UNIT (field
), list_p
);
15179 gimplify_type_sizes (TREE_TYPE (field
), list_p
);
15184 case REFERENCE_TYPE
:
15185 /* We used to recurse on the pointed-to type here, which turned out to
15186 be incorrect because its definition might refer to variables not
15187 yet initialized at this point if a forward declaration is involved.
15189 It was actually useful for anonymous pointed-to types to ensure
15190 that the sizes evaluation dominates every possible later use of the
15191 values. Restricting to such types here would be safe since there
15192 is no possible forward declaration around, but would introduce an
15193 undesirable middle-end semantic to anonymity. We then defer to
15194 front-ends the responsibility of ensuring that the sizes are
15195 evaluated both early and late enough, e.g. by attaching artificial
15196 type declarations to the tree. */
15203 gimplify_one_sizepos (&TYPE_SIZE (type
), list_p
);
15204 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type
), list_p
);
15206 for (t
= TYPE_NEXT_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
15208 TYPE_SIZE (t
) = TYPE_SIZE (type
);
15209 TYPE_SIZE_UNIT (t
) = TYPE_SIZE_UNIT (type
);
15210 TYPE_SIZES_GIMPLIFIED (t
) = 1;
15214 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
15215 a size or position, has had all of its SAVE_EXPRs evaluated.
15216 We add any required statements to *STMT_P. */
15219 gimplify_one_sizepos (tree
*expr_p
, gimple_seq
*stmt_p
)
15221 tree expr
= *expr_p
;
15223 /* We don't do anything if the value isn't there, is constant, or contains
15224 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
15225 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
15226 will want to replace it with a new variable, but that will cause problems
15227 if this type is from outside the function. It's OK to have that here. */
15228 if (expr
== NULL_TREE
15229 || is_gimple_constant (expr
)
15230 || TREE_CODE (expr
) == VAR_DECL
15231 || CONTAINS_PLACEHOLDER_P (expr
))
15234 *expr_p
= unshare_expr (expr
);
15236 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
15237 if the def vanishes. */
15238 gimplify_expr (expr_p
, stmt_p
, NULL
, is_gimple_val
, fb_rvalue
, false);
15240 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
15241 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
15242 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
15243 if (is_gimple_constant (*expr_p
))
15244 *expr_p
= get_initialized_tmp_var (*expr_p
, stmt_p
, NULL
, false);
15247 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
15248 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
15249 is true, also gimplify the parameters. */
15252 gimplify_body (tree fndecl
, bool do_parms
)
15254 location_t saved_location
= input_location
;
15255 gimple_seq parm_stmts
, parm_cleanup
= NULL
, seq
;
15256 gimple
*outer_stmt
;
15259 timevar_push (TV_TREE_GIMPLIFY
);
15261 init_tree_ssa (cfun
);
15263 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
15265 default_rtl_profile ();
15267 gcc_assert (gimplify_ctxp
== NULL
);
15268 push_gimplify_context (true);
15270 if (flag_openacc
|| flag_openmp
)
15272 gcc_assert (gimplify_omp_ctxp
== NULL
);
15273 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl
)))
15274 gimplify_omp_ctxp
= new_omp_context (ORT_IMPLICIT_TARGET
);
15277 /* Unshare most shared trees in the body and in that of any nested functions.
15278 It would seem we don't have to do this for nested functions because
15279 they are supposed to be output and then the outer function gimplified
15280 first, but the g++ front end doesn't always do it that way. */
15281 unshare_body (fndecl
);
15282 unvisit_body (fndecl
);
15284 /* Make sure input_location isn't set to something weird. */
15285 input_location
= DECL_SOURCE_LOCATION (fndecl
);
15287 /* Resolve callee-copies. This has to be done before processing
15288 the body so that DECL_VALUE_EXPR gets processed correctly. */
15289 parm_stmts
= do_parms
? gimplify_parameters (&parm_cleanup
) : NULL
;
15291 /* Gimplify the function's body. */
15293 gimplify_stmt (&DECL_SAVED_TREE (fndecl
), &seq
);
15294 outer_stmt
= gimple_seq_first_nondebug_stmt (seq
);
15297 outer_stmt
= gimple_build_nop ();
15298 gimplify_seq_add_stmt (&seq
, outer_stmt
);
15301 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
15302 not the case, wrap everything in a GIMPLE_BIND to make it so. */
15303 if (gimple_code (outer_stmt
) == GIMPLE_BIND
15304 && (gimple_seq_first_nondebug_stmt (seq
)
15305 == gimple_seq_last_nondebug_stmt (seq
)))
15307 outer_bind
= as_a
<gbind
*> (outer_stmt
);
15308 if (gimple_seq_first_stmt (seq
) != outer_stmt
15309 || gimple_seq_last_stmt (seq
) != outer_stmt
)
15311 /* If there are debug stmts before or after outer_stmt, move them
15312 inside of outer_bind body. */
15313 gimple_stmt_iterator gsi
= gsi_for_stmt (outer_stmt
, &seq
);
15314 gimple_seq second_seq
= NULL
;
15315 if (gimple_seq_first_stmt (seq
) != outer_stmt
15316 && gimple_seq_last_stmt (seq
) != outer_stmt
)
15318 second_seq
= gsi_split_seq_after (gsi
);
15319 gsi_remove (&gsi
, false);
15321 else if (gimple_seq_first_stmt (seq
) != outer_stmt
)
15322 gsi_remove (&gsi
, false);
15325 gsi_remove (&gsi
, false);
15329 gimple_seq_add_seq_without_update (&seq
,
15330 gimple_bind_body (outer_bind
));
15331 gimple_seq_add_seq_without_update (&seq
, second_seq
);
15332 gimple_bind_set_body (outer_bind
, seq
);
15336 outer_bind
= gimple_build_bind (NULL_TREE
, seq
, NULL
);
15338 DECL_SAVED_TREE (fndecl
) = NULL_TREE
;
15340 /* If we had callee-copies statements, insert them at the beginning
15341 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
15342 if (!gimple_seq_empty_p (parm_stmts
))
15346 gimplify_seq_add_seq (&parm_stmts
, gimple_bind_body (outer_bind
));
15349 gtry
*g
= gimple_build_try (parm_stmts
, parm_cleanup
,
15350 GIMPLE_TRY_FINALLY
);
15352 gimple_seq_add_stmt (&parm_stmts
, g
);
15354 gimple_bind_set_body (outer_bind
, parm_stmts
);
15356 for (parm
= DECL_ARGUMENTS (current_function_decl
);
15357 parm
; parm
= DECL_CHAIN (parm
))
15358 if (DECL_HAS_VALUE_EXPR_P (parm
))
15360 DECL_HAS_VALUE_EXPR_P (parm
) = 0;
15361 DECL_IGNORED_P (parm
) = 0;
15365 if ((flag_openacc
|| flag_openmp
|| flag_openmp_simd
)
15366 && gimplify_omp_ctxp
)
15368 delete_omp_context (gimplify_omp_ctxp
);
15369 gimplify_omp_ctxp
= NULL
;
15372 pop_gimplify_context (outer_bind
);
15373 gcc_assert (gimplify_ctxp
== NULL
);
15375 if (flag_checking
&& !seen_error ())
15376 verify_gimple_in_seq (gimple_bind_body (outer_bind
));
15378 timevar_pop (TV_TREE_GIMPLIFY
);
15379 input_location
= saved_location
;
15384 typedef char *char_p
; /* For DEF_VEC_P. */
15386 /* Return whether we should exclude FNDECL from instrumentation. */
15389 flag_instrument_functions_exclude_p (tree fndecl
)
15393 v
= (vec
<char_p
> *) flag_instrument_functions_exclude_functions
;
15394 if (v
&& v
->length () > 0)
15400 name
= lang_hooks
.decl_printable_name (fndecl
, 1);
15401 FOR_EACH_VEC_ELT (*v
, i
, s
)
15402 if (strstr (name
, s
) != NULL
)
15406 v
= (vec
<char_p
> *) flag_instrument_functions_exclude_files
;
15407 if (v
&& v
->length () > 0)
15413 name
= DECL_SOURCE_FILE (fndecl
);
15414 FOR_EACH_VEC_ELT (*v
, i
, s
)
15415 if (strstr (name
, s
) != NULL
)
15422 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
15423 node for the function we want to gimplify.
15425 Return the sequence of GIMPLE statements corresponding to the body
15429 gimplify_function_tree (tree fndecl
)
15434 gcc_assert (!gimple_body (fndecl
));
15436 if (DECL_STRUCT_FUNCTION (fndecl
))
15437 push_cfun (DECL_STRUCT_FUNCTION (fndecl
));
15439 push_struct_function (fndecl
);
15441 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
15443 cfun
->curr_properties
|= PROP_gimple_lva
;
15445 if (asan_sanitize_use_after_scope ())
15446 asan_poisoned_variables
= new hash_set
<tree
> ();
15447 bind
= gimplify_body (fndecl
, true);
15448 if (asan_poisoned_variables
)
15450 delete asan_poisoned_variables
;
15451 asan_poisoned_variables
= NULL
;
15454 /* The tree body of the function is no longer needed, replace it
15455 with the new GIMPLE body. */
15457 gimple_seq_add_stmt (&seq
, bind
);
15458 gimple_set_body (fndecl
, seq
);
15460 /* If we're instrumenting function entry/exit, then prepend the call to
15461 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
15462 catch the exit hook. */
15463 /* ??? Add some way to ignore exceptions for this TFE. */
15464 if (flag_instrument_function_entry_exit
15465 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl
)
15466 /* Do not instrument extern inline functions. */
15467 && !(DECL_DECLARED_INLINE_P (fndecl
)
15468 && DECL_EXTERNAL (fndecl
)
15469 && DECL_DISREGARD_INLINE_LIMITS (fndecl
))
15470 && !flag_instrument_functions_exclude_p (fndecl
))
15475 gimple_seq cleanup
= NULL
, body
= NULL
;
15476 tree tmp_var
, this_fn_addr
;
15479 /* The instrumentation hooks aren't going to call the instrumented
15480 function and the address they receive is expected to be matchable
15481 against symbol addresses. Make sure we don't create a trampoline,
15482 in case the current function is nested. */
15483 this_fn_addr
= build_fold_addr_expr (current_function_decl
);
15484 TREE_NO_TRAMPOLINE (this_fn_addr
) = 1;
15486 x
= builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS
);
15487 call
= gimple_build_call (x
, 1, integer_zero_node
);
15488 tmp_var
= create_tmp_var (ptr_type_node
, "return_addr");
15489 gimple_call_set_lhs (call
, tmp_var
);
15490 gimplify_seq_add_stmt (&cleanup
, call
);
15491 x
= builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT
);
15492 call
= gimple_build_call (x
, 2, this_fn_addr
, tmp_var
);
15493 gimplify_seq_add_stmt (&cleanup
, call
);
15494 tf
= gimple_build_try (seq
, cleanup
, GIMPLE_TRY_FINALLY
);
15496 x
= builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS
);
15497 call
= gimple_build_call (x
, 1, integer_zero_node
);
15498 tmp_var
= create_tmp_var (ptr_type_node
, "return_addr");
15499 gimple_call_set_lhs (call
, tmp_var
);
15500 gimplify_seq_add_stmt (&body
, call
);
15501 x
= builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER
);
15502 call
= gimple_build_call (x
, 2, this_fn_addr
, tmp_var
);
15503 gimplify_seq_add_stmt (&body
, call
);
15504 gimplify_seq_add_stmt (&body
, tf
);
15505 new_bind
= gimple_build_bind (NULL
, body
, NULL
);
15507 /* Replace the current function body with the body
15508 wrapped in the try/finally TF. */
15510 gimple_seq_add_stmt (&seq
, new_bind
);
15511 gimple_set_body (fndecl
, seq
);
15515 if (sanitize_flags_p (SANITIZE_THREAD
)
15516 && param_tsan_instrument_func_entry_exit
)
15518 gcall
*call
= gimple_build_call_internal (IFN_TSAN_FUNC_EXIT
, 0);
15519 gimple
*tf
= gimple_build_try (seq
, call
, GIMPLE_TRY_FINALLY
);
15520 gbind
*new_bind
= gimple_build_bind (NULL
, tf
, NULL
);
15521 /* Replace the current function body with the body
15522 wrapped in the try/finally TF. */
15524 gimple_seq_add_stmt (&seq
, new_bind
);
15525 gimple_set_body (fndecl
, seq
);
15528 DECL_SAVED_TREE (fndecl
) = NULL_TREE
;
15529 cfun
->curr_properties
|= PROP_gimple_any
;
15533 dump_function (TDI_gimple
, fndecl
);
15536 /* Return a dummy expression of type TYPE in order to keep going after an
15540 dummy_object (tree type
)
15542 tree t
= build_int_cst (build_pointer_type (type
), 0);
15543 return build2 (MEM_REF
, type
, t
, t
);
15546 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
15547 builtin function, but a very special sort of operator. */
15549 enum gimplify_status
15550 gimplify_va_arg_expr (tree
*expr_p
, gimple_seq
*pre_p
,
15551 gimple_seq
*post_p ATTRIBUTE_UNUSED
)
15553 tree promoted_type
, have_va_type
;
15554 tree valist
= TREE_OPERAND (*expr_p
, 0);
15555 tree type
= TREE_TYPE (*expr_p
);
15556 tree t
, tag
, aptag
;
15557 location_t loc
= EXPR_LOCATION (*expr_p
);
15559 /* Verify that valist is of the proper type. */
15560 have_va_type
= TREE_TYPE (valist
);
15561 if (have_va_type
== error_mark_node
)
15563 have_va_type
= targetm
.canonical_va_list_type (have_va_type
);
15564 if (have_va_type
== NULL_TREE
15565 && POINTER_TYPE_P (TREE_TYPE (valist
)))
15566 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
15568 = targetm
.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist
)));
15569 gcc_assert (have_va_type
!= NULL_TREE
);
15571 /* Generate a diagnostic for requesting data of a type that cannot
15572 be passed through `...' due to type promotion at the call site. */
15573 if ((promoted_type
= lang_hooks
.types
.type_promotes_to (type
))
15576 static bool gave_help
;
15578 /* Use the expansion point to handle cases such as passing bool (defined
15579 in a system header) through `...'. */
15581 = expansion_point_location_if_in_system_header (loc
);
15583 /* Unfortunately, this is merely undefined, rather than a constraint
15584 violation, so we cannot make this an error. If this call is never
15585 executed, the program is still strictly conforming. */
15586 auto_diagnostic_group d
;
15587 warned
= warning_at (xloc
, 0,
15588 "%qT is promoted to %qT when passed through %<...%>",
15589 type
, promoted_type
);
15590 if (!gave_help
&& warned
)
15593 inform (xloc
, "(so you should pass %qT not %qT to %<va_arg%>)",
15594 promoted_type
, type
);
15597 /* We can, however, treat "undefined" any way we please.
15598 Call abort to encourage the user to fix the program. */
15600 inform (xloc
, "if this code is reached, the program will abort");
15601 /* Before the abort, allow the evaluation of the va_list
15602 expression to exit or longjmp. */
15603 gimplify_and_add (valist
, pre_p
);
15604 t
= build_call_expr_loc (loc
,
15605 builtin_decl_implicit (BUILT_IN_TRAP
), 0);
15606 gimplify_and_add (t
, pre_p
);
15608 /* This is dead code, but go ahead and finish so that the
15609 mode of the result comes out right. */
15610 *expr_p
= dummy_object (type
);
15611 return GS_ALL_DONE
;
15614 tag
= build_int_cst (build_pointer_type (type
), 0);
15615 aptag
= build_int_cst (TREE_TYPE (valist
), 0);
15617 *expr_p
= build_call_expr_internal_loc (loc
, IFN_VA_ARG
, type
, 3,
15618 valist
, tag
, aptag
);
15620 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
15621 needs to be expanded. */
15622 cfun
->curr_properties
&= ~PROP_gimple_lva
;
15627 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
15629 DST/SRC are the destination and source respectively. You can pass
15630 ungimplified trees in DST or SRC, in which case they will be
15631 converted to a gimple operand if necessary.
15633 This function returns the newly created GIMPLE_ASSIGN tuple. */
15636 gimplify_assign (tree dst
, tree src
, gimple_seq
*seq_p
)
15638 tree t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
15639 gimplify_and_add (t
, seq_p
);
15641 return gimple_seq_last_stmt (*seq_p
);
15645 gimplify_hasher::hash (const elt_t
*p
)
15648 return iterative_hash_expr (t
, 0);
15652 gimplify_hasher::equal (const elt_t
*p1
, const elt_t
*p2
)
15656 enum tree_code code
= TREE_CODE (t1
);
15658 if (TREE_CODE (t2
) != code
15659 || TREE_TYPE (t1
) != TREE_TYPE (t2
))
15662 if (!operand_equal_p (t1
, t2
, 0))
15665 /* Only allow them to compare equal if they also hash equal; otherwise
15666 results are nondeterminate, and we fail bootstrap comparison. */
15667 gcc_checking_assert (hash (p1
) == hash (p2
));