OpenMP/Fortran: Fixes for {use,is}_device_ptr
[gcc.git] / gcc / omp-low.c
1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
4
5 Contributed by Diego Novillo <dnovillo@redhat.com>
6
7 Copyright (C) 2005-2021 Free Software Foundation, Inc.
8
9 This file is part of GCC.
10
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
15
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
20
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62
63 /* Lowering of OMP parallel and workshare constructs proceeds in two
64 phases. The first phase scans the function looking for OMP statements
65 and then for variables that must be replaced to satisfy data sharing
66 clauses. The second phase expands code for the constructs, as well as
67 re-gimplifying things when variables have been replaced with complex
68 expressions.
69
70 Final code generation is done by pass_expand_omp. The flowgraph is
71 scanned for regions which are then moved to a new
72 function, to be invoked by the thread library, or offloaded. */
73
74 /* Context structure. Used to store information about each parallel
75 directive in the code. */
76
77 struct omp_context
78 {
79 /* This field must be at the beginning, as we do "inheritance": Some
80 callback functions for tree-inline.c (e.g., omp_copy_decl)
81 receive a copy_body_data pointer that is up-casted to an
82 omp_context pointer. */
83 copy_body_data cb;
84
85 /* The tree of contexts corresponding to the encountered constructs. */
86 struct omp_context *outer;
87 gimple *stmt;
88
89 /* Map variables to fields in a structure that allows communication
90 between sending and receiving threads. */
91 splay_tree field_map;
92 tree record_type;
93 tree sender_decl;
94 tree receiver_decl;
95
96 /* These are used just by task contexts, if task firstprivate fn is
97 needed. srecord_type is used to communicate from the thread
98 that encountered the task construct to task firstprivate fn,
99 record_type is allocated by GOMP_task, initialized by task firstprivate
100 fn and passed to the task body fn. */
101 splay_tree sfield_map;
102 tree srecord_type;
103
104 /* A chain of variables to add to the top-level block surrounding the
105 construct. In the case of a parallel, this is in the child function. */
106 tree block_vars;
107
108 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
109 barriers should jump to during omplower pass. */
110 tree cancel_label;
111
112 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
113 otherwise. */
114 gimple *simt_stmt;
115
116 /* For task reductions registered in this context, a vector containing
117 the length of the private copies block (if constant, otherwise NULL)
118 and then offsets (if constant, otherwise NULL) for each entry. */
119 vec<tree> task_reductions;
120
121 /* A hash map from the reduction clauses to the registered array
122 elts. */
123 hash_map<tree, unsigned> *task_reduction_map;
124
125 /* And a hash map from the lastprivate(conditional:) variables to their
126 corresponding tracking loop iteration variables. */
127 hash_map<tree, tree> *lastprivate_conditional_map;
128
129 /* And a hash map from the allocate variables to their corresponding
130 allocators. */
131 hash_map<tree, tree> *allocate_map;
132
133 /* A tree_list of the reduction clauses in this context. This is
134 only used for checking the consistency of OpenACC reduction
135 clauses in scan_omp_for and is not guaranteed to contain a valid
136 value outside of this function. */
137 tree local_reduction_clauses;
138
139 /* A tree_list of the reduction clauses in outer contexts. This is
140 only used for checking the consistency of OpenACC reduction
141 clauses in scan_omp_for and is not guaranteed to contain a valid
142 value outside of this function. */
143 tree outer_reduction_clauses;
144
145 /* Nesting depth of this context. Used to beautify error messages re
146 invalid gotos. The outermost ctx is depth 1, with depth 0 being
147 reserved for the main body of the function. */
148 int depth;
149
150 /* True if this parallel directive is nested within another. */
151 bool is_nested;
152
153 /* True if this construct can be cancelled. */
154 bool cancellable;
155
156 /* True if lower_omp_1 should look up lastprivate conditional in parent
157 context. */
158 bool combined_into_simd_safelen1;
159
160 /* True if there is nested scan context with inclusive clause. */
161 bool scan_inclusive;
162
163 /* True if there is nested scan context with exclusive clause. */
164 bool scan_exclusive;
165
166 /* True in the second simd loop of for simd with inscan reductions. */
167 bool for_simd_scan_phase;
168
169 /* True if there is order(concurrent) clause on the construct. */
170 bool order_concurrent;
171
172 /* True if there is bind clause on the construct (i.e. a loop construct). */
173 bool loop_p;
174 };
175
176 static splay_tree all_contexts;
177 static int taskreg_nesting_level;
178 static int target_nesting_level;
179 static bitmap task_shared_vars;
180 static bitmap global_nonaddressable_vars;
181 static vec<omp_context *> taskreg_contexts;
182
183 static void scan_omp (gimple_seq *, omp_context *);
184 static tree scan_omp_1_op (tree *, int *, void *);
185
186 #define WALK_SUBSTMTS \
187 case GIMPLE_BIND: \
188 case GIMPLE_TRY: \
189 case GIMPLE_CATCH: \
190 case GIMPLE_EH_FILTER: \
191 case GIMPLE_TRANSACTION: \
192 /* The sub-statements for these should be walked. */ \
193 *handled_ops_p = false; \
194 break;
195
196 /* Return whether CTX represents an OpenACC 'parallel' or 'serial' construct.
197 (This doesn't include OpenACC 'kernels' decomposed parts.) */
198
199 static bool
200 is_oacc_parallel_or_serial (omp_context *ctx)
201 {
202 enum gimple_code outer_type = gimple_code (ctx->stmt);
203 return ((outer_type == GIMPLE_OMP_TARGET)
204 && ((gimple_omp_target_kind (ctx->stmt)
205 == GF_OMP_TARGET_KIND_OACC_PARALLEL)
206 || (gimple_omp_target_kind (ctx->stmt)
207 == GF_OMP_TARGET_KIND_OACC_SERIAL)));
208 }
209
210 /* Return whether CTX represents an OpenACC 'kernels' construct.
211 (This doesn't include OpenACC 'kernels' decomposed parts.) */
212
213 static bool
214 is_oacc_kernels (omp_context *ctx)
215 {
216 enum gimple_code outer_type = gimple_code (ctx->stmt);
217 return ((outer_type == GIMPLE_OMP_TARGET)
218 && (gimple_omp_target_kind (ctx->stmt)
219 == GF_OMP_TARGET_KIND_OACC_KERNELS));
220 }
221
222 /* Return whether CTX represents an OpenACC 'kernels' decomposed part. */
223
224 static bool
225 is_oacc_kernels_decomposed_part (omp_context *ctx)
226 {
227 enum gimple_code outer_type = gimple_code (ctx->stmt);
228 return ((outer_type == GIMPLE_OMP_TARGET)
229 && ((gimple_omp_target_kind (ctx->stmt)
230 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED)
231 || (gimple_omp_target_kind (ctx->stmt)
232 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE)
233 || (gimple_omp_target_kind (ctx->stmt)
234 == GF_OMP_TARGET_KIND_OACC_DATA_KERNELS)));
235 }
236
237 /* Return true if STMT corresponds to an OpenMP target region. */
238 static bool
239 is_omp_target (gimple *stmt)
240 {
241 if (gimple_code (stmt) == GIMPLE_OMP_TARGET)
242 {
243 int kind = gimple_omp_target_kind (stmt);
244 return (kind == GF_OMP_TARGET_KIND_REGION
245 || kind == GF_OMP_TARGET_KIND_DATA
246 || kind == GF_OMP_TARGET_KIND_ENTER_DATA
247 || kind == GF_OMP_TARGET_KIND_EXIT_DATA);
248 }
249 return false;
250 }
251
252 /* If DECL is the artificial dummy VAR_DECL created for non-static
253 data member privatization, return the underlying "this" parameter,
254 otherwise return NULL. */
255
256 tree
257 omp_member_access_dummy_var (tree decl)
258 {
259 if (!VAR_P (decl)
260 || !DECL_ARTIFICIAL (decl)
261 || !DECL_IGNORED_P (decl)
262 || !DECL_HAS_VALUE_EXPR_P (decl)
263 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
264 return NULL_TREE;
265
266 tree v = DECL_VALUE_EXPR (decl);
267 if (TREE_CODE (v) != COMPONENT_REF)
268 return NULL_TREE;
269
270 while (1)
271 switch (TREE_CODE (v))
272 {
273 case COMPONENT_REF:
274 case MEM_REF:
275 case INDIRECT_REF:
276 CASE_CONVERT:
277 case POINTER_PLUS_EXPR:
278 v = TREE_OPERAND (v, 0);
279 continue;
280 case PARM_DECL:
281 if (DECL_CONTEXT (v) == current_function_decl
282 && DECL_ARTIFICIAL (v)
283 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
284 return v;
285 return NULL_TREE;
286 default:
287 return NULL_TREE;
288 }
289 }
290
291 /* Helper for unshare_and_remap, called through walk_tree. */
292
293 static tree
294 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
295 {
296 tree *pair = (tree *) data;
297 if (*tp == pair[0])
298 {
299 *tp = unshare_expr (pair[1]);
300 *walk_subtrees = 0;
301 }
302 else if (IS_TYPE_OR_DECL_P (*tp))
303 *walk_subtrees = 0;
304 return NULL_TREE;
305 }
306
307 /* Return unshare_expr (X) with all occurrences of FROM
308 replaced with TO. */
309
310 static tree
311 unshare_and_remap (tree x, tree from, tree to)
312 {
313 tree pair[2] = { from, to };
314 x = unshare_expr (x);
315 walk_tree (&x, unshare_and_remap_1, pair, NULL);
316 return x;
317 }
318
319 /* Convenience function for calling scan_omp_1_op on tree operands. */
320
321 static inline tree
322 scan_omp_op (tree *tp, omp_context *ctx)
323 {
324 struct walk_stmt_info wi;
325
326 memset (&wi, 0, sizeof (wi));
327 wi.info = ctx;
328 wi.want_locations = true;
329
330 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
331 }
332
333 static void lower_omp (gimple_seq *, omp_context *);
334 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
335 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
336
337 /* Return true if CTX is for an omp parallel. */
338
339 static inline bool
340 is_parallel_ctx (omp_context *ctx)
341 {
342 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
343 }
344
345
346 /* Return true if CTX is for an omp task. */
347
348 static inline bool
349 is_task_ctx (omp_context *ctx)
350 {
351 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
352 }
353
354
355 /* Return true if CTX is for an omp taskloop. */
356
357 static inline bool
358 is_taskloop_ctx (omp_context *ctx)
359 {
360 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
361 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
362 }
363
364
365 /* Return true if CTX is for a host omp teams. */
366
367 static inline bool
368 is_host_teams_ctx (omp_context *ctx)
369 {
370 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
371 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
372 }
373
374 /* Return true if CTX is for an omp parallel or omp task or host omp teams
375 (the last one is strictly not a task region in OpenMP speak, but we
376 need to treat it similarly). */
377
378 static inline bool
379 is_taskreg_ctx (omp_context *ctx)
380 {
381 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
382 }
383
384 /* Return true if EXPR is variable sized. */
385
386 static inline bool
387 is_variable_sized (const_tree expr)
388 {
389 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
390 }
391
392 /* Lookup variables. The "maybe" form
393 allows for the variable form to not have been entered, otherwise we
394 assert that the variable must have been entered. */
395
396 static inline tree
397 lookup_decl (tree var, omp_context *ctx)
398 {
399 tree *n = ctx->cb.decl_map->get (var);
400 return *n;
401 }
402
403 static inline tree
404 maybe_lookup_decl (const_tree var, omp_context *ctx)
405 {
406 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
407 return n ? *n : NULL_TREE;
408 }
409
410 static inline tree
411 lookup_field (tree var, omp_context *ctx)
412 {
413 splay_tree_node n;
414 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
415 return (tree) n->value;
416 }
417
418 static inline tree
419 lookup_sfield (splay_tree_key key, omp_context *ctx)
420 {
421 splay_tree_node n;
422 n = splay_tree_lookup (ctx->sfield_map
423 ? ctx->sfield_map : ctx->field_map, key);
424 return (tree) n->value;
425 }
426
427 static inline tree
428 lookup_sfield (tree var, omp_context *ctx)
429 {
430 return lookup_sfield ((splay_tree_key) var, ctx);
431 }
432
433 static inline tree
434 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
435 {
436 splay_tree_node n;
437 n = splay_tree_lookup (ctx->field_map, key);
438 return n ? (tree) n->value : NULL_TREE;
439 }
440
441 static inline tree
442 maybe_lookup_field (tree var, omp_context *ctx)
443 {
444 return maybe_lookup_field ((splay_tree_key) var, ctx);
445 }
446
447 /* Return true if DECL should be copied by pointer. SHARED_CTX is
448 the parallel context if DECL is to be shared. */
449
450 static bool
451 use_pointer_for_field (tree decl, omp_context *shared_ctx)
452 {
453 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
454 || TYPE_ATOMIC (TREE_TYPE (decl)))
455 return true;
456
457 /* We can only use copy-in/copy-out semantics for shared variables
458 when we know the value is not accessible from an outer scope. */
459 if (shared_ctx)
460 {
461 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
462
463 /* ??? Trivially accessible from anywhere. But why would we even
464 be passing an address in this case? Should we simply assert
465 this to be false, or should we have a cleanup pass that removes
466 these from the list of mappings? */
467 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
468 return true;
469
470 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
471 without analyzing the expression whether or not its location
472 is accessible to anyone else. In the case of nested parallel
473 regions it certainly may be. */
474 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
475 return true;
476
477 /* Do not use copy-in/copy-out for variables that have their
478 address taken. */
479 if (is_global_var (decl))
480 {
481 /* For file scope vars, track whether we've seen them as
482 non-addressable initially and in that case, keep the same
483 answer for the duration of the pass, even when they are made
484 addressable later on e.g. through reduction expansion. Global
485 variables which weren't addressable before the pass will not
486 have their privatized copies address taken. See PR91216. */
487 if (!TREE_ADDRESSABLE (decl))
488 {
489 if (!global_nonaddressable_vars)
490 global_nonaddressable_vars = BITMAP_ALLOC (NULL);
491 bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl));
492 }
493 else if (!global_nonaddressable_vars
494 || !bitmap_bit_p (global_nonaddressable_vars,
495 DECL_UID (decl)))
496 return true;
497 }
498 else if (TREE_ADDRESSABLE (decl))
499 return true;
500
501 /* lower_send_shared_vars only uses copy-in, but not copy-out
502 for these. */
503 if (TREE_READONLY (decl)
504 || ((TREE_CODE (decl) == RESULT_DECL
505 || TREE_CODE (decl) == PARM_DECL)
506 && DECL_BY_REFERENCE (decl)))
507 return false;
508
509 /* Disallow copy-in/out in nested parallel if
510 decl is shared in outer parallel, otherwise
511 each thread could store the shared variable
512 in its own copy-in location, making the
513 variable no longer really shared. */
514 if (shared_ctx->is_nested)
515 {
516 omp_context *up;
517
518 for (up = shared_ctx->outer; up; up = up->outer)
519 if ((is_taskreg_ctx (up)
520 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
521 && is_gimple_omp_offloaded (up->stmt)))
522 && maybe_lookup_decl (decl, up))
523 break;
524
525 if (up)
526 {
527 tree c;
528
529 if (gimple_code (up->stmt) == GIMPLE_OMP_TARGET)
530 {
531 for (c = gimple_omp_target_clauses (up->stmt);
532 c; c = OMP_CLAUSE_CHAIN (c))
533 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
534 && OMP_CLAUSE_DECL (c) == decl)
535 break;
536 }
537 else
538 for (c = gimple_omp_taskreg_clauses (up->stmt);
539 c; c = OMP_CLAUSE_CHAIN (c))
540 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
541 && OMP_CLAUSE_DECL (c) == decl)
542 break;
543
544 if (c)
545 goto maybe_mark_addressable_and_ret;
546 }
547 }
548
549 /* For tasks avoid using copy-in/out. As tasks can be
550 deferred or executed in different thread, when GOMP_task
551 returns, the task hasn't necessarily terminated. */
552 if (is_task_ctx (shared_ctx))
553 {
554 tree outer;
555 maybe_mark_addressable_and_ret:
556 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
557 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
558 {
559 /* Taking address of OUTER in lower_send_shared_vars
560 might need regimplification of everything that uses the
561 variable. */
562 if (!task_shared_vars)
563 task_shared_vars = BITMAP_ALLOC (NULL);
564 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
565 TREE_ADDRESSABLE (outer) = 1;
566 }
567 return true;
568 }
569 }
570
571 return false;
572 }
573
574 /* Construct a new automatic decl similar to VAR. */
575
576 static tree
577 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
578 {
579 tree copy = copy_var_decl (var, name, type);
580
581 DECL_CONTEXT (copy) = current_function_decl;
582 DECL_CHAIN (copy) = ctx->block_vars;
583 /* If VAR is listed in task_shared_vars, it means it wasn't
584 originally addressable and is just because task needs to take
585 it's address. But we don't need to take address of privatizations
586 from that var. */
587 if (TREE_ADDRESSABLE (var)
588 && ((task_shared_vars
589 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
590 || (global_nonaddressable_vars
591 && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var)))))
592 TREE_ADDRESSABLE (copy) = 0;
593 ctx->block_vars = copy;
594
595 return copy;
596 }
597
598 static tree
599 omp_copy_decl_1 (tree var, omp_context *ctx)
600 {
601 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
602 }
603
604 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
605 as appropriate. */
606 static tree
607 omp_build_component_ref (tree obj, tree field)
608 {
609 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
610 if (TREE_THIS_VOLATILE (field))
611 TREE_THIS_VOLATILE (ret) |= 1;
612 if (TREE_READONLY (field))
613 TREE_READONLY (ret) |= 1;
614 return ret;
615 }
616
617 /* Build tree nodes to access the field for VAR on the receiver side. */
618
619 static tree
620 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
621 {
622 tree x, field = lookup_field (var, ctx);
623
624 /* If the receiver record type was remapped in the child function,
625 remap the field into the new record type. */
626 x = maybe_lookup_field (field, ctx);
627 if (x != NULL)
628 field = x;
629
630 x = build_simple_mem_ref (ctx->receiver_decl);
631 TREE_THIS_NOTRAP (x) = 1;
632 x = omp_build_component_ref (x, field);
633 if (by_ref)
634 {
635 x = build_simple_mem_ref (x);
636 TREE_THIS_NOTRAP (x) = 1;
637 }
638
639 return x;
640 }
641
642 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
643 of a parallel, this is a component reference; for workshare constructs
644 this is some variable. */
645
646 static tree
647 build_outer_var_ref (tree var, omp_context *ctx,
648 enum omp_clause_code code = OMP_CLAUSE_ERROR)
649 {
650 tree x;
651 omp_context *outer = ctx->outer;
652 while (outer && gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
653 outer = outer->outer;
654
655 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
656 x = var;
657 else if (is_variable_sized (var))
658 {
659 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
660 x = build_outer_var_ref (x, ctx, code);
661 x = build_simple_mem_ref (x);
662 }
663 else if (is_taskreg_ctx (ctx))
664 {
665 bool by_ref = use_pointer_for_field (var, NULL);
666 x = build_receiver_ref (var, by_ref, ctx);
667 }
668 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
669 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
670 || ctx->loop_p
671 || (code == OMP_CLAUSE_PRIVATE
672 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
673 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
674 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
675 {
676 /* #pragma omp simd isn't a worksharing construct, and can reference
677 even private vars in its linear etc. clauses.
678 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
679 to private vars in all worksharing constructs. */
680 x = NULL_TREE;
681 if (outer && is_taskreg_ctx (outer))
682 x = lookup_decl (var, outer);
683 else if (outer)
684 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
685 if (x == NULL_TREE)
686 x = var;
687 }
688 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
689 {
690 gcc_assert (outer);
691 splay_tree_node n
692 = splay_tree_lookup (outer->field_map,
693 (splay_tree_key) &DECL_UID (var));
694 if (n == NULL)
695 {
696 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
697 x = var;
698 else
699 x = lookup_decl (var, outer);
700 }
701 else
702 {
703 tree field = (tree) n->value;
704 /* If the receiver record type was remapped in the child function,
705 remap the field into the new record type. */
706 x = maybe_lookup_field (field, outer);
707 if (x != NULL)
708 field = x;
709
710 x = build_simple_mem_ref (outer->receiver_decl);
711 x = omp_build_component_ref (x, field);
712 if (use_pointer_for_field (var, outer))
713 x = build_simple_mem_ref (x);
714 }
715 }
716 else if (outer)
717 x = lookup_decl (var, outer);
718 else if (omp_is_reference (var))
719 /* This can happen with orphaned constructs. If var is reference, it is
720 possible it is shared and as such valid. */
721 x = var;
722 else if (omp_member_access_dummy_var (var))
723 x = var;
724 else
725 gcc_unreachable ();
726
727 if (x == var)
728 {
729 tree t = omp_member_access_dummy_var (var);
730 if (t)
731 {
732 x = DECL_VALUE_EXPR (var);
733 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
734 if (o != t)
735 x = unshare_and_remap (x, t, o);
736 else
737 x = unshare_expr (x);
738 }
739 }
740
741 if (omp_is_reference (var))
742 x = build_simple_mem_ref (x);
743
744 return x;
745 }
746
747 /* Build tree nodes to access the field for VAR on the sender side. */
748
749 static tree
750 build_sender_ref (splay_tree_key key, omp_context *ctx)
751 {
752 tree field = lookup_sfield (key, ctx);
753 return omp_build_component_ref (ctx->sender_decl, field);
754 }
755
756 static tree
757 build_sender_ref (tree var, omp_context *ctx)
758 {
759 return build_sender_ref ((splay_tree_key) var, ctx);
760 }
761
762 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
763 BASE_POINTERS_RESTRICT, declare the field with restrict. */
764
765 static void
766 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
767 {
768 tree field, type, sfield = NULL_TREE;
769 splay_tree_key key = (splay_tree_key) var;
770
771 if ((mask & 16) != 0)
772 {
773 key = (splay_tree_key) &DECL_NAME (var);
774 gcc_checking_assert (key != (splay_tree_key) var);
775 }
776 if ((mask & 8) != 0)
777 {
778 key = (splay_tree_key) &DECL_UID (var);
779 gcc_checking_assert (key != (splay_tree_key) var);
780 }
781 gcc_assert ((mask & 1) == 0
782 || !splay_tree_lookup (ctx->field_map, key));
783 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
784 || !splay_tree_lookup (ctx->sfield_map, key));
785 gcc_assert ((mask & 3) == 3
786 || !is_gimple_omp_oacc (ctx->stmt));
787
788 type = TREE_TYPE (var);
789 if ((mask & 16) != 0)
790 type = lang_hooks.decls.omp_array_data (var, true);
791
792 /* Prevent redeclaring the var in the split-off function with a restrict
793 pointer type. Note that we only clear type itself, restrict qualifiers in
794 the pointed-to type will be ignored by points-to analysis. */
795 if (POINTER_TYPE_P (type)
796 && TYPE_RESTRICT (type))
797 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
798
799 if (mask & 4)
800 {
801 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
802 type = build_pointer_type (build_pointer_type (type));
803 }
804 else if (by_ref)
805 type = build_pointer_type (type);
806 else if ((mask & (32 | 3)) == 1 && omp_is_reference (var))
807 type = TREE_TYPE (type);
808
809 field = build_decl (DECL_SOURCE_LOCATION (var),
810 FIELD_DECL, DECL_NAME (var), type);
811
812 /* Remember what variable this field was created for. This does have a
813 side effect of making dwarf2out ignore this member, so for helpful
814 debugging we clear it later in delete_omp_context. */
815 DECL_ABSTRACT_ORIGIN (field) = var;
816 if ((mask & 16) == 0 && type == TREE_TYPE (var))
817 {
818 SET_DECL_ALIGN (field, DECL_ALIGN (var));
819 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
820 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
821 }
822 else
823 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
824
825 if ((mask & 3) == 3)
826 {
827 insert_field_into_struct (ctx->record_type, field);
828 if (ctx->srecord_type)
829 {
830 sfield = build_decl (DECL_SOURCE_LOCATION (var),
831 FIELD_DECL, DECL_NAME (var), type);
832 DECL_ABSTRACT_ORIGIN (sfield) = var;
833 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
834 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
835 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
836 insert_field_into_struct (ctx->srecord_type, sfield);
837 }
838 }
839 else
840 {
841 if (ctx->srecord_type == NULL_TREE)
842 {
843 tree t;
844
845 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
846 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
847 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
848 {
849 sfield = build_decl (DECL_SOURCE_LOCATION (t),
850 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
851 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
852 insert_field_into_struct (ctx->srecord_type, sfield);
853 splay_tree_insert (ctx->sfield_map,
854 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
855 (splay_tree_value) sfield);
856 }
857 }
858 sfield = field;
859 insert_field_into_struct ((mask & 1) ? ctx->record_type
860 : ctx->srecord_type, field);
861 }
862
863 if (mask & 1)
864 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
865 if ((mask & 2) && ctx->sfield_map)
866 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
867 }
868
869 static tree
870 install_var_local (tree var, omp_context *ctx)
871 {
872 tree new_var = omp_copy_decl_1 (var, ctx);
873 insert_decl_map (&ctx->cb, var, new_var);
874 return new_var;
875 }
876
877 /* Adjust the replacement for DECL in CTX for the new context. This means
878 copying the DECL_VALUE_EXPR, and fixing up the type. */
879
880 static void
881 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
882 {
883 tree new_decl, size;
884
885 new_decl = lookup_decl (decl, ctx);
886
887 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
888
889 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
890 && DECL_HAS_VALUE_EXPR_P (decl))
891 {
892 tree ve = DECL_VALUE_EXPR (decl);
893 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
894 SET_DECL_VALUE_EXPR (new_decl, ve);
895 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
896 }
897
898 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
899 {
900 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
901 if (size == error_mark_node)
902 size = TYPE_SIZE (TREE_TYPE (new_decl));
903 DECL_SIZE (new_decl) = size;
904
905 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
906 if (size == error_mark_node)
907 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
908 DECL_SIZE_UNIT (new_decl) = size;
909 }
910 }
911
912 /* The callback for remap_decl. Search all containing contexts for a
913 mapping of the variable; this avoids having to duplicate the splay
914 tree ahead of time. We know a mapping doesn't already exist in the
915 given context. Create new mappings to implement default semantics. */
916
917 static tree
918 omp_copy_decl (tree var, copy_body_data *cb)
919 {
920 omp_context *ctx = (omp_context *) cb;
921 tree new_var;
922
923 if (TREE_CODE (var) == LABEL_DECL)
924 {
925 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
926 return var;
927 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
928 DECL_CONTEXT (new_var) = current_function_decl;
929 insert_decl_map (&ctx->cb, var, new_var);
930 return new_var;
931 }
932
933 while (!is_taskreg_ctx (ctx))
934 {
935 ctx = ctx->outer;
936 if (ctx == NULL)
937 return var;
938 new_var = maybe_lookup_decl (var, ctx);
939 if (new_var)
940 return new_var;
941 }
942
943 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
944 return var;
945
946 return error_mark_node;
947 }
948
949 /* Create a new context, with OUTER_CTX being the surrounding context. */
950
951 static omp_context *
952 new_omp_context (gimple *stmt, omp_context *outer_ctx)
953 {
954 omp_context *ctx = XCNEW (omp_context);
955
956 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
957 (splay_tree_value) ctx);
958 ctx->stmt = stmt;
959
960 if (outer_ctx)
961 {
962 ctx->outer = outer_ctx;
963 ctx->cb = outer_ctx->cb;
964 ctx->cb.block = NULL;
965 ctx->depth = outer_ctx->depth + 1;
966 }
967 else
968 {
969 ctx->cb.src_fn = current_function_decl;
970 ctx->cb.dst_fn = current_function_decl;
971 ctx->cb.src_node = cgraph_node::get (current_function_decl);
972 gcc_checking_assert (ctx->cb.src_node);
973 ctx->cb.dst_node = ctx->cb.src_node;
974 ctx->cb.src_cfun = cfun;
975 ctx->cb.copy_decl = omp_copy_decl;
976 ctx->cb.eh_lp_nr = 0;
977 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
978 ctx->cb.adjust_array_error_bounds = true;
979 ctx->cb.dont_remap_vla_if_no_change = true;
980 ctx->depth = 1;
981 }
982
983 ctx->cb.decl_map = new hash_map<tree, tree>;
984
985 return ctx;
986 }
987
988 static gimple_seq maybe_catch_exception (gimple_seq);
989
990 /* Finalize task copyfn. */
991
992 static void
993 finalize_task_copyfn (gomp_task *task_stmt)
994 {
995 struct function *child_cfun;
996 tree child_fn;
997 gimple_seq seq = NULL, new_seq;
998 gbind *bind;
999
1000 child_fn = gimple_omp_task_copy_fn (task_stmt);
1001 if (child_fn == NULL_TREE)
1002 return;
1003
1004 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
1005 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
1006
1007 push_cfun (child_cfun);
1008 bind = gimplify_body (child_fn, false);
1009 gimple_seq_add_stmt (&seq, bind);
1010 new_seq = maybe_catch_exception (seq);
1011 if (new_seq != seq)
1012 {
1013 bind = gimple_build_bind (NULL, new_seq, NULL);
1014 seq = NULL;
1015 gimple_seq_add_stmt (&seq, bind);
1016 }
1017 gimple_set_body (child_fn, seq);
1018 pop_cfun ();
1019
1020 /* Inform the callgraph about the new function. */
1021 cgraph_node *node = cgraph_node::get_create (child_fn);
1022 node->parallelized_function = 1;
1023 cgraph_node::add_new_function (child_fn, false);
1024 }
1025
1026 /* Destroy a omp_context data structures. Called through the splay tree
1027 value delete callback. */
1028
1029 static void
1030 delete_omp_context (splay_tree_value value)
1031 {
1032 omp_context *ctx = (omp_context *) value;
1033
1034 delete ctx->cb.decl_map;
1035
1036 if (ctx->field_map)
1037 splay_tree_delete (ctx->field_map);
1038 if (ctx->sfield_map)
1039 splay_tree_delete (ctx->sfield_map);
1040
1041 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1042 it produces corrupt debug information. */
1043 if (ctx->record_type)
1044 {
1045 tree t;
1046 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
1047 DECL_ABSTRACT_ORIGIN (t) = NULL;
1048 }
1049 if (ctx->srecord_type)
1050 {
1051 tree t;
1052 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
1053 DECL_ABSTRACT_ORIGIN (t) = NULL;
1054 }
1055
1056 if (is_task_ctx (ctx))
1057 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
1058
1059 if (ctx->task_reduction_map)
1060 {
1061 ctx->task_reductions.release ();
1062 delete ctx->task_reduction_map;
1063 }
1064
1065 delete ctx->lastprivate_conditional_map;
1066 delete ctx->allocate_map;
1067
1068 XDELETE (ctx);
1069 }
1070
1071 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1072 context. */
1073
1074 static void
1075 fixup_child_record_type (omp_context *ctx)
1076 {
1077 tree f, type = ctx->record_type;
1078
1079 if (!ctx->receiver_decl)
1080 return;
1081 /* ??? It isn't sufficient to just call remap_type here, because
1082 variably_modified_type_p doesn't work the way we expect for
1083 record types. Testing each field for whether it needs remapping
1084 and creating a new record by hand works, however. */
1085 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1086 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1087 break;
1088 if (f)
1089 {
1090 tree name, new_fields = NULL;
1091
1092 type = lang_hooks.types.make_type (RECORD_TYPE);
1093 name = DECL_NAME (TYPE_NAME (ctx->record_type));
1094 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1095 TYPE_DECL, name, type);
1096 TYPE_NAME (type) = name;
1097
1098 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1099 {
1100 tree new_f = copy_node (f);
1101 DECL_CONTEXT (new_f) = type;
1102 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1103 DECL_CHAIN (new_f) = new_fields;
1104 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1105 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1106 &ctx->cb, NULL);
1107 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1108 &ctx->cb, NULL);
1109 new_fields = new_f;
1110
1111 /* Arrange to be able to look up the receiver field
1112 given the sender field. */
1113 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1114 (splay_tree_value) new_f);
1115 }
1116 TYPE_FIELDS (type) = nreverse (new_fields);
1117 layout_type (type);
1118 }
1119
1120 /* In a target region we never modify any of the pointers in *.omp_data_i,
1121 so attempt to help the optimizers. */
1122 if (is_gimple_omp_offloaded (ctx->stmt))
1123 type = build_qualified_type (type, TYPE_QUAL_CONST);
1124
1125 TREE_TYPE (ctx->receiver_decl)
1126 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1127 }
1128
1129 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1130 specified by CLAUSES. */
1131
1132 static void
1133 scan_sharing_clauses (tree clauses, omp_context *ctx)
1134 {
1135 tree c, decl;
1136 bool scan_array_reductions = false;
1137
1138 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1139 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE
1140 && (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
1141 /* omp_default_mem_alloc is 1 */
1142 || !integer_onep (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))))
1143 {
1144 if (ctx->allocate_map == NULL)
1145 ctx->allocate_map = new hash_map<tree, tree>;
1146 ctx->allocate_map->put (OMP_CLAUSE_DECL (c),
1147 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
1148 ? OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
1149 : integer_zero_node);
1150 }
1151
1152 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1153 {
1154 bool by_ref;
1155
1156 switch (OMP_CLAUSE_CODE (c))
1157 {
1158 case OMP_CLAUSE_PRIVATE:
1159 decl = OMP_CLAUSE_DECL (c);
1160 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1161 goto do_private;
1162 else if (!is_variable_sized (decl))
1163 install_var_local (decl, ctx);
1164 break;
1165
1166 case OMP_CLAUSE_SHARED:
1167 decl = OMP_CLAUSE_DECL (c);
1168 if (ctx->allocate_map && ctx->allocate_map->get (decl))
1169 ctx->allocate_map->remove (decl);
1170 /* Ignore shared directives in teams construct inside of
1171 target construct. */
1172 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1173 && !is_host_teams_ctx (ctx))
1174 {
1175 /* Global variables don't need to be copied,
1176 the receiver side will use them directly. */
1177 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1178 if (is_global_var (odecl))
1179 break;
1180 insert_decl_map (&ctx->cb, decl, odecl);
1181 break;
1182 }
1183 gcc_assert (is_taskreg_ctx (ctx));
1184 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1185 || !is_variable_sized (decl));
1186 /* Global variables don't need to be copied,
1187 the receiver side will use them directly. */
1188 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1189 break;
1190 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1191 {
1192 use_pointer_for_field (decl, ctx);
1193 break;
1194 }
1195 by_ref = use_pointer_for_field (decl, NULL);
1196 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1197 || TREE_ADDRESSABLE (decl)
1198 || by_ref
1199 || omp_is_reference (decl))
1200 {
1201 by_ref = use_pointer_for_field (decl, ctx);
1202 install_var_field (decl, by_ref, 3, ctx);
1203 install_var_local (decl, ctx);
1204 break;
1205 }
1206 /* We don't need to copy const scalar vars back. */
1207 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1208 goto do_private;
1209
1210 case OMP_CLAUSE_REDUCTION:
1211 /* Collect 'reduction' clauses on OpenACC compute construct. */
1212 if (is_gimple_omp_oacc (ctx->stmt)
1213 && is_gimple_omp_offloaded (ctx->stmt))
1214 {
1215 /* No 'reduction' clauses on OpenACC 'kernels'. */
1216 gcc_checking_assert (!is_oacc_kernels (ctx));
1217 /* Likewise, on OpenACC 'kernels' decomposed parts. */
1218 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
1219
1220 ctx->local_reduction_clauses
1221 = tree_cons (NULL, c, ctx->local_reduction_clauses);
1222 }
1223 /* FALLTHRU */
1224
1225 case OMP_CLAUSE_IN_REDUCTION:
1226 decl = OMP_CLAUSE_DECL (c);
1227 if (ctx->allocate_map
1228 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1229 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
1230 || OMP_CLAUSE_REDUCTION_TASK (c)))
1231 || is_task_ctx (ctx)))
1232 {
1233 /* For now. */
1234 if (ctx->allocate_map->get (decl))
1235 ctx->allocate_map->remove (decl);
1236 }
1237 if (TREE_CODE (decl) == MEM_REF)
1238 {
1239 tree t = TREE_OPERAND (decl, 0);
1240 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1241 t = TREE_OPERAND (t, 0);
1242 if (TREE_CODE (t) == INDIRECT_REF
1243 || TREE_CODE (t) == ADDR_EXPR)
1244 t = TREE_OPERAND (t, 0);
1245 install_var_local (t, ctx);
1246 if (is_taskreg_ctx (ctx)
1247 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1248 || (is_task_ctx (ctx)
1249 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1250 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1251 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1252 == POINTER_TYPE)))))
1253 && !is_variable_sized (t)
1254 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1255 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1256 && !is_task_ctx (ctx))))
1257 {
1258 by_ref = use_pointer_for_field (t, NULL);
1259 if (is_task_ctx (ctx)
1260 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1261 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1262 {
1263 install_var_field (t, false, 1, ctx);
1264 install_var_field (t, by_ref, 2, ctx);
1265 }
1266 else
1267 install_var_field (t, by_ref, 3, ctx);
1268 }
1269 break;
1270 }
1271 if (is_task_ctx (ctx)
1272 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1273 && OMP_CLAUSE_REDUCTION_TASK (c)
1274 && is_parallel_ctx (ctx)))
1275 {
1276 /* Global variables don't need to be copied,
1277 the receiver side will use them directly. */
1278 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1279 {
1280 by_ref = use_pointer_for_field (decl, ctx);
1281 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1282 install_var_field (decl, by_ref, 3, ctx);
1283 }
1284 install_var_local (decl, ctx);
1285 break;
1286 }
1287 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1288 && OMP_CLAUSE_REDUCTION_TASK (c))
1289 {
1290 install_var_local (decl, ctx);
1291 break;
1292 }
1293 goto do_private;
1294
1295 case OMP_CLAUSE_LASTPRIVATE:
1296 /* Let the corresponding firstprivate clause create
1297 the variable. */
1298 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1299 break;
1300 /* FALLTHRU */
1301
1302 case OMP_CLAUSE_FIRSTPRIVATE:
1303 case OMP_CLAUSE_LINEAR:
1304 decl = OMP_CLAUSE_DECL (c);
1305 do_private:
1306 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1307 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1308 && is_gimple_omp_offloaded (ctx->stmt))
1309 {
1310 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1311 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1312 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1313 install_var_field (decl, true, 3, ctx);
1314 else
1315 install_var_field (decl, false, 3, ctx);
1316 }
1317 if (is_variable_sized (decl))
1318 {
1319 if (is_task_ctx (ctx))
1320 {
1321 if (ctx->allocate_map
1322 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1323 {
1324 /* For now. */
1325 if (ctx->allocate_map->get (decl))
1326 ctx->allocate_map->remove (decl);
1327 }
1328 install_var_field (decl, false, 1, ctx);
1329 }
1330 break;
1331 }
1332 else if (is_taskreg_ctx (ctx))
1333 {
1334 bool global
1335 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1336 by_ref = use_pointer_for_field (decl, NULL);
1337
1338 if (is_task_ctx (ctx)
1339 && (global || by_ref || omp_is_reference (decl)))
1340 {
1341 if (ctx->allocate_map
1342 && ctx->allocate_map->get (decl))
1343 install_var_field (decl, by_ref, 32 | 1, ctx);
1344 else
1345 install_var_field (decl, false, 1, ctx);
1346 if (!global)
1347 install_var_field (decl, by_ref, 2, ctx);
1348 }
1349 else if (!global)
1350 install_var_field (decl, by_ref, 3, ctx);
1351 }
1352 install_var_local (decl, ctx);
1353 break;
1354
1355 case OMP_CLAUSE_USE_DEVICE_PTR:
1356 case OMP_CLAUSE_USE_DEVICE_ADDR:
1357 decl = OMP_CLAUSE_DECL (c);
1358
1359 /* Fortran array descriptors. */
1360 if (lang_hooks.decls.omp_array_data (decl, true))
1361 install_var_field (decl, false, 19, ctx);
1362 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
1363 && !omp_is_reference (decl)
1364 && !omp_is_allocatable_or_ptr (decl))
1365 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1366 install_var_field (decl, true, 11, ctx);
1367 else
1368 install_var_field (decl, false, 11, ctx);
1369 if (DECL_SIZE (decl)
1370 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1371 {
1372 tree decl2 = DECL_VALUE_EXPR (decl);
1373 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1374 decl2 = TREE_OPERAND (decl2, 0);
1375 gcc_assert (DECL_P (decl2));
1376 install_var_local (decl2, ctx);
1377 }
1378 install_var_local (decl, ctx);
1379 break;
1380
1381 case OMP_CLAUSE_IS_DEVICE_PTR:
1382 decl = OMP_CLAUSE_DECL (c);
1383 goto do_private;
1384
1385 case OMP_CLAUSE__LOOPTEMP_:
1386 case OMP_CLAUSE__REDUCTEMP_:
1387 gcc_assert (is_taskreg_ctx (ctx));
1388 decl = OMP_CLAUSE_DECL (c);
1389 install_var_field (decl, false, 3, ctx);
1390 install_var_local (decl, ctx);
1391 break;
1392
1393 case OMP_CLAUSE_COPYPRIVATE:
1394 case OMP_CLAUSE_COPYIN:
1395 decl = OMP_CLAUSE_DECL (c);
1396 by_ref = use_pointer_for_field (decl, NULL);
1397 install_var_field (decl, by_ref, 3, ctx);
1398 break;
1399
1400 case OMP_CLAUSE_FINAL:
1401 case OMP_CLAUSE_IF:
1402 case OMP_CLAUSE_NUM_THREADS:
1403 case OMP_CLAUSE_NUM_TEAMS:
1404 case OMP_CLAUSE_THREAD_LIMIT:
1405 case OMP_CLAUSE_DEVICE:
1406 case OMP_CLAUSE_SCHEDULE:
1407 case OMP_CLAUSE_DIST_SCHEDULE:
1408 case OMP_CLAUSE_DEPEND:
1409 case OMP_CLAUSE_PRIORITY:
1410 case OMP_CLAUSE_GRAINSIZE:
1411 case OMP_CLAUSE_NUM_TASKS:
1412 case OMP_CLAUSE_NUM_GANGS:
1413 case OMP_CLAUSE_NUM_WORKERS:
1414 case OMP_CLAUSE_VECTOR_LENGTH:
1415 case OMP_CLAUSE_DETACH:
1416 if (ctx->outer)
1417 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1418 break;
1419
1420 case OMP_CLAUSE_TO:
1421 case OMP_CLAUSE_FROM:
1422 case OMP_CLAUSE_MAP:
1423 if (ctx->outer)
1424 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1425 decl = OMP_CLAUSE_DECL (c);
1426 /* Global variables with "omp declare target" attribute
1427 don't need to be copied, the receiver side will use them
1428 directly. However, global variables with "omp declare target link"
1429 attribute need to be copied. Or when ALWAYS modifier is used. */
1430 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1431 && DECL_P (decl)
1432 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1433 && (OMP_CLAUSE_MAP_KIND (c)
1434 != GOMP_MAP_FIRSTPRIVATE_REFERENCE)
1435 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
1436 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH)
1437 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1438 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1439 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1440 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1441 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
1442 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1443 && varpool_node::get_create (decl)->offloadable
1444 && !lookup_attribute ("omp declare target link",
1445 DECL_ATTRIBUTES (decl)))
1446 break;
1447 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1448 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1449 {
1450 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1451 not offloaded; there is nothing to map for those. */
1452 if (!is_gimple_omp_offloaded (ctx->stmt)
1453 && !POINTER_TYPE_P (TREE_TYPE (decl))
1454 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1455 break;
1456 }
1457 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1458 && DECL_P (decl)
1459 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1460 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1461 && is_omp_target (ctx->stmt))
1462 {
1463 /* If this is an offloaded region, an attach operation should
1464 only exist when the pointer variable is mapped in a prior
1465 clause. */
1466 if (is_gimple_omp_offloaded (ctx->stmt))
1467 gcc_assert
1468 (maybe_lookup_decl (decl, ctx)
1469 || (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1470 && lookup_attribute ("omp declare target",
1471 DECL_ATTRIBUTES (decl))));
1472
1473 /* By itself, attach/detach is generated as part of pointer
1474 variable mapping and should not create new variables in the
1475 offloaded region, however sender refs for it must be created
1476 for its address to be passed to the runtime. */
1477 tree field
1478 = build_decl (OMP_CLAUSE_LOCATION (c),
1479 FIELD_DECL, NULL_TREE, ptr_type_node);
1480 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1481 insert_field_into_struct (ctx->record_type, field);
1482 /* To not clash with a map of the pointer variable itself,
1483 attach/detach maps have their field looked up by the *clause*
1484 tree expression, not the decl. */
1485 gcc_assert (!splay_tree_lookup (ctx->field_map,
1486 (splay_tree_key) c));
1487 splay_tree_insert (ctx->field_map, (splay_tree_key) c,
1488 (splay_tree_value) field);
1489 break;
1490 }
1491 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1492 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1493 || (OMP_CLAUSE_MAP_KIND (c)
1494 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1495 {
1496 if (TREE_CODE (decl) == COMPONENT_REF
1497 || (TREE_CODE (decl) == INDIRECT_REF
1498 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1499 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1500 == REFERENCE_TYPE)))
1501 break;
1502 if (DECL_SIZE (decl)
1503 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1504 {
1505 tree decl2 = DECL_VALUE_EXPR (decl);
1506 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1507 decl2 = TREE_OPERAND (decl2, 0);
1508 gcc_assert (DECL_P (decl2));
1509 install_var_local (decl2, ctx);
1510 }
1511 install_var_local (decl, ctx);
1512 break;
1513 }
1514 if (DECL_P (decl))
1515 {
1516 if (DECL_SIZE (decl)
1517 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1518 {
1519 tree decl2 = DECL_VALUE_EXPR (decl);
1520 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1521 decl2 = TREE_OPERAND (decl2, 0);
1522 gcc_assert (DECL_P (decl2));
1523 install_var_field (decl2, true, 3, ctx);
1524 install_var_local (decl2, ctx);
1525 install_var_local (decl, ctx);
1526 }
1527 else
1528 {
1529 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1530 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1531 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1532 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1533 install_var_field (decl, true, 7, ctx);
1534 else
1535 install_var_field (decl, true, 3, ctx);
1536 if (is_gimple_omp_offloaded (ctx->stmt)
1537 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1538 install_var_local (decl, ctx);
1539 }
1540 }
1541 else
1542 {
1543 tree base = get_base_address (decl);
1544 tree nc = OMP_CLAUSE_CHAIN (c);
1545 if (DECL_P (base)
1546 && nc != NULL_TREE
1547 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1548 && OMP_CLAUSE_DECL (nc) == base
1549 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1550 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1551 {
1552 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1553 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1554 }
1555 else
1556 {
1557 if (ctx->outer)
1558 {
1559 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1560 decl = OMP_CLAUSE_DECL (c);
1561 }
1562 gcc_assert (!splay_tree_lookup (ctx->field_map,
1563 (splay_tree_key) decl));
1564 tree field
1565 = build_decl (OMP_CLAUSE_LOCATION (c),
1566 FIELD_DECL, NULL_TREE, ptr_type_node);
1567 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1568 insert_field_into_struct (ctx->record_type, field);
1569 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1570 (splay_tree_value) field);
1571 }
1572 }
1573 break;
1574
1575 case OMP_CLAUSE_ORDER:
1576 ctx->order_concurrent = true;
1577 break;
1578
1579 case OMP_CLAUSE_BIND:
1580 ctx->loop_p = true;
1581 break;
1582
1583 case OMP_CLAUSE_NOWAIT:
1584 case OMP_CLAUSE_ORDERED:
1585 case OMP_CLAUSE_COLLAPSE:
1586 case OMP_CLAUSE_UNTIED:
1587 case OMP_CLAUSE_MERGEABLE:
1588 case OMP_CLAUSE_PROC_BIND:
1589 case OMP_CLAUSE_SAFELEN:
1590 case OMP_CLAUSE_SIMDLEN:
1591 case OMP_CLAUSE_THREADS:
1592 case OMP_CLAUSE_SIMD:
1593 case OMP_CLAUSE_NOGROUP:
1594 case OMP_CLAUSE_DEFAULTMAP:
1595 case OMP_CLAUSE_ASYNC:
1596 case OMP_CLAUSE_WAIT:
1597 case OMP_CLAUSE_GANG:
1598 case OMP_CLAUSE_WORKER:
1599 case OMP_CLAUSE_VECTOR:
1600 case OMP_CLAUSE_INDEPENDENT:
1601 case OMP_CLAUSE_AUTO:
1602 case OMP_CLAUSE_SEQ:
1603 case OMP_CLAUSE_TILE:
1604 case OMP_CLAUSE__SIMT_:
1605 case OMP_CLAUSE_DEFAULT:
1606 case OMP_CLAUSE_NONTEMPORAL:
1607 case OMP_CLAUSE_IF_PRESENT:
1608 case OMP_CLAUSE_FINALIZE:
1609 case OMP_CLAUSE_TASK_REDUCTION:
1610 case OMP_CLAUSE_ALLOCATE:
1611 break;
1612
1613 case OMP_CLAUSE_ALIGNED:
1614 decl = OMP_CLAUSE_DECL (c);
1615 if (is_global_var (decl)
1616 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1617 install_var_local (decl, ctx);
1618 break;
1619
1620 case OMP_CLAUSE__CONDTEMP_:
1621 decl = OMP_CLAUSE_DECL (c);
1622 if (is_parallel_ctx (ctx))
1623 {
1624 install_var_field (decl, false, 3, ctx);
1625 install_var_local (decl, ctx);
1626 }
1627 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1628 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
1629 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1630 install_var_local (decl, ctx);
1631 break;
1632
1633 case OMP_CLAUSE__CACHE_:
1634 default:
1635 gcc_unreachable ();
1636 }
1637 }
1638
1639 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1640 {
1641 switch (OMP_CLAUSE_CODE (c))
1642 {
1643 case OMP_CLAUSE_LASTPRIVATE:
1644 /* Let the corresponding firstprivate clause create
1645 the variable. */
1646 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1647 scan_array_reductions = true;
1648 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1649 break;
1650 /* FALLTHRU */
1651
1652 case OMP_CLAUSE_FIRSTPRIVATE:
1653 case OMP_CLAUSE_PRIVATE:
1654 case OMP_CLAUSE_LINEAR:
1655 case OMP_CLAUSE_IS_DEVICE_PTR:
1656 decl = OMP_CLAUSE_DECL (c);
1657 if (is_variable_sized (decl))
1658 {
1659 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1660 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1661 && is_gimple_omp_offloaded (ctx->stmt))
1662 {
1663 tree decl2 = DECL_VALUE_EXPR (decl);
1664 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1665 decl2 = TREE_OPERAND (decl2, 0);
1666 gcc_assert (DECL_P (decl2));
1667 install_var_local (decl2, ctx);
1668 fixup_remapped_decl (decl2, ctx, false);
1669 }
1670 install_var_local (decl, ctx);
1671 }
1672 fixup_remapped_decl (decl, ctx,
1673 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1674 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1675 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1676 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1677 scan_array_reductions = true;
1678 break;
1679
1680 case OMP_CLAUSE_REDUCTION:
1681 case OMP_CLAUSE_IN_REDUCTION:
1682 decl = OMP_CLAUSE_DECL (c);
1683 if (TREE_CODE (decl) != MEM_REF)
1684 {
1685 if (is_variable_sized (decl))
1686 install_var_local (decl, ctx);
1687 fixup_remapped_decl (decl, ctx, false);
1688 }
1689 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1690 scan_array_reductions = true;
1691 break;
1692
1693 case OMP_CLAUSE_TASK_REDUCTION:
1694 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1695 scan_array_reductions = true;
1696 break;
1697
1698 case OMP_CLAUSE_SHARED:
1699 /* Ignore shared directives in teams construct inside of
1700 target construct. */
1701 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1702 && !is_host_teams_ctx (ctx))
1703 break;
1704 decl = OMP_CLAUSE_DECL (c);
1705 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1706 break;
1707 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1708 {
1709 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1710 ctx->outer)))
1711 break;
1712 bool by_ref = use_pointer_for_field (decl, ctx);
1713 install_var_field (decl, by_ref, 11, ctx);
1714 break;
1715 }
1716 fixup_remapped_decl (decl, ctx, false);
1717 break;
1718
1719 case OMP_CLAUSE_MAP:
1720 if (!is_gimple_omp_offloaded (ctx->stmt))
1721 break;
1722 decl = OMP_CLAUSE_DECL (c);
1723 if (DECL_P (decl)
1724 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1725 && (OMP_CLAUSE_MAP_KIND (c)
1726 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1727 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1728 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1729 && varpool_node::get_create (decl)->offloadable)
1730 break;
1731 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1732 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1733 && is_omp_target (ctx->stmt)
1734 && !is_gimple_omp_offloaded (ctx->stmt))
1735 break;
1736 if (DECL_P (decl))
1737 {
1738 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1739 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1740 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1741 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1742 {
1743 tree new_decl = lookup_decl (decl, ctx);
1744 TREE_TYPE (new_decl)
1745 = remap_type (TREE_TYPE (decl), &ctx->cb);
1746 }
1747 else if (DECL_SIZE (decl)
1748 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1749 {
1750 tree decl2 = DECL_VALUE_EXPR (decl);
1751 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1752 decl2 = TREE_OPERAND (decl2, 0);
1753 gcc_assert (DECL_P (decl2));
1754 fixup_remapped_decl (decl2, ctx, false);
1755 fixup_remapped_decl (decl, ctx, true);
1756 }
1757 else
1758 fixup_remapped_decl (decl, ctx, false);
1759 }
1760 break;
1761
1762 case OMP_CLAUSE_COPYPRIVATE:
1763 case OMP_CLAUSE_COPYIN:
1764 case OMP_CLAUSE_DEFAULT:
1765 case OMP_CLAUSE_IF:
1766 case OMP_CLAUSE_NUM_THREADS:
1767 case OMP_CLAUSE_NUM_TEAMS:
1768 case OMP_CLAUSE_THREAD_LIMIT:
1769 case OMP_CLAUSE_DEVICE:
1770 case OMP_CLAUSE_SCHEDULE:
1771 case OMP_CLAUSE_DIST_SCHEDULE:
1772 case OMP_CLAUSE_NOWAIT:
1773 case OMP_CLAUSE_ORDERED:
1774 case OMP_CLAUSE_COLLAPSE:
1775 case OMP_CLAUSE_UNTIED:
1776 case OMP_CLAUSE_FINAL:
1777 case OMP_CLAUSE_MERGEABLE:
1778 case OMP_CLAUSE_PROC_BIND:
1779 case OMP_CLAUSE_SAFELEN:
1780 case OMP_CLAUSE_SIMDLEN:
1781 case OMP_CLAUSE_ALIGNED:
1782 case OMP_CLAUSE_DEPEND:
1783 case OMP_CLAUSE_DETACH:
1784 case OMP_CLAUSE_ALLOCATE:
1785 case OMP_CLAUSE__LOOPTEMP_:
1786 case OMP_CLAUSE__REDUCTEMP_:
1787 case OMP_CLAUSE_TO:
1788 case OMP_CLAUSE_FROM:
1789 case OMP_CLAUSE_PRIORITY:
1790 case OMP_CLAUSE_GRAINSIZE:
1791 case OMP_CLAUSE_NUM_TASKS:
1792 case OMP_CLAUSE_THREADS:
1793 case OMP_CLAUSE_SIMD:
1794 case OMP_CLAUSE_NOGROUP:
1795 case OMP_CLAUSE_DEFAULTMAP:
1796 case OMP_CLAUSE_ORDER:
1797 case OMP_CLAUSE_BIND:
1798 case OMP_CLAUSE_USE_DEVICE_PTR:
1799 case OMP_CLAUSE_USE_DEVICE_ADDR:
1800 case OMP_CLAUSE_NONTEMPORAL:
1801 case OMP_CLAUSE_ASYNC:
1802 case OMP_CLAUSE_WAIT:
1803 case OMP_CLAUSE_NUM_GANGS:
1804 case OMP_CLAUSE_NUM_WORKERS:
1805 case OMP_CLAUSE_VECTOR_LENGTH:
1806 case OMP_CLAUSE_GANG:
1807 case OMP_CLAUSE_WORKER:
1808 case OMP_CLAUSE_VECTOR:
1809 case OMP_CLAUSE_INDEPENDENT:
1810 case OMP_CLAUSE_AUTO:
1811 case OMP_CLAUSE_SEQ:
1812 case OMP_CLAUSE_TILE:
1813 case OMP_CLAUSE__SIMT_:
1814 case OMP_CLAUSE_IF_PRESENT:
1815 case OMP_CLAUSE_FINALIZE:
1816 case OMP_CLAUSE__CONDTEMP_:
1817 break;
1818
1819 case OMP_CLAUSE__CACHE_:
1820 default:
1821 gcc_unreachable ();
1822 }
1823 }
1824
1825 gcc_checking_assert (!scan_array_reductions
1826 || !is_gimple_omp_oacc (ctx->stmt));
1827 if (scan_array_reductions)
1828 {
1829 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1830 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1831 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1832 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1833 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1834 {
1835 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1836 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1837 }
1838 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1839 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1840 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1841 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1842 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1843 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1844 }
1845 }
1846
1847 /* Create a new name for omp child function. Returns an identifier. */
1848
1849 static tree
1850 create_omp_child_function_name (bool task_copy)
1851 {
1852 return clone_function_name_numbered (current_function_decl,
1853 task_copy ? "_omp_cpyfn" : "_omp_fn");
1854 }
1855
1856 /* Return true if CTX may belong to offloaded code: either if current function
1857 is offloaded, or any enclosing context corresponds to a target region. */
1858
1859 static bool
1860 omp_maybe_offloaded_ctx (omp_context *ctx)
1861 {
1862 if (cgraph_node::get (current_function_decl)->offloadable)
1863 return true;
1864 for (; ctx; ctx = ctx->outer)
1865 if (is_gimple_omp_offloaded (ctx->stmt))
1866 return true;
1867 return false;
1868 }
1869
1870 /* Build a decl for the omp child function. It'll not contain a body
1871 yet, just the bare decl. */
1872
1873 static void
1874 create_omp_child_function (omp_context *ctx, bool task_copy)
1875 {
1876 tree decl, type, name, t;
1877
1878 name = create_omp_child_function_name (task_copy);
1879 if (task_copy)
1880 type = build_function_type_list (void_type_node, ptr_type_node,
1881 ptr_type_node, NULL_TREE);
1882 else
1883 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1884
1885 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1886
1887 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1888 || !task_copy);
1889 if (!task_copy)
1890 ctx->cb.dst_fn = decl;
1891 else
1892 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1893
1894 TREE_STATIC (decl) = 1;
1895 TREE_USED (decl) = 1;
1896 DECL_ARTIFICIAL (decl) = 1;
1897 DECL_IGNORED_P (decl) = 0;
1898 TREE_PUBLIC (decl) = 0;
1899 DECL_UNINLINABLE (decl) = 1;
1900 DECL_EXTERNAL (decl) = 0;
1901 DECL_CONTEXT (decl) = NULL_TREE;
1902 DECL_INITIAL (decl) = make_node (BLOCK);
1903 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1904 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1905 /* Remove omp declare simd attribute from the new attributes. */
1906 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1907 {
1908 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1909 a = a2;
1910 a = TREE_CHAIN (a);
1911 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1912 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1913 *p = TREE_CHAIN (*p);
1914 else
1915 {
1916 tree chain = TREE_CHAIN (*p);
1917 *p = copy_node (*p);
1918 p = &TREE_CHAIN (*p);
1919 *p = chain;
1920 }
1921 }
1922 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1923 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1924 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1925 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1926 DECL_FUNCTION_VERSIONED (decl)
1927 = DECL_FUNCTION_VERSIONED (current_function_decl);
1928
1929 if (omp_maybe_offloaded_ctx (ctx))
1930 {
1931 cgraph_node::get_create (decl)->offloadable = 1;
1932 if (ENABLE_OFFLOADING)
1933 g->have_offload = true;
1934 }
1935
1936 if (cgraph_node::get_create (decl)->offloadable
1937 && !lookup_attribute ("omp declare target",
1938 DECL_ATTRIBUTES (current_function_decl)))
1939 {
1940 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1941 ? "omp target entrypoint"
1942 : "omp declare target");
1943 DECL_ATTRIBUTES (decl)
1944 = tree_cons (get_identifier (target_attr),
1945 NULL_TREE, DECL_ATTRIBUTES (decl));
1946 }
1947
1948 t = build_decl (DECL_SOURCE_LOCATION (decl),
1949 RESULT_DECL, NULL_TREE, void_type_node);
1950 DECL_ARTIFICIAL (t) = 1;
1951 DECL_IGNORED_P (t) = 1;
1952 DECL_CONTEXT (t) = decl;
1953 DECL_RESULT (decl) = t;
1954
1955 tree data_name = get_identifier (".omp_data_i");
1956 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1957 ptr_type_node);
1958 DECL_ARTIFICIAL (t) = 1;
1959 DECL_NAMELESS (t) = 1;
1960 DECL_ARG_TYPE (t) = ptr_type_node;
1961 DECL_CONTEXT (t) = current_function_decl;
1962 TREE_USED (t) = 1;
1963 TREE_READONLY (t) = 1;
1964 DECL_ARGUMENTS (decl) = t;
1965 if (!task_copy)
1966 ctx->receiver_decl = t;
1967 else
1968 {
1969 t = build_decl (DECL_SOURCE_LOCATION (decl),
1970 PARM_DECL, get_identifier (".omp_data_o"),
1971 ptr_type_node);
1972 DECL_ARTIFICIAL (t) = 1;
1973 DECL_NAMELESS (t) = 1;
1974 DECL_ARG_TYPE (t) = ptr_type_node;
1975 DECL_CONTEXT (t) = current_function_decl;
1976 TREE_USED (t) = 1;
1977 TREE_ADDRESSABLE (t) = 1;
1978 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1979 DECL_ARGUMENTS (decl) = t;
1980 }
1981
1982 /* Allocate memory for the function structure. The call to
1983 allocate_struct_function clobbers CFUN, so we need to restore
1984 it afterward. */
1985 push_struct_function (decl);
1986 cfun->function_end_locus = gimple_location (ctx->stmt);
1987 init_tree_ssa (cfun);
1988 pop_cfun ();
1989 }
1990
1991 /* Callback for walk_gimple_seq. Check if combined parallel
1992 contains gimple_omp_for_combined_into_p OMP_FOR. */
1993
1994 tree
1995 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1996 bool *handled_ops_p,
1997 struct walk_stmt_info *wi)
1998 {
1999 gimple *stmt = gsi_stmt (*gsi_p);
2000
2001 *handled_ops_p = true;
2002 switch (gimple_code (stmt))
2003 {
2004 WALK_SUBSTMTS;
2005
2006 case GIMPLE_OMP_FOR:
2007 if (gimple_omp_for_combined_into_p (stmt)
2008 && gimple_omp_for_kind (stmt)
2009 == *(const enum gf_mask *) (wi->info))
2010 {
2011 wi->info = stmt;
2012 return integer_zero_node;
2013 }
2014 break;
2015 default:
2016 break;
2017 }
2018 return NULL;
2019 }
2020
2021 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
2022
2023 static void
2024 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
2025 omp_context *outer_ctx)
2026 {
2027 struct walk_stmt_info wi;
2028
2029 memset (&wi, 0, sizeof (wi));
2030 wi.val_only = true;
2031 wi.info = (void *) &msk;
2032 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
2033 if (wi.info != (void *) &msk)
2034 {
2035 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
2036 struct omp_for_data fd;
2037 omp_extract_for_data (for_stmt, &fd, NULL);
2038 /* We need two temporaries with fd.loop.v type (istart/iend)
2039 and then (fd.collapse - 1) temporaries with the same
2040 type for count2 ... countN-1 vars if not constant. */
2041 size_t count = 2, i;
2042 tree type = fd.iter_type;
2043 if (fd.collapse > 1
2044 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
2045 {
2046 count += fd.collapse - 1;
2047 /* If there are lastprivate clauses on the inner
2048 GIMPLE_OMP_FOR, add one more temporaries for the total number
2049 of iterations (product of count1 ... countN-1). */
2050 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
2051 OMP_CLAUSE_LASTPRIVATE)
2052 || (msk == GF_OMP_FOR_KIND_FOR
2053 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2054 OMP_CLAUSE_LASTPRIVATE)))
2055 {
2056 tree temp = create_tmp_var (type);
2057 tree c = build_omp_clause (UNKNOWN_LOCATION,
2058 OMP_CLAUSE__LOOPTEMP_);
2059 insert_decl_map (&outer_ctx->cb, temp, temp);
2060 OMP_CLAUSE_DECL (c) = temp;
2061 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2062 gimple_omp_taskreg_set_clauses (stmt, c);
2063 }
2064 if (fd.non_rect
2065 && fd.last_nonrect == fd.first_nonrect + 1)
2066 if (tree v = gimple_omp_for_index (for_stmt, fd.last_nonrect))
2067 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
2068 {
2069 v = gimple_omp_for_index (for_stmt, fd.first_nonrect);
2070 tree type2 = TREE_TYPE (v);
2071 count++;
2072 for (i = 0; i < 3; i++)
2073 {
2074 tree temp = create_tmp_var (type2);
2075 tree c = build_omp_clause (UNKNOWN_LOCATION,
2076 OMP_CLAUSE__LOOPTEMP_);
2077 insert_decl_map (&outer_ctx->cb, temp, temp);
2078 OMP_CLAUSE_DECL (c) = temp;
2079 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2080 gimple_omp_taskreg_set_clauses (stmt, c);
2081 }
2082 }
2083 }
2084 for (i = 0; i < count; i++)
2085 {
2086 tree temp = create_tmp_var (type);
2087 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
2088 insert_decl_map (&outer_ctx->cb, temp, temp);
2089 OMP_CLAUSE_DECL (c) = temp;
2090 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2091 gimple_omp_taskreg_set_clauses (stmt, c);
2092 }
2093 }
2094 if (msk == GF_OMP_FOR_KIND_TASKLOOP
2095 && omp_find_clause (gimple_omp_task_clauses (stmt),
2096 OMP_CLAUSE_REDUCTION))
2097 {
2098 tree type = build_pointer_type (pointer_sized_int_node);
2099 tree temp = create_tmp_var (type);
2100 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2101 insert_decl_map (&outer_ctx->cb, temp, temp);
2102 OMP_CLAUSE_DECL (c) = temp;
2103 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
2104 gimple_omp_task_set_clauses (stmt, c);
2105 }
2106 }
2107
2108 /* Scan an OpenMP parallel directive. */
2109
2110 static void
2111 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2112 {
2113 omp_context *ctx;
2114 tree name;
2115 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
2116
2117 /* Ignore parallel directives with empty bodies, unless there
2118 are copyin clauses. */
2119 if (optimize > 0
2120 && empty_body_p (gimple_omp_body (stmt))
2121 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2122 OMP_CLAUSE_COPYIN) == NULL)
2123 {
2124 gsi_replace (gsi, gimple_build_nop (), false);
2125 return;
2126 }
2127
2128 if (gimple_omp_parallel_combined_p (stmt))
2129 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
2130 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
2131 OMP_CLAUSE_REDUCTION);
2132 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
2133 if (OMP_CLAUSE_REDUCTION_TASK (c))
2134 {
2135 tree type = build_pointer_type (pointer_sized_int_node);
2136 tree temp = create_tmp_var (type);
2137 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2138 if (outer_ctx)
2139 insert_decl_map (&outer_ctx->cb, temp, temp);
2140 OMP_CLAUSE_DECL (c) = temp;
2141 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
2142 gimple_omp_parallel_set_clauses (stmt, c);
2143 break;
2144 }
2145 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
2146 break;
2147
2148 ctx = new_omp_context (stmt, outer_ctx);
2149 taskreg_contexts.safe_push (ctx);
2150 if (taskreg_nesting_level > 1)
2151 ctx->is_nested = true;
2152 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2153 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2154 name = create_tmp_var_name (".omp_data_s");
2155 name = build_decl (gimple_location (stmt),
2156 TYPE_DECL, name, ctx->record_type);
2157 DECL_ARTIFICIAL (name) = 1;
2158 DECL_NAMELESS (name) = 1;
2159 TYPE_NAME (ctx->record_type) = name;
2160 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2161 create_omp_child_function (ctx, false);
2162 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
2163
2164 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
2165 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2166
2167 if (TYPE_FIELDS (ctx->record_type) == NULL)
2168 ctx->record_type = ctx->receiver_decl = NULL;
2169 }
2170
2171 /* Scan an OpenMP task directive. */
2172
2173 static void
2174 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2175 {
2176 omp_context *ctx;
2177 tree name, t;
2178 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
2179
2180 /* Ignore task directives with empty bodies, unless they have depend
2181 clause. */
2182 if (optimize > 0
2183 && gimple_omp_body (stmt)
2184 && empty_body_p (gimple_omp_body (stmt))
2185 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
2186 {
2187 gsi_replace (gsi, gimple_build_nop (), false);
2188 return;
2189 }
2190
2191 if (gimple_omp_task_taskloop_p (stmt))
2192 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
2193
2194 ctx = new_omp_context (stmt, outer_ctx);
2195
2196 if (gimple_omp_task_taskwait_p (stmt))
2197 {
2198 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2199 return;
2200 }
2201
2202 taskreg_contexts.safe_push (ctx);
2203 if (taskreg_nesting_level > 1)
2204 ctx->is_nested = true;
2205 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2206 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2207 name = create_tmp_var_name (".omp_data_s");
2208 name = build_decl (gimple_location (stmt),
2209 TYPE_DECL, name, ctx->record_type);
2210 DECL_ARTIFICIAL (name) = 1;
2211 DECL_NAMELESS (name) = 1;
2212 TYPE_NAME (ctx->record_type) = name;
2213 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2214 create_omp_child_function (ctx, false);
2215 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
2216
2217 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2218
2219 if (ctx->srecord_type)
2220 {
2221 name = create_tmp_var_name (".omp_data_a");
2222 name = build_decl (gimple_location (stmt),
2223 TYPE_DECL, name, ctx->srecord_type);
2224 DECL_ARTIFICIAL (name) = 1;
2225 DECL_NAMELESS (name) = 1;
2226 TYPE_NAME (ctx->srecord_type) = name;
2227 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2228 create_omp_child_function (ctx, true);
2229 }
2230
2231 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2232
2233 if (TYPE_FIELDS (ctx->record_type) == NULL)
2234 {
2235 ctx->record_type = ctx->receiver_decl = NULL;
2236 t = build_int_cst (long_integer_type_node, 0);
2237 gimple_omp_task_set_arg_size (stmt, t);
2238 t = build_int_cst (long_integer_type_node, 1);
2239 gimple_omp_task_set_arg_align (stmt, t);
2240 }
2241 }
2242
2243 /* Helper function for finish_taskreg_scan, called through walk_tree.
2244 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2245 tree, replace it in the expression. */
2246
2247 static tree
2248 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2249 {
2250 if (VAR_P (*tp))
2251 {
2252 omp_context *ctx = (omp_context *) data;
2253 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2254 if (t != *tp)
2255 {
2256 if (DECL_HAS_VALUE_EXPR_P (t))
2257 t = unshare_expr (DECL_VALUE_EXPR (t));
2258 *tp = t;
2259 }
2260 *walk_subtrees = 0;
2261 }
2262 else if (IS_TYPE_OR_DECL_P (*tp))
2263 *walk_subtrees = 0;
2264 return NULL_TREE;
2265 }
2266
2267 /* If any decls have been made addressable during scan_omp,
2268 adjust their fields if needed, and layout record types
2269 of parallel/task constructs. */
2270
2271 static void
2272 finish_taskreg_scan (omp_context *ctx)
2273 {
2274 if (ctx->record_type == NULL_TREE)
2275 return;
2276
2277 /* If any task_shared_vars were needed, verify all
2278 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2279 statements if use_pointer_for_field hasn't changed
2280 because of that. If it did, update field types now. */
2281 if (task_shared_vars)
2282 {
2283 tree c;
2284
2285 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2286 c; c = OMP_CLAUSE_CHAIN (c))
2287 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2288 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2289 {
2290 tree decl = OMP_CLAUSE_DECL (c);
2291
2292 /* Global variables don't need to be copied,
2293 the receiver side will use them directly. */
2294 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2295 continue;
2296 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
2297 || !use_pointer_for_field (decl, ctx))
2298 continue;
2299 tree field = lookup_field (decl, ctx);
2300 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2301 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2302 continue;
2303 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2304 TREE_THIS_VOLATILE (field) = 0;
2305 DECL_USER_ALIGN (field) = 0;
2306 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2307 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2308 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2309 if (ctx->srecord_type)
2310 {
2311 tree sfield = lookup_sfield (decl, ctx);
2312 TREE_TYPE (sfield) = TREE_TYPE (field);
2313 TREE_THIS_VOLATILE (sfield) = 0;
2314 DECL_USER_ALIGN (sfield) = 0;
2315 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2316 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2317 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2318 }
2319 }
2320 }
2321
2322 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2323 {
2324 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2325 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2326 if (c)
2327 {
2328 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2329 expects to find it at the start of data. */
2330 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2331 tree *p = &TYPE_FIELDS (ctx->record_type);
2332 while (*p)
2333 if (*p == f)
2334 {
2335 *p = DECL_CHAIN (*p);
2336 break;
2337 }
2338 else
2339 p = &DECL_CHAIN (*p);
2340 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2341 TYPE_FIELDS (ctx->record_type) = f;
2342 }
2343 layout_type (ctx->record_type);
2344 fixup_child_record_type (ctx);
2345 }
2346 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2347 {
2348 layout_type (ctx->record_type);
2349 fixup_child_record_type (ctx);
2350 }
2351 else
2352 {
2353 location_t loc = gimple_location (ctx->stmt);
2354 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2355 tree detach_clause
2356 = omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
2357 OMP_CLAUSE_DETACH);
2358 /* Move VLA fields to the end. */
2359 p = &TYPE_FIELDS (ctx->record_type);
2360 while (*p)
2361 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2362 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2363 {
2364 *q = *p;
2365 *p = TREE_CHAIN (*p);
2366 TREE_CHAIN (*q) = NULL_TREE;
2367 q = &TREE_CHAIN (*q);
2368 }
2369 else
2370 p = &DECL_CHAIN (*p);
2371 *p = vla_fields;
2372 if (gimple_omp_task_taskloop_p (ctx->stmt))
2373 {
2374 /* Move fields corresponding to first and second _looptemp_
2375 clause first. There are filled by GOMP_taskloop
2376 and thus need to be in specific positions. */
2377 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2378 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2379 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2380 OMP_CLAUSE__LOOPTEMP_);
2381 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2382 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2383 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2384 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2385 p = &TYPE_FIELDS (ctx->record_type);
2386 while (*p)
2387 if (*p == f1 || *p == f2 || *p == f3)
2388 *p = DECL_CHAIN (*p);
2389 else
2390 p = &DECL_CHAIN (*p);
2391 DECL_CHAIN (f1) = f2;
2392 if (c3)
2393 {
2394 DECL_CHAIN (f2) = f3;
2395 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2396 }
2397 else
2398 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2399 TYPE_FIELDS (ctx->record_type) = f1;
2400 if (ctx->srecord_type)
2401 {
2402 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2403 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2404 if (c3)
2405 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2406 p = &TYPE_FIELDS (ctx->srecord_type);
2407 while (*p)
2408 if (*p == f1 || *p == f2 || *p == f3)
2409 *p = DECL_CHAIN (*p);
2410 else
2411 p = &DECL_CHAIN (*p);
2412 DECL_CHAIN (f1) = f2;
2413 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2414 if (c3)
2415 {
2416 DECL_CHAIN (f2) = f3;
2417 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2418 }
2419 else
2420 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2421 TYPE_FIELDS (ctx->srecord_type) = f1;
2422 }
2423 }
2424 if (detach_clause)
2425 {
2426 tree c, field;
2427
2428 /* Look for a firstprivate clause with the detach event handle. */
2429 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2430 c; c = OMP_CLAUSE_CHAIN (c))
2431 {
2432 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
2433 continue;
2434 if (maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c), ctx)
2435 == OMP_CLAUSE_DECL (detach_clause))
2436 break;
2437 }
2438
2439 gcc_assert (c);
2440 field = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2441
2442 /* Move field corresponding to the detach clause first.
2443 This is filled by GOMP_task and needs to be in a
2444 specific position. */
2445 p = &TYPE_FIELDS (ctx->record_type);
2446 while (*p)
2447 if (*p == field)
2448 *p = DECL_CHAIN (*p);
2449 else
2450 p = &DECL_CHAIN (*p);
2451 DECL_CHAIN (field) = TYPE_FIELDS (ctx->record_type);
2452 TYPE_FIELDS (ctx->record_type) = field;
2453 if (ctx->srecord_type)
2454 {
2455 field = lookup_sfield (OMP_CLAUSE_DECL (detach_clause), ctx);
2456 p = &TYPE_FIELDS (ctx->srecord_type);
2457 while (*p)
2458 if (*p == field)
2459 *p = DECL_CHAIN (*p);
2460 else
2461 p = &DECL_CHAIN (*p);
2462 DECL_CHAIN (field) = TYPE_FIELDS (ctx->srecord_type);
2463 TYPE_FIELDS (ctx->srecord_type) = field;
2464 }
2465 }
2466 layout_type (ctx->record_type);
2467 fixup_child_record_type (ctx);
2468 if (ctx->srecord_type)
2469 layout_type (ctx->srecord_type);
2470 tree t = fold_convert_loc (loc, long_integer_type_node,
2471 TYPE_SIZE_UNIT (ctx->record_type));
2472 if (TREE_CODE (t) != INTEGER_CST)
2473 {
2474 t = unshare_expr (t);
2475 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2476 }
2477 gimple_omp_task_set_arg_size (ctx->stmt, t);
2478 t = build_int_cst (long_integer_type_node,
2479 TYPE_ALIGN_UNIT (ctx->record_type));
2480 gimple_omp_task_set_arg_align (ctx->stmt, t);
2481 }
2482 }
2483
2484 /* Find the enclosing offload context. */
2485
2486 static omp_context *
2487 enclosing_target_ctx (omp_context *ctx)
2488 {
2489 for (; ctx; ctx = ctx->outer)
2490 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2491 break;
2492
2493 return ctx;
2494 }
2495
2496 /* Return whether CTX's parent compute construct is an OpenACC 'kernels'
2497 construct.
2498 (This doesn't include OpenACC 'kernels' decomposed parts.) */
2499
2500 static bool
2501 ctx_in_oacc_kernels_region (omp_context *ctx)
2502 {
2503 for (;ctx != NULL; ctx = ctx->outer)
2504 {
2505 gimple *stmt = ctx->stmt;
2506 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2507 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2508 return true;
2509 }
2510
2511 return false;
2512 }
2513
2514 /* Check the parallelism clauses inside a OpenACC 'kernels' region.
2515 (This doesn't include OpenACC 'kernels' decomposed parts.)
2516 Until kernels handling moves to use the same loop indirection
2517 scheme as parallel, we need to do this checking early. */
2518
2519 static unsigned
2520 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2521 {
2522 bool checking = true;
2523 unsigned outer_mask = 0;
2524 unsigned this_mask = 0;
2525 bool has_seq = false, has_auto = false;
2526
2527 if (ctx->outer)
2528 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2529 if (!stmt)
2530 {
2531 checking = false;
2532 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2533 return outer_mask;
2534 stmt = as_a <gomp_for *> (ctx->stmt);
2535 }
2536
2537 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2538 {
2539 switch (OMP_CLAUSE_CODE (c))
2540 {
2541 case OMP_CLAUSE_GANG:
2542 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2543 break;
2544 case OMP_CLAUSE_WORKER:
2545 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2546 break;
2547 case OMP_CLAUSE_VECTOR:
2548 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2549 break;
2550 case OMP_CLAUSE_SEQ:
2551 has_seq = true;
2552 break;
2553 case OMP_CLAUSE_AUTO:
2554 has_auto = true;
2555 break;
2556 default:
2557 break;
2558 }
2559 }
2560
2561 if (checking)
2562 {
2563 if (has_seq && (this_mask || has_auto))
2564 error_at (gimple_location (stmt), "%<seq%> overrides other"
2565 " OpenACC loop specifiers");
2566 else if (has_auto && this_mask)
2567 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2568 " OpenACC loop specifiers");
2569
2570 if (this_mask & outer_mask)
2571 error_at (gimple_location (stmt), "inner loop uses same"
2572 " OpenACC parallelism as containing loop");
2573 }
2574
2575 return outer_mask | this_mask;
2576 }
2577
2578 /* Scan a GIMPLE_OMP_FOR. */
2579
2580 static omp_context *
2581 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2582 {
2583 omp_context *ctx;
2584 size_t i;
2585 tree clauses = gimple_omp_for_clauses (stmt);
2586
2587 ctx = new_omp_context (stmt, outer_ctx);
2588
2589 if (is_gimple_omp_oacc (stmt))
2590 {
2591 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2592
2593 if (!(tgt && is_oacc_kernels (tgt)))
2594 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2595 {
2596 tree c_op0;
2597 switch (OMP_CLAUSE_CODE (c))
2598 {
2599 case OMP_CLAUSE_GANG:
2600 c_op0 = OMP_CLAUSE_GANG_EXPR (c);
2601 break;
2602
2603 case OMP_CLAUSE_WORKER:
2604 c_op0 = OMP_CLAUSE_WORKER_EXPR (c);
2605 break;
2606
2607 case OMP_CLAUSE_VECTOR:
2608 c_op0 = OMP_CLAUSE_VECTOR_EXPR (c);
2609 break;
2610
2611 default:
2612 continue;
2613 }
2614
2615 if (c_op0)
2616 {
2617 /* By construction, this is impossible for OpenACC 'kernels'
2618 decomposed parts. */
2619 gcc_assert (!(tgt && is_oacc_kernels_decomposed_part (tgt)));
2620
2621 error_at (OMP_CLAUSE_LOCATION (c),
2622 "argument not permitted on %qs clause",
2623 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
2624 if (tgt)
2625 inform (gimple_location (tgt->stmt),
2626 "enclosing parent compute construct");
2627 else if (oacc_get_fn_attrib (current_function_decl))
2628 inform (DECL_SOURCE_LOCATION (current_function_decl),
2629 "enclosing routine");
2630 else
2631 gcc_unreachable ();
2632 }
2633 }
2634
2635 if (tgt && is_oacc_kernels (tgt))
2636 check_oacc_kernel_gwv (stmt, ctx);
2637
2638 /* Collect all variables named in reductions on this loop. Ensure
2639 that, if this loop has a reduction on some variable v, and there is
2640 a reduction on v somewhere in an outer context, then there is a
2641 reduction on v on all intervening loops as well. */
2642 tree local_reduction_clauses = NULL;
2643 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2644 {
2645 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
2646 local_reduction_clauses
2647 = tree_cons (NULL, c, local_reduction_clauses);
2648 }
2649 if (ctx->outer_reduction_clauses == NULL && ctx->outer != NULL)
2650 ctx->outer_reduction_clauses
2651 = chainon (unshare_expr (ctx->outer->local_reduction_clauses),
2652 ctx->outer->outer_reduction_clauses);
2653 tree outer_reduction_clauses = ctx->outer_reduction_clauses;
2654 tree local_iter = local_reduction_clauses;
2655 for (; local_iter; local_iter = TREE_CHAIN (local_iter))
2656 {
2657 tree local_clause = TREE_VALUE (local_iter);
2658 tree local_var = OMP_CLAUSE_DECL (local_clause);
2659 tree_code local_op = OMP_CLAUSE_REDUCTION_CODE (local_clause);
2660 bool have_outer_reduction = false;
2661 tree ctx_iter = outer_reduction_clauses;
2662 for (; ctx_iter; ctx_iter = TREE_CHAIN (ctx_iter))
2663 {
2664 tree outer_clause = TREE_VALUE (ctx_iter);
2665 tree outer_var = OMP_CLAUSE_DECL (outer_clause);
2666 tree_code outer_op = OMP_CLAUSE_REDUCTION_CODE (outer_clause);
2667 if (outer_var == local_var && outer_op != local_op)
2668 {
2669 warning_at (OMP_CLAUSE_LOCATION (local_clause), 0,
2670 "conflicting reduction operations for %qE",
2671 local_var);
2672 inform (OMP_CLAUSE_LOCATION (outer_clause),
2673 "location of the previous reduction for %qE",
2674 outer_var);
2675 }
2676 if (outer_var == local_var)
2677 {
2678 have_outer_reduction = true;
2679 break;
2680 }
2681 }
2682 if (have_outer_reduction)
2683 {
2684 /* There is a reduction on outer_var both on this loop and on
2685 some enclosing loop. Walk up the context tree until such a
2686 loop with a reduction on outer_var is found, and complain
2687 about all intervening loops that do not have such a
2688 reduction. */
2689 struct omp_context *curr_loop = ctx->outer;
2690 bool found = false;
2691 while (curr_loop != NULL)
2692 {
2693 tree curr_iter = curr_loop->local_reduction_clauses;
2694 for (; curr_iter; curr_iter = TREE_CHAIN (curr_iter))
2695 {
2696 tree curr_clause = TREE_VALUE (curr_iter);
2697 tree curr_var = OMP_CLAUSE_DECL (curr_clause);
2698 if (curr_var == local_var)
2699 {
2700 found = true;
2701 break;
2702 }
2703 }
2704 if (!found)
2705 warning_at (gimple_location (curr_loop->stmt), 0,
2706 "nested loop in reduction needs "
2707 "reduction clause for %qE",
2708 local_var);
2709 else
2710 break;
2711 curr_loop = curr_loop->outer;
2712 }
2713 }
2714 }
2715 ctx->local_reduction_clauses = local_reduction_clauses;
2716 ctx->outer_reduction_clauses
2717 = chainon (unshare_expr (ctx->local_reduction_clauses),
2718 ctx->outer_reduction_clauses);
2719
2720 if (tgt && is_oacc_kernels (tgt))
2721 {
2722 /* Strip out reductions, as they are not handled yet. */
2723 tree *prev_ptr = &clauses;
2724
2725 while (tree probe = *prev_ptr)
2726 {
2727 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2728
2729 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2730 *prev_ptr = *next_ptr;
2731 else
2732 prev_ptr = next_ptr;
2733 }
2734
2735 gimple_omp_for_set_clauses (stmt, clauses);
2736 }
2737 }
2738
2739 scan_sharing_clauses (clauses, ctx);
2740
2741 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2742 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2743 {
2744 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2745 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2746 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2747 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2748 }
2749 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2750 return ctx;
2751 }
2752
2753 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2754
2755 static void
2756 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2757 omp_context *outer_ctx)
2758 {
2759 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2760 gsi_replace (gsi, bind, false);
2761 gimple_seq seq = NULL;
2762 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2763 tree cond = create_tmp_var_raw (integer_type_node);
2764 DECL_CONTEXT (cond) = current_function_decl;
2765 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2766 gimple_bind_set_vars (bind, cond);
2767 gimple_call_set_lhs (g, cond);
2768 gimple_seq_add_stmt (&seq, g);
2769 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2770 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2771 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2772 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2773 gimple_seq_add_stmt (&seq, g);
2774 g = gimple_build_label (lab1);
2775 gimple_seq_add_stmt (&seq, g);
2776 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2777 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2778 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2779 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2780 gimple_omp_for_set_clauses (new_stmt, clause);
2781 gimple_seq_add_stmt (&seq, new_stmt);
2782 g = gimple_build_goto (lab3);
2783 gimple_seq_add_stmt (&seq, g);
2784 g = gimple_build_label (lab2);
2785 gimple_seq_add_stmt (&seq, g);
2786 gimple_seq_add_stmt (&seq, stmt);
2787 g = gimple_build_label (lab3);
2788 gimple_seq_add_stmt (&seq, g);
2789 gimple_bind_set_body (bind, seq);
2790 update_stmt (bind);
2791 scan_omp_for (new_stmt, outer_ctx);
2792 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2793 }
2794
2795 static tree omp_find_scan (gimple_stmt_iterator *, bool *,
2796 struct walk_stmt_info *);
2797 static omp_context *maybe_lookup_ctx (gimple *);
2798
2799 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2800 for scan phase loop. */
2801
2802 static void
2803 scan_omp_simd_scan (gimple_stmt_iterator *gsi, gomp_for *stmt,
2804 omp_context *outer_ctx)
2805 {
2806 /* The only change between inclusive and exclusive scan will be
2807 within the first simd loop, so just use inclusive in the
2808 worksharing loop. */
2809 outer_ctx->scan_inclusive = true;
2810 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_INCLUSIVE);
2811 OMP_CLAUSE_DECL (c) = integer_zero_node;
2812
2813 gomp_scan *input_stmt = gimple_build_omp_scan (NULL, NULL_TREE);
2814 gomp_scan *scan_stmt = gimple_build_omp_scan (NULL, c);
2815 gsi_replace (gsi, input_stmt, false);
2816 gimple_seq input_body = NULL;
2817 gimple_seq_add_stmt (&input_body, stmt);
2818 gsi_insert_after (gsi, scan_stmt, GSI_NEW_STMT);
2819
2820 gimple_stmt_iterator input1_gsi = gsi_none ();
2821 struct walk_stmt_info wi;
2822 memset (&wi, 0, sizeof (wi));
2823 wi.val_only = true;
2824 wi.info = (void *) &input1_gsi;
2825 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), omp_find_scan, NULL, &wi);
2826 gcc_assert (!gsi_end_p (input1_gsi));
2827
2828 gimple *input_stmt1 = gsi_stmt (input1_gsi);
2829 gsi_next (&input1_gsi);
2830 gimple *scan_stmt1 = gsi_stmt (input1_gsi);
2831 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
2832 c = gimple_omp_scan_clauses (as_a <gomp_scan *> (scan_stmt1));
2833 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2834 std::swap (input_stmt1, scan_stmt1);
2835
2836 gimple_seq input_body1 = gimple_omp_body (input_stmt1);
2837 gimple_omp_set_body (input_stmt1, NULL);
2838
2839 gimple_seq scan_body = copy_gimple_seq_and_replace_locals (stmt);
2840 gomp_for *new_stmt = as_a <gomp_for *> (scan_body);
2841
2842 gimple_omp_set_body (input_stmt1, input_body1);
2843 gimple_omp_set_body (scan_stmt1, NULL);
2844
2845 gimple_stmt_iterator input2_gsi = gsi_none ();
2846 memset (&wi, 0, sizeof (wi));
2847 wi.val_only = true;
2848 wi.info = (void *) &input2_gsi;
2849 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt), omp_find_scan,
2850 NULL, &wi);
2851 gcc_assert (!gsi_end_p (input2_gsi));
2852
2853 gimple *input_stmt2 = gsi_stmt (input2_gsi);
2854 gsi_next (&input2_gsi);
2855 gimple *scan_stmt2 = gsi_stmt (input2_gsi);
2856 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
2857 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2858 std::swap (input_stmt2, scan_stmt2);
2859
2860 gimple_omp_set_body (input_stmt2, NULL);
2861
2862 gimple_omp_set_body (input_stmt, input_body);
2863 gimple_omp_set_body (scan_stmt, scan_body);
2864
2865 omp_context *ctx = new_omp_context (input_stmt, outer_ctx);
2866 scan_omp (gimple_omp_body_ptr (input_stmt), ctx);
2867
2868 ctx = new_omp_context (scan_stmt, outer_ctx);
2869 scan_omp (gimple_omp_body_ptr (scan_stmt), ctx);
2870
2871 maybe_lookup_ctx (new_stmt)->for_simd_scan_phase = true;
2872 }
2873
2874 /* Scan an OpenMP sections directive. */
2875
2876 static void
2877 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2878 {
2879 omp_context *ctx;
2880
2881 ctx = new_omp_context (stmt, outer_ctx);
2882 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2883 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2884 }
2885
2886 /* Scan an OpenMP single directive. */
2887
2888 static void
2889 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2890 {
2891 omp_context *ctx;
2892 tree name;
2893
2894 ctx = new_omp_context (stmt, outer_ctx);
2895 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2896 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2897 name = create_tmp_var_name (".omp_copy_s");
2898 name = build_decl (gimple_location (stmt),
2899 TYPE_DECL, name, ctx->record_type);
2900 TYPE_NAME (ctx->record_type) = name;
2901
2902 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2903 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2904
2905 if (TYPE_FIELDS (ctx->record_type) == NULL)
2906 ctx->record_type = NULL;
2907 else
2908 layout_type (ctx->record_type);
2909 }
2910
2911 /* Scan a GIMPLE_OMP_TARGET. */
2912
2913 static void
2914 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2915 {
2916 omp_context *ctx;
2917 tree name;
2918 bool offloaded = is_gimple_omp_offloaded (stmt);
2919 tree clauses = gimple_omp_target_clauses (stmt);
2920
2921 ctx = new_omp_context (stmt, outer_ctx);
2922 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2923 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2924 name = create_tmp_var_name (".omp_data_t");
2925 name = build_decl (gimple_location (stmt),
2926 TYPE_DECL, name, ctx->record_type);
2927 DECL_ARTIFICIAL (name) = 1;
2928 DECL_NAMELESS (name) = 1;
2929 TYPE_NAME (ctx->record_type) = name;
2930 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2931
2932 if (offloaded)
2933 {
2934 create_omp_child_function (ctx, false);
2935 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2936 }
2937
2938 scan_sharing_clauses (clauses, ctx);
2939 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2940
2941 if (TYPE_FIELDS (ctx->record_type) == NULL)
2942 ctx->record_type = ctx->receiver_decl = NULL;
2943 else
2944 {
2945 TYPE_FIELDS (ctx->record_type)
2946 = nreverse (TYPE_FIELDS (ctx->record_type));
2947 if (flag_checking)
2948 {
2949 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2950 for (tree field = TYPE_FIELDS (ctx->record_type);
2951 field;
2952 field = DECL_CHAIN (field))
2953 gcc_assert (DECL_ALIGN (field) == align);
2954 }
2955 layout_type (ctx->record_type);
2956 if (offloaded)
2957 fixup_child_record_type (ctx);
2958 }
2959 }
2960
2961 /* Scan an OpenMP teams directive. */
2962
2963 static void
2964 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2965 {
2966 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2967
2968 if (!gimple_omp_teams_host (stmt))
2969 {
2970 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2971 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2972 return;
2973 }
2974 taskreg_contexts.safe_push (ctx);
2975 gcc_assert (taskreg_nesting_level == 1);
2976 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2977 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2978 tree name = create_tmp_var_name (".omp_data_s");
2979 name = build_decl (gimple_location (stmt),
2980 TYPE_DECL, name, ctx->record_type);
2981 DECL_ARTIFICIAL (name) = 1;
2982 DECL_NAMELESS (name) = 1;
2983 TYPE_NAME (ctx->record_type) = name;
2984 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2985 create_omp_child_function (ctx, false);
2986 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
2987
2988 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2989 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2990
2991 if (TYPE_FIELDS (ctx->record_type) == NULL)
2992 ctx->record_type = ctx->receiver_decl = NULL;
2993 }
2994
2995 /* Check nesting restrictions. */
2996 static bool
2997 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2998 {
2999 tree c;
3000
3001 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
3002 inside an OpenACC CTX. */
3003 if (!(is_gimple_omp (stmt)
3004 && is_gimple_omp_oacc (stmt))
3005 /* Except for atomic codes that we share with OpenMP. */
3006 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3007 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
3008 {
3009 if (oacc_get_fn_attrib (cfun->decl) != NULL)
3010 {
3011 error_at (gimple_location (stmt),
3012 "non-OpenACC construct inside of OpenACC routine");
3013 return false;
3014 }
3015 else
3016 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
3017 if (is_gimple_omp (octx->stmt)
3018 && is_gimple_omp_oacc (octx->stmt))
3019 {
3020 error_at (gimple_location (stmt),
3021 "non-OpenACC construct inside of OpenACC region");
3022 return false;
3023 }
3024 }
3025
3026 if (ctx != NULL)
3027 {
3028 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
3029 && ctx->outer
3030 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
3031 ctx = ctx->outer;
3032 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3033 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
3034 && !ctx->loop_p)
3035 {
3036 c = NULL_TREE;
3037 if (ctx->order_concurrent
3038 && (gimple_code (stmt) == GIMPLE_OMP_ORDERED
3039 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3040 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
3041 {
3042 error_at (gimple_location (stmt),
3043 "OpenMP constructs other than %<parallel%>, %<loop%>"
3044 " or %<simd%> may not be nested inside a region with"
3045 " the %<order(concurrent)%> clause");
3046 return false;
3047 }
3048 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
3049 {
3050 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3051 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3052 {
3053 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
3054 && (ctx->outer == NULL
3055 || !gimple_omp_for_combined_into_p (ctx->stmt)
3056 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
3057 || (gimple_omp_for_kind (ctx->outer->stmt)
3058 != GF_OMP_FOR_KIND_FOR)
3059 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
3060 {
3061 error_at (gimple_location (stmt),
3062 "%<ordered simd threads%> must be closely "
3063 "nested inside of %<%s simd%> region",
3064 lang_GNU_Fortran () ? "do" : "for");
3065 return false;
3066 }
3067 return true;
3068 }
3069 }
3070 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3071 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
3072 || gimple_code (stmt) == GIMPLE_OMP_SCAN)
3073 return true;
3074 else if (gimple_code (stmt) == GIMPLE_OMP_FOR
3075 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
3076 return true;
3077 error_at (gimple_location (stmt),
3078 "OpenMP constructs other than "
3079 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
3080 "not be nested inside %<simd%> region");
3081 return false;
3082 }
3083 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3084 {
3085 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
3086 || (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE
3087 && omp_find_clause (gimple_omp_for_clauses (stmt),
3088 OMP_CLAUSE_BIND) == NULL_TREE))
3089 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
3090 {
3091 error_at (gimple_location (stmt),
3092 "only %<distribute%>, %<parallel%> or %<loop%> "
3093 "regions are allowed to be strictly nested inside "
3094 "%<teams%> region");
3095 return false;
3096 }
3097 }
3098 else if (ctx->order_concurrent
3099 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL
3100 && (gimple_code (stmt) != GIMPLE_OMP_FOR
3101 || gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_SIMD)
3102 && gimple_code (stmt) != GIMPLE_OMP_SCAN)
3103 {
3104 if (ctx->loop_p)
3105 error_at (gimple_location (stmt),
3106 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3107 "%<simd%> may not be nested inside a %<loop%> region");
3108 else
3109 error_at (gimple_location (stmt),
3110 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3111 "%<simd%> may not be nested inside a region with "
3112 "the %<order(concurrent)%> clause");
3113 return false;
3114 }
3115 }
3116 switch (gimple_code (stmt))
3117 {
3118 case GIMPLE_OMP_FOR:
3119 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD)
3120 return true;
3121 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
3122 {
3123 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
3124 {
3125 error_at (gimple_location (stmt),
3126 "%<distribute%> region must be strictly nested "
3127 "inside %<teams%> construct");
3128 return false;
3129 }
3130 return true;
3131 }
3132 /* We split taskloop into task and nested taskloop in it. */
3133 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3134 return true;
3135 /* For now, hope this will change and loop bind(parallel) will not
3136 be allowed in lots of contexts. */
3137 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
3138 && omp_find_clause (gimple_omp_for_clauses (stmt), OMP_CLAUSE_BIND))
3139 return true;
3140 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
3141 {
3142 bool ok = false;
3143
3144 if (ctx)
3145 switch (gimple_code (ctx->stmt))
3146 {
3147 case GIMPLE_OMP_FOR:
3148 ok = (gimple_omp_for_kind (ctx->stmt)
3149 == GF_OMP_FOR_KIND_OACC_LOOP);
3150 break;
3151
3152 case GIMPLE_OMP_TARGET:
3153 switch (gimple_omp_target_kind (ctx->stmt))
3154 {
3155 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3156 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3157 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3158 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3159 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3160 ok = true;
3161 break;
3162
3163 default:
3164 break;
3165 }
3166
3167 default:
3168 break;
3169 }
3170 else if (oacc_get_fn_attrib (current_function_decl))
3171 ok = true;
3172 if (!ok)
3173 {
3174 error_at (gimple_location (stmt),
3175 "OpenACC loop directive must be associated with"
3176 " an OpenACC compute region");
3177 return false;
3178 }
3179 }
3180 /* FALLTHRU */
3181 case GIMPLE_CALL:
3182 if (is_gimple_call (stmt)
3183 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3184 == BUILT_IN_GOMP_CANCEL
3185 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3186 == BUILT_IN_GOMP_CANCELLATION_POINT))
3187 {
3188 const char *bad = NULL;
3189 const char *kind = NULL;
3190 const char *construct
3191 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3192 == BUILT_IN_GOMP_CANCEL)
3193 ? "cancel"
3194 : "cancellation point";
3195 if (ctx == NULL)
3196 {
3197 error_at (gimple_location (stmt), "orphaned %qs construct",
3198 construct);
3199 return false;
3200 }
3201 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
3202 ? tree_to_shwi (gimple_call_arg (stmt, 0))
3203 : 0)
3204 {
3205 case 1:
3206 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
3207 bad = "parallel";
3208 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3209 == BUILT_IN_GOMP_CANCEL
3210 && !integer_zerop (gimple_call_arg (stmt, 1)))
3211 ctx->cancellable = true;
3212 kind = "parallel";
3213 break;
3214 case 2:
3215 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3216 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
3217 bad = "for";
3218 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3219 == BUILT_IN_GOMP_CANCEL
3220 && !integer_zerop (gimple_call_arg (stmt, 1)))
3221 {
3222 ctx->cancellable = true;
3223 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3224 OMP_CLAUSE_NOWAIT))
3225 warning_at (gimple_location (stmt), 0,
3226 "%<cancel for%> inside "
3227 "%<nowait%> for construct");
3228 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3229 OMP_CLAUSE_ORDERED))
3230 warning_at (gimple_location (stmt), 0,
3231 "%<cancel for%> inside "
3232 "%<ordered%> for construct");
3233 }
3234 kind = "for";
3235 break;
3236 case 4:
3237 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
3238 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
3239 bad = "sections";
3240 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3241 == BUILT_IN_GOMP_CANCEL
3242 && !integer_zerop (gimple_call_arg (stmt, 1)))
3243 {
3244 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
3245 {
3246 ctx->cancellable = true;
3247 if (omp_find_clause (gimple_omp_sections_clauses
3248 (ctx->stmt),
3249 OMP_CLAUSE_NOWAIT))
3250 warning_at (gimple_location (stmt), 0,
3251 "%<cancel sections%> inside "
3252 "%<nowait%> sections construct");
3253 }
3254 else
3255 {
3256 gcc_assert (ctx->outer
3257 && gimple_code (ctx->outer->stmt)
3258 == GIMPLE_OMP_SECTIONS);
3259 ctx->outer->cancellable = true;
3260 if (omp_find_clause (gimple_omp_sections_clauses
3261 (ctx->outer->stmt),
3262 OMP_CLAUSE_NOWAIT))
3263 warning_at (gimple_location (stmt), 0,
3264 "%<cancel sections%> inside "
3265 "%<nowait%> sections construct");
3266 }
3267 }
3268 kind = "sections";
3269 break;
3270 case 8:
3271 if (!is_task_ctx (ctx)
3272 && (!is_taskloop_ctx (ctx)
3273 || ctx->outer == NULL
3274 || !is_task_ctx (ctx->outer)))
3275 bad = "task";
3276 else
3277 {
3278 for (omp_context *octx = ctx->outer;
3279 octx; octx = octx->outer)
3280 {
3281 switch (gimple_code (octx->stmt))
3282 {
3283 case GIMPLE_OMP_TASKGROUP:
3284 break;
3285 case GIMPLE_OMP_TARGET:
3286 if (gimple_omp_target_kind (octx->stmt)
3287 != GF_OMP_TARGET_KIND_REGION)
3288 continue;
3289 /* FALLTHRU */
3290 case GIMPLE_OMP_PARALLEL:
3291 case GIMPLE_OMP_TEAMS:
3292 error_at (gimple_location (stmt),
3293 "%<%s taskgroup%> construct not closely "
3294 "nested inside of %<taskgroup%> region",
3295 construct);
3296 return false;
3297 case GIMPLE_OMP_TASK:
3298 if (gimple_omp_task_taskloop_p (octx->stmt)
3299 && octx->outer
3300 && is_taskloop_ctx (octx->outer))
3301 {
3302 tree clauses
3303 = gimple_omp_for_clauses (octx->outer->stmt);
3304 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
3305 break;
3306 }
3307 continue;
3308 default:
3309 continue;
3310 }
3311 break;
3312 }
3313 ctx->cancellable = true;
3314 }
3315 kind = "taskgroup";
3316 break;
3317 default:
3318 error_at (gimple_location (stmt), "invalid arguments");
3319 return false;
3320 }
3321 if (bad)
3322 {
3323 error_at (gimple_location (stmt),
3324 "%<%s %s%> construct not closely nested inside of %qs",
3325 construct, kind, bad);
3326 return false;
3327 }
3328 }
3329 /* FALLTHRU */
3330 case GIMPLE_OMP_SECTIONS:
3331 case GIMPLE_OMP_SINGLE:
3332 for (; ctx != NULL; ctx = ctx->outer)
3333 switch (gimple_code (ctx->stmt))
3334 {
3335 case GIMPLE_OMP_FOR:
3336 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3337 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3338 break;
3339 /* FALLTHRU */
3340 case GIMPLE_OMP_SECTIONS:
3341 case GIMPLE_OMP_SINGLE:
3342 case GIMPLE_OMP_ORDERED:
3343 case GIMPLE_OMP_MASTER:
3344 case GIMPLE_OMP_TASK:
3345 case GIMPLE_OMP_CRITICAL:
3346 if (is_gimple_call (stmt))
3347 {
3348 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3349 != BUILT_IN_GOMP_BARRIER)
3350 return true;
3351 error_at (gimple_location (stmt),
3352 "barrier region may not be closely nested inside "
3353 "of work-sharing, %<loop%>, %<critical%>, "
3354 "%<ordered%>, %<master%>, explicit %<task%> or "
3355 "%<taskloop%> region");
3356 return false;
3357 }
3358 error_at (gimple_location (stmt),
3359 "work-sharing region may not be closely nested inside "
3360 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3361 "%<master%>, explicit %<task%> or %<taskloop%> region");
3362 return false;
3363 case GIMPLE_OMP_PARALLEL:
3364 case GIMPLE_OMP_TEAMS:
3365 return true;
3366 case GIMPLE_OMP_TARGET:
3367 if (gimple_omp_target_kind (ctx->stmt)
3368 == GF_OMP_TARGET_KIND_REGION)
3369 return true;
3370 break;
3371 default:
3372 break;
3373 }
3374 break;
3375 case GIMPLE_OMP_MASTER:
3376 for (; ctx != NULL; ctx = ctx->outer)
3377 switch (gimple_code (ctx->stmt))
3378 {
3379 case GIMPLE_OMP_FOR:
3380 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3381 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3382 break;
3383 /* FALLTHRU */
3384 case GIMPLE_OMP_SECTIONS:
3385 case GIMPLE_OMP_SINGLE:
3386 case GIMPLE_OMP_TASK:
3387 error_at (gimple_location (stmt),
3388 "%<master%> region may not be closely nested inside "
3389 "of work-sharing, %<loop%>, explicit %<task%> or "
3390 "%<taskloop%> region");
3391 return false;
3392 case GIMPLE_OMP_PARALLEL:
3393 case GIMPLE_OMP_TEAMS:
3394 return true;
3395 case GIMPLE_OMP_TARGET:
3396 if (gimple_omp_target_kind (ctx->stmt)
3397 == GF_OMP_TARGET_KIND_REGION)
3398 return true;
3399 break;
3400 default:
3401 break;
3402 }
3403 break;
3404 case GIMPLE_OMP_TASK:
3405 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3406 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3407 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3408 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3409 {
3410 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3411 error_at (OMP_CLAUSE_LOCATION (c),
3412 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3413 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3414 return false;
3415 }
3416 break;
3417 case GIMPLE_OMP_ORDERED:
3418 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3419 c; c = OMP_CLAUSE_CHAIN (c))
3420 {
3421 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
3422 {
3423 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
3424 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
3425 continue;
3426 }
3427 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3428 if (kind == OMP_CLAUSE_DEPEND_SOURCE
3429 || kind == OMP_CLAUSE_DEPEND_SINK)
3430 {
3431 tree oclause;
3432 /* Look for containing ordered(N) loop. */
3433 if (ctx == NULL
3434 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3435 || (oclause
3436 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3437 OMP_CLAUSE_ORDERED)) == NULL_TREE)
3438 {
3439 error_at (OMP_CLAUSE_LOCATION (c),
3440 "%<ordered%> construct with %<depend%> clause "
3441 "must be closely nested inside an %<ordered%> "
3442 "loop");
3443 return false;
3444 }
3445 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
3446 {
3447 error_at (OMP_CLAUSE_LOCATION (c),
3448 "%<ordered%> construct with %<depend%> clause "
3449 "must be closely nested inside a loop with "
3450 "%<ordered%> clause with a parameter");
3451 return false;
3452 }
3453 }
3454 else
3455 {
3456 error_at (OMP_CLAUSE_LOCATION (c),
3457 "invalid depend kind in omp %<ordered%> %<depend%>");
3458 return false;
3459 }
3460 }
3461 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3462 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3463 {
3464 /* ordered simd must be closely nested inside of simd region,
3465 and simd region must not encounter constructs other than
3466 ordered simd, therefore ordered simd may be either orphaned,
3467 or ctx->stmt must be simd. The latter case is handled already
3468 earlier. */
3469 if (ctx != NULL)
3470 {
3471 error_at (gimple_location (stmt),
3472 "%<ordered%> %<simd%> must be closely nested inside "
3473 "%<simd%> region");
3474 return false;
3475 }
3476 }
3477 for (; ctx != NULL; ctx = ctx->outer)
3478 switch (gimple_code (ctx->stmt))
3479 {
3480 case GIMPLE_OMP_CRITICAL:
3481 case GIMPLE_OMP_TASK:
3482 case GIMPLE_OMP_ORDERED:
3483 ordered_in_taskloop:
3484 error_at (gimple_location (stmt),
3485 "%<ordered%> region may not be closely nested inside "
3486 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3487 "%<taskloop%> region");
3488 return false;
3489 case GIMPLE_OMP_FOR:
3490 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3491 goto ordered_in_taskloop;
3492 tree o;
3493 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3494 OMP_CLAUSE_ORDERED);
3495 if (o == NULL)
3496 {
3497 error_at (gimple_location (stmt),
3498 "%<ordered%> region must be closely nested inside "
3499 "a loop region with an %<ordered%> clause");
3500 return false;
3501 }
3502 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
3503 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
3504 {
3505 error_at (gimple_location (stmt),
3506 "%<ordered%> region without %<depend%> clause may "
3507 "not be closely nested inside a loop region with "
3508 "an %<ordered%> clause with a parameter");
3509 return false;
3510 }
3511 return true;
3512 case GIMPLE_OMP_TARGET:
3513 if (gimple_omp_target_kind (ctx->stmt)
3514 != GF_OMP_TARGET_KIND_REGION)
3515 break;
3516 /* FALLTHRU */
3517 case GIMPLE_OMP_PARALLEL:
3518 case GIMPLE_OMP_TEAMS:
3519 error_at (gimple_location (stmt),
3520 "%<ordered%> region must be closely nested inside "
3521 "a loop region with an %<ordered%> clause");
3522 return false;
3523 default:
3524 break;
3525 }
3526 break;
3527 case GIMPLE_OMP_CRITICAL:
3528 {
3529 tree this_stmt_name
3530 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3531 for (; ctx != NULL; ctx = ctx->outer)
3532 if (gomp_critical *other_crit
3533 = dyn_cast <gomp_critical *> (ctx->stmt))
3534 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3535 {
3536 error_at (gimple_location (stmt),
3537 "%<critical%> region may not be nested inside "
3538 "a %<critical%> region with the same name");
3539 return false;
3540 }
3541 }
3542 break;
3543 case GIMPLE_OMP_TEAMS:
3544 if (ctx == NULL)
3545 break;
3546 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3547 || (gimple_omp_target_kind (ctx->stmt)
3548 != GF_OMP_TARGET_KIND_REGION))
3549 {
3550 /* Teams construct can appear either strictly nested inside of
3551 target construct with no intervening stmts, or can be encountered
3552 only by initial task (so must not appear inside any OpenMP
3553 construct. */
3554 error_at (gimple_location (stmt),
3555 "%<teams%> construct must be closely nested inside of "
3556 "%<target%> construct or not nested in any OpenMP "
3557 "construct");
3558 return false;
3559 }
3560 break;
3561 case GIMPLE_OMP_TARGET:
3562 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3563 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3564 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3565 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3566 {
3567 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3568 error_at (OMP_CLAUSE_LOCATION (c),
3569 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3570 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3571 return false;
3572 }
3573 if (is_gimple_omp_offloaded (stmt)
3574 && oacc_get_fn_attrib (cfun->decl) != NULL)
3575 {
3576 error_at (gimple_location (stmt),
3577 "OpenACC region inside of OpenACC routine, nested "
3578 "parallelism not supported yet");
3579 return false;
3580 }
3581 for (; ctx != NULL; ctx = ctx->outer)
3582 {
3583 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3584 {
3585 if (is_gimple_omp (stmt)
3586 && is_gimple_omp_oacc (stmt)
3587 && is_gimple_omp (ctx->stmt))
3588 {
3589 error_at (gimple_location (stmt),
3590 "OpenACC construct inside of non-OpenACC region");
3591 return false;
3592 }
3593 continue;
3594 }
3595
3596 const char *stmt_name, *ctx_stmt_name;
3597 switch (gimple_omp_target_kind (stmt))
3598 {
3599 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3600 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3601 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3602 case GF_OMP_TARGET_KIND_ENTER_DATA:
3603 stmt_name = "target enter data"; break;
3604 case GF_OMP_TARGET_KIND_EXIT_DATA:
3605 stmt_name = "target exit data"; break;
3606 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3607 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3608 case GF_OMP_TARGET_KIND_OACC_SERIAL: stmt_name = "serial"; break;
3609 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3610 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3611 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
3612 stmt_name = "enter/exit data"; break;
3613 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3614 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3615 break;
3616 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3617 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3618 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3619 /* OpenACC 'kernels' decomposed parts. */
3620 stmt_name = "kernels"; break;
3621 default: gcc_unreachable ();
3622 }
3623 switch (gimple_omp_target_kind (ctx->stmt))
3624 {
3625 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3626 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3627 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3628 ctx_stmt_name = "parallel"; break;
3629 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3630 ctx_stmt_name = "kernels"; break;
3631 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3632 ctx_stmt_name = "serial"; break;
3633 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3634 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3635 ctx_stmt_name = "host_data"; break;
3636 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3637 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3638 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3639 /* OpenACC 'kernels' decomposed parts. */
3640 ctx_stmt_name = "kernels"; break;
3641 default: gcc_unreachable ();
3642 }
3643
3644 /* OpenACC/OpenMP mismatch? */
3645 if (is_gimple_omp_oacc (stmt)
3646 != is_gimple_omp_oacc (ctx->stmt))
3647 {
3648 error_at (gimple_location (stmt),
3649 "%s %qs construct inside of %s %qs region",
3650 (is_gimple_omp_oacc (stmt)
3651 ? "OpenACC" : "OpenMP"), stmt_name,
3652 (is_gimple_omp_oacc (ctx->stmt)
3653 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3654 return false;
3655 }
3656 if (is_gimple_omp_offloaded (ctx->stmt))
3657 {
3658 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3659 if (is_gimple_omp_oacc (ctx->stmt))
3660 {
3661 error_at (gimple_location (stmt),
3662 "%qs construct inside of %qs region",
3663 stmt_name, ctx_stmt_name);
3664 return false;
3665 }
3666 else
3667 {
3668 warning_at (gimple_location (stmt), 0,
3669 "%qs construct inside of %qs region",
3670 stmt_name, ctx_stmt_name);
3671 }
3672 }
3673 }
3674 break;
3675 default:
3676 break;
3677 }
3678 return true;
3679 }
3680
3681
3682 /* Helper function scan_omp.
3683
3684 Callback for walk_tree or operators in walk_gimple_stmt used to
3685 scan for OMP directives in TP. */
3686
3687 static tree
3688 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3689 {
3690 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3691 omp_context *ctx = (omp_context *) wi->info;
3692 tree t = *tp;
3693
3694 switch (TREE_CODE (t))
3695 {
3696 case VAR_DECL:
3697 case PARM_DECL:
3698 case LABEL_DECL:
3699 case RESULT_DECL:
3700 if (ctx)
3701 {
3702 tree repl = remap_decl (t, &ctx->cb);
3703 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3704 *tp = repl;
3705 }
3706 break;
3707
3708 default:
3709 if (ctx && TYPE_P (t))
3710 *tp = remap_type (t, &ctx->cb);
3711 else if (!DECL_P (t))
3712 {
3713 *walk_subtrees = 1;
3714 if (ctx)
3715 {
3716 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3717 if (tem != TREE_TYPE (t))
3718 {
3719 if (TREE_CODE (t) == INTEGER_CST)
3720 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3721 else
3722 TREE_TYPE (t) = tem;
3723 }
3724 }
3725 }
3726 break;
3727 }
3728
3729 return NULL_TREE;
3730 }
3731
3732 /* Return true if FNDECL is a setjmp or a longjmp. */
3733
3734 static bool
3735 setjmp_or_longjmp_p (const_tree fndecl)
3736 {
3737 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3738 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3739 return true;
3740
3741 tree declname = DECL_NAME (fndecl);
3742 if (!declname
3743 || (DECL_CONTEXT (fndecl) != NULL_TREE
3744 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3745 || !TREE_PUBLIC (fndecl))
3746 return false;
3747
3748 const char *name = IDENTIFIER_POINTER (declname);
3749 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3750 }
3751
3752 /* Return true if FNDECL is an omp_* runtime API call. */
3753
3754 static bool
3755 omp_runtime_api_call (const_tree fndecl)
3756 {
3757 tree declname = DECL_NAME (fndecl);
3758 if (!declname
3759 || (DECL_CONTEXT (fndecl) != NULL_TREE
3760 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3761 || !TREE_PUBLIC (fndecl))
3762 return false;
3763
3764 const char *name = IDENTIFIER_POINTER (declname);
3765 if (strncmp (name, "omp_", 4) != 0)
3766 return false;
3767
3768 static const char *omp_runtime_apis[] =
3769 {
3770 /* This array has 3 sections. First omp_* calls that don't
3771 have any suffixes. */
3772 "target_alloc",
3773 "target_associate_ptr",
3774 "target_disassociate_ptr",
3775 "target_free",
3776 "target_is_present",
3777 "target_memcpy",
3778 "target_memcpy_rect",
3779 NULL,
3780 /* Now omp_* calls that are available as omp_* and omp_*_. */
3781 "capture_affinity",
3782 "destroy_lock",
3783 "destroy_nest_lock",
3784 "display_affinity",
3785 "get_active_level",
3786 "get_affinity_format",
3787 "get_cancellation",
3788 "get_default_device",
3789 "get_dynamic",
3790 "get_initial_device",
3791 "get_level",
3792 "get_max_active_levels",
3793 "get_max_task_priority",
3794 "get_max_threads",
3795 "get_nested",
3796 "get_num_devices",
3797 "get_num_places",
3798 "get_num_procs",
3799 "get_num_teams",
3800 "get_num_threads",
3801 "get_partition_num_places",
3802 "get_place_num",
3803 "get_proc_bind",
3804 "get_team_num",
3805 "get_thread_limit",
3806 "get_thread_num",
3807 "get_wtick",
3808 "get_wtime",
3809 "in_final",
3810 "in_parallel",
3811 "init_lock",
3812 "init_nest_lock",
3813 "is_initial_device",
3814 "pause_resource",
3815 "pause_resource_all",
3816 "set_affinity_format",
3817 "set_lock",
3818 "set_nest_lock",
3819 "test_lock",
3820 "test_nest_lock",
3821 "unset_lock",
3822 "unset_nest_lock",
3823 NULL,
3824 /* And finally calls available as omp_*, omp_*_ and omp_*_8_. */
3825 "get_ancestor_thread_num",
3826 "get_partition_place_nums",
3827 "get_place_num_procs",
3828 "get_place_proc_ids",
3829 "get_schedule",
3830 "get_team_size",
3831 "set_default_device",
3832 "set_dynamic",
3833 "set_max_active_levels",
3834 "set_nested",
3835 "set_num_threads",
3836 "set_schedule"
3837 };
3838
3839 int mode = 0;
3840 for (unsigned i = 0; i < ARRAY_SIZE (omp_runtime_apis); i++)
3841 {
3842 if (omp_runtime_apis[i] == NULL)
3843 {
3844 mode++;
3845 continue;
3846 }
3847 size_t len = strlen (omp_runtime_apis[i]);
3848 if (strncmp (name + 4, omp_runtime_apis[i], len) == 0
3849 && (name[4 + len] == '\0'
3850 || (mode > 0
3851 && name[4 + len] == '_'
3852 && (name[4 + len + 1] == '\0'
3853 || (mode > 1
3854 && strcmp (name + 4 + len + 1, "8_") == 0)))))
3855 return true;
3856 }
3857 return false;
3858 }
3859
3860 /* Helper function for scan_omp.
3861
3862 Callback for walk_gimple_stmt used to scan for OMP directives in
3863 the current statement in GSI. */
3864
3865 static tree
3866 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3867 struct walk_stmt_info *wi)
3868 {
3869 gimple *stmt = gsi_stmt (*gsi);
3870 omp_context *ctx = (omp_context *) wi->info;
3871
3872 if (gimple_has_location (stmt))
3873 input_location = gimple_location (stmt);
3874
3875 /* Check the nesting restrictions. */
3876 bool remove = false;
3877 if (is_gimple_omp (stmt))
3878 remove = !check_omp_nesting_restrictions (stmt, ctx);
3879 else if (is_gimple_call (stmt))
3880 {
3881 tree fndecl = gimple_call_fndecl (stmt);
3882 if (fndecl)
3883 {
3884 if (ctx
3885 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3886 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
3887 && setjmp_or_longjmp_p (fndecl)
3888 && !ctx->loop_p)
3889 {
3890 remove = true;
3891 error_at (gimple_location (stmt),
3892 "setjmp/longjmp inside %<simd%> construct");
3893 }
3894 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3895 switch (DECL_FUNCTION_CODE (fndecl))
3896 {
3897 case BUILT_IN_GOMP_BARRIER:
3898 case BUILT_IN_GOMP_CANCEL:
3899 case BUILT_IN_GOMP_CANCELLATION_POINT:
3900 case BUILT_IN_GOMP_TASKYIELD:
3901 case BUILT_IN_GOMP_TASKWAIT:
3902 case BUILT_IN_GOMP_TASKGROUP_START:
3903 case BUILT_IN_GOMP_TASKGROUP_END:
3904 remove = !check_omp_nesting_restrictions (stmt, ctx);
3905 break;
3906 default:
3907 break;
3908 }
3909 else if (ctx)
3910 {
3911 omp_context *octx = ctx;
3912 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN && ctx->outer)
3913 octx = ctx->outer;
3914 if (octx->order_concurrent && omp_runtime_api_call (fndecl))
3915 {
3916 remove = true;
3917 error_at (gimple_location (stmt),
3918 "OpenMP runtime API call %qD in a region with "
3919 "%<order(concurrent)%> clause", fndecl);
3920 }
3921 }
3922 }
3923 }
3924 if (remove)
3925 {
3926 stmt = gimple_build_nop ();
3927 gsi_replace (gsi, stmt, false);
3928 }
3929
3930 *handled_ops_p = true;
3931
3932 switch (gimple_code (stmt))
3933 {
3934 case GIMPLE_OMP_PARALLEL:
3935 taskreg_nesting_level++;
3936 scan_omp_parallel (gsi, ctx);
3937 taskreg_nesting_level--;
3938 break;
3939
3940 case GIMPLE_OMP_TASK:
3941 taskreg_nesting_level++;
3942 scan_omp_task (gsi, ctx);
3943 taskreg_nesting_level--;
3944 break;
3945
3946 case GIMPLE_OMP_FOR:
3947 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3948 == GF_OMP_FOR_KIND_SIMD)
3949 && gimple_omp_for_combined_into_p (stmt)
3950 && gimple_code (ctx->stmt) != GIMPLE_OMP_SCAN)
3951 {
3952 tree clauses = gimple_omp_for_clauses (as_a <gomp_for *> (stmt));
3953 tree c = omp_find_clause (clauses, OMP_CLAUSE_REDUCTION);
3954 if (c && OMP_CLAUSE_REDUCTION_INSCAN (c) && !seen_error ())
3955 {
3956 scan_omp_simd_scan (gsi, as_a <gomp_for *> (stmt), ctx);
3957 break;
3958 }
3959 }
3960 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3961 == GF_OMP_FOR_KIND_SIMD)
3962 && omp_maybe_offloaded_ctx (ctx)
3963 && omp_max_simt_vf ()
3964 && gimple_omp_for_collapse (stmt) == 1)
3965 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3966 else
3967 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3968 break;
3969
3970 case GIMPLE_OMP_SECTIONS:
3971 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3972 break;
3973
3974 case GIMPLE_OMP_SINGLE:
3975 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3976 break;
3977
3978 case GIMPLE_OMP_SCAN:
3979 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
3980 {
3981 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
3982 ctx->scan_inclusive = true;
3983 else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
3984 ctx->scan_exclusive = true;
3985 }
3986 /* FALLTHRU */
3987 case GIMPLE_OMP_SECTION:
3988 case GIMPLE_OMP_MASTER:
3989 case GIMPLE_OMP_ORDERED:
3990 case GIMPLE_OMP_CRITICAL:
3991 ctx = new_omp_context (stmt, ctx);
3992 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3993 break;
3994
3995 case GIMPLE_OMP_TASKGROUP:
3996 ctx = new_omp_context (stmt, ctx);
3997 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
3998 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3999 break;
4000
4001 case GIMPLE_OMP_TARGET:
4002 if (is_gimple_omp_offloaded (stmt))
4003 {
4004 taskreg_nesting_level++;
4005 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4006 taskreg_nesting_level--;
4007 }
4008 else
4009 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4010 break;
4011
4012 case GIMPLE_OMP_TEAMS:
4013 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
4014 {
4015 taskreg_nesting_level++;
4016 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4017 taskreg_nesting_level--;
4018 }
4019 else
4020 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4021 break;
4022
4023 case GIMPLE_BIND:
4024 {
4025 tree var;
4026
4027 *handled_ops_p = false;
4028 if (ctx)
4029 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
4030 var ;
4031 var = DECL_CHAIN (var))
4032 insert_decl_map (&ctx->cb, var, var);
4033 }
4034 break;
4035 default:
4036 *handled_ops_p = false;
4037 break;
4038 }
4039
4040 return NULL_TREE;
4041 }
4042
4043
4044 /* Scan all the statements starting at the current statement. CTX
4045 contains context information about the OMP directives and
4046 clauses found during the scan. */
4047
4048 static void
4049 scan_omp (gimple_seq *body_p, omp_context *ctx)
4050 {
4051 location_t saved_location;
4052 struct walk_stmt_info wi;
4053
4054 memset (&wi, 0, sizeof (wi));
4055 wi.info = ctx;
4056 wi.want_locations = true;
4057
4058 saved_location = input_location;
4059 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
4060 input_location = saved_location;
4061 }
4062 \f
4063 /* Re-gimplification and code generation routines. */
4064
4065 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
4066 of BIND if in a method. */
4067
4068 static void
4069 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
4070 {
4071 if (DECL_ARGUMENTS (current_function_decl)
4072 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
4073 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
4074 == POINTER_TYPE))
4075 {
4076 tree vars = gimple_bind_vars (bind);
4077 for (tree *pvar = &vars; *pvar; )
4078 if (omp_member_access_dummy_var (*pvar))
4079 *pvar = DECL_CHAIN (*pvar);
4080 else
4081 pvar = &DECL_CHAIN (*pvar);
4082 gimple_bind_set_vars (bind, vars);
4083 }
4084 }
4085
4086 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
4087 block and its subblocks. */
4088
4089 static void
4090 remove_member_access_dummy_vars (tree block)
4091 {
4092 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
4093 if (omp_member_access_dummy_var (*pvar))
4094 *pvar = DECL_CHAIN (*pvar);
4095 else
4096 pvar = &DECL_CHAIN (*pvar);
4097
4098 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
4099 remove_member_access_dummy_vars (block);
4100 }
4101
4102 /* If a context was created for STMT when it was scanned, return it. */
4103
4104 static omp_context *
4105 maybe_lookup_ctx (gimple *stmt)
4106 {
4107 splay_tree_node n;
4108 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
4109 return n ? (omp_context *) n->value : NULL;
4110 }
4111
4112
4113 /* Find the mapping for DECL in CTX or the immediately enclosing
4114 context that has a mapping for DECL.
4115
4116 If CTX is a nested parallel directive, we may have to use the decl
4117 mappings created in CTX's parent context. Suppose that we have the
4118 following parallel nesting (variable UIDs showed for clarity):
4119
4120 iD.1562 = 0;
4121 #omp parallel shared(iD.1562) -> outer parallel
4122 iD.1562 = iD.1562 + 1;
4123
4124 #omp parallel shared (iD.1562) -> inner parallel
4125 iD.1562 = iD.1562 - 1;
4126
4127 Each parallel structure will create a distinct .omp_data_s structure
4128 for copying iD.1562 in/out of the directive:
4129
4130 outer parallel .omp_data_s.1.i -> iD.1562
4131 inner parallel .omp_data_s.2.i -> iD.1562
4132
4133 A shared variable mapping will produce a copy-out operation before
4134 the parallel directive and a copy-in operation after it. So, in
4135 this case we would have:
4136
4137 iD.1562 = 0;
4138 .omp_data_o.1.i = iD.1562;
4139 #omp parallel shared(iD.1562) -> outer parallel
4140 .omp_data_i.1 = &.omp_data_o.1
4141 .omp_data_i.1->i = .omp_data_i.1->i + 1;
4142
4143 .omp_data_o.2.i = iD.1562; -> **
4144 #omp parallel shared(iD.1562) -> inner parallel
4145 .omp_data_i.2 = &.omp_data_o.2
4146 .omp_data_i.2->i = .omp_data_i.2->i - 1;
4147
4148
4149 ** This is a problem. The symbol iD.1562 cannot be referenced
4150 inside the body of the outer parallel region. But since we are
4151 emitting this copy operation while expanding the inner parallel
4152 directive, we need to access the CTX structure of the outer
4153 parallel directive to get the correct mapping:
4154
4155 .omp_data_o.2.i = .omp_data_i.1->i
4156
4157 Since there may be other workshare or parallel directives enclosing
4158 the parallel directive, it may be necessary to walk up the context
4159 parent chain. This is not a problem in general because nested
4160 parallelism happens only rarely. */
4161
4162 static tree
4163 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4164 {
4165 tree t;
4166 omp_context *up;
4167
4168 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4169 t = maybe_lookup_decl (decl, up);
4170
4171 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
4172
4173 return t ? t : decl;
4174 }
4175
4176
4177 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
4178 in outer contexts. */
4179
4180 static tree
4181 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4182 {
4183 tree t = NULL;
4184 omp_context *up;
4185
4186 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4187 t = maybe_lookup_decl (decl, up);
4188
4189 return t ? t : decl;
4190 }
4191
4192
4193 /* Construct the initialization value for reduction operation OP. */
4194
4195 tree
4196 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
4197 {
4198 switch (op)
4199 {
4200 case PLUS_EXPR:
4201 case MINUS_EXPR:
4202 case BIT_IOR_EXPR:
4203 case BIT_XOR_EXPR:
4204 case TRUTH_OR_EXPR:
4205 case TRUTH_ORIF_EXPR:
4206 case TRUTH_XOR_EXPR:
4207 case NE_EXPR:
4208 return build_zero_cst (type);
4209
4210 case MULT_EXPR:
4211 case TRUTH_AND_EXPR:
4212 case TRUTH_ANDIF_EXPR:
4213 case EQ_EXPR:
4214 return fold_convert_loc (loc, type, integer_one_node);
4215
4216 case BIT_AND_EXPR:
4217 return fold_convert_loc (loc, type, integer_minus_one_node);
4218
4219 case MAX_EXPR:
4220 if (SCALAR_FLOAT_TYPE_P (type))
4221 {
4222 REAL_VALUE_TYPE max, min;
4223 if (HONOR_INFINITIES (type))
4224 {
4225 real_inf (&max);
4226 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
4227 }
4228 else
4229 real_maxval (&min, 1, TYPE_MODE (type));
4230 return build_real (type, min);
4231 }
4232 else if (POINTER_TYPE_P (type))
4233 {
4234 wide_int min
4235 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4236 return wide_int_to_tree (type, min);
4237 }
4238 else
4239 {
4240 gcc_assert (INTEGRAL_TYPE_P (type));
4241 return TYPE_MIN_VALUE (type);
4242 }
4243
4244 case MIN_EXPR:
4245 if (SCALAR_FLOAT_TYPE_P (type))
4246 {
4247 REAL_VALUE_TYPE max;
4248 if (HONOR_INFINITIES (type))
4249 real_inf (&max);
4250 else
4251 real_maxval (&max, 0, TYPE_MODE (type));
4252 return build_real (type, max);
4253 }
4254 else if (POINTER_TYPE_P (type))
4255 {
4256 wide_int max
4257 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4258 return wide_int_to_tree (type, max);
4259 }
4260 else
4261 {
4262 gcc_assert (INTEGRAL_TYPE_P (type));
4263 return TYPE_MAX_VALUE (type);
4264 }
4265
4266 default:
4267 gcc_unreachable ();
4268 }
4269 }
4270
4271 /* Construct the initialization value for reduction CLAUSE. */
4272
4273 tree
4274 omp_reduction_init (tree clause, tree type)
4275 {
4276 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
4277 OMP_CLAUSE_REDUCTION_CODE (clause), type);
4278 }
4279
4280 /* Return alignment to be assumed for var in CLAUSE, which should be
4281 OMP_CLAUSE_ALIGNED. */
4282
4283 static tree
4284 omp_clause_aligned_alignment (tree clause)
4285 {
4286 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
4287 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
4288
4289 /* Otherwise return implementation defined alignment. */
4290 unsigned int al = 1;
4291 opt_scalar_mode mode_iter;
4292 auto_vector_modes modes;
4293 targetm.vectorize.autovectorize_vector_modes (&modes, true);
4294 static enum mode_class classes[]
4295 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
4296 for (int i = 0; i < 4; i += 2)
4297 /* The for loop above dictates that we only walk through scalar classes. */
4298 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
4299 {
4300 scalar_mode mode = mode_iter.require ();
4301 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
4302 if (GET_MODE_CLASS (vmode) != classes[i + 1])
4303 continue;
4304 machine_mode alt_vmode;
4305 for (unsigned int j = 0; j < modes.length (); ++j)
4306 if (related_vector_mode (modes[j], mode).exists (&alt_vmode)
4307 && known_ge (GET_MODE_SIZE (alt_vmode), GET_MODE_SIZE (vmode)))
4308 vmode = alt_vmode;
4309
4310 tree type = lang_hooks.types.type_for_mode (mode, 1);
4311 if (type == NULL_TREE || TYPE_MODE (type) != mode)
4312 continue;
4313 type = build_vector_type_for_mode (type, vmode);
4314 if (TYPE_MODE (type) != vmode)
4315 continue;
4316 if (TYPE_ALIGN_UNIT (type) > al)
4317 al = TYPE_ALIGN_UNIT (type);
4318 }
4319 return build_int_cst (integer_type_node, al);
4320 }
4321
4322
4323 /* This structure is part of the interface between lower_rec_simd_input_clauses
4324 and lower_rec_input_clauses. */
4325
4326 class omplow_simd_context {
4327 public:
4328 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4329 tree idx;
4330 tree lane;
4331 tree lastlane;
4332 vec<tree, va_heap> simt_eargs;
4333 gimple_seq simt_dlist;
4334 poly_uint64_pod max_vf;
4335 bool is_simt;
4336 };
4337
4338 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4339 privatization. */
4340
4341 static bool
4342 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
4343 omplow_simd_context *sctx, tree &ivar,
4344 tree &lvar, tree *rvar = NULL,
4345 tree *rvar2 = NULL)
4346 {
4347 if (known_eq (sctx->max_vf, 0U))
4348 {
4349 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
4350 if (maybe_gt (sctx->max_vf, 1U))
4351 {
4352 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4353 OMP_CLAUSE_SAFELEN);
4354 if (c)
4355 {
4356 poly_uint64 safe_len;
4357 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4358 || maybe_lt (safe_len, 1U))
4359 sctx->max_vf = 1;
4360 else
4361 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
4362 }
4363 }
4364 if (maybe_gt (sctx->max_vf, 1U))
4365 {
4366 sctx->idx = create_tmp_var (unsigned_type_node);
4367 sctx->lane = create_tmp_var (unsigned_type_node);
4368 }
4369 }
4370 if (known_eq (sctx->max_vf, 1U))
4371 return false;
4372
4373 if (sctx->is_simt)
4374 {
4375 if (is_gimple_reg (new_var))
4376 {
4377 ivar = lvar = new_var;
4378 return true;
4379 }
4380 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
4381 ivar = lvar = create_tmp_var (type);
4382 TREE_ADDRESSABLE (ivar) = 1;
4383 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
4384 NULL, DECL_ATTRIBUTES (ivar));
4385 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
4386 tree clobber = build_clobber (type);
4387 gimple *g = gimple_build_assign (ivar, clobber);
4388 gimple_seq_add_stmt (&sctx->simt_dlist, g);
4389 }
4390 else
4391 {
4392 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
4393 tree avar = create_tmp_var_raw (atype);
4394 if (TREE_ADDRESSABLE (new_var))
4395 TREE_ADDRESSABLE (avar) = 1;
4396 DECL_ATTRIBUTES (avar)
4397 = tree_cons (get_identifier ("omp simd array"), NULL,
4398 DECL_ATTRIBUTES (avar));
4399 gimple_add_tmp_var (avar);
4400 tree iavar = avar;
4401 if (rvar && !ctx->for_simd_scan_phase)
4402 {
4403 /* For inscan reductions, create another array temporary,
4404 which will hold the reduced value. */
4405 iavar = create_tmp_var_raw (atype);
4406 if (TREE_ADDRESSABLE (new_var))
4407 TREE_ADDRESSABLE (iavar) = 1;
4408 DECL_ATTRIBUTES (iavar)
4409 = tree_cons (get_identifier ("omp simd array"), NULL,
4410 tree_cons (get_identifier ("omp simd inscan"), NULL,
4411 DECL_ATTRIBUTES (iavar)));
4412 gimple_add_tmp_var (iavar);
4413 ctx->cb.decl_map->put (avar, iavar);
4414 if (sctx->lastlane == NULL_TREE)
4415 sctx->lastlane = create_tmp_var (unsigned_type_node);
4416 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
4417 sctx->lastlane, NULL_TREE, NULL_TREE);
4418 TREE_THIS_NOTRAP (*rvar) = 1;
4419
4420 if (ctx->scan_exclusive)
4421 {
4422 /* And for exclusive scan yet another one, which will
4423 hold the value during the scan phase. */
4424 tree savar = create_tmp_var_raw (atype);
4425 if (TREE_ADDRESSABLE (new_var))
4426 TREE_ADDRESSABLE (savar) = 1;
4427 DECL_ATTRIBUTES (savar)
4428 = tree_cons (get_identifier ("omp simd array"), NULL,
4429 tree_cons (get_identifier ("omp simd inscan "
4430 "exclusive"), NULL,
4431 DECL_ATTRIBUTES (savar)));
4432 gimple_add_tmp_var (savar);
4433 ctx->cb.decl_map->put (iavar, savar);
4434 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
4435 sctx->idx, NULL_TREE, NULL_TREE);
4436 TREE_THIS_NOTRAP (*rvar2) = 1;
4437 }
4438 }
4439 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
4440 NULL_TREE, NULL_TREE);
4441 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
4442 NULL_TREE, NULL_TREE);
4443 TREE_THIS_NOTRAP (ivar) = 1;
4444 TREE_THIS_NOTRAP (lvar) = 1;
4445 }
4446 if (DECL_P (new_var))
4447 {
4448 SET_DECL_VALUE_EXPR (new_var, lvar);
4449 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4450 }
4451 return true;
4452 }
4453
4454 /* Helper function of lower_rec_input_clauses. For a reference
4455 in simd reduction, add an underlying variable it will reference. */
4456
4457 static void
4458 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
4459 {
4460 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
4461 if (TREE_CONSTANT (z))
4462 {
4463 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
4464 get_name (new_vard));
4465 gimple_add_tmp_var (z);
4466 TREE_ADDRESSABLE (z) = 1;
4467 z = build_fold_addr_expr_loc (loc, z);
4468 gimplify_assign (new_vard, z, ilist);
4469 }
4470 }
4471
4472 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4473 code to emit (type) (tskred_temp[idx]). */
4474
4475 static tree
4476 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
4477 unsigned idx)
4478 {
4479 unsigned HOST_WIDE_INT sz
4480 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
4481 tree r = build2 (MEM_REF, pointer_sized_int_node,
4482 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
4483 idx * sz));
4484 tree v = create_tmp_var (pointer_sized_int_node);
4485 gimple *g = gimple_build_assign (v, r);
4486 gimple_seq_add_stmt (ilist, g);
4487 if (!useless_type_conversion_p (type, pointer_sized_int_node))
4488 {
4489 v = create_tmp_var (type);
4490 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
4491 gimple_seq_add_stmt (ilist, g);
4492 }
4493 return v;
4494 }
4495
4496 /* Lower early initialization of privatized variable NEW_VAR
4497 if it needs an allocator (has allocate clause). */
4498
4499 static bool
4500 lower_private_allocate (tree var, tree new_var, tree &allocator,
4501 tree &allocate_ptr, gimple_seq *ilist,
4502 omp_context *ctx, bool is_ref, tree size)
4503 {
4504 if (allocator)
4505 return false;
4506 gcc_assert (allocate_ptr == NULL_TREE);
4507 if (ctx->allocate_map
4508 && (DECL_P (new_var) || (TYPE_P (new_var) && size)))
4509 if (tree *allocatorp = ctx->allocate_map->get (var))
4510 allocator = *allocatorp;
4511 if (allocator == NULL_TREE)
4512 return false;
4513 if (!is_ref && omp_is_reference (var))
4514 {
4515 allocator = NULL_TREE;
4516 return false;
4517 }
4518
4519 if (TREE_CODE (allocator) != INTEGER_CST)
4520 allocator = build_outer_var_ref (allocator, ctx);
4521 allocator = fold_convert (pointer_sized_int_node, allocator);
4522 if (TREE_CODE (allocator) != INTEGER_CST)
4523 {
4524 tree var = create_tmp_var (TREE_TYPE (allocator));
4525 gimplify_assign (var, allocator, ilist);
4526 allocator = var;
4527 }
4528
4529 tree ptr_type, align, sz = size;
4530 if (TYPE_P (new_var))
4531 {
4532 ptr_type = build_pointer_type (new_var);
4533 align = build_int_cst (size_type_node, TYPE_ALIGN_UNIT (new_var));
4534 }
4535 else if (is_ref)
4536 {
4537 ptr_type = build_pointer_type (TREE_TYPE (TREE_TYPE (new_var)));
4538 align = build_int_cst (size_type_node,
4539 TYPE_ALIGN_UNIT (TREE_TYPE (ptr_type)));
4540 }
4541 else
4542 {
4543 ptr_type = build_pointer_type (TREE_TYPE (new_var));
4544 align = build_int_cst (size_type_node, DECL_ALIGN_UNIT (new_var));
4545 if (sz == NULL_TREE)
4546 sz = fold_convert (size_type_node, DECL_SIZE_UNIT (new_var));
4547 }
4548 if (TREE_CODE (sz) != INTEGER_CST)
4549 {
4550 tree szvar = create_tmp_var (size_type_node);
4551 gimplify_assign (szvar, sz, ilist);
4552 sz = szvar;
4553 }
4554 allocate_ptr = create_tmp_var (ptr_type);
4555 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
4556 gimple *g = gimple_build_call (a, 3, align, sz, allocator);
4557 gimple_call_set_lhs (g, allocate_ptr);
4558 gimple_seq_add_stmt (ilist, g);
4559 if (!is_ref)
4560 {
4561 tree x = build_simple_mem_ref (allocate_ptr);
4562 TREE_THIS_NOTRAP (x) = 1;
4563 SET_DECL_VALUE_EXPR (new_var, x);
4564 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4565 }
4566 return true;
4567 }
4568
4569 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4570 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4571 private variables. Initialization statements go in ILIST, while calls
4572 to destructors go in DLIST. */
4573
4574 static void
4575 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
4576 omp_context *ctx, struct omp_for_data *fd)
4577 {
4578 tree c, copyin_seq, x, ptr;
4579 bool copyin_by_ref = false;
4580 bool lastprivate_firstprivate = false;
4581 bool reduction_omp_orig_ref = false;
4582 int pass;
4583 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4584 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
4585 omplow_simd_context sctx = omplow_simd_context ();
4586 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
4587 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
4588 gimple_seq llist[4] = { };
4589 tree nonconst_simd_if = NULL_TREE;
4590
4591 copyin_seq = NULL;
4592 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
4593
4594 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4595 with data sharing clauses referencing variable sized vars. That
4596 is unnecessarily hard to support and very unlikely to result in
4597 vectorized code anyway. */
4598 if (is_simd)
4599 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4600 switch (OMP_CLAUSE_CODE (c))
4601 {
4602 case OMP_CLAUSE_LINEAR:
4603 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4604 sctx.max_vf = 1;
4605 /* FALLTHRU */
4606 case OMP_CLAUSE_PRIVATE:
4607 case OMP_CLAUSE_FIRSTPRIVATE:
4608 case OMP_CLAUSE_LASTPRIVATE:
4609 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
4610 sctx.max_vf = 1;
4611 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4612 {
4613 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4614 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4615 sctx.max_vf = 1;
4616 }
4617 break;
4618 case OMP_CLAUSE_REDUCTION:
4619 case OMP_CLAUSE_IN_REDUCTION:
4620 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
4621 || is_variable_sized (OMP_CLAUSE_DECL (c)))
4622 sctx.max_vf = 1;
4623 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4624 {
4625 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4626 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4627 sctx.max_vf = 1;
4628 }
4629 break;
4630 case OMP_CLAUSE_IF:
4631 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
4632 sctx.max_vf = 1;
4633 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
4634 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
4635 break;
4636 case OMP_CLAUSE_SIMDLEN:
4637 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
4638 sctx.max_vf = 1;
4639 break;
4640 case OMP_CLAUSE__CONDTEMP_:
4641 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4642 if (sctx.is_simt)
4643 sctx.max_vf = 1;
4644 break;
4645 default:
4646 continue;
4647 }
4648
4649 /* Add a placeholder for simduid. */
4650 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
4651 sctx.simt_eargs.safe_push (NULL_TREE);
4652
4653 unsigned task_reduction_cnt = 0;
4654 unsigned task_reduction_cntorig = 0;
4655 unsigned task_reduction_cnt_full = 0;
4656 unsigned task_reduction_cntorig_full = 0;
4657 unsigned task_reduction_other_cnt = 0;
4658 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
4659 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
4660 /* Do all the fixed sized types in the first pass, and the variable sized
4661 types in the second pass. This makes sure that the scalar arguments to
4662 the variable sized types are processed before we use them in the
4663 variable sized operations. For task reductions we use 4 passes, in the
4664 first two we ignore them, in the third one gather arguments for
4665 GOMP_task_reduction_remap call and in the last pass actually handle
4666 the task reductions. */
4667 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
4668 ? 4 : 2); ++pass)
4669 {
4670 if (pass == 2 && task_reduction_cnt)
4671 {
4672 tskred_atype
4673 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
4674 + task_reduction_cntorig);
4675 tskred_avar = create_tmp_var_raw (tskred_atype);
4676 gimple_add_tmp_var (tskred_avar);
4677 TREE_ADDRESSABLE (tskred_avar) = 1;
4678 task_reduction_cnt_full = task_reduction_cnt;
4679 task_reduction_cntorig_full = task_reduction_cntorig;
4680 }
4681 else if (pass == 3 && task_reduction_cnt)
4682 {
4683 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
4684 gimple *g
4685 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
4686 size_int (task_reduction_cntorig),
4687 build_fold_addr_expr (tskred_avar));
4688 gimple_seq_add_stmt (ilist, g);
4689 }
4690 if (pass == 3 && task_reduction_other_cnt)
4691 {
4692 /* For reduction clauses, build
4693 tskred_base = (void *) tskred_temp[2]
4694 + omp_get_thread_num () * tskred_temp[1]
4695 or if tskred_temp[1] is known to be constant, that constant
4696 directly. This is the start of the private reduction copy block
4697 for the current thread. */
4698 tree v = create_tmp_var (integer_type_node);
4699 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
4700 gimple *g = gimple_build_call (x, 0);
4701 gimple_call_set_lhs (g, v);
4702 gimple_seq_add_stmt (ilist, g);
4703 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
4704 tskred_temp = OMP_CLAUSE_DECL (c);
4705 if (is_taskreg_ctx (ctx))
4706 tskred_temp = lookup_decl (tskred_temp, ctx);
4707 tree v2 = create_tmp_var (sizetype);
4708 g = gimple_build_assign (v2, NOP_EXPR, v);
4709 gimple_seq_add_stmt (ilist, g);
4710 if (ctx->task_reductions[0])
4711 v = fold_convert (sizetype, ctx->task_reductions[0]);
4712 else
4713 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
4714 tree v3 = create_tmp_var (sizetype);
4715 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
4716 gimple_seq_add_stmt (ilist, g);
4717 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
4718 tskred_base = create_tmp_var (ptr_type_node);
4719 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
4720 gimple_seq_add_stmt (ilist, g);
4721 }
4722 task_reduction_cnt = 0;
4723 task_reduction_cntorig = 0;
4724 task_reduction_other_cnt = 0;
4725 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4726 {
4727 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
4728 tree var, new_var;
4729 bool by_ref;
4730 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4731 bool task_reduction_p = false;
4732 bool task_reduction_needs_orig_p = false;
4733 tree cond = NULL_TREE;
4734 tree allocator, allocate_ptr;
4735
4736 switch (c_kind)
4737 {
4738 case OMP_CLAUSE_PRIVATE:
4739 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
4740 continue;
4741 break;
4742 case OMP_CLAUSE_SHARED:
4743 /* Ignore shared directives in teams construct inside
4744 of target construct. */
4745 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4746 && !is_host_teams_ctx (ctx))
4747 continue;
4748 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
4749 {
4750 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
4751 || is_global_var (OMP_CLAUSE_DECL (c)));
4752 continue;
4753 }
4754 case OMP_CLAUSE_FIRSTPRIVATE:
4755 case OMP_CLAUSE_COPYIN:
4756 break;
4757 case OMP_CLAUSE_LINEAR:
4758 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
4759 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4760 lastprivate_firstprivate = true;
4761 break;
4762 case OMP_CLAUSE_REDUCTION:
4763 case OMP_CLAUSE_IN_REDUCTION:
4764 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
4765 {
4766 task_reduction_p = true;
4767 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4768 {
4769 task_reduction_other_cnt++;
4770 if (pass == 2)
4771 continue;
4772 }
4773 else
4774 task_reduction_cnt++;
4775 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4776 {
4777 var = OMP_CLAUSE_DECL (c);
4778 /* If var is a global variable that isn't privatized
4779 in outer contexts, we don't need to look up the
4780 original address, it is always the address of the
4781 global variable itself. */
4782 if (!DECL_P (var)
4783 || omp_is_reference (var)
4784 || !is_global_var
4785 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4786 {
4787 task_reduction_needs_orig_p = true;
4788 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4789 task_reduction_cntorig++;
4790 }
4791 }
4792 }
4793 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4794 reduction_omp_orig_ref = true;
4795 break;
4796 case OMP_CLAUSE__REDUCTEMP_:
4797 if (!is_taskreg_ctx (ctx))
4798 continue;
4799 /* FALLTHRU */
4800 case OMP_CLAUSE__LOOPTEMP_:
4801 /* Handle _looptemp_/_reductemp_ clauses only on
4802 parallel/task. */
4803 if (fd)
4804 continue;
4805 break;
4806 case OMP_CLAUSE_LASTPRIVATE:
4807 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4808 {
4809 lastprivate_firstprivate = true;
4810 if (pass != 0 || is_taskloop_ctx (ctx))
4811 continue;
4812 }
4813 /* Even without corresponding firstprivate, if
4814 decl is Fortran allocatable, it needs outer var
4815 reference. */
4816 else if (pass == 0
4817 && lang_hooks.decls.omp_private_outer_ref
4818 (OMP_CLAUSE_DECL (c)))
4819 lastprivate_firstprivate = true;
4820 break;
4821 case OMP_CLAUSE_ALIGNED:
4822 if (pass != 1)
4823 continue;
4824 var = OMP_CLAUSE_DECL (c);
4825 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
4826 && !is_global_var (var))
4827 {
4828 new_var = maybe_lookup_decl (var, ctx);
4829 if (new_var == NULL_TREE)
4830 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
4831 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4832 tree alarg = omp_clause_aligned_alignment (c);
4833 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4834 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
4835 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4836 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4837 gimplify_and_add (x, ilist);
4838 }
4839 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
4840 && is_global_var (var))
4841 {
4842 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
4843 new_var = lookup_decl (var, ctx);
4844 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
4845 t = build_fold_addr_expr_loc (clause_loc, t);
4846 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4847 tree alarg = omp_clause_aligned_alignment (c);
4848 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4849 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
4850 t = fold_convert_loc (clause_loc, ptype, t);
4851 x = create_tmp_var (ptype);
4852 t = build2 (MODIFY_EXPR, ptype, x, t);
4853 gimplify_and_add (t, ilist);
4854 t = build_simple_mem_ref_loc (clause_loc, x);
4855 SET_DECL_VALUE_EXPR (new_var, t);
4856 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4857 }
4858 continue;
4859 case OMP_CLAUSE__CONDTEMP_:
4860 if (is_parallel_ctx (ctx)
4861 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
4862 break;
4863 continue;
4864 default:
4865 continue;
4866 }
4867
4868 if (task_reduction_p != (pass >= 2))
4869 continue;
4870
4871 allocator = NULL_TREE;
4872 allocate_ptr = NULL_TREE;
4873 new_var = var = OMP_CLAUSE_DECL (c);
4874 if ((c_kind == OMP_CLAUSE_REDUCTION
4875 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4876 && TREE_CODE (var) == MEM_REF)
4877 {
4878 var = TREE_OPERAND (var, 0);
4879 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
4880 var = TREE_OPERAND (var, 0);
4881 if (TREE_CODE (var) == INDIRECT_REF
4882 || TREE_CODE (var) == ADDR_EXPR)
4883 var = TREE_OPERAND (var, 0);
4884 if (is_variable_sized (var))
4885 {
4886 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
4887 var = DECL_VALUE_EXPR (var);
4888 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
4889 var = TREE_OPERAND (var, 0);
4890 gcc_assert (DECL_P (var));
4891 }
4892 new_var = var;
4893 }
4894 if (c_kind != OMP_CLAUSE_COPYIN)
4895 new_var = lookup_decl (var, ctx);
4896
4897 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
4898 {
4899 if (pass != 0)
4900 continue;
4901 }
4902 /* C/C++ array section reductions. */
4903 else if ((c_kind == OMP_CLAUSE_REDUCTION
4904 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4905 && var != OMP_CLAUSE_DECL (c))
4906 {
4907 if (pass == 0)
4908 continue;
4909
4910 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
4911 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
4912
4913 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
4914 {
4915 tree b = TREE_OPERAND (orig_var, 1);
4916 b = maybe_lookup_decl (b, ctx);
4917 if (b == NULL)
4918 {
4919 b = TREE_OPERAND (orig_var, 1);
4920 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
4921 }
4922 if (integer_zerop (bias))
4923 bias = b;
4924 else
4925 {
4926 bias = fold_convert_loc (clause_loc,
4927 TREE_TYPE (b), bias);
4928 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
4929 TREE_TYPE (b), b, bias);
4930 }
4931 orig_var = TREE_OPERAND (orig_var, 0);
4932 }
4933 if (pass == 2)
4934 {
4935 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
4936 if (is_global_var (out)
4937 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
4938 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
4939 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
4940 != POINTER_TYPE)))
4941 x = var;
4942 else
4943 {
4944 bool by_ref = use_pointer_for_field (var, NULL);
4945 x = build_receiver_ref (var, by_ref, ctx);
4946 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
4947 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
4948 == POINTER_TYPE))
4949 x = build_fold_addr_expr (x);
4950 }
4951 if (TREE_CODE (orig_var) == INDIRECT_REF)
4952 x = build_simple_mem_ref (x);
4953 else if (TREE_CODE (orig_var) == ADDR_EXPR)
4954 {
4955 if (var == TREE_OPERAND (orig_var, 0))
4956 x = build_fold_addr_expr (x);
4957 }
4958 bias = fold_convert (sizetype, bias);
4959 x = fold_convert (ptr_type_node, x);
4960 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
4961 TREE_TYPE (x), x, bias);
4962 unsigned cnt = task_reduction_cnt - 1;
4963 if (!task_reduction_needs_orig_p)
4964 cnt += (task_reduction_cntorig_full
4965 - task_reduction_cntorig);
4966 else
4967 cnt = task_reduction_cntorig - 1;
4968 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4969 size_int (cnt), NULL_TREE, NULL_TREE);
4970 gimplify_assign (r, x, ilist);
4971 continue;
4972 }
4973
4974 if (TREE_CODE (orig_var) == INDIRECT_REF
4975 || TREE_CODE (orig_var) == ADDR_EXPR)
4976 orig_var = TREE_OPERAND (orig_var, 0);
4977 tree d = OMP_CLAUSE_DECL (c);
4978 tree type = TREE_TYPE (d);
4979 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
4980 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
4981 tree sz = v;
4982 const char *name = get_name (orig_var);
4983 if (pass != 3 && !TREE_CONSTANT (v))
4984 {
4985 tree t = maybe_lookup_decl (v, ctx);
4986 if (t)
4987 v = t;
4988 else
4989 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4990 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
4991 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4992 TREE_TYPE (v), v,
4993 build_int_cst (TREE_TYPE (v), 1));
4994 sz = fold_build2_loc (clause_loc, MULT_EXPR,
4995 TREE_TYPE (v), t,
4996 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4997 }
4998 if (pass == 3)
4999 {
5000 tree xv = create_tmp_var (ptr_type_node);
5001 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5002 {
5003 unsigned cnt = task_reduction_cnt - 1;
5004 if (!task_reduction_needs_orig_p)
5005 cnt += (task_reduction_cntorig_full
5006 - task_reduction_cntorig);
5007 else
5008 cnt = task_reduction_cntorig - 1;
5009 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5010 size_int (cnt), NULL_TREE, NULL_TREE);
5011
5012 gimple *g = gimple_build_assign (xv, x);
5013 gimple_seq_add_stmt (ilist, g);
5014 }
5015 else
5016 {
5017 unsigned int idx = *ctx->task_reduction_map->get (c);
5018 tree off;
5019 if (ctx->task_reductions[1 + idx])
5020 off = fold_convert (sizetype,
5021 ctx->task_reductions[1 + idx]);
5022 else
5023 off = task_reduction_read (ilist, tskred_temp, sizetype,
5024 7 + 3 * idx + 1);
5025 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
5026 tskred_base, off);
5027 gimple_seq_add_stmt (ilist, g);
5028 }
5029 x = fold_convert (build_pointer_type (boolean_type_node),
5030 xv);
5031 if (TREE_CONSTANT (v))
5032 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
5033 TYPE_SIZE_UNIT (type));
5034 else
5035 {
5036 tree t = maybe_lookup_decl (v, ctx);
5037 if (t)
5038 v = t;
5039 else
5040 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5041 gimplify_expr (&v, ilist, NULL, is_gimple_val,
5042 fb_rvalue);
5043 t = fold_build2_loc (clause_loc, PLUS_EXPR,
5044 TREE_TYPE (v), v,
5045 build_int_cst (TREE_TYPE (v), 1));
5046 t = fold_build2_loc (clause_loc, MULT_EXPR,
5047 TREE_TYPE (v), t,
5048 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5049 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
5050 }
5051 cond = create_tmp_var (TREE_TYPE (x));
5052 gimplify_assign (cond, x, ilist);
5053 x = xv;
5054 }
5055 else if (lower_private_allocate (var, type, allocator,
5056 allocate_ptr, ilist, ctx,
5057 true,
5058 TREE_CONSTANT (v)
5059 ? TYPE_SIZE_UNIT (type)
5060 : sz))
5061 x = allocate_ptr;
5062 else if (TREE_CONSTANT (v))
5063 {
5064 x = create_tmp_var_raw (type, name);
5065 gimple_add_tmp_var (x);
5066 TREE_ADDRESSABLE (x) = 1;
5067 x = build_fold_addr_expr_loc (clause_loc, x);
5068 }
5069 else
5070 {
5071 tree atmp
5072 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5073 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
5074 x = build_call_expr_loc (clause_loc, atmp, 2, sz, al);
5075 }
5076
5077 tree ptype = build_pointer_type (TREE_TYPE (type));
5078 x = fold_convert_loc (clause_loc, ptype, x);
5079 tree y = create_tmp_var (ptype, name);
5080 gimplify_assign (y, x, ilist);
5081 x = y;
5082 tree yb = y;
5083
5084 if (!integer_zerop (bias))
5085 {
5086 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
5087 bias);
5088 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
5089 x);
5090 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
5091 pointer_sized_int_node, yb, bias);
5092 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
5093 yb = create_tmp_var (ptype, name);
5094 gimplify_assign (yb, x, ilist);
5095 x = yb;
5096 }
5097
5098 d = TREE_OPERAND (d, 0);
5099 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5100 d = TREE_OPERAND (d, 0);
5101 if (TREE_CODE (d) == ADDR_EXPR)
5102 {
5103 if (orig_var != var)
5104 {
5105 gcc_assert (is_variable_sized (orig_var));
5106 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
5107 x);
5108 gimplify_assign (new_var, x, ilist);
5109 tree new_orig_var = lookup_decl (orig_var, ctx);
5110 tree t = build_fold_indirect_ref (new_var);
5111 DECL_IGNORED_P (new_var) = 0;
5112 TREE_THIS_NOTRAP (t) = 1;
5113 SET_DECL_VALUE_EXPR (new_orig_var, t);
5114 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
5115 }
5116 else
5117 {
5118 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
5119 build_int_cst (ptype, 0));
5120 SET_DECL_VALUE_EXPR (new_var, x);
5121 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5122 }
5123 }
5124 else
5125 {
5126 gcc_assert (orig_var == var);
5127 if (TREE_CODE (d) == INDIRECT_REF)
5128 {
5129 x = create_tmp_var (ptype, name);
5130 TREE_ADDRESSABLE (x) = 1;
5131 gimplify_assign (x, yb, ilist);
5132 x = build_fold_addr_expr_loc (clause_loc, x);
5133 }
5134 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5135 gimplify_assign (new_var, x, ilist);
5136 }
5137 /* GOMP_taskgroup_reduction_register memsets the whole
5138 array to zero. If the initializer is zero, we don't
5139 need to initialize it again, just mark it as ever
5140 used unconditionally, i.e. cond = true. */
5141 if (cond
5142 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
5143 && initializer_zerop (omp_reduction_init (c,
5144 TREE_TYPE (type))))
5145 {
5146 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
5147 boolean_true_node);
5148 gimple_seq_add_stmt (ilist, g);
5149 continue;
5150 }
5151 tree end = create_artificial_label (UNKNOWN_LOCATION);
5152 if (cond)
5153 {
5154 gimple *g;
5155 if (!is_parallel_ctx (ctx))
5156 {
5157 tree condv = create_tmp_var (boolean_type_node);
5158 g = gimple_build_assign (condv,
5159 build_simple_mem_ref (cond));
5160 gimple_seq_add_stmt (ilist, g);
5161 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
5162 g = gimple_build_cond (NE_EXPR, condv,
5163 boolean_false_node, end, lab1);
5164 gimple_seq_add_stmt (ilist, g);
5165 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
5166 }
5167 g = gimple_build_assign (build_simple_mem_ref (cond),
5168 boolean_true_node);
5169 gimple_seq_add_stmt (ilist, g);
5170 }
5171
5172 tree y1 = create_tmp_var (ptype);
5173 gimplify_assign (y1, y, ilist);
5174 tree i2 = NULL_TREE, y2 = NULL_TREE;
5175 tree body2 = NULL_TREE, end2 = NULL_TREE;
5176 tree y3 = NULL_TREE, y4 = NULL_TREE;
5177 if (task_reduction_needs_orig_p)
5178 {
5179 y3 = create_tmp_var (ptype);
5180 tree ref;
5181 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5182 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5183 size_int (task_reduction_cnt_full
5184 + task_reduction_cntorig - 1),
5185 NULL_TREE, NULL_TREE);
5186 else
5187 {
5188 unsigned int idx = *ctx->task_reduction_map->get (c);
5189 ref = task_reduction_read (ilist, tskred_temp, ptype,
5190 7 + 3 * idx);
5191 }
5192 gimplify_assign (y3, ref, ilist);
5193 }
5194 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
5195 {
5196 if (pass != 3)
5197 {
5198 y2 = create_tmp_var (ptype);
5199 gimplify_assign (y2, y, ilist);
5200 }
5201 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5202 {
5203 tree ref = build_outer_var_ref (var, ctx);
5204 /* For ref build_outer_var_ref already performs this. */
5205 if (TREE_CODE (d) == INDIRECT_REF)
5206 gcc_assert (omp_is_reference (var));
5207 else if (TREE_CODE (d) == ADDR_EXPR)
5208 ref = build_fold_addr_expr (ref);
5209 else if (omp_is_reference (var))
5210 ref = build_fold_addr_expr (ref);
5211 ref = fold_convert_loc (clause_loc, ptype, ref);
5212 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5213 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5214 {
5215 y3 = create_tmp_var (ptype);
5216 gimplify_assign (y3, unshare_expr (ref), ilist);
5217 }
5218 if (is_simd)
5219 {
5220 y4 = create_tmp_var (ptype);
5221 gimplify_assign (y4, ref, dlist);
5222 }
5223 }
5224 }
5225 tree i = create_tmp_var (TREE_TYPE (v));
5226 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
5227 tree body = create_artificial_label (UNKNOWN_LOCATION);
5228 gimple_seq_add_stmt (ilist, gimple_build_label (body));
5229 if (y2)
5230 {
5231 i2 = create_tmp_var (TREE_TYPE (v));
5232 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
5233 body2 = create_artificial_label (UNKNOWN_LOCATION);
5234 end2 = create_artificial_label (UNKNOWN_LOCATION);
5235 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
5236 }
5237 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5238 {
5239 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5240 tree decl_placeholder
5241 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5242 SET_DECL_VALUE_EXPR (decl_placeholder,
5243 build_simple_mem_ref (y1));
5244 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5245 SET_DECL_VALUE_EXPR (placeholder,
5246 y3 ? build_simple_mem_ref (y3)
5247 : error_mark_node);
5248 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5249 x = lang_hooks.decls.omp_clause_default_ctor
5250 (c, build_simple_mem_ref (y1),
5251 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
5252 if (x)
5253 gimplify_and_add (x, ilist);
5254 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5255 {
5256 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5257 lower_omp (&tseq, ctx);
5258 gimple_seq_add_seq (ilist, tseq);
5259 }
5260 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5261 if (is_simd)
5262 {
5263 SET_DECL_VALUE_EXPR (decl_placeholder,
5264 build_simple_mem_ref (y2));
5265 SET_DECL_VALUE_EXPR (placeholder,
5266 build_simple_mem_ref (y4));
5267 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5268 lower_omp (&tseq, ctx);
5269 gimple_seq_add_seq (dlist, tseq);
5270 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5271 }
5272 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5273 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
5274 if (y2)
5275 {
5276 x = lang_hooks.decls.omp_clause_dtor
5277 (c, build_simple_mem_ref (y2));
5278 if (x)
5279 gimplify_and_add (x, dlist);
5280 }
5281 }
5282 else
5283 {
5284 x = omp_reduction_init (c, TREE_TYPE (type));
5285 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5286
5287 /* reduction(-:var) sums up the partial results, so it
5288 acts identically to reduction(+:var). */
5289 if (code == MINUS_EXPR)
5290 code = PLUS_EXPR;
5291
5292 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
5293 if (is_simd)
5294 {
5295 x = build2 (code, TREE_TYPE (type),
5296 build_simple_mem_ref (y4),
5297 build_simple_mem_ref (y2));
5298 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
5299 }
5300 }
5301 gimple *g
5302 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
5303 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5304 gimple_seq_add_stmt (ilist, g);
5305 if (y3)
5306 {
5307 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
5308 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5309 gimple_seq_add_stmt (ilist, g);
5310 }
5311 g = gimple_build_assign (i, PLUS_EXPR, i,
5312 build_int_cst (TREE_TYPE (i), 1));
5313 gimple_seq_add_stmt (ilist, g);
5314 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5315 gimple_seq_add_stmt (ilist, g);
5316 gimple_seq_add_stmt (ilist, gimple_build_label (end));
5317 if (y2)
5318 {
5319 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
5320 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5321 gimple_seq_add_stmt (dlist, g);
5322 if (y4)
5323 {
5324 g = gimple_build_assign
5325 (y4, POINTER_PLUS_EXPR, y4,
5326 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5327 gimple_seq_add_stmt (dlist, g);
5328 }
5329 g = gimple_build_assign (i2, PLUS_EXPR, i2,
5330 build_int_cst (TREE_TYPE (i2), 1));
5331 gimple_seq_add_stmt (dlist, g);
5332 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
5333 gimple_seq_add_stmt (dlist, g);
5334 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
5335 }
5336 if (allocator)
5337 {
5338 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
5339 g = gimple_build_call (f, 2, allocate_ptr, allocator);
5340 gimple_seq_add_stmt (dlist, g);
5341 }
5342 continue;
5343 }
5344 else if (pass == 2)
5345 {
5346 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
5347 x = var;
5348 else
5349 {
5350 bool by_ref = use_pointer_for_field (var, ctx);
5351 x = build_receiver_ref (var, by_ref, ctx);
5352 }
5353 if (!omp_is_reference (var))
5354 x = build_fold_addr_expr (x);
5355 x = fold_convert (ptr_type_node, x);
5356 unsigned cnt = task_reduction_cnt - 1;
5357 if (!task_reduction_needs_orig_p)
5358 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
5359 else
5360 cnt = task_reduction_cntorig - 1;
5361 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5362 size_int (cnt), NULL_TREE, NULL_TREE);
5363 gimplify_assign (r, x, ilist);
5364 continue;
5365 }
5366 else if (pass == 3)
5367 {
5368 tree type = TREE_TYPE (new_var);
5369 if (!omp_is_reference (var))
5370 type = build_pointer_type (type);
5371 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5372 {
5373 unsigned cnt = task_reduction_cnt - 1;
5374 if (!task_reduction_needs_orig_p)
5375 cnt += (task_reduction_cntorig_full
5376 - task_reduction_cntorig);
5377 else
5378 cnt = task_reduction_cntorig - 1;
5379 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5380 size_int (cnt), NULL_TREE, NULL_TREE);
5381 }
5382 else
5383 {
5384 unsigned int idx = *ctx->task_reduction_map->get (c);
5385 tree off;
5386 if (ctx->task_reductions[1 + idx])
5387 off = fold_convert (sizetype,
5388 ctx->task_reductions[1 + idx]);
5389 else
5390 off = task_reduction_read (ilist, tskred_temp, sizetype,
5391 7 + 3 * idx + 1);
5392 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
5393 tskred_base, off);
5394 }
5395 x = fold_convert (type, x);
5396 tree t;
5397 if (omp_is_reference (var))
5398 {
5399 gimplify_assign (new_var, x, ilist);
5400 t = new_var;
5401 new_var = build_simple_mem_ref (new_var);
5402 }
5403 else
5404 {
5405 t = create_tmp_var (type);
5406 gimplify_assign (t, x, ilist);
5407 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
5408 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5409 }
5410 t = fold_convert (build_pointer_type (boolean_type_node), t);
5411 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
5412 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5413 cond = create_tmp_var (TREE_TYPE (t));
5414 gimplify_assign (cond, t, ilist);
5415 }
5416 else if (is_variable_sized (var))
5417 {
5418 /* For variable sized types, we need to allocate the
5419 actual storage here. Call alloca and store the
5420 result in the pointer decl that we created elsewhere. */
5421 if (pass == 0)
5422 continue;
5423
5424 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
5425 {
5426 tree tmp;
5427
5428 ptr = DECL_VALUE_EXPR (new_var);
5429 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
5430 ptr = TREE_OPERAND (ptr, 0);
5431 gcc_assert (DECL_P (ptr));
5432 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
5433
5434 if (lower_private_allocate (var, new_var, allocator,
5435 allocate_ptr, ilist, ctx,
5436 false, x))
5437 tmp = allocate_ptr;
5438 else
5439 {
5440 /* void *tmp = __builtin_alloca */
5441 tree atmp
5442 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5443 gcall *stmt
5444 = gimple_build_call (atmp, 2, x,
5445 size_int (DECL_ALIGN (var)));
5446 cfun->calls_alloca = 1;
5447 tmp = create_tmp_var_raw (ptr_type_node);
5448 gimple_add_tmp_var (tmp);
5449 gimple_call_set_lhs (stmt, tmp);
5450
5451 gimple_seq_add_stmt (ilist, stmt);
5452 }
5453
5454 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
5455 gimplify_assign (ptr, x, ilist);
5456 }
5457 }
5458 else if (omp_is_reference (var)
5459 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
5460 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
5461 {
5462 /* For references that are being privatized for Fortran,
5463 allocate new backing storage for the new pointer
5464 variable. This allows us to avoid changing all the
5465 code that expects a pointer to something that expects
5466 a direct variable. */
5467 if (pass == 0)
5468 continue;
5469
5470 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
5471 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
5472 {
5473 x = build_receiver_ref (var, false, ctx);
5474 if (ctx->allocate_map)
5475 if (tree *allocatep = ctx->allocate_map->get (var))
5476 {
5477 allocator = *allocatep;
5478 if (TREE_CODE (allocator) != INTEGER_CST)
5479 allocator = build_outer_var_ref (allocator, ctx);
5480 allocator = fold_convert (pointer_sized_int_node,
5481 allocator);
5482 allocate_ptr = unshare_expr (x);
5483 }
5484 if (allocator == NULL_TREE)
5485 x = build_fold_addr_expr_loc (clause_loc, x);
5486 }
5487 else if (lower_private_allocate (var, new_var, allocator,
5488 allocate_ptr,
5489 ilist, ctx, true, x))
5490 x = allocate_ptr;
5491 else if (TREE_CONSTANT (x))
5492 {
5493 /* For reduction in SIMD loop, defer adding the
5494 initialization of the reference, because if we decide
5495 to use SIMD array for it, the initilization could cause
5496 expansion ICE. Ditto for other privatization clauses. */
5497 if (is_simd)
5498 x = NULL_TREE;
5499 else
5500 {
5501 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
5502 get_name (var));
5503 gimple_add_tmp_var (x);
5504 TREE_ADDRESSABLE (x) = 1;
5505 x = build_fold_addr_expr_loc (clause_loc, x);
5506 }
5507 }
5508 else
5509 {
5510 tree atmp
5511 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5512 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
5513 tree al = size_int (TYPE_ALIGN (rtype));
5514 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
5515 }
5516
5517 if (x)
5518 {
5519 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5520 gimplify_assign (new_var, x, ilist);
5521 }
5522
5523 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5524 }
5525 else if ((c_kind == OMP_CLAUSE_REDUCTION
5526 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5527 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5528 {
5529 if (pass == 0)
5530 continue;
5531 }
5532 else if (pass != 0)
5533 continue;
5534
5535 switch (OMP_CLAUSE_CODE (c))
5536 {
5537 case OMP_CLAUSE_SHARED:
5538 /* Ignore shared directives in teams construct inside
5539 target construct. */
5540 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5541 && !is_host_teams_ctx (ctx))
5542 continue;
5543 /* Shared global vars are just accessed directly. */
5544 if (is_global_var (new_var))
5545 break;
5546 /* For taskloop firstprivate/lastprivate, represented
5547 as firstprivate and shared clause on the task, new_var
5548 is the firstprivate var. */
5549 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5550 break;
5551 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5552 needs to be delayed until after fixup_child_record_type so
5553 that we get the correct type during the dereference. */
5554 by_ref = use_pointer_for_field (var, ctx);
5555 x = build_receiver_ref (var, by_ref, ctx);
5556 SET_DECL_VALUE_EXPR (new_var, x);
5557 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5558
5559 /* ??? If VAR is not passed by reference, and the variable
5560 hasn't been initialized yet, then we'll get a warning for
5561 the store into the omp_data_s structure. Ideally, we'd be
5562 able to notice this and not store anything at all, but
5563 we're generating code too early. Suppress the warning. */
5564 if (!by_ref)
5565 TREE_NO_WARNING (var) = 1;
5566 break;
5567
5568 case OMP_CLAUSE__CONDTEMP_:
5569 if (is_parallel_ctx (ctx))
5570 {
5571 x = build_receiver_ref (var, false, ctx);
5572 SET_DECL_VALUE_EXPR (new_var, x);
5573 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5574 }
5575 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
5576 {
5577 x = build_zero_cst (TREE_TYPE (var));
5578 goto do_private;
5579 }
5580 break;
5581
5582 case OMP_CLAUSE_LASTPRIVATE:
5583 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5584 break;
5585 /* FALLTHRU */
5586
5587 case OMP_CLAUSE_PRIVATE:
5588 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
5589 x = build_outer_var_ref (var, ctx);
5590 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5591 {
5592 if (is_task_ctx (ctx))
5593 x = build_receiver_ref (var, false, ctx);
5594 else
5595 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
5596 }
5597 else
5598 x = NULL;
5599 do_private:
5600 tree nx;
5601 bool copy_ctor;
5602 copy_ctor = false;
5603 lower_private_allocate (var, new_var, allocator, allocate_ptr,
5604 ilist, ctx, false, NULL_TREE);
5605 nx = unshare_expr (new_var);
5606 if (is_simd
5607 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5608 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
5609 copy_ctor = true;
5610 if (copy_ctor)
5611 nx = lang_hooks.decls.omp_clause_copy_ctor (c, nx, x);
5612 else
5613 nx = lang_hooks.decls.omp_clause_default_ctor (c, nx, x);
5614 if (is_simd)
5615 {
5616 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
5617 if ((TREE_ADDRESSABLE (new_var) || nx || y
5618 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5619 && (gimple_omp_for_collapse (ctx->stmt) != 1
5620 || (gimple_omp_for_index (ctx->stmt, 0)
5621 != new_var)))
5622 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
5623 || omp_is_reference (var))
5624 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5625 ivar, lvar))
5626 {
5627 if (omp_is_reference (var))
5628 {
5629 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5630 tree new_vard = TREE_OPERAND (new_var, 0);
5631 gcc_assert (DECL_P (new_vard));
5632 SET_DECL_VALUE_EXPR (new_vard,
5633 build_fold_addr_expr (lvar));
5634 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5635 }
5636
5637 if (nx)
5638 {
5639 tree iv = unshare_expr (ivar);
5640 if (copy_ctor)
5641 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv,
5642 x);
5643 else
5644 x = lang_hooks.decls.omp_clause_default_ctor (c,
5645 iv,
5646 x);
5647 }
5648 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
5649 {
5650 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
5651 unshare_expr (ivar), x);
5652 nx = x;
5653 }
5654 if (nx && x)
5655 gimplify_and_add (x, &llist[0]);
5656 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5657 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5658 {
5659 tree v = new_var;
5660 if (!DECL_P (v))
5661 {
5662 gcc_assert (TREE_CODE (v) == MEM_REF);
5663 v = TREE_OPERAND (v, 0);
5664 gcc_assert (DECL_P (v));
5665 }
5666 v = *ctx->lastprivate_conditional_map->get (v);
5667 tree t = create_tmp_var (TREE_TYPE (v));
5668 tree z = build_zero_cst (TREE_TYPE (v));
5669 tree orig_v
5670 = build_outer_var_ref (var, ctx,
5671 OMP_CLAUSE_LASTPRIVATE);
5672 gimple_seq_add_stmt (dlist,
5673 gimple_build_assign (t, z));
5674 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
5675 tree civar = DECL_VALUE_EXPR (v);
5676 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
5677 civar = unshare_expr (civar);
5678 TREE_OPERAND (civar, 1) = sctx.idx;
5679 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
5680 unshare_expr (civar));
5681 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
5682 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
5683 orig_v, unshare_expr (ivar)));
5684 tree cond = build2 (LT_EXPR, boolean_type_node, t,
5685 civar);
5686 x = build3 (COND_EXPR, void_type_node, cond, x,
5687 void_node);
5688 gimple_seq tseq = NULL;
5689 gimplify_and_add (x, &tseq);
5690 if (ctx->outer)
5691 lower_omp (&tseq, ctx->outer);
5692 gimple_seq_add_seq (&llist[1], tseq);
5693 }
5694 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5695 && ctx->for_simd_scan_phase)
5696 {
5697 x = unshare_expr (ivar);
5698 tree orig_v
5699 = build_outer_var_ref (var, ctx,
5700 OMP_CLAUSE_LASTPRIVATE);
5701 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5702 orig_v);
5703 gimplify_and_add (x, &llist[0]);
5704 }
5705 if (y)
5706 {
5707 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
5708 if (y)
5709 gimplify_and_add (y, &llist[1]);
5710 }
5711 break;
5712 }
5713 if (omp_is_reference (var))
5714 {
5715 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5716 tree new_vard = TREE_OPERAND (new_var, 0);
5717 gcc_assert (DECL_P (new_vard));
5718 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5719 x = TYPE_SIZE_UNIT (type);
5720 if (TREE_CONSTANT (x))
5721 {
5722 x = create_tmp_var_raw (type, get_name (var));
5723 gimple_add_tmp_var (x);
5724 TREE_ADDRESSABLE (x) = 1;
5725 x = build_fold_addr_expr_loc (clause_loc, x);
5726 x = fold_convert_loc (clause_loc,
5727 TREE_TYPE (new_vard), x);
5728 gimplify_assign (new_vard, x, ilist);
5729 }
5730 }
5731 }
5732 if (nx)
5733 gimplify_and_add (nx, ilist);
5734 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5735 && is_simd
5736 && ctx->for_simd_scan_phase)
5737 {
5738 tree orig_v = build_outer_var_ref (var, ctx,
5739 OMP_CLAUSE_LASTPRIVATE);
5740 x = lang_hooks.decls.omp_clause_assign_op (c, new_var,
5741 orig_v);
5742 gimplify_and_add (x, ilist);
5743 }
5744 /* FALLTHRU */
5745
5746 do_dtor:
5747 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
5748 if (x)
5749 gimplify_and_add (x, dlist);
5750 if (allocator)
5751 {
5752 if (!is_gimple_val (allocator))
5753 {
5754 tree avar = create_tmp_var (TREE_TYPE (allocator));
5755 gimplify_assign (avar, allocator, dlist);
5756 allocator = avar;
5757 }
5758 if (!is_gimple_val (allocate_ptr))
5759 {
5760 tree apvar = create_tmp_var (TREE_TYPE (allocate_ptr));
5761 gimplify_assign (apvar, allocate_ptr, dlist);
5762 allocate_ptr = apvar;
5763 }
5764 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
5765 gimple *g
5766 = gimple_build_call (f, 2, allocate_ptr, allocator);
5767 gimple_seq_add_stmt (dlist, g);
5768 }
5769 break;
5770
5771 case OMP_CLAUSE_LINEAR:
5772 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
5773 goto do_firstprivate;
5774 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
5775 x = NULL;
5776 else
5777 x = build_outer_var_ref (var, ctx);
5778 goto do_private;
5779
5780 case OMP_CLAUSE_FIRSTPRIVATE:
5781 if (is_task_ctx (ctx))
5782 {
5783 if ((omp_is_reference (var)
5784 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
5785 || is_variable_sized (var))
5786 goto do_dtor;
5787 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
5788 ctx))
5789 || use_pointer_for_field (var, NULL))
5790 {
5791 x = build_receiver_ref (var, false, ctx);
5792 if (ctx->allocate_map)
5793 if (tree *allocatep = ctx->allocate_map->get (var))
5794 {
5795 allocator = *allocatep;
5796 if (TREE_CODE (allocator) != INTEGER_CST)
5797 allocator = build_outer_var_ref (allocator, ctx);
5798 allocator = fold_convert (pointer_sized_int_node,
5799 allocator);
5800 allocate_ptr = unshare_expr (x);
5801 x = build_simple_mem_ref (x);
5802 TREE_THIS_NOTRAP (x) = 1;
5803 }
5804 SET_DECL_VALUE_EXPR (new_var, x);
5805 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5806 goto do_dtor;
5807 }
5808 }
5809 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
5810 && omp_is_reference (var))
5811 {
5812 x = build_outer_var_ref (var, ctx);
5813 gcc_assert (TREE_CODE (x) == MEM_REF
5814 && integer_zerop (TREE_OPERAND (x, 1)));
5815 x = TREE_OPERAND (x, 0);
5816 x = lang_hooks.decls.omp_clause_copy_ctor
5817 (c, unshare_expr (new_var), x);
5818 gimplify_and_add (x, ilist);
5819 goto do_dtor;
5820 }
5821 do_firstprivate:
5822 lower_private_allocate (var, new_var, allocator, allocate_ptr,
5823 ilist, ctx, false, NULL_TREE);
5824 x = build_outer_var_ref (var, ctx);
5825 if (is_simd)
5826 {
5827 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
5828 && gimple_omp_for_combined_into_p (ctx->stmt))
5829 {
5830 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5831 tree stept = TREE_TYPE (t);
5832 tree ct = omp_find_clause (clauses,
5833 OMP_CLAUSE__LOOPTEMP_);
5834 gcc_assert (ct);
5835 tree l = OMP_CLAUSE_DECL (ct);
5836 tree n1 = fd->loop.n1;
5837 tree step = fd->loop.step;
5838 tree itype = TREE_TYPE (l);
5839 if (POINTER_TYPE_P (itype))
5840 itype = signed_type_for (itype);
5841 l = fold_build2 (MINUS_EXPR, itype, l, n1);
5842 if (TYPE_UNSIGNED (itype)
5843 && fd->loop.cond_code == GT_EXPR)
5844 l = fold_build2 (TRUNC_DIV_EXPR, itype,
5845 fold_build1 (NEGATE_EXPR, itype, l),
5846 fold_build1 (NEGATE_EXPR,
5847 itype, step));
5848 else
5849 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
5850 t = fold_build2 (MULT_EXPR, stept,
5851 fold_convert (stept, l), t);
5852
5853 if (OMP_CLAUSE_LINEAR_ARRAY (c))
5854 {
5855 if (omp_is_reference (var))
5856 {
5857 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5858 tree new_vard = TREE_OPERAND (new_var, 0);
5859 gcc_assert (DECL_P (new_vard));
5860 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5861 nx = TYPE_SIZE_UNIT (type);
5862 if (TREE_CONSTANT (nx))
5863 {
5864 nx = create_tmp_var_raw (type,
5865 get_name (var));
5866 gimple_add_tmp_var (nx);
5867 TREE_ADDRESSABLE (nx) = 1;
5868 nx = build_fold_addr_expr_loc (clause_loc,
5869 nx);
5870 nx = fold_convert_loc (clause_loc,
5871 TREE_TYPE (new_vard),
5872 nx);
5873 gimplify_assign (new_vard, nx, ilist);
5874 }
5875 }
5876
5877 x = lang_hooks.decls.omp_clause_linear_ctor
5878 (c, new_var, x, t);
5879 gimplify_and_add (x, ilist);
5880 goto do_dtor;
5881 }
5882
5883 if (POINTER_TYPE_P (TREE_TYPE (x)))
5884 x = fold_build2 (POINTER_PLUS_EXPR,
5885 TREE_TYPE (x), x, t);
5886 else
5887 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
5888 }
5889
5890 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
5891 || TREE_ADDRESSABLE (new_var)
5892 || omp_is_reference (var))
5893 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5894 ivar, lvar))
5895 {
5896 if (omp_is_reference (var))
5897 {
5898 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5899 tree new_vard = TREE_OPERAND (new_var, 0);
5900 gcc_assert (DECL_P (new_vard));
5901 SET_DECL_VALUE_EXPR (new_vard,
5902 build_fold_addr_expr (lvar));
5903 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5904 }
5905 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
5906 {
5907 tree iv = create_tmp_var (TREE_TYPE (new_var));
5908 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
5909 gimplify_and_add (x, ilist);
5910 gimple_stmt_iterator gsi
5911 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
5912 gassign *g
5913 = gimple_build_assign (unshare_expr (lvar), iv);
5914 gsi_insert_before_without_update (&gsi, g,
5915 GSI_SAME_STMT);
5916 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5917 enum tree_code code = PLUS_EXPR;
5918 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
5919 code = POINTER_PLUS_EXPR;
5920 g = gimple_build_assign (iv, code, iv, t);
5921 gsi_insert_before_without_update (&gsi, g,
5922 GSI_SAME_STMT);
5923 break;
5924 }
5925 x = lang_hooks.decls.omp_clause_copy_ctor
5926 (c, unshare_expr (ivar), x);
5927 gimplify_and_add (x, &llist[0]);
5928 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5929 if (x)
5930 gimplify_and_add (x, &llist[1]);
5931 break;
5932 }
5933 if (omp_is_reference (var))
5934 {
5935 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5936 tree new_vard = TREE_OPERAND (new_var, 0);
5937 gcc_assert (DECL_P (new_vard));
5938 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5939 nx = TYPE_SIZE_UNIT (type);
5940 if (TREE_CONSTANT (nx))
5941 {
5942 nx = create_tmp_var_raw (type, get_name (var));
5943 gimple_add_tmp_var (nx);
5944 TREE_ADDRESSABLE (nx) = 1;
5945 nx = build_fold_addr_expr_loc (clause_loc, nx);
5946 nx = fold_convert_loc (clause_loc,
5947 TREE_TYPE (new_vard), nx);
5948 gimplify_assign (new_vard, nx, ilist);
5949 }
5950 }
5951 }
5952 x = lang_hooks.decls.omp_clause_copy_ctor
5953 (c, unshare_expr (new_var), x);
5954 gimplify_and_add (x, ilist);
5955 goto do_dtor;
5956
5957 case OMP_CLAUSE__LOOPTEMP_:
5958 case OMP_CLAUSE__REDUCTEMP_:
5959 gcc_assert (is_taskreg_ctx (ctx));
5960 x = build_outer_var_ref (var, ctx);
5961 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5962 gimplify_and_add (x, ilist);
5963 break;
5964
5965 case OMP_CLAUSE_COPYIN:
5966 by_ref = use_pointer_for_field (var, NULL);
5967 x = build_receiver_ref (var, by_ref, ctx);
5968 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
5969 append_to_statement_list (x, &copyin_seq);
5970 copyin_by_ref |= by_ref;
5971 break;
5972
5973 case OMP_CLAUSE_REDUCTION:
5974 case OMP_CLAUSE_IN_REDUCTION:
5975 /* OpenACC reductions are initialized using the
5976 GOACC_REDUCTION internal function. */
5977 if (is_gimple_omp_oacc (ctx->stmt))
5978 break;
5979 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5980 {
5981 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5982 gimple *tseq;
5983 tree ptype = TREE_TYPE (placeholder);
5984 if (cond)
5985 {
5986 x = error_mark_node;
5987 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
5988 && !task_reduction_needs_orig_p)
5989 x = var;
5990 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5991 {
5992 tree pptype = build_pointer_type (ptype);
5993 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5994 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5995 size_int (task_reduction_cnt_full
5996 + task_reduction_cntorig - 1),
5997 NULL_TREE, NULL_TREE);
5998 else
5999 {
6000 unsigned int idx
6001 = *ctx->task_reduction_map->get (c);
6002 x = task_reduction_read (ilist, tskred_temp,
6003 pptype, 7 + 3 * idx);
6004 }
6005 x = fold_convert (pptype, x);
6006 x = build_simple_mem_ref (x);
6007 }
6008 }
6009 else
6010 {
6011 lower_private_allocate (var, new_var, allocator,
6012 allocate_ptr, ilist, ctx, false,
6013 NULL_TREE);
6014 x = build_outer_var_ref (var, ctx);
6015
6016 if (omp_is_reference (var)
6017 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
6018 x = build_fold_addr_expr_loc (clause_loc, x);
6019 }
6020 SET_DECL_VALUE_EXPR (placeholder, x);
6021 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6022 tree new_vard = new_var;
6023 if (omp_is_reference (var))
6024 {
6025 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6026 new_vard = TREE_OPERAND (new_var, 0);
6027 gcc_assert (DECL_P (new_vard));
6028 }
6029 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6030 if (is_simd
6031 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6032 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6033 rvarp = &rvar;
6034 if (is_simd
6035 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6036 ivar, lvar, rvarp,
6037 &rvar2))
6038 {
6039 if (new_vard == new_var)
6040 {
6041 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
6042 SET_DECL_VALUE_EXPR (new_var, ivar);
6043 }
6044 else
6045 {
6046 SET_DECL_VALUE_EXPR (new_vard,
6047 build_fold_addr_expr (ivar));
6048 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6049 }
6050 x = lang_hooks.decls.omp_clause_default_ctor
6051 (c, unshare_expr (ivar),
6052 build_outer_var_ref (var, ctx));
6053 if (rvarp && ctx->for_simd_scan_phase)
6054 {
6055 if (x)
6056 gimplify_and_add (x, &llist[0]);
6057 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6058 if (x)
6059 gimplify_and_add (x, &llist[1]);
6060 break;
6061 }
6062 else if (rvarp)
6063 {
6064 if (x)
6065 {
6066 gimplify_and_add (x, &llist[0]);
6067
6068 tree ivar2 = unshare_expr (lvar);
6069 TREE_OPERAND (ivar2, 1) = sctx.idx;
6070 x = lang_hooks.decls.omp_clause_default_ctor
6071 (c, ivar2, build_outer_var_ref (var, ctx));
6072 gimplify_and_add (x, &llist[0]);
6073
6074 if (rvar2)
6075 {
6076 x = lang_hooks.decls.omp_clause_default_ctor
6077 (c, unshare_expr (rvar2),
6078 build_outer_var_ref (var, ctx));
6079 gimplify_and_add (x, &llist[0]);
6080 }
6081
6082 /* For types that need construction, add another
6083 private var which will be default constructed
6084 and optionally initialized with
6085 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
6086 loop we want to assign this value instead of
6087 constructing and destructing it in each
6088 iteration. */
6089 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
6090 gimple_add_tmp_var (nv);
6091 ctx->cb.decl_map->put (TREE_OPERAND (rvar2
6092 ? rvar2
6093 : ivar, 0),
6094 nv);
6095 x = lang_hooks.decls.omp_clause_default_ctor
6096 (c, nv, build_outer_var_ref (var, ctx));
6097 gimplify_and_add (x, ilist);
6098
6099 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6100 {
6101 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6102 x = DECL_VALUE_EXPR (new_vard);
6103 tree vexpr = nv;
6104 if (new_vard != new_var)
6105 vexpr = build_fold_addr_expr (nv);
6106 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6107 lower_omp (&tseq, ctx);
6108 SET_DECL_VALUE_EXPR (new_vard, x);
6109 gimple_seq_add_seq (ilist, tseq);
6110 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6111 }
6112
6113 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6114 if (x)
6115 gimplify_and_add (x, dlist);
6116 }
6117
6118 tree ref = build_outer_var_ref (var, ctx);
6119 x = unshare_expr (ivar);
6120 x = lang_hooks.decls.omp_clause_assign_op (c, x,
6121 ref);
6122 gimplify_and_add (x, &llist[0]);
6123
6124 ref = build_outer_var_ref (var, ctx);
6125 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
6126 rvar);
6127 gimplify_and_add (x, &llist[3]);
6128
6129 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6130 if (new_vard == new_var)
6131 SET_DECL_VALUE_EXPR (new_var, lvar);
6132 else
6133 SET_DECL_VALUE_EXPR (new_vard,
6134 build_fold_addr_expr (lvar));
6135
6136 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6137 if (x)
6138 gimplify_and_add (x, &llist[1]);
6139
6140 tree ivar2 = unshare_expr (lvar);
6141 TREE_OPERAND (ivar2, 1) = sctx.idx;
6142 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
6143 if (x)
6144 gimplify_and_add (x, &llist[1]);
6145
6146 if (rvar2)
6147 {
6148 x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
6149 if (x)
6150 gimplify_and_add (x, &llist[1]);
6151 }
6152 break;
6153 }
6154 if (x)
6155 gimplify_and_add (x, &llist[0]);
6156 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6157 {
6158 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6159 lower_omp (&tseq, ctx);
6160 gimple_seq_add_seq (&llist[0], tseq);
6161 }
6162 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6163 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6164 lower_omp (&tseq, ctx);
6165 gimple_seq_add_seq (&llist[1], tseq);
6166 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6167 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6168 if (new_vard == new_var)
6169 SET_DECL_VALUE_EXPR (new_var, lvar);
6170 else
6171 SET_DECL_VALUE_EXPR (new_vard,
6172 build_fold_addr_expr (lvar));
6173 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6174 if (x)
6175 gimplify_and_add (x, &llist[1]);
6176 break;
6177 }
6178 /* If this is a reference to constant size reduction var
6179 with placeholder, we haven't emitted the initializer
6180 for it because it is undesirable if SIMD arrays are used.
6181 But if they aren't used, we need to emit the deferred
6182 initialization now. */
6183 else if (omp_is_reference (var) && is_simd)
6184 handle_simd_reference (clause_loc, new_vard, ilist);
6185
6186 tree lab2 = NULL_TREE;
6187 if (cond)
6188 {
6189 gimple *g;
6190 if (!is_parallel_ctx (ctx))
6191 {
6192 tree condv = create_tmp_var (boolean_type_node);
6193 tree m = build_simple_mem_ref (cond);
6194 g = gimple_build_assign (condv, m);
6195 gimple_seq_add_stmt (ilist, g);
6196 tree lab1
6197 = create_artificial_label (UNKNOWN_LOCATION);
6198 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6199 g = gimple_build_cond (NE_EXPR, condv,
6200 boolean_false_node,
6201 lab2, lab1);
6202 gimple_seq_add_stmt (ilist, g);
6203 gimple_seq_add_stmt (ilist,
6204 gimple_build_label (lab1));
6205 }
6206 g = gimple_build_assign (build_simple_mem_ref (cond),
6207 boolean_true_node);
6208 gimple_seq_add_stmt (ilist, g);
6209 }
6210 x = lang_hooks.decls.omp_clause_default_ctor
6211 (c, unshare_expr (new_var),
6212 cond ? NULL_TREE
6213 : build_outer_var_ref (var, ctx));
6214 if (x)
6215 gimplify_and_add (x, ilist);
6216
6217 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6218 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6219 {
6220 if (ctx->for_simd_scan_phase)
6221 goto do_dtor;
6222 if (x || (!is_simd
6223 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
6224 {
6225 tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
6226 gimple_add_tmp_var (nv);
6227 ctx->cb.decl_map->put (new_vard, nv);
6228 x = lang_hooks.decls.omp_clause_default_ctor
6229 (c, nv, build_outer_var_ref (var, ctx));
6230 if (x)
6231 gimplify_and_add (x, ilist);
6232 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6233 {
6234 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6235 tree vexpr = nv;
6236 if (new_vard != new_var)
6237 vexpr = build_fold_addr_expr (nv);
6238 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6239 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6240 lower_omp (&tseq, ctx);
6241 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
6242 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
6243 gimple_seq_add_seq (ilist, tseq);
6244 }
6245 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6246 if (is_simd && ctx->scan_exclusive)
6247 {
6248 tree nv2
6249 = create_tmp_var_raw (TREE_TYPE (new_var));
6250 gimple_add_tmp_var (nv2);
6251 ctx->cb.decl_map->put (nv, nv2);
6252 x = lang_hooks.decls.omp_clause_default_ctor
6253 (c, nv2, build_outer_var_ref (var, ctx));
6254 gimplify_and_add (x, ilist);
6255 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6256 if (x)
6257 gimplify_and_add (x, dlist);
6258 }
6259 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6260 if (x)
6261 gimplify_and_add (x, dlist);
6262 }
6263 else if (is_simd
6264 && ctx->scan_exclusive
6265 && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
6266 {
6267 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
6268 gimple_add_tmp_var (nv2);
6269 ctx->cb.decl_map->put (new_vard, nv2);
6270 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6271 if (x)
6272 gimplify_and_add (x, dlist);
6273 }
6274 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6275 goto do_dtor;
6276 }
6277
6278 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6279 {
6280 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6281 lower_omp (&tseq, ctx);
6282 gimple_seq_add_seq (ilist, tseq);
6283 }
6284 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6285 if (is_simd)
6286 {
6287 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6288 lower_omp (&tseq, ctx);
6289 gimple_seq_add_seq (dlist, tseq);
6290 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6291 }
6292 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6293 if (cond)
6294 {
6295 if (lab2)
6296 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6297 break;
6298 }
6299 goto do_dtor;
6300 }
6301 else
6302 {
6303 x = omp_reduction_init (c, TREE_TYPE (new_var));
6304 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
6305 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
6306
6307 if (cond)
6308 {
6309 gimple *g;
6310 tree lab2 = NULL_TREE;
6311 /* GOMP_taskgroup_reduction_register memsets the whole
6312 array to zero. If the initializer is zero, we don't
6313 need to initialize it again, just mark it as ever
6314 used unconditionally, i.e. cond = true. */
6315 if (initializer_zerop (x))
6316 {
6317 g = gimple_build_assign (build_simple_mem_ref (cond),
6318 boolean_true_node);
6319 gimple_seq_add_stmt (ilist, g);
6320 break;
6321 }
6322
6323 /* Otherwise, emit
6324 if (!cond) { cond = true; new_var = x; } */
6325 if (!is_parallel_ctx (ctx))
6326 {
6327 tree condv = create_tmp_var (boolean_type_node);
6328 tree m = build_simple_mem_ref (cond);
6329 g = gimple_build_assign (condv, m);
6330 gimple_seq_add_stmt (ilist, g);
6331 tree lab1
6332 = create_artificial_label (UNKNOWN_LOCATION);
6333 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6334 g = gimple_build_cond (NE_EXPR, condv,
6335 boolean_false_node,
6336 lab2, lab1);
6337 gimple_seq_add_stmt (ilist, g);
6338 gimple_seq_add_stmt (ilist,
6339 gimple_build_label (lab1));
6340 }
6341 g = gimple_build_assign (build_simple_mem_ref (cond),
6342 boolean_true_node);
6343 gimple_seq_add_stmt (ilist, g);
6344 gimplify_assign (new_var, x, ilist);
6345 if (lab2)
6346 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6347 break;
6348 }
6349
6350 /* reduction(-:var) sums up the partial results, so it
6351 acts identically to reduction(+:var). */
6352 if (code == MINUS_EXPR)
6353 code = PLUS_EXPR;
6354
6355 tree new_vard = new_var;
6356 if (is_simd && omp_is_reference (var))
6357 {
6358 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6359 new_vard = TREE_OPERAND (new_var, 0);
6360 gcc_assert (DECL_P (new_vard));
6361 }
6362 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6363 if (is_simd
6364 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6365 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6366 rvarp = &rvar;
6367 if (is_simd
6368 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6369 ivar, lvar, rvarp,
6370 &rvar2))
6371 {
6372 if (new_vard != new_var)
6373 {
6374 SET_DECL_VALUE_EXPR (new_vard,
6375 build_fold_addr_expr (lvar));
6376 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6377 }
6378
6379 tree ref = build_outer_var_ref (var, ctx);
6380
6381 if (rvarp)
6382 {
6383 if (ctx->for_simd_scan_phase)
6384 break;
6385 gimplify_assign (ivar, ref, &llist[0]);
6386 ref = build_outer_var_ref (var, ctx);
6387 gimplify_assign (ref, rvar, &llist[3]);
6388 break;
6389 }
6390
6391 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
6392
6393 if (sctx.is_simt)
6394 {
6395 if (!simt_lane)
6396 simt_lane = create_tmp_var (unsigned_type_node);
6397 x = build_call_expr_internal_loc
6398 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
6399 TREE_TYPE (ivar), 2, ivar, simt_lane);
6400 x = build2 (code, TREE_TYPE (ivar), ivar, x);
6401 gimplify_assign (ivar, x, &llist[2]);
6402 }
6403 x = build2 (code, TREE_TYPE (ref), ref, ivar);
6404 ref = build_outer_var_ref (var, ctx);
6405 gimplify_assign (ref, x, &llist[1]);
6406
6407 }
6408 else
6409 {
6410 lower_private_allocate (var, new_var, allocator,
6411 allocate_ptr, ilist, ctx,
6412 false, NULL_TREE);
6413 if (omp_is_reference (var) && is_simd)
6414 handle_simd_reference (clause_loc, new_vard, ilist);
6415 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6416 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6417 break;
6418 gimplify_assign (new_var, x, ilist);
6419 if (is_simd)
6420 {
6421 tree ref = build_outer_var_ref (var, ctx);
6422
6423 x = build2 (code, TREE_TYPE (ref), ref, new_var);
6424 ref = build_outer_var_ref (var, ctx);
6425 gimplify_assign (ref, x, dlist);
6426 }
6427 if (allocator)
6428 goto do_dtor;
6429 }
6430 }
6431 break;
6432
6433 default:
6434 gcc_unreachable ();
6435 }
6436 }
6437 }
6438 if (tskred_avar)
6439 {
6440 tree clobber = build_clobber (TREE_TYPE (tskred_avar));
6441 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
6442 }
6443
6444 if (known_eq (sctx.max_vf, 1U))
6445 {
6446 sctx.is_simt = false;
6447 if (ctx->lastprivate_conditional_map)
6448 {
6449 if (gimple_omp_for_combined_into_p (ctx->stmt))
6450 {
6451 /* Signal to lower_omp_1 that it should use parent context. */
6452 ctx->combined_into_simd_safelen1 = true;
6453 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6454 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6455 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6456 {
6457 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6458 omp_context *outer = ctx->outer;
6459 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCAN)
6460 outer = outer->outer;
6461 tree *v = ctx->lastprivate_conditional_map->get (o);
6462 tree po = lookup_decl (OMP_CLAUSE_DECL (c), outer);
6463 tree *pv = outer->lastprivate_conditional_map->get (po);
6464 *v = *pv;
6465 }
6466 }
6467 else
6468 {
6469 /* When not vectorized, treat lastprivate(conditional:) like
6470 normal lastprivate, as there will be just one simd lane
6471 writing the privatized variable. */
6472 delete ctx->lastprivate_conditional_map;
6473 ctx->lastprivate_conditional_map = NULL;
6474 }
6475 }
6476 }
6477
6478 if (nonconst_simd_if)
6479 {
6480 if (sctx.lane == NULL_TREE)
6481 {
6482 sctx.idx = create_tmp_var (unsigned_type_node);
6483 sctx.lane = create_tmp_var (unsigned_type_node);
6484 }
6485 /* FIXME: For now. */
6486 sctx.is_simt = false;
6487 }
6488
6489 if (sctx.lane || sctx.is_simt)
6490 {
6491 uid = create_tmp_var (ptr_type_node, "simduid");
6492 /* Don't want uninit warnings on simduid, it is always uninitialized,
6493 but we use it not for the value, but for the DECL_UID only. */
6494 TREE_NO_WARNING (uid) = 1;
6495 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
6496 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
6497 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6498 gimple_omp_for_set_clauses (ctx->stmt, c);
6499 }
6500 /* Emit calls denoting privatized variables and initializing a pointer to
6501 structure that holds private variables as fields after ompdevlow pass. */
6502 if (sctx.is_simt)
6503 {
6504 sctx.simt_eargs[0] = uid;
6505 gimple *g
6506 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
6507 gimple_call_set_lhs (g, uid);
6508 gimple_seq_add_stmt (ilist, g);
6509 sctx.simt_eargs.release ();
6510
6511 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
6512 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
6513 gimple_call_set_lhs (g, simtrec);
6514 gimple_seq_add_stmt (ilist, g);
6515 }
6516 if (sctx.lane)
6517 {
6518 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
6519 2 + (nonconst_simd_if != NULL),
6520 uid, integer_zero_node,
6521 nonconst_simd_if);
6522 gimple_call_set_lhs (g, sctx.lane);
6523 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
6524 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6525 g = gimple_build_assign (sctx.lane, INTEGER_CST,
6526 build_int_cst (unsigned_type_node, 0));
6527 gimple_seq_add_stmt (ilist, g);
6528 if (sctx.lastlane)
6529 {
6530 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6531 2, uid, sctx.lane);
6532 gimple_call_set_lhs (g, sctx.lastlane);
6533 gimple_seq_add_stmt (dlist, g);
6534 gimple_seq_add_seq (dlist, llist[3]);
6535 }
6536 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6537 if (llist[2])
6538 {
6539 tree simt_vf = create_tmp_var (unsigned_type_node);
6540 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
6541 gimple_call_set_lhs (g, simt_vf);
6542 gimple_seq_add_stmt (dlist, g);
6543
6544 tree t = build_int_cst (unsigned_type_node, 1);
6545 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
6546 gimple_seq_add_stmt (dlist, g);
6547
6548 t = build_int_cst (unsigned_type_node, 0);
6549 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6550 gimple_seq_add_stmt (dlist, g);
6551
6552 tree body = create_artificial_label (UNKNOWN_LOCATION);
6553 tree header = create_artificial_label (UNKNOWN_LOCATION);
6554 tree end = create_artificial_label (UNKNOWN_LOCATION);
6555 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
6556 gimple_seq_add_stmt (dlist, gimple_build_label (body));
6557
6558 gimple_seq_add_seq (dlist, llist[2]);
6559
6560 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
6561 gimple_seq_add_stmt (dlist, g);
6562
6563 gimple_seq_add_stmt (dlist, gimple_build_label (header));
6564 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
6565 gimple_seq_add_stmt (dlist, g);
6566
6567 gimple_seq_add_stmt (dlist, gimple_build_label (end));
6568 }
6569 for (int i = 0; i < 2; i++)
6570 if (llist[i])
6571 {
6572 tree vf = create_tmp_var (unsigned_type_node);
6573 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
6574 gimple_call_set_lhs (g, vf);
6575 gimple_seq *seq = i == 0 ? ilist : dlist;
6576 gimple_seq_add_stmt (seq, g);
6577 tree t = build_int_cst (unsigned_type_node, 0);
6578 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6579 gimple_seq_add_stmt (seq, g);
6580 tree body = create_artificial_label (UNKNOWN_LOCATION);
6581 tree header = create_artificial_label (UNKNOWN_LOCATION);
6582 tree end = create_artificial_label (UNKNOWN_LOCATION);
6583 gimple_seq_add_stmt (seq, gimple_build_goto (header));
6584 gimple_seq_add_stmt (seq, gimple_build_label (body));
6585 gimple_seq_add_seq (seq, llist[i]);
6586 t = build_int_cst (unsigned_type_node, 1);
6587 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
6588 gimple_seq_add_stmt (seq, g);
6589 gimple_seq_add_stmt (seq, gimple_build_label (header));
6590 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
6591 gimple_seq_add_stmt (seq, g);
6592 gimple_seq_add_stmt (seq, gimple_build_label (end));
6593 }
6594 }
6595 if (sctx.is_simt)
6596 {
6597 gimple_seq_add_seq (dlist, sctx.simt_dlist);
6598 gimple *g
6599 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
6600 gimple_seq_add_stmt (dlist, g);
6601 }
6602
6603 /* The copyin sequence is not to be executed by the main thread, since
6604 that would result in self-copies. Perhaps not visible to scalars,
6605 but it certainly is to C++ operator=. */
6606 if (copyin_seq)
6607 {
6608 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
6609 0);
6610 x = build2 (NE_EXPR, boolean_type_node, x,
6611 build_int_cst (TREE_TYPE (x), 0));
6612 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
6613 gimplify_and_add (x, ilist);
6614 }
6615
6616 /* If any copyin variable is passed by reference, we must ensure the
6617 master thread doesn't modify it before it is copied over in all
6618 threads. Similarly for variables in both firstprivate and
6619 lastprivate clauses we need to ensure the lastprivate copying
6620 happens after firstprivate copying in all threads. And similarly
6621 for UDRs if initializer expression refers to omp_orig. */
6622 if (copyin_by_ref || lastprivate_firstprivate
6623 || (reduction_omp_orig_ref
6624 && !ctx->scan_inclusive
6625 && !ctx->scan_exclusive))
6626 {
6627 /* Don't add any barrier for #pragma omp simd or
6628 #pragma omp distribute. */
6629 if (!is_task_ctx (ctx)
6630 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
6631 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
6632 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
6633 }
6634
6635 /* If max_vf is non-zero, then we can use only a vectorization factor
6636 up to the max_vf we chose. So stick it into the safelen clause. */
6637 if (maybe_ne (sctx.max_vf, 0U))
6638 {
6639 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
6640 OMP_CLAUSE_SAFELEN);
6641 poly_uint64 safe_len;
6642 if (c == NULL_TREE
6643 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
6644 && maybe_gt (safe_len, sctx.max_vf)))
6645 {
6646 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
6647 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
6648 sctx.max_vf);
6649 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6650 gimple_omp_for_set_clauses (ctx->stmt, c);
6651 }
6652 }
6653 }
6654
6655 /* Create temporary variables for lastprivate(conditional:) implementation
6656 in context CTX with CLAUSES. */
6657
6658 static void
6659 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
6660 {
6661 tree iter_type = NULL_TREE;
6662 tree cond_ptr = NULL_TREE;
6663 tree iter_var = NULL_TREE;
6664 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6665 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
6666 tree next = *clauses;
6667 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
6668 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6669 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6670 {
6671 if (is_simd)
6672 {
6673 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
6674 gcc_assert (cc);
6675 if (iter_type == NULL_TREE)
6676 {
6677 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
6678 iter_var = create_tmp_var_raw (iter_type);
6679 DECL_CONTEXT (iter_var) = current_function_decl;
6680 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6681 DECL_CHAIN (iter_var) = ctx->block_vars;
6682 ctx->block_vars = iter_var;
6683 tree c3
6684 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6685 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6686 OMP_CLAUSE_DECL (c3) = iter_var;
6687 OMP_CLAUSE_CHAIN (c3) = *clauses;
6688 *clauses = c3;
6689 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6690 }
6691 next = OMP_CLAUSE_CHAIN (cc);
6692 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6693 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
6694 ctx->lastprivate_conditional_map->put (o, v);
6695 continue;
6696 }
6697 if (iter_type == NULL)
6698 {
6699 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
6700 {
6701 struct omp_for_data fd;
6702 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
6703 NULL);
6704 iter_type = unsigned_type_for (fd.iter_type);
6705 }
6706 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
6707 iter_type = unsigned_type_node;
6708 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
6709 if (c2)
6710 {
6711 cond_ptr
6712 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
6713 OMP_CLAUSE_DECL (c2) = cond_ptr;
6714 }
6715 else
6716 {
6717 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
6718 DECL_CONTEXT (cond_ptr) = current_function_decl;
6719 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
6720 DECL_CHAIN (cond_ptr) = ctx->block_vars;
6721 ctx->block_vars = cond_ptr;
6722 c2 = build_omp_clause (UNKNOWN_LOCATION,
6723 OMP_CLAUSE__CONDTEMP_);
6724 OMP_CLAUSE_DECL (c2) = cond_ptr;
6725 OMP_CLAUSE_CHAIN (c2) = *clauses;
6726 *clauses = c2;
6727 }
6728 iter_var = create_tmp_var_raw (iter_type);
6729 DECL_CONTEXT (iter_var) = current_function_decl;
6730 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6731 DECL_CHAIN (iter_var) = ctx->block_vars;
6732 ctx->block_vars = iter_var;
6733 tree c3
6734 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6735 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6736 OMP_CLAUSE_DECL (c3) = iter_var;
6737 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
6738 OMP_CLAUSE_CHAIN (c2) = c3;
6739 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6740 }
6741 tree v = create_tmp_var_raw (iter_type);
6742 DECL_CONTEXT (v) = current_function_decl;
6743 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
6744 DECL_CHAIN (v) = ctx->block_vars;
6745 ctx->block_vars = v;
6746 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6747 ctx->lastprivate_conditional_map->put (o, v);
6748 }
6749 }
6750
6751
6752 /* Generate code to implement the LASTPRIVATE clauses. This is used for
6753 both parallel and workshare constructs. PREDICATE may be NULL if it's
6754 always true. BODY_P is the sequence to insert early initialization
6755 if needed, STMT_LIST is where the non-conditional lastprivate handling
6756 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
6757 section. */
6758
6759 static void
6760 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
6761 gimple_seq *stmt_list, gimple_seq *cstmt_list,
6762 omp_context *ctx)
6763 {
6764 tree x, c, label = NULL, orig_clauses = clauses;
6765 bool par_clauses = false;
6766 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
6767 unsigned HOST_WIDE_INT conditional_off = 0;
6768 gimple_seq post_stmt_list = NULL;
6769
6770 /* Early exit if there are no lastprivate or linear clauses. */
6771 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
6772 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
6773 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
6774 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
6775 break;
6776 if (clauses == NULL)
6777 {
6778 /* If this was a workshare clause, see if it had been combined
6779 with its parallel. In that case, look for the clauses on the
6780 parallel statement itself. */
6781 if (is_parallel_ctx (ctx))
6782 return;
6783
6784 ctx = ctx->outer;
6785 if (ctx == NULL || !is_parallel_ctx (ctx))
6786 return;
6787
6788 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
6789 OMP_CLAUSE_LASTPRIVATE);
6790 if (clauses == NULL)
6791 return;
6792 par_clauses = true;
6793 }
6794
6795 bool maybe_simt = false;
6796 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6797 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
6798 {
6799 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
6800 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
6801 if (simduid)
6802 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
6803 }
6804
6805 if (predicate)
6806 {
6807 gcond *stmt;
6808 tree label_true, arm1, arm2;
6809 enum tree_code pred_code = TREE_CODE (predicate);
6810
6811 label = create_artificial_label (UNKNOWN_LOCATION);
6812 label_true = create_artificial_label (UNKNOWN_LOCATION);
6813 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
6814 {
6815 arm1 = TREE_OPERAND (predicate, 0);
6816 arm2 = TREE_OPERAND (predicate, 1);
6817 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6818 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
6819 }
6820 else
6821 {
6822 arm1 = predicate;
6823 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6824 arm2 = boolean_false_node;
6825 pred_code = NE_EXPR;
6826 }
6827 if (maybe_simt)
6828 {
6829 c = build2 (pred_code, boolean_type_node, arm1, arm2);
6830 c = fold_convert (integer_type_node, c);
6831 simtcond = create_tmp_var (integer_type_node);
6832 gimplify_assign (simtcond, c, stmt_list);
6833 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
6834 1, simtcond);
6835 c = create_tmp_var (integer_type_node);
6836 gimple_call_set_lhs (g, c);
6837 gimple_seq_add_stmt (stmt_list, g);
6838 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
6839 label_true, label);
6840 }
6841 else
6842 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
6843 gimple_seq_add_stmt (stmt_list, stmt);
6844 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
6845 }
6846
6847 tree cond_ptr = NULL_TREE;
6848 for (c = clauses; c ;)
6849 {
6850 tree var, new_var;
6851 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6852 gimple_seq *this_stmt_list = stmt_list;
6853 tree lab2 = NULL_TREE;
6854
6855 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6856 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6857 && ctx->lastprivate_conditional_map
6858 && !ctx->combined_into_simd_safelen1)
6859 {
6860 gcc_assert (body_p);
6861 if (simduid)
6862 goto next;
6863 if (cond_ptr == NULL_TREE)
6864 {
6865 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
6866 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
6867 }
6868 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
6869 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6870 tree v = *ctx->lastprivate_conditional_map->get (o);
6871 gimplify_assign (v, build_zero_cst (type), body_p);
6872 this_stmt_list = cstmt_list;
6873 tree mem;
6874 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
6875 {
6876 mem = build2 (MEM_REF, type, cond_ptr,
6877 build_int_cst (TREE_TYPE (cond_ptr),
6878 conditional_off));
6879 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
6880 }
6881 else
6882 mem = build4 (ARRAY_REF, type, cond_ptr,
6883 size_int (conditional_off++), NULL_TREE, NULL_TREE);
6884 tree mem2 = copy_node (mem);
6885 gimple_seq seq = NULL;
6886 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
6887 gimple_seq_add_seq (this_stmt_list, seq);
6888 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
6889 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6890 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
6891 gimple_seq_add_stmt (this_stmt_list, g);
6892 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
6893 gimplify_assign (mem2, v, this_stmt_list);
6894 }
6895 else if (predicate
6896 && ctx->combined_into_simd_safelen1
6897 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6898 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6899 && ctx->lastprivate_conditional_map)
6900 this_stmt_list = &post_stmt_list;
6901
6902 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6903 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6904 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
6905 {
6906 var = OMP_CLAUSE_DECL (c);
6907 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6908 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6909 && is_taskloop_ctx (ctx))
6910 {
6911 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
6912 new_var = lookup_decl (var, ctx->outer);
6913 }
6914 else
6915 {
6916 new_var = lookup_decl (var, ctx);
6917 /* Avoid uninitialized warnings for lastprivate and
6918 for linear iterators. */
6919 if (predicate
6920 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6921 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
6922 TREE_NO_WARNING (new_var) = 1;
6923 }
6924
6925 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
6926 {
6927 tree val = DECL_VALUE_EXPR (new_var);
6928 if (TREE_CODE (val) == ARRAY_REF
6929 && VAR_P (TREE_OPERAND (val, 0))
6930 && lookup_attribute ("omp simd array",
6931 DECL_ATTRIBUTES (TREE_OPERAND (val,
6932 0))))
6933 {
6934 if (lastlane == NULL)
6935 {
6936 lastlane = create_tmp_var (unsigned_type_node);
6937 gcall *g
6938 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6939 2, simduid,
6940 TREE_OPERAND (val, 1));
6941 gimple_call_set_lhs (g, lastlane);
6942 gimple_seq_add_stmt (this_stmt_list, g);
6943 }
6944 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
6945 TREE_OPERAND (val, 0), lastlane,
6946 NULL_TREE, NULL_TREE);
6947 TREE_THIS_NOTRAP (new_var) = 1;
6948 }
6949 }
6950 else if (maybe_simt)
6951 {
6952 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
6953 ? DECL_VALUE_EXPR (new_var)
6954 : new_var);
6955 if (simtlast == NULL)
6956 {
6957 simtlast = create_tmp_var (unsigned_type_node);
6958 gcall *g = gimple_build_call_internal
6959 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
6960 gimple_call_set_lhs (g, simtlast);
6961 gimple_seq_add_stmt (this_stmt_list, g);
6962 }
6963 x = build_call_expr_internal_loc
6964 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
6965 TREE_TYPE (val), 2, val, simtlast);
6966 new_var = unshare_expr (new_var);
6967 gimplify_assign (new_var, x, this_stmt_list);
6968 new_var = unshare_expr (new_var);
6969 }
6970
6971 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6972 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
6973 {
6974 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
6975 gimple_seq_add_seq (this_stmt_list,
6976 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6977 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
6978 }
6979 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6980 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
6981 {
6982 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
6983 gimple_seq_add_seq (this_stmt_list,
6984 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
6985 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
6986 }
6987
6988 x = NULL_TREE;
6989 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6990 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)
6991 && is_taskloop_ctx (ctx))
6992 {
6993 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
6994 ctx->outer->outer);
6995 if (is_global_var (ovar))
6996 x = ovar;
6997 }
6998 if (!x)
6999 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
7000 if (omp_is_reference (var))
7001 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7002 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
7003 gimplify_and_add (x, this_stmt_list);
7004
7005 if (lab2)
7006 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
7007 }
7008
7009 next:
7010 c = OMP_CLAUSE_CHAIN (c);
7011 if (c == NULL && !par_clauses)
7012 {
7013 /* If this was a workshare clause, see if it had been combined
7014 with its parallel. In that case, continue looking for the
7015 clauses also on the parallel statement itself. */
7016 if (is_parallel_ctx (ctx))
7017 break;
7018
7019 ctx = ctx->outer;
7020 if (ctx == NULL || !is_parallel_ctx (ctx))
7021 break;
7022
7023 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7024 OMP_CLAUSE_LASTPRIVATE);
7025 par_clauses = true;
7026 }
7027 }
7028
7029 if (label)
7030 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
7031 gimple_seq_add_seq (stmt_list, post_stmt_list);
7032 }
7033
7034 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
7035 (which might be a placeholder). INNER is true if this is an inner
7036 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
7037 join markers. Generate the before-loop forking sequence in
7038 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
7039 general form of these sequences is
7040
7041 GOACC_REDUCTION_SETUP
7042 GOACC_FORK
7043 GOACC_REDUCTION_INIT
7044 ...
7045 GOACC_REDUCTION_FINI
7046 GOACC_JOIN
7047 GOACC_REDUCTION_TEARDOWN. */
7048
7049 static void
7050 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
7051 gcall *fork, gcall *join, gimple_seq *fork_seq,
7052 gimple_seq *join_seq, omp_context *ctx)
7053 {
7054 gimple_seq before_fork = NULL;
7055 gimple_seq after_fork = NULL;
7056 gimple_seq before_join = NULL;
7057 gimple_seq after_join = NULL;
7058 tree init_code = NULL_TREE, fini_code = NULL_TREE,
7059 setup_code = NULL_TREE, teardown_code = NULL_TREE;
7060 unsigned offset = 0;
7061
7062 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7063 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
7064 {
7065 /* No 'reduction' clauses on OpenACC 'kernels'. */
7066 gcc_checking_assert (!is_oacc_kernels (ctx));
7067 /* Likewise, on OpenACC 'kernels' decomposed parts. */
7068 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
7069
7070 tree orig = OMP_CLAUSE_DECL (c);
7071 tree var = maybe_lookup_decl (orig, ctx);
7072 tree ref_to_res = NULL_TREE;
7073 tree incoming, outgoing, v1, v2, v3;
7074 bool is_private = false;
7075
7076 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
7077 if (rcode == MINUS_EXPR)
7078 rcode = PLUS_EXPR;
7079 else if (rcode == TRUTH_ANDIF_EXPR)
7080 rcode = BIT_AND_EXPR;
7081 else if (rcode == TRUTH_ORIF_EXPR)
7082 rcode = BIT_IOR_EXPR;
7083 tree op = build_int_cst (unsigned_type_node, rcode);
7084
7085 if (!var)
7086 var = orig;
7087
7088 incoming = outgoing = var;
7089
7090 if (!inner)
7091 {
7092 /* See if an outer construct also reduces this variable. */
7093 omp_context *outer = ctx;
7094
7095 while (omp_context *probe = outer->outer)
7096 {
7097 enum gimple_code type = gimple_code (probe->stmt);
7098 tree cls;
7099
7100 switch (type)
7101 {
7102 case GIMPLE_OMP_FOR:
7103 cls = gimple_omp_for_clauses (probe->stmt);
7104 break;
7105
7106 case GIMPLE_OMP_TARGET:
7107 /* No 'reduction' clauses inside OpenACC 'kernels'
7108 regions. */
7109 gcc_checking_assert (!is_oacc_kernels (probe));
7110
7111 if (!is_gimple_omp_offloaded (probe->stmt))
7112 goto do_lookup;
7113
7114 cls = gimple_omp_target_clauses (probe->stmt);
7115 break;
7116
7117 default:
7118 goto do_lookup;
7119 }
7120
7121 outer = probe;
7122 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
7123 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
7124 && orig == OMP_CLAUSE_DECL (cls))
7125 {
7126 incoming = outgoing = lookup_decl (orig, probe);
7127 goto has_outer_reduction;
7128 }
7129 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
7130 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
7131 && orig == OMP_CLAUSE_DECL (cls))
7132 {
7133 is_private = true;
7134 goto do_lookup;
7135 }
7136 }
7137
7138 do_lookup:
7139 /* This is the outermost construct with this reduction,
7140 see if there's a mapping for it. */
7141 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
7142 && maybe_lookup_field (orig, outer) && !is_private)
7143 {
7144 ref_to_res = build_receiver_ref (orig, false, outer);
7145 if (omp_is_reference (orig))
7146 ref_to_res = build_simple_mem_ref (ref_to_res);
7147
7148 tree type = TREE_TYPE (var);
7149 if (POINTER_TYPE_P (type))
7150 type = TREE_TYPE (type);
7151
7152 outgoing = var;
7153 incoming = omp_reduction_init_op (loc, rcode, type);
7154 }
7155 else
7156 {
7157 /* Try to look at enclosing contexts for reduction var,
7158 use original if no mapping found. */
7159 tree t = NULL_TREE;
7160 omp_context *c = ctx->outer;
7161 while (c && !t)
7162 {
7163 t = maybe_lookup_decl (orig, c);
7164 c = c->outer;
7165 }
7166 incoming = outgoing = (t ? t : orig);
7167 }
7168
7169 has_outer_reduction:;
7170 }
7171
7172 if (!ref_to_res)
7173 ref_to_res = integer_zero_node;
7174
7175 if (omp_is_reference (orig))
7176 {
7177 tree type = TREE_TYPE (var);
7178 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
7179
7180 if (!inner)
7181 {
7182 tree x = create_tmp_var (TREE_TYPE (type), id);
7183 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
7184 }
7185
7186 v1 = create_tmp_var (type, id);
7187 v2 = create_tmp_var (type, id);
7188 v3 = create_tmp_var (type, id);
7189
7190 gimplify_assign (v1, var, fork_seq);
7191 gimplify_assign (v2, var, fork_seq);
7192 gimplify_assign (v3, var, fork_seq);
7193
7194 var = build_simple_mem_ref (var);
7195 v1 = build_simple_mem_ref (v1);
7196 v2 = build_simple_mem_ref (v2);
7197 v3 = build_simple_mem_ref (v3);
7198 outgoing = build_simple_mem_ref (outgoing);
7199
7200 if (!TREE_CONSTANT (incoming))
7201 incoming = build_simple_mem_ref (incoming);
7202 }
7203 else
7204 v1 = v2 = v3 = var;
7205
7206 /* Determine position in reduction buffer, which may be used
7207 by target. The parser has ensured that this is not a
7208 variable-sized type. */
7209 fixed_size_mode mode
7210 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
7211 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7212 offset = (offset + align - 1) & ~(align - 1);
7213 tree off = build_int_cst (sizetype, offset);
7214 offset += GET_MODE_SIZE (mode);
7215
7216 if (!init_code)
7217 {
7218 init_code = build_int_cst (integer_type_node,
7219 IFN_GOACC_REDUCTION_INIT);
7220 fini_code = build_int_cst (integer_type_node,
7221 IFN_GOACC_REDUCTION_FINI);
7222 setup_code = build_int_cst (integer_type_node,
7223 IFN_GOACC_REDUCTION_SETUP);
7224 teardown_code = build_int_cst (integer_type_node,
7225 IFN_GOACC_REDUCTION_TEARDOWN);
7226 }
7227
7228 tree setup_call
7229 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7230 TREE_TYPE (var), 6, setup_code,
7231 unshare_expr (ref_to_res),
7232 incoming, level, op, off);
7233 tree init_call
7234 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7235 TREE_TYPE (var), 6, init_code,
7236 unshare_expr (ref_to_res),
7237 v1, level, op, off);
7238 tree fini_call
7239 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7240 TREE_TYPE (var), 6, fini_code,
7241 unshare_expr (ref_to_res),
7242 v2, level, op, off);
7243 tree teardown_call
7244 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7245 TREE_TYPE (var), 6, teardown_code,
7246 ref_to_res, v3, level, op, off);
7247
7248 gimplify_assign (v1, setup_call, &before_fork);
7249 gimplify_assign (v2, init_call, &after_fork);
7250 gimplify_assign (v3, fini_call, &before_join);
7251 gimplify_assign (outgoing, teardown_call, &after_join);
7252 }
7253
7254 /* Now stitch things together. */
7255 gimple_seq_add_seq (fork_seq, before_fork);
7256 if (fork)
7257 gimple_seq_add_stmt (fork_seq, fork);
7258 gimple_seq_add_seq (fork_seq, after_fork);
7259
7260 gimple_seq_add_seq (join_seq, before_join);
7261 if (join)
7262 gimple_seq_add_stmt (join_seq, join);
7263 gimple_seq_add_seq (join_seq, after_join);
7264 }
7265
7266 /* Generate code to implement the REDUCTION clauses, append it
7267 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
7268 that should be emitted also inside of the critical section,
7269 in that case clear *CLIST afterwards, otherwise leave it as is
7270 and let the caller emit it itself. */
7271
7272 static void
7273 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
7274 gimple_seq *clist, omp_context *ctx)
7275 {
7276 gimple_seq sub_seq = NULL;
7277 gimple *stmt;
7278 tree x, c;
7279 int count = 0;
7280
7281 /* OpenACC loop reductions are handled elsewhere. */
7282 if (is_gimple_omp_oacc (ctx->stmt))
7283 return;
7284
7285 /* SIMD reductions are handled in lower_rec_input_clauses. */
7286 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7287 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7288 return;
7289
7290 /* inscan reductions are handled elsewhere. */
7291 if (ctx->scan_inclusive || ctx->scan_exclusive)
7292 return;
7293
7294 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
7295 update in that case, otherwise use a lock. */
7296 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
7297 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7298 && !OMP_CLAUSE_REDUCTION_TASK (c))
7299 {
7300 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
7301 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7302 {
7303 /* Never use OMP_ATOMIC for array reductions or UDRs. */
7304 count = -1;
7305 break;
7306 }
7307 count++;
7308 }
7309
7310 if (count == 0)
7311 return;
7312
7313 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7314 {
7315 tree var, ref, new_var, orig_var;
7316 enum tree_code code;
7317 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7318
7319 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7320 || OMP_CLAUSE_REDUCTION_TASK (c))
7321 continue;
7322
7323 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
7324 orig_var = var = OMP_CLAUSE_DECL (c);
7325 if (TREE_CODE (var) == MEM_REF)
7326 {
7327 var = TREE_OPERAND (var, 0);
7328 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
7329 var = TREE_OPERAND (var, 0);
7330 if (TREE_CODE (var) == ADDR_EXPR)
7331 var = TREE_OPERAND (var, 0);
7332 else
7333 {
7334 /* If this is a pointer or referenced based array
7335 section, the var could be private in the outer
7336 context e.g. on orphaned loop construct. Pretend this
7337 is private variable's outer reference. */
7338 ccode = OMP_CLAUSE_PRIVATE;
7339 if (TREE_CODE (var) == INDIRECT_REF)
7340 var = TREE_OPERAND (var, 0);
7341 }
7342 orig_var = var;
7343 if (is_variable_sized (var))
7344 {
7345 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
7346 var = DECL_VALUE_EXPR (var);
7347 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
7348 var = TREE_OPERAND (var, 0);
7349 gcc_assert (DECL_P (var));
7350 }
7351 }
7352 new_var = lookup_decl (var, ctx);
7353 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
7354 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7355 ref = build_outer_var_ref (var, ctx, ccode);
7356 code = OMP_CLAUSE_REDUCTION_CODE (c);
7357
7358 /* reduction(-:var) sums up the partial results, so it acts
7359 identically to reduction(+:var). */
7360 if (code == MINUS_EXPR)
7361 code = PLUS_EXPR;
7362
7363 if (count == 1)
7364 {
7365 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
7366
7367 addr = save_expr (addr);
7368 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
7369 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
7370 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
7371 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
7372 gimplify_and_add (x, stmt_seqp);
7373 return;
7374 }
7375 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7376 {
7377 tree d = OMP_CLAUSE_DECL (c);
7378 tree type = TREE_TYPE (d);
7379 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7380 tree i = create_tmp_var (TREE_TYPE (v));
7381 tree ptype = build_pointer_type (TREE_TYPE (type));
7382 tree bias = TREE_OPERAND (d, 1);
7383 d = TREE_OPERAND (d, 0);
7384 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
7385 {
7386 tree b = TREE_OPERAND (d, 1);
7387 b = maybe_lookup_decl (b, ctx);
7388 if (b == NULL)
7389 {
7390 b = TREE_OPERAND (d, 1);
7391 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
7392 }
7393 if (integer_zerop (bias))
7394 bias = b;
7395 else
7396 {
7397 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
7398 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
7399 TREE_TYPE (b), b, bias);
7400 }
7401 d = TREE_OPERAND (d, 0);
7402 }
7403 /* For ref build_outer_var_ref already performs this, so
7404 only new_var needs a dereference. */
7405 if (TREE_CODE (d) == INDIRECT_REF)
7406 {
7407 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7408 gcc_assert (omp_is_reference (var) && var == orig_var);
7409 }
7410 else if (TREE_CODE (d) == ADDR_EXPR)
7411 {
7412 if (orig_var == var)
7413 {
7414 new_var = build_fold_addr_expr (new_var);
7415 ref = build_fold_addr_expr (ref);
7416 }
7417 }
7418 else
7419 {
7420 gcc_assert (orig_var == var);
7421 if (omp_is_reference (var))
7422 ref = build_fold_addr_expr (ref);
7423 }
7424 if (DECL_P (v))
7425 {
7426 tree t = maybe_lookup_decl (v, ctx);
7427 if (t)
7428 v = t;
7429 else
7430 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7431 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
7432 }
7433 if (!integer_zerop (bias))
7434 {
7435 bias = fold_convert_loc (clause_loc, sizetype, bias);
7436 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7437 TREE_TYPE (new_var), new_var,
7438 unshare_expr (bias));
7439 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7440 TREE_TYPE (ref), ref, bias);
7441 }
7442 new_var = fold_convert_loc (clause_loc, ptype, new_var);
7443 ref = fold_convert_loc (clause_loc, ptype, ref);
7444 tree m = create_tmp_var (ptype);
7445 gimplify_assign (m, new_var, stmt_seqp);
7446 new_var = m;
7447 m = create_tmp_var (ptype);
7448 gimplify_assign (m, ref, stmt_seqp);
7449 ref = m;
7450 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
7451 tree body = create_artificial_label (UNKNOWN_LOCATION);
7452 tree end = create_artificial_label (UNKNOWN_LOCATION);
7453 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
7454 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
7455 tree out = build_simple_mem_ref_loc (clause_loc, ref);
7456 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7457 {
7458 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7459 tree decl_placeholder
7460 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
7461 SET_DECL_VALUE_EXPR (placeholder, out);
7462 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7463 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
7464 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
7465 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7466 gimple_seq_add_seq (&sub_seq,
7467 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7468 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7469 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7470 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
7471 }
7472 else
7473 {
7474 x = build2 (code, TREE_TYPE (out), out, priv);
7475 out = unshare_expr (out);
7476 gimplify_assign (out, x, &sub_seq);
7477 }
7478 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
7479 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7480 gimple_seq_add_stmt (&sub_seq, g);
7481 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
7482 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7483 gimple_seq_add_stmt (&sub_seq, g);
7484 g = gimple_build_assign (i, PLUS_EXPR, i,
7485 build_int_cst (TREE_TYPE (i), 1));
7486 gimple_seq_add_stmt (&sub_seq, g);
7487 g = gimple_build_cond (LE_EXPR, i, v, body, end);
7488 gimple_seq_add_stmt (&sub_seq, g);
7489 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
7490 }
7491 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7492 {
7493 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7494
7495 if (omp_is_reference (var)
7496 && !useless_type_conversion_p (TREE_TYPE (placeholder),
7497 TREE_TYPE (ref)))
7498 ref = build_fold_addr_expr_loc (clause_loc, ref);
7499 SET_DECL_VALUE_EXPR (placeholder, ref);
7500 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7501 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7502 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7503 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7504 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7505 }
7506 else
7507 {
7508 x = build2 (code, TREE_TYPE (ref), ref, new_var);
7509 ref = build_outer_var_ref (var, ctx);
7510 gimplify_assign (ref, x, &sub_seq);
7511 }
7512 }
7513
7514 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
7515 0);
7516 gimple_seq_add_stmt (stmt_seqp, stmt);
7517
7518 gimple_seq_add_seq (stmt_seqp, sub_seq);
7519
7520 if (clist)
7521 {
7522 gimple_seq_add_seq (stmt_seqp, *clist);
7523 *clist = NULL;
7524 }
7525
7526 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
7527 0);
7528 gimple_seq_add_stmt (stmt_seqp, stmt);
7529 }
7530
7531
7532 /* Generate code to implement the COPYPRIVATE clauses. */
7533
7534 static void
7535 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
7536 omp_context *ctx)
7537 {
7538 tree c;
7539
7540 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7541 {
7542 tree var, new_var, ref, x;
7543 bool by_ref;
7544 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7545
7546 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
7547 continue;
7548
7549 var = OMP_CLAUSE_DECL (c);
7550 by_ref = use_pointer_for_field (var, NULL);
7551
7552 ref = build_sender_ref (var, ctx);
7553 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
7554 if (by_ref)
7555 {
7556 x = build_fold_addr_expr_loc (clause_loc, new_var);
7557 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
7558 }
7559 gimplify_assign (ref, x, slist);
7560
7561 ref = build_receiver_ref (var, false, ctx);
7562 if (by_ref)
7563 {
7564 ref = fold_convert_loc (clause_loc,
7565 build_pointer_type (TREE_TYPE (new_var)),
7566 ref);
7567 ref = build_fold_indirect_ref_loc (clause_loc, ref);
7568 }
7569 if (omp_is_reference (var))
7570 {
7571 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
7572 ref = build_simple_mem_ref_loc (clause_loc, ref);
7573 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7574 }
7575 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
7576 gimplify_and_add (x, rlist);
7577 }
7578 }
7579
7580
7581 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7582 and REDUCTION from the sender (aka parent) side. */
7583
7584 static void
7585 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
7586 omp_context *ctx)
7587 {
7588 tree c, t;
7589 int ignored_looptemp = 0;
7590 bool is_taskloop = false;
7591
7592 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7593 by GOMP_taskloop. */
7594 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
7595 {
7596 ignored_looptemp = 2;
7597 is_taskloop = true;
7598 }
7599
7600 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7601 {
7602 tree val, ref, x, var;
7603 bool by_ref, do_in = false, do_out = false;
7604 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7605
7606 switch (OMP_CLAUSE_CODE (c))
7607 {
7608 case OMP_CLAUSE_PRIVATE:
7609 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7610 break;
7611 continue;
7612 case OMP_CLAUSE_FIRSTPRIVATE:
7613 case OMP_CLAUSE_COPYIN:
7614 case OMP_CLAUSE_LASTPRIVATE:
7615 case OMP_CLAUSE_IN_REDUCTION:
7616 case OMP_CLAUSE__REDUCTEMP_:
7617 break;
7618 case OMP_CLAUSE_REDUCTION:
7619 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
7620 continue;
7621 break;
7622 case OMP_CLAUSE_SHARED:
7623 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7624 break;
7625 continue;
7626 case OMP_CLAUSE__LOOPTEMP_:
7627 if (ignored_looptemp)
7628 {
7629 ignored_looptemp--;
7630 continue;
7631 }
7632 break;
7633 default:
7634 continue;
7635 }
7636
7637 val = OMP_CLAUSE_DECL (c);
7638 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7639 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
7640 && TREE_CODE (val) == MEM_REF)
7641 {
7642 val = TREE_OPERAND (val, 0);
7643 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
7644 val = TREE_OPERAND (val, 0);
7645 if (TREE_CODE (val) == INDIRECT_REF
7646 || TREE_CODE (val) == ADDR_EXPR)
7647 val = TREE_OPERAND (val, 0);
7648 if (is_variable_sized (val))
7649 continue;
7650 }
7651
7652 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
7653 outer taskloop region. */
7654 omp_context *ctx_for_o = ctx;
7655 if (is_taskloop
7656 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
7657 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7658 ctx_for_o = ctx->outer;
7659
7660 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
7661
7662 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
7663 && is_global_var (var)
7664 && (val == OMP_CLAUSE_DECL (c)
7665 || !is_task_ctx (ctx)
7666 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
7667 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
7668 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
7669 != POINTER_TYPE)))))
7670 continue;
7671
7672 t = omp_member_access_dummy_var (var);
7673 if (t)
7674 {
7675 var = DECL_VALUE_EXPR (var);
7676 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
7677 if (o != t)
7678 var = unshare_and_remap (var, t, o);
7679 else
7680 var = unshare_expr (var);
7681 }
7682
7683 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
7684 {
7685 /* Handle taskloop firstprivate/lastprivate, where the
7686 lastprivate on GIMPLE_OMP_TASK is represented as
7687 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
7688 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
7689 x = omp_build_component_ref (ctx->sender_decl, f);
7690 if (use_pointer_for_field (val, ctx))
7691 var = build_fold_addr_expr (var);
7692 gimplify_assign (x, var, ilist);
7693 DECL_ABSTRACT_ORIGIN (f) = NULL;
7694 continue;
7695 }
7696
7697 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7698 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
7699 || val == OMP_CLAUSE_DECL (c))
7700 && is_variable_sized (val))
7701 continue;
7702 by_ref = use_pointer_for_field (val, NULL);
7703
7704 switch (OMP_CLAUSE_CODE (c))
7705 {
7706 case OMP_CLAUSE_FIRSTPRIVATE:
7707 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
7708 && !by_ref
7709 && is_task_ctx (ctx))
7710 TREE_NO_WARNING (var) = 1;
7711 do_in = true;
7712 break;
7713
7714 case OMP_CLAUSE_PRIVATE:
7715 case OMP_CLAUSE_COPYIN:
7716 case OMP_CLAUSE__LOOPTEMP_:
7717 case OMP_CLAUSE__REDUCTEMP_:
7718 do_in = true;
7719 break;
7720
7721 case OMP_CLAUSE_LASTPRIVATE:
7722 if (by_ref || omp_is_reference (val))
7723 {
7724 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
7725 continue;
7726 do_in = true;
7727 }
7728 else
7729 {
7730 do_out = true;
7731 if (lang_hooks.decls.omp_private_outer_ref (val))
7732 do_in = true;
7733 }
7734 break;
7735
7736 case OMP_CLAUSE_REDUCTION:
7737 case OMP_CLAUSE_IN_REDUCTION:
7738 do_in = true;
7739 if (val == OMP_CLAUSE_DECL (c))
7740 {
7741 if (is_task_ctx (ctx))
7742 by_ref = use_pointer_for_field (val, ctx);
7743 else
7744 do_out = !(by_ref || omp_is_reference (val));
7745 }
7746 else
7747 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
7748 break;
7749
7750 default:
7751 gcc_unreachable ();
7752 }
7753
7754 if (do_in)
7755 {
7756 ref = build_sender_ref (val, ctx);
7757 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
7758 gimplify_assign (ref, x, ilist);
7759 if (is_task_ctx (ctx))
7760 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
7761 }
7762
7763 if (do_out)
7764 {
7765 ref = build_sender_ref (val, ctx);
7766 gimplify_assign (var, ref, olist);
7767 }
7768 }
7769 }
7770
7771 /* Generate code to implement SHARED from the sender (aka parent)
7772 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
7773 list things that got automatically shared. */
7774
7775 static void
7776 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
7777 {
7778 tree var, ovar, nvar, t, f, x, record_type;
7779
7780 if (ctx->record_type == NULL)
7781 return;
7782
7783 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
7784 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7785 {
7786 ovar = DECL_ABSTRACT_ORIGIN (f);
7787 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
7788 continue;
7789
7790 nvar = maybe_lookup_decl (ovar, ctx);
7791 if (!nvar
7792 || !DECL_HAS_VALUE_EXPR_P (nvar)
7793 || (ctx->allocate_map
7794 && ctx->allocate_map->get (ovar)))
7795 continue;
7796
7797 /* If CTX is a nested parallel directive. Find the immediately
7798 enclosing parallel or workshare construct that contains a
7799 mapping for OVAR. */
7800 var = lookup_decl_in_outer_ctx (ovar, ctx);
7801
7802 t = omp_member_access_dummy_var (var);
7803 if (t)
7804 {
7805 var = DECL_VALUE_EXPR (var);
7806 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
7807 if (o != t)
7808 var = unshare_and_remap (var, t, o);
7809 else
7810 var = unshare_expr (var);
7811 }
7812
7813 if (use_pointer_for_field (ovar, ctx))
7814 {
7815 x = build_sender_ref (ovar, ctx);
7816 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
7817 && TREE_TYPE (f) == TREE_TYPE (ovar))
7818 {
7819 gcc_assert (is_parallel_ctx (ctx)
7820 && DECL_ARTIFICIAL (ovar));
7821 /* _condtemp_ clause. */
7822 var = build_constructor (TREE_TYPE (x), NULL);
7823 }
7824 else
7825 var = build_fold_addr_expr (var);
7826 gimplify_assign (x, var, ilist);
7827 }
7828 else
7829 {
7830 x = build_sender_ref (ovar, ctx);
7831 gimplify_assign (x, var, ilist);
7832
7833 if (!TREE_READONLY (var)
7834 /* We don't need to receive a new reference to a result
7835 or parm decl. In fact we may not store to it as we will
7836 invalidate any pending RSO and generate wrong gimple
7837 during inlining. */
7838 && !((TREE_CODE (var) == RESULT_DECL
7839 || TREE_CODE (var) == PARM_DECL)
7840 && DECL_BY_REFERENCE (var)))
7841 {
7842 x = build_sender_ref (ovar, ctx);
7843 gimplify_assign (var, x, olist);
7844 }
7845 }
7846 }
7847 }
7848
7849 /* Emit an OpenACC head marker call, encapulating the partitioning and
7850 other information that must be processed by the target compiler.
7851 Return the maximum number of dimensions the associated loop might
7852 be partitioned over. */
7853
7854 static unsigned
7855 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
7856 gimple_seq *seq, omp_context *ctx)
7857 {
7858 unsigned levels = 0;
7859 unsigned tag = 0;
7860 tree gang_static = NULL_TREE;
7861 auto_vec<tree, 5> args;
7862
7863 args.quick_push (build_int_cst
7864 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
7865 args.quick_push (ddvar);
7866 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7867 {
7868 switch (OMP_CLAUSE_CODE (c))
7869 {
7870 case OMP_CLAUSE_GANG:
7871 tag |= OLF_DIM_GANG;
7872 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
7873 /* static:* is represented by -1, and we can ignore it, as
7874 scheduling is always static. */
7875 if (gang_static && integer_minus_onep (gang_static))
7876 gang_static = NULL_TREE;
7877 levels++;
7878 break;
7879
7880 case OMP_CLAUSE_WORKER:
7881 tag |= OLF_DIM_WORKER;
7882 levels++;
7883 break;
7884
7885 case OMP_CLAUSE_VECTOR:
7886 tag |= OLF_DIM_VECTOR;
7887 levels++;
7888 break;
7889
7890 case OMP_CLAUSE_SEQ:
7891 tag |= OLF_SEQ;
7892 break;
7893
7894 case OMP_CLAUSE_AUTO:
7895 tag |= OLF_AUTO;
7896 break;
7897
7898 case OMP_CLAUSE_INDEPENDENT:
7899 tag |= OLF_INDEPENDENT;
7900 break;
7901
7902 case OMP_CLAUSE_TILE:
7903 tag |= OLF_TILE;
7904 break;
7905
7906 default:
7907 continue;
7908 }
7909 }
7910
7911 if (gang_static)
7912 {
7913 if (DECL_P (gang_static))
7914 gang_static = build_outer_var_ref (gang_static, ctx);
7915 tag |= OLF_GANG_STATIC;
7916 }
7917
7918 omp_context *tgt = enclosing_target_ctx (ctx);
7919 if (!tgt || is_oacc_parallel_or_serial (tgt))
7920 ;
7921 else if (is_oacc_kernels (tgt))
7922 /* Not using this loops handling inside OpenACC 'kernels' regions. */
7923 gcc_unreachable ();
7924 else if (is_oacc_kernels_decomposed_part (tgt))
7925 ;
7926 else
7927 gcc_unreachable ();
7928
7929 /* In a parallel region, loops are implicitly INDEPENDENT. */
7930 if (!tgt || is_oacc_parallel_or_serial (tgt))
7931 tag |= OLF_INDEPENDENT;
7932
7933 /* Loops inside OpenACC 'kernels' decomposed parts' regions are expected to
7934 have an explicit 'seq' or 'independent' clause, and no 'auto' clause. */
7935 if (tgt && is_oacc_kernels_decomposed_part (tgt))
7936 {
7937 gcc_assert (tag & (OLF_SEQ | OLF_INDEPENDENT));
7938 gcc_assert (!(tag & OLF_AUTO));
7939 }
7940
7941 if (tag & OLF_TILE)
7942 /* Tiling could use all 3 levels. */
7943 levels = 3;
7944 else
7945 {
7946 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
7947 Ensure at least one level, or 2 for possible auto
7948 partitioning */
7949 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
7950 << OLF_DIM_BASE) | OLF_SEQ));
7951
7952 if (levels < 1u + maybe_auto)
7953 levels = 1u + maybe_auto;
7954 }
7955
7956 args.quick_push (build_int_cst (integer_type_node, levels));
7957 args.quick_push (build_int_cst (integer_type_node, tag));
7958 if (gang_static)
7959 args.quick_push (gang_static);
7960
7961 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
7962 gimple_set_location (call, loc);
7963 gimple_set_lhs (call, ddvar);
7964 gimple_seq_add_stmt (seq, call);
7965
7966 return levels;
7967 }
7968
7969 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
7970 partitioning level of the enclosed region. */
7971
7972 static void
7973 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
7974 tree tofollow, gimple_seq *seq)
7975 {
7976 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
7977 : IFN_UNIQUE_OACC_TAIL_MARK);
7978 tree marker = build_int_cst (integer_type_node, marker_kind);
7979 int nargs = 2 + (tofollow != NULL_TREE);
7980 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
7981 marker, ddvar, tofollow);
7982 gimple_set_location (call, loc);
7983 gimple_set_lhs (call, ddvar);
7984 gimple_seq_add_stmt (seq, call);
7985 }
7986
7987 /* Generate the before and after OpenACC loop sequences. CLAUSES are
7988 the loop clauses, from which we extract reductions. Initialize
7989 HEAD and TAIL. */
7990
7991 static void
7992 lower_oacc_head_tail (location_t loc, tree clauses,
7993 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
7994 {
7995 bool inner = false;
7996 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
7997 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
7998
7999 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
8000 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
8001 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
8002
8003 gcc_assert (count);
8004 for (unsigned done = 1; count; count--, done++)
8005 {
8006 gimple_seq fork_seq = NULL;
8007 gimple_seq join_seq = NULL;
8008
8009 tree place = build_int_cst (integer_type_node, -1);
8010 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
8011 fork_kind, ddvar, place);
8012 gimple_set_location (fork, loc);
8013 gimple_set_lhs (fork, ddvar);
8014
8015 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
8016 join_kind, ddvar, place);
8017 gimple_set_location (join, loc);
8018 gimple_set_lhs (join, ddvar);
8019
8020 /* Mark the beginning of this level sequence. */
8021 if (inner)
8022 lower_oacc_loop_marker (loc, ddvar, true,
8023 build_int_cst (integer_type_node, count),
8024 &fork_seq);
8025 lower_oacc_loop_marker (loc, ddvar, false,
8026 build_int_cst (integer_type_node, done),
8027 &join_seq);
8028
8029 lower_oacc_reductions (loc, clauses, place, inner,
8030 fork, join, &fork_seq, &join_seq, ctx);
8031
8032 /* Append this level to head. */
8033 gimple_seq_add_seq (head, fork_seq);
8034 /* Prepend it to tail. */
8035 gimple_seq_add_seq (&join_seq, *tail);
8036 *tail = join_seq;
8037
8038 inner = true;
8039 }
8040
8041 /* Mark the end of the sequence. */
8042 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
8043 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
8044 }
8045
8046 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
8047 catch handler and return it. This prevents programs from violating the
8048 structured block semantics with throws. */
8049
8050 static gimple_seq
8051 maybe_catch_exception (gimple_seq body)
8052 {
8053 gimple *g;
8054 tree decl;
8055
8056 if (!flag_exceptions)
8057 return body;
8058
8059 if (lang_hooks.eh_protect_cleanup_actions != NULL)
8060 decl = lang_hooks.eh_protect_cleanup_actions ();
8061 else
8062 decl = builtin_decl_explicit (BUILT_IN_TRAP);
8063
8064 g = gimple_build_eh_must_not_throw (decl);
8065 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
8066 GIMPLE_TRY_CATCH);
8067
8068 return gimple_seq_alloc_with_stmt (g);
8069 }
8070
8071 \f
8072 /* Routines to lower OMP directives into OMP-GIMPLE. */
8073
8074 /* If ctx is a worksharing context inside of a cancellable parallel
8075 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
8076 and conditional branch to parallel's cancel_label to handle
8077 cancellation in the implicit barrier. */
8078
8079 static void
8080 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
8081 gimple_seq *body)
8082 {
8083 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
8084 if (gimple_omp_return_nowait_p (omp_return))
8085 return;
8086 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8087 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8088 && outer->cancellable)
8089 {
8090 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
8091 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
8092 tree lhs = create_tmp_var (c_bool_type);
8093 gimple_omp_return_set_lhs (omp_return, lhs);
8094 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8095 gimple *g = gimple_build_cond (NE_EXPR, lhs,
8096 fold_convert (c_bool_type,
8097 boolean_false_node),
8098 outer->cancel_label, fallthru_label);
8099 gimple_seq_add_stmt (body, g);
8100 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
8101 }
8102 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
8103 return;
8104 }
8105
8106 /* Find the first task_reduction or reduction clause or return NULL
8107 if there are none. */
8108
8109 static inline tree
8110 omp_task_reductions_find_first (tree clauses, enum tree_code code,
8111 enum omp_clause_code ccode)
8112 {
8113 while (1)
8114 {
8115 clauses = omp_find_clause (clauses, ccode);
8116 if (clauses == NULL_TREE)
8117 return NULL_TREE;
8118 if (ccode != OMP_CLAUSE_REDUCTION
8119 || code == OMP_TASKLOOP
8120 || OMP_CLAUSE_REDUCTION_TASK (clauses))
8121 return clauses;
8122 clauses = OMP_CLAUSE_CHAIN (clauses);
8123 }
8124 }
8125
8126 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
8127 gimple_seq *, gimple_seq *);
8128
8129 /* Lower the OpenMP sections directive in the current statement in GSI_P.
8130 CTX is the enclosing OMP context for the current statement. */
8131
8132 static void
8133 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8134 {
8135 tree block, control;
8136 gimple_stmt_iterator tgsi;
8137 gomp_sections *stmt;
8138 gimple *t;
8139 gbind *new_stmt, *bind;
8140 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
8141
8142 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
8143
8144 push_gimplify_context ();
8145
8146 dlist = NULL;
8147 ilist = NULL;
8148
8149 tree rclauses
8150 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
8151 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
8152 tree rtmp = NULL_TREE;
8153 if (rclauses)
8154 {
8155 tree type = build_pointer_type (pointer_sized_int_node);
8156 tree temp = create_tmp_var (type);
8157 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8158 OMP_CLAUSE_DECL (c) = temp;
8159 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
8160 gimple_omp_sections_set_clauses (stmt, c);
8161 lower_omp_task_reductions (ctx, OMP_SECTIONS,
8162 gimple_omp_sections_clauses (stmt),
8163 &ilist, &tred_dlist);
8164 rclauses = c;
8165 rtmp = make_ssa_name (type);
8166 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
8167 }
8168
8169 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
8170 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
8171
8172 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
8173 &ilist, &dlist, ctx, NULL);
8174
8175 control = create_tmp_var (unsigned_type_node, ".section");
8176 gimple_omp_sections_set_control (stmt, control);
8177
8178 new_body = gimple_omp_body (stmt);
8179 gimple_omp_set_body (stmt, NULL);
8180 tgsi = gsi_start (new_body);
8181 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
8182 {
8183 omp_context *sctx;
8184 gimple *sec_start;
8185
8186 sec_start = gsi_stmt (tgsi);
8187 sctx = maybe_lookup_ctx (sec_start);
8188 gcc_assert (sctx);
8189
8190 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
8191 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
8192 GSI_CONTINUE_LINKING);
8193 gimple_omp_set_body (sec_start, NULL);
8194
8195 if (gsi_one_before_end_p (tgsi))
8196 {
8197 gimple_seq l = NULL;
8198 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
8199 &ilist, &l, &clist, ctx);
8200 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
8201 gimple_omp_section_set_last (sec_start);
8202 }
8203
8204 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
8205 GSI_CONTINUE_LINKING);
8206 }
8207
8208 block = make_node (BLOCK);
8209 bind = gimple_build_bind (NULL, new_body, block);
8210
8211 olist = NULL;
8212 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
8213 &clist, ctx);
8214 if (clist)
8215 {
8216 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
8217 gcall *g = gimple_build_call (fndecl, 0);
8218 gimple_seq_add_stmt (&olist, g);
8219 gimple_seq_add_seq (&olist, clist);
8220 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
8221 g = gimple_build_call (fndecl, 0);
8222 gimple_seq_add_stmt (&olist, g);
8223 }
8224
8225 block = make_node (BLOCK);
8226 new_stmt = gimple_build_bind (NULL, NULL, block);
8227 gsi_replace (gsi_p, new_stmt, true);
8228
8229 pop_gimplify_context (new_stmt);
8230 gimple_bind_append_vars (new_stmt, ctx->block_vars);
8231 BLOCK_VARS (block) = gimple_bind_vars (bind);
8232 if (BLOCK_VARS (block))
8233 TREE_USED (block) = 1;
8234
8235 new_body = NULL;
8236 gimple_seq_add_seq (&new_body, ilist);
8237 gimple_seq_add_stmt (&new_body, stmt);
8238 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
8239 gimple_seq_add_stmt (&new_body, bind);
8240
8241 t = gimple_build_omp_continue (control, control);
8242 gimple_seq_add_stmt (&new_body, t);
8243
8244 gimple_seq_add_seq (&new_body, olist);
8245 if (ctx->cancellable)
8246 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
8247 gimple_seq_add_seq (&new_body, dlist);
8248
8249 new_body = maybe_catch_exception (new_body);
8250
8251 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
8252 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8253 t = gimple_build_omp_return (nowait);
8254 gimple_seq_add_stmt (&new_body, t);
8255 gimple_seq_add_seq (&new_body, tred_dlist);
8256 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
8257
8258 if (rclauses)
8259 OMP_CLAUSE_DECL (rclauses) = rtmp;
8260
8261 gimple_bind_set_body (new_stmt, new_body);
8262 }
8263
8264
8265 /* A subroutine of lower_omp_single. Expand the simple form of
8266 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
8267
8268 if (GOMP_single_start ())
8269 BODY;
8270 [ GOMP_barrier (); ] -> unless 'nowait' is present.
8271
8272 FIXME. It may be better to delay expanding the logic of this until
8273 pass_expand_omp. The expanded logic may make the job more difficult
8274 to a synchronization analysis pass. */
8275
8276 static void
8277 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
8278 {
8279 location_t loc = gimple_location (single_stmt);
8280 tree tlabel = create_artificial_label (loc);
8281 tree flabel = create_artificial_label (loc);
8282 gimple *call, *cond;
8283 tree lhs, decl;
8284
8285 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
8286 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
8287 call = gimple_build_call (decl, 0);
8288 gimple_call_set_lhs (call, lhs);
8289 gimple_seq_add_stmt (pre_p, call);
8290
8291 cond = gimple_build_cond (EQ_EXPR, lhs,
8292 fold_convert_loc (loc, TREE_TYPE (lhs),
8293 boolean_true_node),
8294 tlabel, flabel);
8295 gimple_seq_add_stmt (pre_p, cond);
8296 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
8297 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8298 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
8299 }
8300
8301
8302 /* A subroutine of lower_omp_single. Expand the simple form of
8303 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
8304
8305 #pragma omp single copyprivate (a, b, c)
8306
8307 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
8308
8309 {
8310 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
8311 {
8312 BODY;
8313 copyout.a = a;
8314 copyout.b = b;
8315 copyout.c = c;
8316 GOMP_single_copy_end (&copyout);
8317 }
8318 else
8319 {
8320 a = copyout_p->a;
8321 b = copyout_p->b;
8322 c = copyout_p->c;
8323 }
8324 GOMP_barrier ();
8325 }
8326
8327 FIXME. It may be better to delay expanding the logic of this until
8328 pass_expand_omp. The expanded logic may make the job more difficult
8329 to a synchronization analysis pass. */
8330
8331 static void
8332 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
8333 omp_context *ctx)
8334 {
8335 tree ptr_type, t, l0, l1, l2, bfn_decl;
8336 gimple_seq copyin_seq;
8337 location_t loc = gimple_location (single_stmt);
8338
8339 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
8340
8341 ptr_type = build_pointer_type (ctx->record_type);
8342 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
8343
8344 l0 = create_artificial_label (loc);
8345 l1 = create_artificial_label (loc);
8346 l2 = create_artificial_label (loc);
8347
8348 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
8349 t = build_call_expr_loc (loc, bfn_decl, 0);
8350 t = fold_convert_loc (loc, ptr_type, t);
8351 gimplify_assign (ctx->receiver_decl, t, pre_p);
8352
8353 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
8354 build_int_cst (ptr_type, 0));
8355 t = build3 (COND_EXPR, void_type_node, t,
8356 build_and_jump (&l0), build_and_jump (&l1));
8357 gimplify_and_add (t, pre_p);
8358
8359 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
8360
8361 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8362
8363 copyin_seq = NULL;
8364 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
8365 &copyin_seq, ctx);
8366
8367 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8368 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
8369 t = build_call_expr_loc (loc, bfn_decl, 1, t);
8370 gimplify_and_add (t, pre_p);
8371
8372 t = build_and_jump (&l2);
8373 gimplify_and_add (t, pre_p);
8374
8375 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
8376
8377 gimple_seq_add_seq (pre_p, copyin_seq);
8378
8379 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
8380 }
8381
8382
8383 /* Expand code for an OpenMP single directive. */
8384
8385 static void
8386 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8387 {
8388 tree block;
8389 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
8390 gbind *bind;
8391 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8392
8393 push_gimplify_context ();
8394
8395 block = make_node (BLOCK);
8396 bind = gimple_build_bind (NULL, NULL, block);
8397 gsi_replace (gsi_p, bind, true);
8398 bind_body = NULL;
8399 dlist = NULL;
8400 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
8401 &bind_body, &dlist, ctx, NULL);
8402 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
8403
8404 gimple_seq_add_stmt (&bind_body, single_stmt);
8405
8406 if (ctx->record_type)
8407 lower_omp_single_copy (single_stmt, &bind_body, ctx);
8408 else
8409 lower_omp_single_simple (single_stmt, &bind_body);
8410
8411 gimple_omp_set_body (single_stmt, NULL);
8412
8413 gimple_seq_add_seq (&bind_body, dlist);
8414
8415 bind_body = maybe_catch_exception (bind_body);
8416
8417 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
8418 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8419 gimple *g = gimple_build_omp_return (nowait);
8420 gimple_seq_add_stmt (&bind_body_tail, g);
8421 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8422 if (ctx->record_type)
8423 {
8424 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8425 tree clobber = build_clobber (ctx->record_type);
8426 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8427 clobber), GSI_SAME_STMT);
8428 }
8429 gimple_seq_add_seq (&bind_body, bind_body_tail);
8430 gimple_bind_set_body (bind, bind_body);
8431
8432 pop_gimplify_context (bind);
8433
8434 gimple_bind_append_vars (bind, ctx->block_vars);
8435 BLOCK_VARS (block) = ctx->block_vars;
8436 if (BLOCK_VARS (block))
8437 TREE_USED (block) = 1;
8438 }
8439
8440
8441 /* Expand code for an OpenMP master directive. */
8442
8443 static void
8444 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8445 {
8446 tree block, lab = NULL, x, bfn_decl;
8447 gimple *stmt = gsi_stmt (*gsi_p);
8448 gbind *bind;
8449 location_t loc = gimple_location (stmt);
8450 gimple_seq tseq;
8451
8452 push_gimplify_context ();
8453
8454 block = make_node (BLOCK);
8455 bind = gimple_build_bind (NULL, NULL, block);
8456 gsi_replace (gsi_p, bind, true);
8457 gimple_bind_add_stmt (bind, stmt);
8458
8459 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8460 x = build_call_expr_loc (loc, bfn_decl, 0);
8461 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
8462 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
8463 tseq = NULL;
8464 gimplify_and_add (x, &tseq);
8465 gimple_bind_add_seq (bind, tseq);
8466
8467 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8468 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8469 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8470 gimple_omp_set_body (stmt, NULL);
8471
8472 gimple_bind_add_stmt (bind, gimple_build_label (lab));
8473
8474 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8475
8476 pop_gimplify_context (bind);
8477
8478 gimple_bind_append_vars (bind, ctx->block_vars);
8479 BLOCK_VARS (block) = ctx->block_vars;
8480 }
8481
8482 /* Helper function for lower_omp_task_reductions. For a specific PASS
8483 find out the current clause it should be processed, or return false
8484 if all have been processed already. */
8485
8486 static inline bool
8487 omp_task_reduction_iterate (int pass, enum tree_code code,
8488 enum omp_clause_code ccode, tree *c, tree *decl,
8489 tree *type, tree *next)
8490 {
8491 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
8492 {
8493 if (ccode == OMP_CLAUSE_REDUCTION
8494 && code != OMP_TASKLOOP
8495 && !OMP_CLAUSE_REDUCTION_TASK (*c))
8496 continue;
8497 *decl = OMP_CLAUSE_DECL (*c);
8498 *type = TREE_TYPE (*decl);
8499 if (TREE_CODE (*decl) == MEM_REF)
8500 {
8501 if (pass != 1)
8502 continue;
8503 }
8504 else
8505 {
8506 if (omp_is_reference (*decl))
8507 *type = TREE_TYPE (*type);
8508 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
8509 continue;
8510 }
8511 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
8512 return true;
8513 }
8514 *decl = NULL_TREE;
8515 *type = NULL_TREE;
8516 *next = NULL_TREE;
8517 return false;
8518 }
8519
8520 /* Lower task_reduction and reduction clauses (the latter unless CODE is
8521 OMP_TASKGROUP only with task modifier). Register mapping of those in
8522 START sequence and reducing them and unregister them in the END sequence. */
8523
8524 static void
8525 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
8526 gimple_seq *start, gimple_seq *end)
8527 {
8528 enum omp_clause_code ccode
8529 = (code == OMP_TASKGROUP
8530 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
8531 tree cancellable = NULL_TREE;
8532 clauses = omp_task_reductions_find_first (clauses, code, ccode);
8533 if (clauses == NULL_TREE)
8534 return;
8535 if (code == OMP_FOR || code == OMP_SECTIONS)
8536 {
8537 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8538 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8539 && outer->cancellable)
8540 {
8541 cancellable = error_mark_node;
8542 break;
8543 }
8544 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
8545 break;
8546 }
8547 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
8548 tree *last = &TYPE_FIELDS (record_type);
8549 unsigned cnt = 0;
8550 if (cancellable)
8551 {
8552 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8553 ptr_type_node);
8554 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8555 integer_type_node);
8556 *last = field;
8557 DECL_CHAIN (field) = ifield;
8558 last = &DECL_CHAIN (ifield);
8559 DECL_CONTEXT (field) = record_type;
8560 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
8561 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
8562 DECL_CONTEXT (ifield) = record_type;
8563 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
8564 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
8565 }
8566 for (int pass = 0; pass < 2; pass++)
8567 {
8568 tree decl, type, next;
8569 for (tree c = clauses;
8570 omp_task_reduction_iterate (pass, code, ccode,
8571 &c, &decl, &type, &next); c = next)
8572 {
8573 ++cnt;
8574 tree new_type = type;
8575 if (ctx->outer)
8576 new_type = remap_type (type, &ctx->outer->cb);
8577 tree field
8578 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
8579 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
8580 new_type);
8581 if (DECL_P (decl) && type == TREE_TYPE (decl))
8582 {
8583 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
8584 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
8585 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
8586 }
8587 else
8588 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
8589 DECL_CONTEXT (field) = record_type;
8590 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
8591 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
8592 *last = field;
8593 last = &DECL_CHAIN (field);
8594 tree bfield
8595 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
8596 boolean_type_node);
8597 DECL_CONTEXT (bfield) = record_type;
8598 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
8599 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
8600 *last = bfield;
8601 last = &DECL_CHAIN (bfield);
8602 }
8603 }
8604 *last = NULL_TREE;
8605 layout_type (record_type);
8606
8607 /* Build up an array which registers with the runtime all the reductions
8608 and deregisters them at the end. Format documented in libgomp/task.c. */
8609 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
8610 tree avar = create_tmp_var_raw (atype);
8611 gimple_add_tmp_var (avar);
8612 TREE_ADDRESSABLE (avar) = 1;
8613 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
8614 NULL_TREE, NULL_TREE);
8615 tree t = build_int_cst (pointer_sized_int_node, cnt);
8616 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8617 gimple_seq seq = NULL;
8618 tree sz = fold_convert (pointer_sized_int_node,
8619 TYPE_SIZE_UNIT (record_type));
8620 int cachesz = 64;
8621 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
8622 build_int_cst (pointer_sized_int_node, cachesz - 1));
8623 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
8624 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
8625 ctx->task_reductions.create (1 + cnt);
8626 ctx->task_reduction_map = new hash_map<tree, unsigned>;
8627 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
8628 ? sz : NULL_TREE);
8629 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
8630 gimple_seq_add_seq (start, seq);
8631 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
8632 NULL_TREE, NULL_TREE);
8633 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
8634 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
8635 NULL_TREE, NULL_TREE);
8636 t = build_int_cst (pointer_sized_int_node,
8637 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
8638 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8639 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
8640 NULL_TREE, NULL_TREE);
8641 t = build_int_cst (pointer_sized_int_node, -1);
8642 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8643 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
8644 NULL_TREE, NULL_TREE);
8645 t = build_int_cst (pointer_sized_int_node, 0);
8646 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8647
8648 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
8649 and for each task reduction checks a bool right after the private variable
8650 within that thread's chunk; if the bool is clear, it hasn't been
8651 initialized and thus isn't going to be reduced nor destructed, otherwise
8652 reduce and destruct it. */
8653 tree idx = create_tmp_var (size_type_node);
8654 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
8655 tree num_thr_sz = create_tmp_var (size_type_node);
8656 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
8657 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
8658 tree lab3 = NULL_TREE;
8659 gimple *g;
8660 if (code == OMP_FOR || code == OMP_SECTIONS)
8661 {
8662 /* For worksharing constructs, only perform it in the master thread,
8663 with the exception of cancelled implicit barriers - then only handle
8664 the current thread. */
8665 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
8666 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8667 tree thr_num = create_tmp_var (integer_type_node);
8668 g = gimple_build_call (t, 0);
8669 gimple_call_set_lhs (g, thr_num);
8670 gimple_seq_add_stmt (end, g);
8671 if (cancellable)
8672 {
8673 tree c;
8674 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8675 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
8676 lab3 = create_artificial_label (UNKNOWN_LOCATION);
8677 if (code == OMP_FOR)
8678 c = gimple_omp_for_clauses (ctx->stmt);
8679 else /* if (code == OMP_SECTIONS) */
8680 c = gimple_omp_sections_clauses (ctx->stmt);
8681 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
8682 cancellable = c;
8683 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
8684 lab5, lab6);
8685 gimple_seq_add_stmt (end, g);
8686 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8687 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
8688 gimple_seq_add_stmt (end, g);
8689 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
8690 build_one_cst (TREE_TYPE (idx)));
8691 gimple_seq_add_stmt (end, g);
8692 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
8693 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8694 }
8695 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
8696 gimple_seq_add_stmt (end, g);
8697 gimple_seq_add_stmt (end, gimple_build_label (lab4));
8698 }
8699 if (code != OMP_PARALLEL)
8700 {
8701 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
8702 tree num_thr = create_tmp_var (integer_type_node);
8703 g = gimple_build_call (t, 0);
8704 gimple_call_set_lhs (g, num_thr);
8705 gimple_seq_add_stmt (end, g);
8706 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
8707 gimple_seq_add_stmt (end, g);
8708 if (cancellable)
8709 gimple_seq_add_stmt (end, gimple_build_label (lab3));
8710 }
8711 else
8712 {
8713 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
8714 OMP_CLAUSE__REDUCTEMP_);
8715 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
8716 t = fold_convert (size_type_node, t);
8717 gimplify_assign (num_thr_sz, t, end);
8718 }
8719 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
8720 NULL_TREE, NULL_TREE);
8721 tree data = create_tmp_var (pointer_sized_int_node);
8722 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
8723 gimple_seq_add_stmt (end, gimple_build_label (lab1));
8724 tree ptr;
8725 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
8726 ptr = create_tmp_var (build_pointer_type (record_type));
8727 else
8728 ptr = create_tmp_var (ptr_type_node);
8729 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
8730
8731 tree field = TYPE_FIELDS (record_type);
8732 cnt = 0;
8733 if (cancellable)
8734 field = DECL_CHAIN (DECL_CHAIN (field));
8735 for (int pass = 0; pass < 2; pass++)
8736 {
8737 tree decl, type, next;
8738 for (tree c = clauses;
8739 omp_task_reduction_iterate (pass, code, ccode,
8740 &c, &decl, &type, &next); c = next)
8741 {
8742 tree var = decl, ref;
8743 if (TREE_CODE (decl) == MEM_REF)
8744 {
8745 var = TREE_OPERAND (var, 0);
8746 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
8747 var = TREE_OPERAND (var, 0);
8748 tree v = var;
8749 if (TREE_CODE (var) == ADDR_EXPR)
8750 var = TREE_OPERAND (var, 0);
8751 else if (TREE_CODE (var) == INDIRECT_REF)
8752 var = TREE_OPERAND (var, 0);
8753 tree orig_var = var;
8754 if (is_variable_sized (var))
8755 {
8756 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
8757 var = DECL_VALUE_EXPR (var);
8758 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
8759 var = TREE_OPERAND (var, 0);
8760 gcc_assert (DECL_P (var));
8761 }
8762 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
8763 if (orig_var != var)
8764 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
8765 else if (TREE_CODE (v) == ADDR_EXPR)
8766 t = build_fold_addr_expr (t);
8767 else if (TREE_CODE (v) == INDIRECT_REF)
8768 t = build_fold_indirect_ref (t);
8769 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
8770 {
8771 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
8772 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
8773 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
8774 }
8775 if (!integer_zerop (TREE_OPERAND (decl, 1)))
8776 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
8777 fold_convert (size_type_node,
8778 TREE_OPERAND (decl, 1)));
8779 }
8780 else
8781 {
8782 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
8783 if (!omp_is_reference (decl))
8784 t = build_fold_addr_expr (t);
8785 }
8786 t = fold_convert (pointer_sized_int_node, t);
8787 seq = NULL;
8788 t = force_gimple_operand (t, &seq, true, NULL_TREE);
8789 gimple_seq_add_seq (start, seq);
8790 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8791 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
8792 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8793 t = unshare_expr (byte_position (field));
8794 t = fold_convert (pointer_sized_int_node, t);
8795 ctx->task_reduction_map->put (c, cnt);
8796 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
8797 ? t : NULL_TREE);
8798 seq = NULL;
8799 t = force_gimple_operand (t, &seq, true, NULL_TREE);
8800 gimple_seq_add_seq (start, seq);
8801 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8802 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
8803 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8804
8805 tree bfield = DECL_CHAIN (field);
8806 tree cond;
8807 if (code == OMP_PARALLEL || code == OMP_FOR || code == OMP_SECTIONS)
8808 /* In parallel or worksharing all threads unconditionally
8809 initialize all their task reduction private variables. */
8810 cond = boolean_true_node;
8811 else if (TREE_TYPE (ptr) == ptr_type_node)
8812 {
8813 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
8814 unshare_expr (byte_position (bfield)));
8815 seq = NULL;
8816 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
8817 gimple_seq_add_seq (end, seq);
8818 tree pbool = build_pointer_type (TREE_TYPE (bfield));
8819 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
8820 build_int_cst (pbool, 0));
8821 }
8822 else
8823 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
8824 build_simple_mem_ref (ptr), bfield, NULL_TREE);
8825 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
8826 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
8827 tree condv = create_tmp_var (boolean_type_node);
8828 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
8829 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
8830 lab3, lab4);
8831 gimple_seq_add_stmt (end, g);
8832 gimple_seq_add_stmt (end, gimple_build_label (lab3));
8833 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
8834 {
8835 /* If this reduction doesn't need destruction and parallel
8836 has been cancelled, there is nothing to do for this
8837 reduction, so jump around the merge operation. */
8838 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8839 g = gimple_build_cond (NE_EXPR, cancellable,
8840 build_zero_cst (TREE_TYPE (cancellable)),
8841 lab4, lab5);
8842 gimple_seq_add_stmt (end, g);
8843 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8844 }
8845
8846 tree new_var;
8847 if (TREE_TYPE (ptr) == ptr_type_node)
8848 {
8849 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
8850 unshare_expr (byte_position (field)));
8851 seq = NULL;
8852 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
8853 gimple_seq_add_seq (end, seq);
8854 tree pbool = build_pointer_type (TREE_TYPE (field));
8855 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
8856 build_int_cst (pbool, 0));
8857 }
8858 else
8859 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
8860 build_simple_mem_ref (ptr), field, NULL_TREE);
8861
8862 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
8863 if (TREE_CODE (decl) != MEM_REF && omp_is_reference (decl))
8864 ref = build_simple_mem_ref (ref);
8865 /* reduction(-:var) sums up the partial results, so it acts
8866 identically to reduction(+:var). */
8867 if (rcode == MINUS_EXPR)
8868 rcode = PLUS_EXPR;
8869 if (TREE_CODE (decl) == MEM_REF)
8870 {
8871 tree type = TREE_TYPE (new_var);
8872 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8873 tree i = create_tmp_var (TREE_TYPE (v));
8874 tree ptype = build_pointer_type (TREE_TYPE (type));
8875 if (DECL_P (v))
8876 {
8877 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
8878 tree vv = create_tmp_var (TREE_TYPE (v));
8879 gimplify_assign (vv, v, start);
8880 v = vv;
8881 }
8882 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8883 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
8884 new_var = build_fold_addr_expr (new_var);
8885 new_var = fold_convert (ptype, new_var);
8886 ref = fold_convert (ptype, ref);
8887 tree m = create_tmp_var (ptype);
8888 gimplify_assign (m, new_var, end);
8889 new_var = m;
8890 m = create_tmp_var (ptype);
8891 gimplify_assign (m, ref, end);
8892 ref = m;
8893 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
8894 tree body = create_artificial_label (UNKNOWN_LOCATION);
8895 tree endl = create_artificial_label (UNKNOWN_LOCATION);
8896 gimple_seq_add_stmt (end, gimple_build_label (body));
8897 tree priv = build_simple_mem_ref (new_var);
8898 tree out = build_simple_mem_ref (ref);
8899 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8900 {
8901 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8902 tree decl_placeholder
8903 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
8904 tree lab6 = NULL_TREE;
8905 if (cancellable)
8906 {
8907 /* If this reduction needs destruction and parallel
8908 has been cancelled, jump around the merge operation
8909 to the destruction. */
8910 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8911 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8912 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8913 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8914 lab6, lab5);
8915 gimple_seq_add_stmt (end, g);
8916 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8917 }
8918 SET_DECL_VALUE_EXPR (placeholder, out);
8919 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8920 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
8921 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
8922 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8923 gimple_seq_add_seq (end,
8924 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8925 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8926 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8927 {
8928 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8929 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
8930 }
8931 if (cancellable)
8932 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8933 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
8934 if (x)
8935 {
8936 gimple_seq tseq = NULL;
8937 gimplify_stmt (&x, &tseq);
8938 gimple_seq_add_seq (end, tseq);
8939 }
8940 }
8941 else
8942 {
8943 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
8944 out = unshare_expr (out);
8945 gimplify_assign (out, x, end);
8946 }
8947 gimple *g
8948 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
8949 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8950 gimple_seq_add_stmt (end, g);
8951 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
8952 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8953 gimple_seq_add_stmt (end, g);
8954 g = gimple_build_assign (i, PLUS_EXPR, i,
8955 build_int_cst (TREE_TYPE (i), 1));
8956 gimple_seq_add_stmt (end, g);
8957 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
8958 gimple_seq_add_stmt (end, g);
8959 gimple_seq_add_stmt (end, gimple_build_label (endl));
8960 }
8961 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8962 {
8963 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8964 tree oldv = NULL_TREE;
8965 tree lab6 = NULL_TREE;
8966 if (cancellable)
8967 {
8968 /* If this reduction needs destruction and parallel
8969 has been cancelled, jump around the merge operation
8970 to the destruction. */
8971 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8972 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8973 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8974 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8975 lab6, lab5);
8976 gimple_seq_add_stmt (end, g);
8977 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8978 }
8979 if (omp_is_reference (decl)
8980 && !useless_type_conversion_p (TREE_TYPE (placeholder),
8981 TREE_TYPE (ref)))
8982 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8983 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8984 tree refv = create_tmp_var (TREE_TYPE (ref));
8985 gimplify_assign (refv, ref, end);
8986 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
8987 SET_DECL_VALUE_EXPR (placeholder, ref);
8988 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8989 tree d = maybe_lookup_decl (decl, ctx);
8990 gcc_assert (d);
8991 if (DECL_HAS_VALUE_EXPR_P (d))
8992 oldv = DECL_VALUE_EXPR (d);
8993 if (omp_is_reference (var))
8994 {
8995 tree v = fold_convert (TREE_TYPE (d),
8996 build_fold_addr_expr (new_var));
8997 SET_DECL_VALUE_EXPR (d, v);
8998 }
8999 else
9000 SET_DECL_VALUE_EXPR (d, new_var);
9001 DECL_HAS_VALUE_EXPR_P (d) = 1;
9002 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9003 if (oldv)
9004 SET_DECL_VALUE_EXPR (d, oldv);
9005 else
9006 {
9007 SET_DECL_VALUE_EXPR (d, NULL_TREE);
9008 DECL_HAS_VALUE_EXPR_P (d) = 0;
9009 }
9010 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9011 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9012 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9013 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9014 if (cancellable)
9015 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9016 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
9017 if (x)
9018 {
9019 gimple_seq tseq = NULL;
9020 gimplify_stmt (&x, &tseq);
9021 gimple_seq_add_seq (end, tseq);
9022 }
9023 }
9024 else
9025 {
9026 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
9027 ref = unshare_expr (ref);
9028 gimplify_assign (ref, x, end);
9029 }
9030 gimple_seq_add_stmt (end, gimple_build_label (lab4));
9031 ++cnt;
9032 field = DECL_CHAIN (bfield);
9033 }
9034 }
9035
9036 if (code == OMP_TASKGROUP)
9037 {
9038 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
9039 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9040 gimple_seq_add_stmt (start, g);
9041 }
9042 else
9043 {
9044 tree c;
9045 if (code == OMP_FOR)
9046 c = gimple_omp_for_clauses (ctx->stmt);
9047 else if (code == OMP_SECTIONS)
9048 c = gimple_omp_sections_clauses (ctx->stmt);
9049 else
9050 c = gimple_omp_taskreg_clauses (ctx->stmt);
9051 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
9052 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
9053 build_fold_addr_expr (avar));
9054 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
9055 }
9056
9057 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
9058 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
9059 size_one_node));
9060 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
9061 gimple_seq_add_stmt (end, g);
9062 gimple_seq_add_stmt (end, gimple_build_label (lab2));
9063 if (code == OMP_FOR || code == OMP_SECTIONS)
9064 {
9065 enum built_in_function bfn
9066 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
9067 t = builtin_decl_explicit (bfn);
9068 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
9069 tree arg;
9070 if (cancellable)
9071 {
9072 arg = create_tmp_var (c_bool_type);
9073 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
9074 cancellable));
9075 }
9076 else
9077 arg = build_int_cst (c_bool_type, 0);
9078 g = gimple_build_call (t, 1, arg);
9079 }
9080 else
9081 {
9082 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
9083 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9084 }
9085 gimple_seq_add_stmt (end, g);
9086 t = build_constructor (atype, NULL);
9087 TREE_THIS_VOLATILE (t) = 1;
9088 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
9089 }
9090
9091 /* Expand code for an OpenMP taskgroup directive. */
9092
9093 static void
9094 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9095 {
9096 gimple *stmt = gsi_stmt (*gsi_p);
9097 gcall *x;
9098 gbind *bind;
9099 gimple_seq dseq = NULL;
9100 tree block = make_node (BLOCK);
9101
9102 bind = gimple_build_bind (NULL, NULL, block);
9103 gsi_replace (gsi_p, bind, true);
9104 gimple_bind_add_stmt (bind, stmt);
9105
9106 push_gimplify_context ();
9107
9108 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
9109 0);
9110 gimple_bind_add_stmt (bind, x);
9111
9112 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
9113 gimple_omp_taskgroup_clauses (stmt),
9114 gimple_bind_body_ptr (bind), &dseq);
9115
9116 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9117 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9118 gimple_omp_set_body (stmt, NULL);
9119
9120 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9121 gimple_bind_add_seq (bind, dseq);
9122
9123 pop_gimplify_context (bind);
9124
9125 gimple_bind_append_vars (bind, ctx->block_vars);
9126 BLOCK_VARS (block) = ctx->block_vars;
9127 }
9128
9129
9130 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
9131
9132 static void
9133 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
9134 omp_context *ctx)
9135 {
9136 struct omp_for_data fd;
9137 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
9138 return;
9139
9140 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
9141 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
9142 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
9143 if (!fd.ordered)
9144 return;
9145
9146 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9147 tree c = gimple_omp_ordered_clauses (ord_stmt);
9148 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
9149 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
9150 {
9151 /* Merge depend clauses from multiple adjacent
9152 #pragma omp ordered depend(sink:...) constructs
9153 into one #pragma omp ordered depend(sink:...), so that
9154 we can optimize them together. */
9155 gimple_stmt_iterator gsi = *gsi_p;
9156 gsi_next (&gsi);
9157 while (!gsi_end_p (gsi))
9158 {
9159 gimple *stmt = gsi_stmt (gsi);
9160 if (is_gimple_debug (stmt)
9161 || gimple_code (stmt) == GIMPLE_NOP)
9162 {
9163 gsi_next (&gsi);
9164 continue;
9165 }
9166 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
9167 break;
9168 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
9169 c = gimple_omp_ordered_clauses (ord_stmt2);
9170 if (c == NULL_TREE
9171 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
9172 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
9173 break;
9174 while (*list_p)
9175 list_p = &OMP_CLAUSE_CHAIN (*list_p);
9176 *list_p = c;
9177 gsi_remove (&gsi, true);
9178 }
9179 }
9180
9181 /* Canonicalize sink dependence clauses into one folded clause if
9182 possible.
9183
9184 The basic algorithm is to create a sink vector whose first
9185 element is the GCD of all the first elements, and whose remaining
9186 elements are the minimum of the subsequent columns.
9187
9188 We ignore dependence vectors whose first element is zero because
9189 such dependencies are known to be executed by the same thread.
9190
9191 We take into account the direction of the loop, so a minimum
9192 becomes a maximum if the loop is iterating forwards. We also
9193 ignore sink clauses where the loop direction is unknown, or where
9194 the offsets are clearly invalid because they are not a multiple
9195 of the loop increment.
9196
9197 For example:
9198
9199 #pragma omp for ordered(2)
9200 for (i=0; i < N; ++i)
9201 for (j=0; j < M; ++j)
9202 {
9203 #pragma omp ordered \
9204 depend(sink:i-8,j-2) \
9205 depend(sink:i,j-1) \ // Completely ignored because i+0.
9206 depend(sink:i-4,j-3) \
9207 depend(sink:i-6,j-4)
9208 #pragma omp ordered depend(source)
9209 }
9210
9211 Folded clause is:
9212
9213 depend(sink:-gcd(8,4,6),-min(2,3,4))
9214 -or-
9215 depend(sink:-2,-2)
9216 */
9217
9218 /* FIXME: Computing GCD's where the first element is zero is
9219 non-trivial in the presence of collapsed loops. Do this later. */
9220 if (fd.collapse > 1)
9221 return;
9222
9223 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
9224
9225 /* wide_int is not a POD so it must be default-constructed. */
9226 for (unsigned i = 0; i != 2 * len - 1; ++i)
9227 new (static_cast<void*>(folded_deps + i)) wide_int ();
9228
9229 tree folded_dep = NULL_TREE;
9230 /* TRUE if the first dimension's offset is negative. */
9231 bool neg_offset_p = false;
9232
9233 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9234 unsigned int i;
9235 while ((c = *list_p) != NULL)
9236 {
9237 bool remove = false;
9238
9239 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
9240 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
9241 goto next_ordered_clause;
9242
9243 tree vec;
9244 for (vec = OMP_CLAUSE_DECL (c), i = 0;
9245 vec && TREE_CODE (vec) == TREE_LIST;
9246 vec = TREE_CHAIN (vec), ++i)
9247 {
9248 gcc_assert (i < len);
9249
9250 /* omp_extract_for_data has canonicalized the condition. */
9251 gcc_assert (fd.loops[i].cond_code == LT_EXPR
9252 || fd.loops[i].cond_code == GT_EXPR);
9253 bool forward = fd.loops[i].cond_code == LT_EXPR;
9254 bool maybe_lexically_later = true;
9255
9256 /* While the committee makes up its mind, bail if we have any
9257 non-constant steps. */
9258 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
9259 goto lower_omp_ordered_ret;
9260
9261 tree itype = TREE_TYPE (TREE_VALUE (vec));
9262 if (POINTER_TYPE_P (itype))
9263 itype = sizetype;
9264 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
9265 TYPE_PRECISION (itype),
9266 TYPE_SIGN (itype));
9267
9268 /* Ignore invalid offsets that are not multiples of the step. */
9269 if (!wi::multiple_of_p (wi::abs (offset),
9270 wi::abs (wi::to_wide (fd.loops[i].step)),
9271 UNSIGNED))
9272 {
9273 warning_at (OMP_CLAUSE_LOCATION (c), 0,
9274 "ignoring sink clause with offset that is not "
9275 "a multiple of the loop step");
9276 remove = true;
9277 goto next_ordered_clause;
9278 }
9279
9280 /* Calculate the first dimension. The first dimension of
9281 the folded dependency vector is the GCD of the first
9282 elements, while ignoring any first elements whose offset
9283 is 0. */
9284 if (i == 0)
9285 {
9286 /* Ignore dependence vectors whose first dimension is 0. */
9287 if (offset == 0)
9288 {
9289 remove = true;
9290 goto next_ordered_clause;
9291 }
9292 else
9293 {
9294 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
9295 {
9296 error_at (OMP_CLAUSE_LOCATION (c),
9297 "first offset must be in opposite direction "
9298 "of loop iterations");
9299 goto lower_omp_ordered_ret;
9300 }
9301 if (forward)
9302 offset = -offset;
9303 neg_offset_p = forward;
9304 /* Initialize the first time around. */
9305 if (folded_dep == NULL_TREE)
9306 {
9307 folded_dep = c;
9308 folded_deps[0] = offset;
9309 }
9310 else
9311 folded_deps[0] = wi::gcd (folded_deps[0],
9312 offset, UNSIGNED);
9313 }
9314 }
9315 /* Calculate minimum for the remaining dimensions. */
9316 else
9317 {
9318 folded_deps[len + i - 1] = offset;
9319 if (folded_dep == c)
9320 folded_deps[i] = offset;
9321 else if (maybe_lexically_later
9322 && !wi::eq_p (folded_deps[i], offset))
9323 {
9324 if (forward ^ wi::gts_p (folded_deps[i], offset))
9325 {
9326 unsigned int j;
9327 folded_dep = c;
9328 for (j = 1; j <= i; j++)
9329 folded_deps[j] = folded_deps[len + j - 1];
9330 }
9331 else
9332 maybe_lexically_later = false;
9333 }
9334 }
9335 }
9336 gcc_assert (i == len);
9337
9338 remove = true;
9339
9340 next_ordered_clause:
9341 if (remove)
9342 *list_p = OMP_CLAUSE_CHAIN (c);
9343 else
9344 list_p = &OMP_CLAUSE_CHAIN (c);
9345 }
9346
9347 if (folded_dep)
9348 {
9349 if (neg_offset_p)
9350 folded_deps[0] = -folded_deps[0];
9351
9352 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
9353 if (POINTER_TYPE_P (itype))
9354 itype = sizetype;
9355
9356 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
9357 = wide_int_to_tree (itype, folded_deps[0]);
9358 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
9359 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
9360 }
9361
9362 lower_omp_ordered_ret:
9363
9364 /* Ordered without clauses is #pragma omp threads, while we want
9365 a nop instead if we remove all clauses. */
9366 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
9367 gsi_replace (gsi_p, gimple_build_nop (), true);
9368 }
9369
9370
9371 /* Expand code for an OpenMP ordered directive. */
9372
9373 static void
9374 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9375 {
9376 tree block;
9377 gimple *stmt = gsi_stmt (*gsi_p), *g;
9378 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
9379 gcall *x;
9380 gbind *bind;
9381 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9382 OMP_CLAUSE_SIMD);
9383 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
9384 loop. */
9385 bool maybe_simt
9386 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
9387 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9388 OMP_CLAUSE_THREADS);
9389
9390 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9391 OMP_CLAUSE_DEPEND))
9392 {
9393 /* FIXME: This is needs to be moved to the expansion to verify various
9394 conditions only testable on cfg with dominators computed, and also
9395 all the depend clauses to be merged still might need to be available
9396 for the runtime checks. */
9397 if (0)
9398 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
9399 return;
9400 }
9401
9402 push_gimplify_context ();
9403
9404 block = make_node (BLOCK);
9405 bind = gimple_build_bind (NULL, NULL, block);
9406 gsi_replace (gsi_p, bind, true);
9407 gimple_bind_add_stmt (bind, stmt);
9408
9409 if (simd)
9410 {
9411 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
9412 build_int_cst (NULL_TREE, threads));
9413 cfun->has_simduid_loops = true;
9414 }
9415 else
9416 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
9417 0);
9418 gimple_bind_add_stmt (bind, x);
9419
9420 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
9421 if (maybe_simt)
9422 {
9423 counter = create_tmp_var (integer_type_node);
9424 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
9425 gimple_call_set_lhs (g, counter);
9426 gimple_bind_add_stmt (bind, g);
9427
9428 body = create_artificial_label (UNKNOWN_LOCATION);
9429 test = create_artificial_label (UNKNOWN_LOCATION);
9430 gimple_bind_add_stmt (bind, gimple_build_label (body));
9431
9432 tree simt_pred = create_tmp_var (integer_type_node);
9433 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
9434 gimple_call_set_lhs (g, simt_pred);
9435 gimple_bind_add_stmt (bind, g);
9436
9437 tree t = create_artificial_label (UNKNOWN_LOCATION);
9438 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
9439 gimple_bind_add_stmt (bind, g);
9440
9441 gimple_bind_add_stmt (bind, gimple_build_label (t));
9442 }
9443 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9444 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9445 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9446 gimple_omp_set_body (stmt, NULL);
9447
9448 if (maybe_simt)
9449 {
9450 gimple_bind_add_stmt (bind, gimple_build_label (test));
9451 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
9452 gimple_bind_add_stmt (bind, g);
9453
9454 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
9455 tree nonneg = create_tmp_var (integer_type_node);
9456 gimple_seq tseq = NULL;
9457 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
9458 gimple_bind_add_seq (bind, tseq);
9459
9460 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
9461 gimple_call_set_lhs (g, nonneg);
9462 gimple_bind_add_stmt (bind, g);
9463
9464 tree end = create_artificial_label (UNKNOWN_LOCATION);
9465 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
9466 gimple_bind_add_stmt (bind, g);
9467
9468 gimple_bind_add_stmt (bind, gimple_build_label (end));
9469 }
9470 if (simd)
9471 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
9472 build_int_cst (NULL_TREE, threads));
9473 else
9474 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
9475 0);
9476 gimple_bind_add_stmt (bind, x);
9477
9478 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9479
9480 pop_gimplify_context (bind);
9481
9482 gimple_bind_append_vars (bind, ctx->block_vars);
9483 BLOCK_VARS (block) = gimple_bind_vars (bind);
9484 }
9485
9486
9487 /* Expand code for an OpenMP scan directive and the structured block
9488 before the scan directive. */
9489
9490 static void
9491 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9492 {
9493 gimple *stmt = gsi_stmt (*gsi_p);
9494 bool has_clauses
9495 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
9496 tree lane = NULL_TREE;
9497 gimple_seq before = NULL;
9498 omp_context *octx = ctx->outer;
9499 gcc_assert (octx);
9500 if (octx->scan_exclusive && !has_clauses)
9501 {
9502 gimple_stmt_iterator gsi2 = *gsi_p;
9503 gsi_next (&gsi2);
9504 gimple *stmt2 = gsi_stmt (gsi2);
9505 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
9506 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
9507 the one with exclusive clause(s), comes first. */
9508 if (stmt2
9509 && gimple_code (stmt2) == GIMPLE_OMP_SCAN
9510 && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
9511 {
9512 gsi_remove (gsi_p, false);
9513 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
9514 ctx = maybe_lookup_ctx (stmt2);
9515 gcc_assert (ctx);
9516 lower_omp_scan (gsi_p, ctx);
9517 return;
9518 }
9519 }
9520
9521 bool input_phase = has_clauses ^ octx->scan_inclusive;
9522 bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
9523 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_SIMD);
9524 bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
9525 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR
9526 && !gimple_omp_for_combined_p (octx->stmt));
9527 bool is_for_simd = is_simd && gimple_omp_for_combined_into_p (octx->stmt);
9528 if (is_for_simd && octx->for_simd_scan_phase)
9529 is_simd = false;
9530 if (is_simd)
9531 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
9532 OMP_CLAUSE__SIMDUID_))
9533 {
9534 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
9535 lane = create_tmp_var (unsigned_type_node);
9536 tree t = build_int_cst (integer_type_node,
9537 input_phase ? 1
9538 : octx->scan_inclusive ? 2 : 3);
9539 gimple *g
9540 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
9541 gimple_call_set_lhs (g, lane);
9542 gimple_seq_add_stmt (&before, g);
9543 }
9544
9545 if (is_simd || is_for)
9546 {
9547 for (tree c = gimple_omp_for_clauses (octx->stmt);
9548 c; c = OMP_CLAUSE_CHAIN (c))
9549 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9550 && OMP_CLAUSE_REDUCTION_INSCAN (c))
9551 {
9552 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9553 tree var = OMP_CLAUSE_DECL (c);
9554 tree new_var = lookup_decl (var, octx);
9555 tree val = new_var;
9556 tree var2 = NULL_TREE;
9557 tree var3 = NULL_TREE;
9558 tree var4 = NULL_TREE;
9559 tree lane0 = NULL_TREE;
9560 tree new_vard = new_var;
9561 if (omp_is_reference (var))
9562 {
9563 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
9564 val = new_var;
9565 }
9566 if (DECL_HAS_VALUE_EXPR_P (new_vard))
9567 {
9568 val = DECL_VALUE_EXPR (new_vard);
9569 if (new_vard != new_var)
9570 {
9571 gcc_assert (TREE_CODE (val) == ADDR_EXPR);
9572 val = TREE_OPERAND (val, 0);
9573 }
9574 if (TREE_CODE (val) == ARRAY_REF
9575 && VAR_P (TREE_OPERAND (val, 0)))
9576 {
9577 tree v = TREE_OPERAND (val, 0);
9578 if (lookup_attribute ("omp simd array",
9579 DECL_ATTRIBUTES (v)))
9580 {
9581 val = unshare_expr (val);
9582 lane0 = TREE_OPERAND (val, 1);
9583 TREE_OPERAND (val, 1) = lane;
9584 var2 = lookup_decl (v, octx);
9585 if (octx->scan_exclusive)
9586 var4 = lookup_decl (var2, octx);
9587 if (input_phase
9588 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9589 var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
9590 if (!input_phase)
9591 {
9592 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
9593 var2, lane, NULL_TREE, NULL_TREE);
9594 TREE_THIS_NOTRAP (var2) = 1;
9595 if (octx->scan_exclusive)
9596 {
9597 var4 = build4 (ARRAY_REF, TREE_TYPE (val),
9598 var4, lane, NULL_TREE,
9599 NULL_TREE);
9600 TREE_THIS_NOTRAP (var4) = 1;
9601 }
9602 }
9603 else
9604 var2 = val;
9605 }
9606 }
9607 gcc_assert (var2);
9608 }
9609 else
9610 {
9611 var2 = build_outer_var_ref (var, octx);
9612 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9613 {
9614 var3 = maybe_lookup_decl (new_vard, octx);
9615 if (var3 == new_vard || var3 == NULL_TREE)
9616 var3 = NULL_TREE;
9617 else if (is_simd && octx->scan_exclusive && !input_phase)
9618 {
9619 var4 = maybe_lookup_decl (var3, octx);
9620 if (var4 == var3 || var4 == NULL_TREE)
9621 {
9622 if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
9623 {
9624 var4 = var3;
9625 var3 = NULL_TREE;
9626 }
9627 else
9628 var4 = NULL_TREE;
9629 }
9630 }
9631 }
9632 if (is_simd
9633 && octx->scan_exclusive
9634 && !input_phase
9635 && var4 == NULL_TREE)
9636 var4 = create_tmp_var (TREE_TYPE (val));
9637 }
9638 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9639 {
9640 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9641 if (input_phase)
9642 {
9643 if (var3)
9644 {
9645 /* If we've added a separate identity element
9646 variable, copy it over into val. */
9647 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
9648 var3);
9649 gimplify_and_add (x, &before);
9650 }
9651 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
9652 {
9653 /* Otherwise, assign to it the identity element. */
9654 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9655 if (is_for)
9656 tseq = copy_gimple_seq_and_replace_locals (tseq);
9657 tree ref = build_outer_var_ref (var, octx);
9658 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
9659 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9660 if (x)
9661 {
9662 if (new_vard != new_var)
9663 val = build_fold_addr_expr_loc (clause_loc, val);
9664 SET_DECL_VALUE_EXPR (new_vard, val);
9665 }
9666 SET_DECL_VALUE_EXPR (placeholder, ref);
9667 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9668 lower_omp (&tseq, octx);
9669 if (x)
9670 SET_DECL_VALUE_EXPR (new_vard, x);
9671 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9672 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9673 gimple_seq_add_seq (&before, tseq);
9674 if (is_simd)
9675 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9676 }
9677 }
9678 else if (is_simd)
9679 {
9680 tree x;
9681 if (octx->scan_exclusive)
9682 {
9683 tree v4 = unshare_expr (var4);
9684 tree v2 = unshare_expr (var2);
9685 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
9686 gimplify_and_add (x, &before);
9687 }
9688 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9689 x = (DECL_HAS_VALUE_EXPR_P (new_vard)
9690 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9691 tree vexpr = val;
9692 if (x && new_vard != new_var)
9693 vexpr = build_fold_addr_expr_loc (clause_loc, val);
9694 if (x)
9695 SET_DECL_VALUE_EXPR (new_vard, vexpr);
9696 SET_DECL_VALUE_EXPR (placeholder, var2);
9697 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9698 lower_omp (&tseq, octx);
9699 gimple_seq_add_seq (&before, tseq);
9700 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9701 if (x)
9702 SET_DECL_VALUE_EXPR (new_vard, x);
9703 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9704 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9705 if (octx->scan_inclusive)
9706 {
9707 x = lang_hooks.decls.omp_clause_assign_op (c, val,
9708 var2);
9709 gimplify_and_add (x, &before);
9710 }
9711 else if (lane0 == NULL_TREE)
9712 {
9713 x = lang_hooks.decls.omp_clause_assign_op (c, val,
9714 var4);
9715 gimplify_and_add (x, &before);
9716 }
9717 }
9718 }
9719 else
9720 {
9721 if (input_phase)
9722 {
9723 /* input phase. Set val to initializer before
9724 the body. */
9725 tree x = omp_reduction_init (c, TREE_TYPE (new_var));
9726 gimplify_assign (val, x, &before);
9727 }
9728 else if (is_simd)
9729 {
9730 /* scan phase. */
9731 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
9732 if (code == MINUS_EXPR)
9733 code = PLUS_EXPR;
9734
9735 tree x = build2 (code, TREE_TYPE (var2),
9736 unshare_expr (var2), unshare_expr (val));
9737 if (octx->scan_inclusive)
9738 {
9739 gimplify_assign (unshare_expr (var2), x, &before);
9740 gimplify_assign (val, var2, &before);
9741 }
9742 else
9743 {
9744 gimplify_assign (unshare_expr (var4),
9745 unshare_expr (var2), &before);
9746 gimplify_assign (var2, x, &before);
9747 if (lane0 == NULL_TREE)
9748 gimplify_assign (val, var4, &before);
9749 }
9750 }
9751 }
9752 if (octx->scan_exclusive && !input_phase && lane0)
9753 {
9754 tree vexpr = unshare_expr (var4);
9755 TREE_OPERAND (vexpr, 1) = lane0;
9756 if (new_vard != new_var)
9757 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
9758 SET_DECL_VALUE_EXPR (new_vard, vexpr);
9759 }
9760 }
9761 }
9762 if (is_simd && !is_for_simd)
9763 {
9764 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
9765 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
9766 gsi_replace (gsi_p, gimple_build_nop (), true);
9767 return;
9768 }
9769 lower_omp (gimple_omp_body_ptr (stmt), octx);
9770 if (before)
9771 {
9772 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (stmt));
9773 gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
9774 }
9775 }
9776
9777
9778 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
9779 substitution of a couple of function calls. But in the NAMED case,
9780 requires that languages coordinate a symbol name. It is therefore
9781 best put here in common code. */
9782
9783 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
9784
9785 static void
9786 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9787 {
9788 tree block;
9789 tree name, lock, unlock;
9790 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
9791 gbind *bind;
9792 location_t loc = gimple_location (stmt);
9793 gimple_seq tbody;
9794
9795 name = gimple_omp_critical_name (stmt);
9796 if (name)
9797 {
9798 tree decl;
9799
9800 if (!critical_name_mutexes)
9801 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
9802
9803 tree *n = critical_name_mutexes->get (name);
9804 if (n == NULL)
9805 {
9806 char *new_str;
9807
9808 decl = create_tmp_var_raw (ptr_type_node);
9809
9810 new_str = ACONCAT ((".gomp_critical_user_",
9811 IDENTIFIER_POINTER (name), NULL));
9812 DECL_NAME (decl) = get_identifier (new_str);
9813 TREE_PUBLIC (decl) = 1;
9814 TREE_STATIC (decl) = 1;
9815 DECL_COMMON (decl) = 1;
9816 DECL_ARTIFICIAL (decl) = 1;
9817 DECL_IGNORED_P (decl) = 1;
9818
9819 varpool_node::finalize_decl (decl);
9820
9821 critical_name_mutexes->put (name, decl);
9822 }
9823 else
9824 decl = *n;
9825
9826 /* If '#pragma omp critical' is inside offloaded region or
9827 inside function marked as offloadable, the symbol must be
9828 marked as offloadable too. */
9829 omp_context *octx;
9830 if (cgraph_node::get (current_function_decl)->offloadable)
9831 varpool_node::get_create (decl)->offloadable = 1;
9832 else
9833 for (octx = ctx->outer; octx; octx = octx->outer)
9834 if (is_gimple_omp_offloaded (octx->stmt))
9835 {
9836 varpool_node::get_create (decl)->offloadable = 1;
9837 break;
9838 }
9839
9840 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
9841 lock = build_call_expr_loc (loc, lock, 1,
9842 build_fold_addr_expr_loc (loc, decl));
9843
9844 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
9845 unlock = build_call_expr_loc (loc, unlock, 1,
9846 build_fold_addr_expr_loc (loc, decl));
9847 }
9848 else
9849 {
9850 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
9851 lock = build_call_expr_loc (loc, lock, 0);
9852
9853 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
9854 unlock = build_call_expr_loc (loc, unlock, 0);
9855 }
9856
9857 push_gimplify_context ();
9858
9859 block = make_node (BLOCK);
9860 bind = gimple_build_bind (NULL, NULL, block);
9861 gsi_replace (gsi_p, bind, true);
9862 gimple_bind_add_stmt (bind, stmt);
9863
9864 tbody = gimple_bind_body (bind);
9865 gimplify_and_add (lock, &tbody);
9866 gimple_bind_set_body (bind, tbody);
9867
9868 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9869 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9870 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9871 gimple_omp_set_body (stmt, NULL);
9872
9873 tbody = gimple_bind_body (bind);
9874 gimplify_and_add (unlock, &tbody);
9875 gimple_bind_set_body (bind, tbody);
9876
9877 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9878
9879 pop_gimplify_context (bind);
9880 gimple_bind_append_vars (bind, ctx->block_vars);
9881 BLOCK_VARS (block) = gimple_bind_vars (bind);
9882 }
9883
9884 /* A subroutine of lower_omp_for. Generate code to emit the predicate
9885 for a lastprivate clause. Given a loop control predicate of (V
9886 cond N2), we gate the clause on (!(V cond N2)). The lowered form
9887 is appended to *DLIST, iterator initialization is appended to
9888 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
9889 to be emitted in a critical section. */
9890
9891 static void
9892 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
9893 gimple_seq *dlist, gimple_seq *clist,
9894 struct omp_context *ctx)
9895 {
9896 tree clauses, cond, vinit;
9897 enum tree_code cond_code;
9898 gimple_seq stmts;
9899
9900 cond_code = fd->loop.cond_code;
9901 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
9902
9903 /* When possible, use a strict equality expression. This can let VRP
9904 type optimizations deduce the value and remove a copy. */
9905 if (tree_fits_shwi_p (fd->loop.step))
9906 {
9907 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
9908 if (step == 1 || step == -1)
9909 cond_code = EQ_EXPR;
9910 }
9911
9912 tree n2 = fd->loop.n2;
9913 if (fd->collapse > 1
9914 && TREE_CODE (n2) != INTEGER_CST
9915 && gimple_omp_for_combined_into_p (fd->for_stmt))
9916 {
9917 struct omp_context *taskreg_ctx = NULL;
9918 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
9919 {
9920 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
9921 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
9922 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
9923 {
9924 if (gimple_omp_for_combined_into_p (gfor))
9925 {
9926 gcc_assert (ctx->outer->outer
9927 && is_parallel_ctx (ctx->outer->outer));
9928 taskreg_ctx = ctx->outer->outer;
9929 }
9930 else
9931 {
9932 struct omp_for_data outer_fd;
9933 omp_extract_for_data (gfor, &outer_fd, NULL);
9934 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
9935 }
9936 }
9937 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
9938 taskreg_ctx = ctx->outer->outer;
9939 }
9940 else if (is_taskreg_ctx (ctx->outer))
9941 taskreg_ctx = ctx->outer;
9942 if (taskreg_ctx)
9943 {
9944 int i;
9945 tree taskreg_clauses
9946 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
9947 tree innerc = omp_find_clause (taskreg_clauses,
9948 OMP_CLAUSE__LOOPTEMP_);
9949 gcc_assert (innerc);
9950 int count = fd->collapse;
9951 if (fd->non_rect
9952 && fd->last_nonrect == fd->first_nonrect + 1)
9953 if (tree v = gimple_omp_for_index (fd->for_stmt, fd->last_nonrect))
9954 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
9955 count += 4;
9956 for (i = 0; i < count; i++)
9957 {
9958 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9959 OMP_CLAUSE__LOOPTEMP_);
9960 gcc_assert (innerc);
9961 }
9962 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9963 OMP_CLAUSE__LOOPTEMP_);
9964 if (innerc)
9965 n2 = fold_convert (TREE_TYPE (n2),
9966 lookup_decl (OMP_CLAUSE_DECL (innerc),
9967 taskreg_ctx));
9968 }
9969 }
9970 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
9971
9972 clauses = gimple_omp_for_clauses (fd->for_stmt);
9973 stmts = NULL;
9974 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
9975 if (!gimple_seq_empty_p (stmts))
9976 {
9977 gimple_seq_add_seq (&stmts, *dlist);
9978 *dlist = stmts;
9979
9980 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
9981 vinit = fd->loop.n1;
9982 if (cond_code == EQ_EXPR
9983 && tree_fits_shwi_p (fd->loop.n2)
9984 && ! integer_zerop (fd->loop.n2))
9985 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
9986 else
9987 vinit = unshare_expr (vinit);
9988
9989 /* Initialize the iterator variable, so that threads that don't execute
9990 any iterations don't execute the lastprivate clauses by accident. */
9991 gimplify_assign (fd->loop.v, vinit, body_p);
9992 }
9993 }
9994
9995 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
9996
9997 static tree
9998 omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9999 struct walk_stmt_info *wi)
10000 {
10001 gimple *stmt = gsi_stmt (*gsi_p);
10002
10003 *handled_ops_p = true;
10004 switch (gimple_code (stmt))
10005 {
10006 WALK_SUBSTMTS;
10007
10008 case GIMPLE_OMP_FOR:
10009 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD
10010 && gimple_omp_for_combined_into_p (stmt))
10011 *handled_ops_p = false;
10012 break;
10013
10014 case GIMPLE_OMP_SCAN:
10015 *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
10016 return integer_zero_node;
10017 default:
10018 break;
10019 }
10020 return NULL;
10021 }
10022
10023 /* Helper function for lower_omp_for, add transformations for a worksharing
10024 loop with scan directives inside of it.
10025 For worksharing loop not combined with simd, transform:
10026 #pragma omp for reduction(inscan,+:r) private(i)
10027 for (i = 0; i < n; i = i + 1)
10028 {
10029 {
10030 update (r);
10031 }
10032 #pragma omp scan inclusive(r)
10033 {
10034 use (r);
10035 }
10036 }
10037
10038 into two worksharing loops + code to merge results:
10039
10040 num_threads = omp_get_num_threads ();
10041 thread_num = omp_get_thread_num ();
10042 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
10043 <D.2099>:
10044 var2 = r;
10045 goto <D.2101>;
10046 <D.2100>:
10047 // For UDRs this is UDR init, or if ctors are needed, copy from
10048 // var3 that has been constructed to contain the neutral element.
10049 var2 = 0;
10050 <D.2101>:
10051 ivar = 0;
10052 // The _scantemp_ clauses will arrange for rpriva to be initialized to
10053 // a shared array with num_threads elements and rprivb to a local array
10054 // number of elements equal to the number of (contiguous) iterations the
10055 // current thread will perform. controlb and controlp variables are
10056 // temporaries to handle deallocation of rprivb at the end of second
10057 // GOMP_FOR.
10058 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
10059 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
10060 for (i = 0; i < n; i = i + 1)
10061 {
10062 {
10063 // For UDRs this is UDR init or copy from var3.
10064 r = 0;
10065 // This is the input phase from user code.
10066 update (r);
10067 }
10068 {
10069 // For UDRs this is UDR merge.
10070 var2 = var2 + r;
10071 // Rather than handing it over to the user, save to local thread's
10072 // array.
10073 rprivb[ivar] = var2;
10074 // For exclusive scan, the above two statements are swapped.
10075 ivar = ivar + 1;
10076 }
10077 }
10078 // And remember the final value from this thread's into the shared
10079 // rpriva array.
10080 rpriva[(sizetype) thread_num] = var2;
10081 // If more than one thread, compute using Work-Efficient prefix sum
10082 // the inclusive parallel scan of the rpriva array.
10083 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
10084 <D.2102>:
10085 GOMP_barrier ();
10086 down = 0;
10087 k = 1;
10088 num_threadsu = (unsigned int) num_threads;
10089 thread_numup1 = (unsigned int) thread_num + 1;
10090 <D.2108>:
10091 twok = k << 1;
10092 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
10093 <D.2110>:
10094 down = 4294967295;
10095 k = k >> 1;
10096 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
10097 <D.2112>:
10098 k = k >> 1;
10099 <D.2111>:
10100 twok = k << 1;
10101 cplx = .MUL_OVERFLOW (thread_nump1, twok);
10102 mul = REALPART_EXPR <cplx>;
10103 ovf = IMAGPART_EXPR <cplx>;
10104 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
10105 <D.2116>:
10106 andv = k & down;
10107 andvm1 = andv + 4294967295;
10108 l = mul + andvm1;
10109 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
10110 <D.2120>:
10111 // For UDRs this is UDR merge, performed using var2 variable as temporary,
10112 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
10113 rpriva[l] = rpriva[l - k] + rpriva[l];
10114 <D.2117>:
10115 if (down == 0) goto <D.2121>; else goto <D.2122>;
10116 <D.2121>:
10117 k = k << 1;
10118 goto <D.2123>;
10119 <D.2122>:
10120 k = k >> 1;
10121 <D.2123>:
10122 GOMP_barrier ();
10123 if (k != 0) goto <D.2108>; else goto <D.2103>;
10124 <D.2103>:
10125 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
10126 <D.2124>:
10127 // For UDRs this is UDR init or copy from var3.
10128 var2 = 0;
10129 goto <D.2126>;
10130 <D.2125>:
10131 var2 = rpriva[thread_num - 1];
10132 <D.2126>:
10133 ivar = 0;
10134 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
10135 reduction(inscan,+:r) private(i)
10136 for (i = 0; i < n; i = i + 1)
10137 {
10138 {
10139 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
10140 r = var2 + rprivb[ivar];
10141 }
10142 {
10143 // This is the scan phase from user code.
10144 use (r);
10145 // Plus a bump of the iterator.
10146 ivar = ivar + 1;
10147 }
10148 } */
10149
10150 static void
10151 lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
10152 struct omp_for_data *fd, omp_context *ctx)
10153 {
10154 bool is_for_simd = gimple_omp_for_combined_p (stmt);
10155 gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
10156
10157 gimple_seq body = gimple_omp_body (stmt);
10158 gimple_stmt_iterator input1_gsi = gsi_none ();
10159 struct walk_stmt_info wi;
10160 memset (&wi, 0, sizeof (wi));
10161 wi.val_only = true;
10162 wi.info = (void *) &input1_gsi;
10163 walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
10164 gcc_assert (!gsi_end_p (input1_gsi));
10165
10166 gimple *input_stmt1 = gsi_stmt (input1_gsi);
10167 gimple_stmt_iterator gsi = input1_gsi;
10168 gsi_next (&gsi);
10169 gimple_stmt_iterator scan1_gsi = gsi;
10170 gimple *scan_stmt1 = gsi_stmt (gsi);
10171 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
10172
10173 gimple_seq input_body = gimple_omp_body (input_stmt1);
10174 gimple_seq scan_body = gimple_omp_body (scan_stmt1);
10175 gimple_omp_set_body (input_stmt1, NULL);
10176 gimple_omp_set_body (scan_stmt1, NULL);
10177 gimple_omp_set_body (stmt, NULL);
10178
10179 gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt));
10180 gimple_seq new_body = copy_gimple_seq_and_replace_locals (body);
10181 gimple_omp_set_body (stmt, body);
10182 gimple_omp_set_body (input_stmt1, input_body);
10183
10184 gimple_stmt_iterator input2_gsi = gsi_none ();
10185 memset (&wi, 0, sizeof (wi));
10186 wi.val_only = true;
10187 wi.info = (void *) &input2_gsi;
10188 walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
10189 gcc_assert (!gsi_end_p (input2_gsi));
10190
10191 gimple *input_stmt2 = gsi_stmt (input2_gsi);
10192 gsi = input2_gsi;
10193 gsi_next (&gsi);
10194 gimple_stmt_iterator scan2_gsi = gsi;
10195 gimple *scan_stmt2 = gsi_stmt (gsi);
10196 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
10197 gimple_omp_set_body (scan_stmt2, scan_body);
10198
10199 gimple_stmt_iterator input3_gsi = gsi_none ();
10200 gimple_stmt_iterator scan3_gsi = gsi_none ();
10201 gimple_stmt_iterator input4_gsi = gsi_none ();
10202 gimple_stmt_iterator scan4_gsi = gsi_none ();
10203 gimple *input_stmt3 = NULL, *scan_stmt3 = NULL;
10204 gimple *input_stmt4 = NULL, *scan_stmt4 = NULL;
10205 omp_context *input_simd_ctx = NULL, *scan_simd_ctx = NULL;
10206 if (is_for_simd)
10207 {
10208 memset (&wi, 0, sizeof (wi));
10209 wi.val_only = true;
10210 wi.info = (void *) &input3_gsi;
10211 walk_gimple_seq_mod (&input_body, omp_find_scan, NULL, &wi);
10212 gcc_assert (!gsi_end_p (input3_gsi));
10213
10214 input_stmt3 = gsi_stmt (input3_gsi);
10215 gsi = input3_gsi;
10216 gsi_next (&gsi);
10217 scan3_gsi = gsi;
10218 scan_stmt3 = gsi_stmt (gsi);
10219 gcc_assert (scan_stmt3 && gimple_code (scan_stmt3) == GIMPLE_OMP_SCAN);
10220
10221 memset (&wi, 0, sizeof (wi));
10222 wi.val_only = true;
10223 wi.info = (void *) &input4_gsi;
10224 walk_gimple_seq_mod (&scan_body, omp_find_scan, NULL, &wi);
10225 gcc_assert (!gsi_end_p (input4_gsi));
10226
10227 input_stmt4 = gsi_stmt (input4_gsi);
10228 gsi = input4_gsi;
10229 gsi_next (&gsi);
10230 scan4_gsi = gsi;
10231 scan_stmt4 = gsi_stmt (gsi);
10232 gcc_assert (scan_stmt4 && gimple_code (scan_stmt4) == GIMPLE_OMP_SCAN);
10233
10234 input_simd_ctx = maybe_lookup_ctx (input_stmt3)->outer;
10235 scan_simd_ctx = maybe_lookup_ctx (input_stmt4)->outer;
10236 }
10237
10238 tree num_threads = create_tmp_var (integer_type_node);
10239 tree thread_num = create_tmp_var (integer_type_node);
10240 tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
10241 tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
10242 gimple *g = gimple_build_call (nthreads_decl, 0);
10243 gimple_call_set_lhs (g, num_threads);
10244 gimple_seq_add_stmt (body_p, g);
10245 g = gimple_build_call (threadnum_decl, 0);
10246 gimple_call_set_lhs (g, thread_num);
10247 gimple_seq_add_stmt (body_p, g);
10248
10249 tree ivar = create_tmp_var (sizetype);
10250 tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
10251 tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
10252 tree k = create_tmp_var (unsigned_type_node);
10253 tree l = create_tmp_var (unsigned_type_node);
10254
10255 gimple_seq clist = NULL, mdlist = NULL;
10256 gimple_seq thr01_list = NULL, thrn1_list = NULL;
10257 gimple_seq thr02_list = NULL, thrn2_list = NULL;
10258 gimple_seq scan1_list = NULL, input2_list = NULL;
10259 gimple_seq last_list = NULL, reduc_list = NULL;
10260 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
10261 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10262 && OMP_CLAUSE_REDUCTION_INSCAN (c))
10263 {
10264 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10265 tree var = OMP_CLAUSE_DECL (c);
10266 tree new_var = lookup_decl (var, ctx);
10267 tree var3 = NULL_TREE;
10268 tree new_vard = new_var;
10269 if (omp_is_reference (var))
10270 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
10271 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10272 {
10273 var3 = maybe_lookup_decl (new_vard, ctx);
10274 if (var3 == new_vard)
10275 var3 = NULL_TREE;
10276 }
10277
10278 tree ptype = build_pointer_type (TREE_TYPE (new_var));
10279 tree rpriva = create_tmp_var (ptype);
10280 tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
10281 OMP_CLAUSE_DECL (nc) = rpriva;
10282 *cp1 = nc;
10283 cp1 = &OMP_CLAUSE_CHAIN (nc);
10284
10285 tree rprivb = create_tmp_var (ptype);
10286 nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
10287 OMP_CLAUSE_DECL (nc) = rprivb;
10288 OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
10289 *cp1 = nc;
10290 cp1 = &OMP_CLAUSE_CHAIN (nc);
10291
10292 tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
10293 if (new_vard != new_var)
10294 TREE_ADDRESSABLE (var2) = 1;
10295 gimple_add_tmp_var (var2);
10296
10297 tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
10298 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10299 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10300 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10301 tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
10302
10303 x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
10304 thread_num, integer_minus_one_node);
10305 x = fold_convert_loc (clause_loc, sizetype, x);
10306 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10307 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10308 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10309 tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
10310
10311 x = fold_convert_loc (clause_loc, sizetype, l);
10312 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10313 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10314 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10315 tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
10316
10317 x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
10318 x = fold_convert_loc (clause_loc, sizetype, x);
10319 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10320 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10321 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10322 tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
10323
10324 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
10325 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10326 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
10327 tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
10328
10329 tree var4 = is_for_simd ? new_var : var2;
10330 tree var5 = NULL_TREE, var6 = NULL_TREE;
10331 if (is_for_simd)
10332 {
10333 var5 = lookup_decl (var, input_simd_ctx);
10334 var6 = lookup_decl (var, scan_simd_ctx);
10335 if (new_vard != new_var)
10336 {
10337 var5 = build_simple_mem_ref_loc (clause_loc, var5);
10338 var6 = build_simple_mem_ref_loc (clause_loc, var6);
10339 }
10340 }
10341 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10342 {
10343 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
10344 tree val = var2;
10345
10346 x = lang_hooks.decls.omp_clause_default_ctor
10347 (c, var2, build_outer_var_ref (var, ctx));
10348 if (x)
10349 gimplify_and_add (x, &clist);
10350
10351 x = build_outer_var_ref (var, ctx);
10352 x = lang_hooks.decls.omp_clause_assign_op (c, unshare_expr (var4),
10353 x);
10354 gimplify_and_add (x, &thr01_list);
10355
10356 tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
10357 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10358 if (var3)
10359 {
10360 x = unshare_expr (var4);
10361 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
10362 gimplify_and_add (x, &thrn1_list);
10363 x = unshare_expr (var4);
10364 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
10365 gimplify_and_add (x, &thr02_list);
10366 }
10367 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
10368 {
10369 /* Otherwise, assign to it the identity element. */
10370 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
10371 tseq = copy_gimple_seq_and_replace_locals (tseq);
10372 if (!is_for_simd)
10373 {
10374 if (new_vard != new_var)
10375 val = build_fold_addr_expr_loc (clause_loc, val);
10376 SET_DECL_VALUE_EXPR (new_vard, val);
10377 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
10378 }
10379 SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
10380 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10381 lower_omp (&tseq, ctx);
10382 gimple_seq_add_seq (&thrn1_list, tseq);
10383 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
10384 lower_omp (&tseq, ctx);
10385 gimple_seq_add_seq (&thr02_list, tseq);
10386 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10387 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10388 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
10389 if (y)
10390 SET_DECL_VALUE_EXPR (new_vard, y);
10391 else
10392 {
10393 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10394 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10395 }
10396 }
10397
10398 x = unshare_expr (var4);
10399 x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivam1_ref);
10400 gimplify_and_add (x, &thrn2_list);
10401
10402 if (is_for_simd)
10403 {
10404 x = unshare_expr (rprivb_ref);
10405 x = lang_hooks.decls.omp_clause_assign_op (c, x, var5);
10406 gimplify_and_add (x, &scan1_list);
10407 }
10408 else
10409 {
10410 if (ctx->scan_exclusive)
10411 {
10412 x = unshare_expr (rprivb_ref);
10413 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
10414 gimplify_and_add (x, &scan1_list);
10415 }
10416
10417 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10418 tseq = copy_gimple_seq_and_replace_locals (tseq);
10419 SET_DECL_VALUE_EXPR (placeholder, var2);
10420 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10421 lower_omp (&tseq, ctx);
10422 gimple_seq_add_seq (&scan1_list, tseq);
10423
10424 if (ctx->scan_inclusive)
10425 {
10426 x = unshare_expr (rprivb_ref);
10427 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
10428 gimplify_and_add (x, &scan1_list);
10429 }
10430 }
10431
10432 x = unshare_expr (rpriva_ref);
10433 x = lang_hooks.decls.omp_clause_assign_op (c, x,
10434 unshare_expr (var4));
10435 gimplify_and_add (x, &mdlist);
10436
10437 x = unshare_expr (is_for_simd ? var6 : new_var);
10438 x = lang_hooks.decls.omp_clause_assign_op (c, x, var4);
10439 gimplify_and_add (x, &input2_list);
10440
10441 val = rprivb_ref;
10442 if (new_vard != new_var)
10443 val = build_fold_addr_expr_loc (clause_loc, val);
10444
10445 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10446 tseq = copy_gimple_seq_and_replace_locals (tseq);
10447 SET_DECL_VALUE_EXPR (new_vard, val);
10448 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
10449 if (is_for_simd)
10450 {
10451 SET_DECL_VALUE_EXPR (placeholder, var6);
10452 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10453 }
10454 else
10455 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10456 lower_omp (&tseq, ctx);
10457 if (y)
10458 SET_DECL_VALUE_EXPR (new_vard, y);
10459 else
10460 {
10461 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10462 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10463 }
10464 if (!is_for_simd)
10465 {
10466 SET_DECL_VALUE_EXPR (placeholder, new_var);
10467 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10468 lower_omp (&tseq, ctx);
10469 }
10470 gimple_seq_add_seq (&input2_list, tseq);
10471
10472 x = build_outer_var_ref (var, ctx);
10473 x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
10474 gimplify_and_add (x, &last_list);
10475
10476 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
10477 gimplify_and_add (x, &reduc_list);
10478 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10479 tseq = copy_gimple_seq_and_replace_locals (tseq);
10480 val = rprival_ref;
10481 if (new_vard != new_var)
10482 val = build_fold_addr_expr_loc (clause_loc, val);
10483 SET_DECL_VALUE_EXPR (new_vard, val);
10484 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
10485 SET_DECL_VALUE_EXPR (placeholder, var2);
10486 lower_omp (&tseq, ctx);
10487 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
10488 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10489 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10490 if (y)
10491 SET_DECL_VALUE_EXPR (new_vard, y);
10492 else
10493 {
10494 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10495 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10496 }
10497 gimple_seq_add_seq (&reduc_list, tseq);
10498 x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
10499 gimplify_and_add (x, &reduc_list);
10500
10501 x = lang_hooks.decls.omp_clause_dtor (c, var2);
10502 if (x)
10503 gimplify_and_add (x, dlist);
10504 }
10505 else
10506 {
10507 x = build_outer_var_ref (var, ctx);
10508 gimplify_assign (unshare_expr (var4), x, &thr01_list);
10509
10510 x = omp_reduction_init (c, TREE_TYPE (new_var));
10511 gimplify_assign (unshare_expr (var4), unshare_expr (x),
10512 &thrn1_list);
10513 gimplify_assign (unshare_expr (var4), x, &thr02_list);
10514
10515 gimplify_assign (unshare_expr (var4), rprivam1_ref, &thrn2_list);
10516
10517 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
10518 if (code == MINUS_EXPR)
10519 code = PLUS_EXPR;
10520
10521 if (is_for_simd)
10522 gimplify_assign (unshare_expr (rprivb_ref), var5, &scan1_list);
10523 else
10524 {
10525 if (ctx->scan_exclusive)
10526 gimplify_assign (unshare_expr (rprivb_ref), var2,
10527 &scan1_list);
10528 x = build2 (code, TREE_TYPE (new_var), var2, new_var);
10529 gimplify_assign (var2, x, &scan1_list);
10530 if (ctx->scan_inclusive)
10531 gimplify_assign (unshare_expr (rprivb_ref), var2,
10532 &scan1_list);
10533 }
10534
10535 gimplify_assign (unshare_expr (rpriva_ref), unshare_expr (var4),
10536 &mdlist);
10537
10538 x = build2 (code, TREE_TYPE (new_var), var4, rprivb_ref);
10539 gimplify_assign (is_for_simd ? var6 : new_var, x, &input2_list);
10540
10541 gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
10542 &last_list);
10543
10544 x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
10545 unshare_expr (rprival_ref));
10546 gimplify_assign (rprival_ref, x, &reduc_list);
10547 }
10548 }
10549
10550 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
10551 gimple_seq_add_stmt (&scan1_list, g);
10552 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
10553 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
10554 ? scan_stmt4 : scan_stmt2), g);
10555
10556 tree controlb = create_tmp_var (boolean_type_node);
10557 tree controlp = create_tmp_var (ptr_type_node);
10558 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10559 OMP_CLAUSE_DECL (nc) = controlb;
10560 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10561 *cp1 = nc;
10562 cp1 = &OMP_CLAUSE_CHAIN (nc);
10563 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10564 OMP_CLAUSE_DECL (nc) = controlp;
10565 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10566 *cp1 = nc;
10567 cp1 = &OMP_CLAUSE_CHAIN (nc);
10568 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10569 OMP_CLAUSE_DECL (nc) = controlb;
10570 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10571 *cp2 = nc;
10572 cp2 = &OMP_CLAUSE_CHAIN (nc);
10573 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10574 OMP_CLAUSE_DECL (nc) = controlp;
10575 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10576 *cp2 = nc;
10577 cp2 = &OMP_CLAUSE_CHAIN (nc);
10578
10579 *cp1 = gimple_omp_for_clauses (stmt);
10580 gimple_omp_for_set_clauses (stmt, new_clauses1);
10581 *cp2 = gimple_omp_for_clauses (new_stmt);
10582 gimple_omp_for_set_clauses (new_stmt, new_clauses2);
10583
10584 if (is_for_simd)
10585 {
10586 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3), scan1_list);
10587 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4), input2_list);
10588
10589 gsi_insert_seq_after (&input3_gsi, gimple_omp_body (input_stmt3),
10590 GSI_SAME_STMT);
10591 gsi_remove (&input3_gsi, true);
10592 gsi_insert_seq_after (&scan3_gsi, gimple_omp_body (scan_stmt3),
10593 GSI_SAME_STMT);
10594 gsi_remove (&scan3_gsi, true);
10595 gsi_insert_seq_after (&input4_gsi, gimple_omp_body (input_stmt4),
10596 GSI_SAME_STMT);
10597 gsi_remove (&input4_gsi, true);
10598 gsi_insert_seq_after (&scan4_gsi, gimple_omp_body (scan_stmt4),
10599 GSI_SAME_STMT);
10600 gsi_remove (&scan4_gsi, true);
10601 }
10602 else
10603 {
10604 gimple_omp_set_body (scan_stmt1, scan1_list);
10605 gimple_omp_set_body (input_stmt2, input2_list);
10606 }
10607
10608 gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1),
10609 GSI_SAME_STMT);
10610 gsi_remove (&input1_gsi, true);
10611 gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1),
10612 GSI_SAME_STMT);
10613 gsi_remove (&scan1_gsi, true);
10614 gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2),
10615 GSI_SAME_STMT);
10616 gsi_remove (&input2_gsi, true);
10617 gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2),
10618 GSI_SAME_STMT);
10619 gsi_remove (&scan2_gsi, true);
10620
10621 gimple_seq_add_seq (body_p, clist);
10622
10623 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
10624 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
10625 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
10626 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
10627 gimple_seq_add_stmt (body_p, g);
10628 g = gimple_build_label (lab1);
10629 gimple_seq_add_stmt (body_p, g);
10630 gimple_seq_add_seq (body_p, thr01_list);
10631 g = gimple_build_goto (lab3);
10632 gimple_seq_add_stmt (body_p, g);
10633 g = gimple_build_label (lab2);
10634 gimple_seq_add_stmt (body_p, g);
10635 gimple_seq_add_seq (body_p, thrn1_list);
10636 g = gimple_build_label (lab3);
10637 gimple_seq_add_stmt (body_p, g);
10638
10639 g = gimple_build_assign (ivar, size_zero_node);
10640 gimple_seq_add_stmt (body_p, g);
10641
10642 gimple_seq_add_stmt (body_p, stmt);
10643 gimple_seq_add_seq (body_p, body);
10644 gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
10645 fd->loop.v));
10646
10647 g = gimple_build_omp_return (true);
10648 gimple_seq_add_stmt (body_p, g);
10649 gimple_seq_add_seq (body_p, mdlist);
10650
10651 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10652 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10653 g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
10654 gimple_seq_add_stmt (body_p, g);
10655 g = gimple_build_label (lab1);
10656 gimple_seq_add_stmt (body_p, g);
10657
10658 g = omp_build_barrier (NULL);
10659 gimple_seq_add_stmt (body_p, g);
10660
10661 tree down = create_tmp_var (unsigned_type_node);
10662 g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
10663 gimple_seq_add_stmt (body_p, g);
10664
10665 g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
10666 gimple_seq_add_stmt (body_p, g);
10667
10668 tree num_threadsu = create_tmp_var (unsigned_type_node);
10669 g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
10670 gimple_seq_add_stmt (body_p, g);
10671
10672 tree thread_numu = create_tmp_var (unsigned_type_node);
10673 g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
10674 gimple_seq_add_stmt (body_p, g);
10675
10676 tree thread_nump1 = create_tmp_var (unsigned_type_node);
10677 g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
10678 build_int_cst (unsigned_type_node, 1));
10679 gimple_seq_add_stmt (body_p, g);
10680
10681 lab3 = create_artificial_label (UNKNOWN_LOCATION);
10682 g = gimple_build_label (lab3);
10683 gimple_seq_add_stmt (body_p, g);
10684
10685 tree twok = create_tmp_var (unsigned_type_node);
10686 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
10687 gimple_seq_add_stmt (body_p, g);
10688
10689 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
10690 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
10691 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
10692 g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
10693 gimple_seq_add_stmt (body_p, g);
10694 g = gimple_build_label (lab4);
10695 gimple_seq_add_stmt (body_p, g);
10696 g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
10697 gimple_seq_add_stmt (body_p, g);
10698 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10699 gimple_seq_add_stmt (body_p, g);
10700
10701 g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
10702 gimple_seq_add_stmt (body_p, g);
10703 g = gimple_build_label (lab6);
10704 gimple_seq_add_stmt (body_p, g);
10705
10706 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10707 gimple_seq_add_stmt (body_p, g);
10708
10709 g = gimple_build_label (lab5);
10710 gimple_seq_add_stmt (body_p, g);
10711
10712 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
10713 gimple_seq_add_stmt (body_p, g);
10714
10715 tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false));
10716 g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
10717 gimple_call_set_lhs (g, cplx);
10718 gimple_seq_add_stmt (body_p, g);
10719 tree mul = create_tmp_var (unsigned_type_node);
10720 g = gimple_build_assign (mul, REALPART_EXPR,
10721 build1 (REALPART_EXPR, unsigned_type_node, cplx));
10722 gimple_seq_add_stmt (body_p, g);
10723 tree ovf = create_tmp_var (unsigned_type_node);
10724 g = gimple_build_assign (ovf, IMAGPART_EXPR,
10725 build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
10726 gimple_seq_add_stmt (body_p, g);
10727
10728 tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
10729 tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
10730 g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
10731 lab7, lab8);
10732 gimple_seq_add_stmt (body_p, g);
10733 g = gimple_build_label (lab7);
10734 gimple_seq_add_stmt (body_p, g);
10735
10736 tree andv = create_tmp_var (unsigned_type_node);
10737 g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
10738 gimple_seq_add_stmt (body_p, g);
10739 tree andvm1 = create_tmp_var (unsigned_type_node);
10740 g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
10741 build_minus_one_cst (unsigned_type_node));
10742 gimple_seq_add_stmt (body_p, g);
10743
10744 g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
10745 gimple_seq_add_stmt (body_p, g);
10746
10747 tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
10748 g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
10749 gimple_seq_add_stmt (body_p, g);
10750 g = gimple_build_label (lab9);
10751 gimple_seq_add_stmt (body_p, g);
10752 gimple_seq_add_seq (body_p, reduc_list);
10753 g = gimple_build_label (lab8);
10754 gimple_seq_add_stmt (body_p, g);
10755
10756 tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
10757 tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
10758 tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
10759 g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
10760 lab10, lab11);
10761 gimple_seq_add_stmt (body_p, g);
10762 g = gimple_build_label (lab10);
10763 gimple_seq_add_stmt (body_p, g);
10764 g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
10765 gimple_seq_add_stmt (body_p, g);
10766 g = gimple_build_goto (lab12);
10767 gimple_seq_add_stmt (body_p, g);
10768 g = gimple_build_label (lab11);
10769 gimple_seq_add_stmt (body_p, g);
10770 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10771 gimple_seq_add_stmt (body_p, g);
10772 g = gimple_build_label (lab12);
10773 gimple_seq_add_stmt (body_p, g);
10774
10775 g = omp_build_barrier (NULL);
10776 gimple_seq_add_stmt (body_p, g);
10777
10778 g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
10779 lab3, lab2);
10780 gimple_seq_add_stmt (body_p, g);
10781
10782 g = gimple_build_label (lab2);
10783 gimple_seq_add_stmt (body_p, g);
10784
10785 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10786 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10787 lab3 = create_artificial_label (UNKNOWN_LOCATION);
10788 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
10789 gimple_seq_add_stmt (body_p, g);
10790 g = gimple_build_label (lab1);
10791 gimple_seq_add_stmt (body_p, g);
10792 gimple_seq_add_seq (body_p, thr02_list);
10793 g = gimple_build_goto (lab3);
10794 gimple_seq_add_stmt (body_p, g);
10795 g = gimple_build_label (lab2);
10796 gimple_seq_add_stmt (body_p, g);
10797 gimple_seq_add_seq (body_p, thrn2_list);
10798 g = gimple_build_label (lab3);
10799 gimple_seq_add_stmt (body_p, g);
10800
10801 g = gimple_build_assign (ivar, size_zero_node);
10802 gimple_seq_add_stmt (body_p, g);
10803 gimple_seq_add_stmt (body_p, new_stmt);
10804 gimple_seq_add_seq (body_p, new_body);
10805
10806 gimple_seq new_dlist = NULL;
10807 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10808 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10809 tree num_threadsm1 = create_tmp_var (integer_type_node);
10810 g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
10811 integer_minus_one_node);
10812 gimple_seq_add_stmt (&new_dlist, g);
10813 g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
10814 gimple_seq_add_stmt (&new_dlist, g);
10815 g = gimple_build_label (lab1);
10816 gimple_seq_add_stmt (&new_dlist, g);
10817 gimple_seq_add_seq (&new_dlist, last_list);
10818 g = gimple_build_label (lab2);
10819 gimple_seq_add_stmt (&new_dlist, g);
10820 gimple_seq_add_seq (&new_dlist, *dlist);
10821 *dlist = new_dlist;
10822 }
10823
10824 /* Lower code for an OMP loop directive. */
10825
10826 static void
10827 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10828 {
10829 tree *rhs_p, block;
10830 struct omp_for_data fd, *fdp = NULL;
10831 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
10832 gbind *new_stmt;
10833 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
10834 gimple_seq cnt_list = NULL, clist = NULL;
10835 gimple_seq oacc_head = NULL, oacc_tail = NULL;
10836 size_t i;
10837
10838 push_gimplify_context ();
10839
10840 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
10841
10842 block = make_node (BLOCK);
10843 new_stmt = gimple_build_bind (NULL, NULL, block);
10844 /* Replace at gsi right away, so that 'stmt' is no member
10845 of a sequence anymore as we're going to add to a different
10846 one below. */
10847 gsi_replace (gsi_p, new_stmt, true);
10848
10849 /* Move declaration of temporaries in the loop body before we make
10850 it go away. */
10851 omp_for_body = gimple_omp_body (stmt);
10852 if (!gimple_seq_empty_p (omp_for_body)
10853 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
10854 {
10855 gbind *inner_bind
10856 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
10857 tree vars = gimple_bind_vars (inner_bind);
10858 gimple_bind_append_vars (new_stmt, vars);
10859 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
10860 keep them on the inner_bind and it's block. */
10861 gimple_bind_set_vars (inner_bind, NULL_TREE);
10862 if (gimple_bind_block (inner_bind))
10863 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
10864 }
10865
10866 if (gimple_omp_for_combined_into_p (stmt))
10867 {
10868 omp_extract_for_data (stmt, &fd, NULL);
10869 fdp = &fd;
10870
10871 /* We need two temporaries with fd.loop.v type (istart/iend)
10872 and then (fd.collapse - 1) temporaries with the same
10873 type for count2 ... countN-1 vars if not constant. */
10874 size_t count = 2;
10875 tree type = fd.iter_type;
10876 if (fd.collapse > 1
10877 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
10878 count += fd.collapse - 1;
10879 size_t count2 = 0;
10880 tree type2 = NULL_TREE;
10881 bool taskreg_for
10882 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
10883 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
10884 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
10885 tree simtc = NULL;
10886 tree clauses = *pc;
10887 if (fd.collapse > 1
10888 && fd.non_rect
10889 && fd.last_nonrect == fd.first_nonrect + 1
10890 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
10891 if (tree v = gimple_omp_for_index (stmt, fd.last_nonrect))
10892 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
10893 {
10894 v = gimple_omp_for_index (stmt, fd.first_nonrect);
10895 type2 = TREE_TYPE (v);
10896 count++;
10897 count2 = 3;
10898 }
10899 if (taskreg_for)
10900 outerc
10901 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
10902 OMP_CLAUSE__LOOPTEMP_);
10903 if (ctx->simt_stmt)
10904 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
10905 OMP_CLAUSE__LOOPTEMP_);
10906 for (i = 0; i < count + count2; i++)
10907 {
10908 tree temp;
10909 if (taskreg_for)
10910 {
10911 gcc_assert (outerc);
10912 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
10913 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
10914 OMP_CLAUSE__LOOPTEMP_);
10915 }
10916 else
10917 {
10918 /* If there are 2 adjacent SIMD stmts, one with _simt_
10919 clause, another without, make sure they have the same
10920 decls in _looptemp_ clauses, because the outer stmt
10921 they are combined into will look up just one inner_stmt. */
10922 if (ctx->simt_stmt)
10923 temp = OMP_CLAUSE_DECL (simtc);
10924 else
10925 temp = create_tmp_var (i >= count ? type2 : type);
10926 insert_decl_map (&ctx->outer->cb, temp, temp);
10927 }
10928 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
10929 OMP_CLAUSE_DECL (*pc) = temp;
10930 pc = &OMP_CLAUSE_CHAIN (*pc);
10931 if (ctx->simt_stmt)
10932 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
10933 OMP_CLAUSE__LOOPTEMP_);
10934 }
10935 *pc = clauses;
10936 }
10937
10938 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
10939 dlist = NULL;
10940 body = NULL;
10941 tree rclauses
10942 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
10943 OMP_CLAUSE_REDUCTION);
10944 tree rtmp = NULL_TREE;
10945 if (rclauses)
10946 {
10947 tree type = build_pointer_type (pointer_sized_int_node);
10948 tree temp = create_tmp_var (type);
10949 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
10950 OMP_CLAUSE_DECL (c) = temp;
10951 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
10952 gimple_omp_for_set_clauses (stmt, c);
10953 lower_omp_task_reductions (ctx, OMP_FOR,
10954 gimple_omp_for_clauses (stmt),
10955 &tred_ilist, &tred_dlist);
10956 rclauses = c;
10957 rtmp = make_ssa_name (type);
10958 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
10959 }
10960
10961 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
10962 ctx);
10963
10964 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
10965 fdp);
10966 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
10967 gimple_omp_for_pre_body (stmt));
10968
10969 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10970
10971 /* Lower the header expressions. At this point, we can assume that
10972 the header is of the form:
10973
10974 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
10975
10976 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
10977 using the .omp_data_s mapping, if needed. */
10978 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
10979 {
10980 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
10981 if (TREE_CODE (*rhs_p) == TREE_VEC)
10982 {
10983 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
10984 TREE_VEC_ELT (*rhs_p, 1)
10985 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
10986 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
10987 TREE_VEC_ELT (*rhs_p, 2)
10988 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
10989 }
10990 else if (!is_gimple_min_invariant (*rhs_p))
10991 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10992 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
10993 recompute_tree_invariant_for_addr_expr (*rhs_p);
10994
10995 rhs_p = gimple_omp_for_final_ptr (stmt, i);
10996 if (TREE_CODE (*rhs_p) == TREE_VEC)
10997 {
10998 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
10999 TREE_VEC_ELT (*rhs_p, 1)
11000 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11001 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11002 TREE_VEC_ELT (*rhs_p, 2)
11003 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11004 }
11005 else if (!is_gimple_min_invariant (*rhs_p))
11006 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11007 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11008 recompute_tree_invariant_for_addr_expr (*rhs_p);
11009
11010 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
11011 if (!is_gimple_min_invariant (*rhs_p))
11012 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11013 }
11014 if (rclauses)
11015 gimple_seq_add_seq (&tred_ilist, cnt_list);
11016 else
11017 gimple_seq_add_seq (&body, cnt_list);
11018
11019 /* Once lowered, extract the bounds and clauses. */
11020 omp_extract_for_data (stmt, &fd, NULL);
11021
11022 if (is_gimple_omp_oacc (ctx->stmt)
11023 && !ctx_in_oacc_kernels_region (ctx))
11024 lower_oacc_head_tail (gimple_location (stmt),
11025 gimple_omp_for_clauses (stmt),
11026 &oacc_head, &oacc_tail, ctx);
11027
11028 /* Add OpenACC partitioning and reduction markers just before the loop. */
11029 if (oacc_head)
11030 gimple_seq_add_seq (&body, oacc_head);
11031
11032 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
11033
11034 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11035 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
11036 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11037 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11038 {
11039 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
11040 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
11041 OMP_CLAUSE_LINEAR_STEP (c)
11042 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
11043 ctx);
11044 }
11045
11046 if ((ctx->scan_inclusive || ctx->scan_exclusive)
11047 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11048 lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx);
11049 else
11050 {
11051 gimple_seq_add_stmt (&body, stmt);
11052 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
11053 }
11054
11055 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
11056 fd.loop.v));
11057
11058 /* After the loop, add exit clauses. */
11059 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
11060
11061 if (clist)
11062 {
11063 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
11064 gcall *g = gimple_build_call (fndecl, 0);
11065 gimple_seq_add_stmt (&body, g);
11066 gimple_seq_add_seq (&body, clist);
11067 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
11068 g = gimple_build_call (fndecl, 0);
11069 gimple_seq_add_stmt (&body, g);
11070 }
11071
11072 if (ctx->cancellable)
11073 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
11074
11075 gimple_seq_add_seq (&body, dlist);
11076
11077 if (rclauses)
11078 {
11079 gimple_seq_add_seq (&tred_ilist, body);
11080 body = tred_ilist;
11081 }
11082
11083 body = maybe_catch_exception (body);
11084
11085 /* Region exit marker goes at the end of the loop body. */
11086 gimple *g = gimple_build_omp_return (fd.have_nowait);
11087 gimple_seq_add_stmt (&body, g);
11088
11089 gimple_seq_add_seq (&body, tred_dlist);
11090
11091 maybe_add_implicit_barrier_cancel (ctx, g, &body);
11092
11093 if (rclauses)
11094 OMP_CLAUSE_DECL (rclauses) = rtmp;
11095
11096 /* Add OpenACC joining and reduction markers just after the loop. */
11097 if (oacc_tail)
11098 gimple_seq_add_seq (&body, oacc_tail);
11099
11100 pop_gimplify_context (new_stmt);
11101
11102 gimple_bind_append_vars (new_stmt, ctx->block_vars);
11103 maybe_remove_omp_member_access_dummy_vars (new_stmt);
11104 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
11105 if (BLOCK_VARS (block))
11106 TREE_USED (block) = 1;
11107
11108 gimple_bind_set_body (new_stmt, body);
11109 gimple_omp_set_body (stmt, NULL);
11110 gimple_omp_for_set_pre_body (stmt, NULL);
11111 }
11112
11113 /* Callback for walk_stmts. Check if the current statement only contains
11114 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
11115
11116 static tree
11117 check_combined_parallel (gimple_stmt_iterator *gsi_p,
11118 bool *handled_ops_p,
11119 struct walk_stmt_info *wi)
11120 {
11121 int *info = (int *) wi->info;
11122 gimple *stmt = gsi_stmt (*gsi_p);
11123
11124 *handled_ops_p = true;
11125 switch (gimple_code (stmt))
11126 {
11127 WALK_SUBSTMTS;
11128
11129 case GIMPLE_DEBUG:
11130 break;
11131 case GIMPLE_OMP_FOR:
11132 case GIMPLE_OMP_SECTIONS:
11133 *info = *info == 0 ? 1 : -1;
11134 break;
11135 default:
11136 *info = -1;
11137 break;
11138 }
11139 return NULL;
11140 }
11141
11142 struct omp_taskcopy_context
11143 {
11144 /* This field must be at the beginning, as we do "inheritance": Some
11145 callback functions for tree-inline.c (e.g., omp_copy_decl)
11146 receive a copy_body_data pointer that is up-casted to an
11147 omp_context pointer. */
11148 copy_body_data cb;
11149 omp_context *ctx;
11150 };
11151
11152 static tree
11153 task_copyfn_copy_decl (tree var, copy_body_data *cb)
11154 {
11155 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
11156
11157 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
11158 return create_tmp_var (TREE_TYPE (var));
11159
11160 return var;
11161 }
11162
11163 static tree
11164 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
11165 {
11166 tree name, new_fields = NULL, type, f;
11167
11168 type = lang_hooks.types.make_type (RECORD_TYPE);
11169 name = DECL_NAME (TYPE_NAME (orig_type));
11170 name = build_decl (gimple_location (tcctx->ctx->stmt),
11171 TYPE_DECL, name, type);
11172 TYPE_NAME (type) = name;
11173
11174 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
11175 {
11176 tree new_f = copy_node (f);
11177 DECL_CONTEXT (new_f) = type;
11178 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
11179 TREE_CHAIN (new_f) = new_fields;
11180 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
11181 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
11182 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
11183 &tcctx->cb, NULL);
11184 new_fields = new_f;
11185 tcctx->cb.decl_map->put (f, new_f);
11186 }
11187 TYPE_FIELDS (type) = nreverse (new_fields);
11188 layout_type (type);
11189 return type;
11190 }
11191
11192 /* Create task copyfn. */
11193
11194 static void
11195 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
11196 {
11197 struct function *child_cfun;
11198 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
11199 tree record_type, srecord_type, bind, list;
11200 bool record_needs_remap = false, srecord_needs_remap = false;
11201 splay_tree_node n;
11202 struct omp_taskcopy_context tcctx;
11203 location_t loc = gimple_location (task_stmt);
11204 size_t looptempno = 0;
11205
11206 child_fn = gimple_omp_task_copy_fn (task_stmt);
11207 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
11208 gcc_assert (child_cfun->cfg == NULL);
11209 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
11210
11211 /* Reset DECL_CONTEXT on function arguments. */
11212 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
11213 DECL_CONTEXT (t) = child_fn;
11214
11215 /* Populate the function. */
11216 push_gimplify_context ();
11217 push_cfun (child_cfun);
11218
11219 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
11220 TREE_SIDE_EFFECTS (bind) = 1;
11221 list = NULL;
11222 DECL_SAVED_TREE (child_fn) = bind;
11223 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
11224
11225 /* Remap src and dst argument types if needed. */
11226 record_type = ctx->record_type;
11227 srecord_type = ctx->srecord_type;
11228 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
11229 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
11230 {
11231 record_needs_remap = true;
11232 break;
11233 }
11234 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
11235 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
11236 {
11237 srecord_needs_remap = true;
11238 break;
11239 }
11240
11241 if (record_needs_remap || srecord_needs_remap)
11242 {
11243 memset (&tcctx, '\0', sizeof (tcctx));
11244 tcctx.cb.src_fn = ctx->cb.src_fn;
11245 tcctx.cb.dst_fn = child_fn;
11246 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
11247 gcc_checking_assert (tcctx.cb.src_node);
11248 tcctx.cb.dst_node = tcctx.cb.src_node;
11249 tcctx.cb.src_cfun = ctx->cb.src_cfun;
11250 tcctx.cb.copy_decl = task_copyfn_copy_decl;
11251 tcctx.cb.eh_lp_nr = 0;
11252 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
11253 tcctx.cb.decl_map = new hash_map<tree, tree>;
11254 tcctx.ctx = ctx;
11255
11256 if (record_needs_remap)
11257 record_type = task_copyfn_remap_type (&tcctx, record_type);
11258 if (srecord_needs_remap)
11259 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
11260 }
11261 else
11262 tcctx.cb.decl_map = NULL;
11263
11264 arg = DECL_ARGUMENTS (child_fn);
11265 TREE_TYPE (arg) = build_pointer_type (record_type);
11266 sarg = DECL_CHAIN (arg);
11267 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
11268
11269 /* First pass: initialize temporaries used in record_type and srecord_type
11270 sizes and field offsets. */
11271 if (tcctx.cb.decl_map)
11272 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11273 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11274 {
11275 tree *p;
11276
11277 decl = OMP_CLAUSE_DECL (c);
11278 p = tcctx.cb.decl_map->get (decl);
11279 if (p == NULL)
11280 continue;
11281 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
11282 sf = (tree) n->value;
11283 sf = *tcctx.cb.decl_map->get (sf);
11284 src = build_simple_mem_ref_loc (loc, sarg);
11285 src = omp_build_component_ref (src, sf);
11286 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
11287 append_to_statement_list (t, &list);
11288 }
11289
11290 /* Second pass: copy shared var pointers and copy construct non-VLA
11291 firstprivate vars. */
11292 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11293 switch (OMP_CLAUSE_CODE (c))
11294 {
11295 splay_tree_key key;
11296 case OMP_CLAUSE_SHARED:
11297 decl = OMP_CLAUSE_DECL (c);
11298 key = (splay_tree_key) decl;
11299 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
11300 key = (splay_tree_key) &DECL_UID (decl);
11301 n = splay_tree_lookup (ctx->field_map, key);
11302 if (n == NULL)
11303 break;
11304 f = (tree) n->value;
11305 if (tcctx.cb.decl_map)
11306 f = *tcctx.cb.decl_map->get (f);
11307 n = splay_tree_lookup (ctx->sfield_map, key);
11308 sf = (tree) n->value;
11309 if (tcctx.cb.decl_map)
11310 sf = *tcctx.cb.decl_map->get (sf);
11311 src = build_simple_mem_ref_loc (loc, sarg);
11312 src = omp_build_component_ref (src, sf);
11313 dst = build_simple_mem_ref_loc (loc, arg);
11314 dst = omp_build_component_ref (dst, f);
11315 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11316 append_to_statement_list (t, &list);
11317 break;
11318 case OMP_CLAUSE_REDUCTION:
11319 case OMP_CLAUSE_IN_REDUCTION:
11320 decl = OMP_CLAUSE_DECL (c);
11321 if (TREE_CODE (decl) == MEM_REF)
11322 {
11323 decl = TREE_OPERAND (decl, 0);
11324 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
11325 decl = TREE_OPERAND (decl, 0);
11326 if (TREE_CODE (decl) == INDIRECT_REF
11327 || TREE_CODE (decl) == ADDR_EXPR)
11328 decl = TREE_OPERAND (decl, 0);
11329 }
11330 key = (splay_tree_key) decl;
11331 n = splay_tree_lookup (ctx->field_map, key);
11332 if (n == NULL)
11333 break;
11334 f = (tree) n->value;
11335 if (tcctx.cb.decl_map)
11336 f = *tcctx.cb.decl_map->get (f);
11337 n = splay_tree_lookup (ctx->sfield_map, key);
11338 sf = (tree) n->value;
11339 if (tcctx.cb.decl_map)
11340 sf = *tcctx.cb.decl_map->get (sf);
11341 src = build_simple_mem_ref_loc (loc, sarg);
11342 src = omp_build_component_ref (src, sf);
11343 if (decl != OMP_CLAUSE_DECL (c)
11344 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
11345 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
11346 src = build_simple_mem_ref_loc (loc, src);
11347 dst = build_simple_mem_ref_loc (loc, arg);
11348 dst = omp_build_component_ref (dst, f);
11349 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11350 append_to_statement_list (t, &list);
11351 break;
11352 case OMP_CLAUSE__LOOPTEMP_:
11353 /* Fields for first two _looptemp_ clauses are initialized by
11354 GOMP_taskloop*, the rest are handled like firstprivate. */
11355 if (looptempno < 2)
11356 {
11357 looptempno++;
11358 break;
11359 }
11360 /* FALLTHRU */
11361 case OMP_CLAUSE__REDUCTEMP_:
11362 case OMP_CLAUSE_FIRSTPRIVATE:
11363 decl = OMP_CLAUSE_DECL (c);
11364 if (is_variable_sized (decl))
11365 break;
11366 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
11367 if (n == NULL)
11368 break;
11369 f = (tree) n->value;
11370 if (tcctx.cb.decl_map)
11371 f = *tcctx.cb.decl_map->get (f);
11372 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
11373 if (n != NULL)
11374 {
11375 sf = (tree) n->value;
11376 if (tcctx.cb.decl_map)
11377 sf = *tcctx.cb.decl_map->get (sf);
11378 src = build_simple_mem_ref_loc (loc, sarg);
11379 src = omp_build_component_ref (src, sf);
11380 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
11381 src = build_simple_mem_ref_loc (loc, src);
11382 }
11383 else
11384 src = decl;
11385 dst = build_simple_mem_ref_loc (loc, arg);
11386 dst = omp_build_component_ref (dst, f);
11387 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
11388 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11389 else
11390 {
11391 if (ctx->allocate_map)
11392 if (tree *allocatorp = ctx->allocate_map->get (decl))
11393 {
11394 tree allocator = *allocatorp;
11395 if (TREE_CODE (allocator) != INTEGER_CST)
11396 {
11397 n = splay_tree_lookup (ctx->sfield_map,
11398 (splay_tree_key) allocator);
11399 allocator = (tree) n->value;
11400 if (tcctx.cb.decl_map)
11401 allocator = *tcctx.cb.decl_map->get (allocator);
11402 tree a = build_simple_mem_ref_loc (loc, sarg);
11403 allocator = omp_build_component_ref (a, allocator);
11404 }
11405 allocator = fold_convert (pointer_sized_int_node, allocator);
11406 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
11407 tree align = build_int_cst (size_type_node,
11408 DECL_ALIGN_UNIT (decl));
11409 tree sz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dst)));
11410 tree ptr = build_call_expr_loc (loc, a, 3, align, sz,
11411 allocator);
11412 ptr = fold_convert (TREE_TYPE (dst), ptr);
11413 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, ptr);
11414 append_to_statement_list (t, &list);
11415 dst = build_simple_mem_ref_loc (loc, dst);
11416 }
11417 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
11418 }
11419 append_to_statement_list (t, &list);
11420 break;
11421 case OMP_CLAUSE_PRIVATE:
11422 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
11423 break;
11424 decl = OMP_CLAUSE_DECL (c);
11425 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
11426 f = (tree) n->value;
11427 if (tcctx.cb.decl_map)
11428 f = *tcctx.cb.decl_map->get (f);
11429 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
11430 if (n != NULL)
11431 {
11432 sf = (tree) n->value;
11433 if (tcctx.cb.decl_map)
11434 sf = *tcctx.cb.decl_map->get (sf);
11435 src = build_simple_mem_ref_loc (loc, sarg);
11436 src = omp_build_component_ref (src, sf);
11437 if (use_pointer_for_field (decl, NULL))
11438 src = build_simple_mem_ref_loc (loc, src);
11439 }
11440 else
11441 src = decl;
11442 dst = build_simple_mem_ref_loc (loc, arg);
11443 dst = omp_build_component_ref (dst, f);
11444 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11445 append_to_statement_list (t, &list);
11446 break;
11447 default:
11448 break;
11449 }
11450
11451 /* Last pass: handle VLA firstprivates. */
11452 if (tcctx.cb.decl_map)
11453 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11454 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11455 {
11456 tree ind, ptr, df;
11457
11458 decl = OMP_CLAUSE_DECL (c);
11459 if (!is_variable_sized (decl))
11460 continue;
11461 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
11462 if (n == NULL)
11463 continue;
11464 f = (tree) n->value;
11465 f = *tcctx.cb.decl_map->get (f);
11466 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
11467 ind = DECL_VALUE_EXPR (decl);
11468 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
11469 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
11470 n = splay_tree_lookup (ctx->sfield_map,
11471 (splay_tree_key) TREE_OPERAND (ind, 0));
11472 sf = (tree) n->value;
11473 sf = *tcctx.cb.decl_map->get (sf);
11474 src = build_simple_mem_ref_loc (loc, sarg);
11475 src = omp_build_component_ref (src, sf);
11476 src = build_simple_mem_ref_loc (loc, src);
11477 dst = build_simple_mem_ref_loc (loc, arg);
11478 dst = omp_build_component_ref (dst, f);
11479 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
11480 append_to_statement_list (t, &list);
11481 n = splay_tree_lookup (ctx->field_map,
11482 (splay_tree_key) TREE_OPERAND (ind, 0));
11483 df = (tree) n->value;
11484 df = *tcctx.cb.decl_map->get (df);
11485 ptr = build_simple_mem_ref_loc (loc, arg);
11486 ptr = omp_build_component_ref (ptr, df);
11487 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
11488 build_fold_addr_expr_loc (loc, dst));
11489 append_to_statement_list (t, &list);
11490 }
11491
11492 t = build1 (RETURN_EXPR, void_type_node, NULL);
11493 append_to_statement_list (t, &list);
11494
11495 if (tcctx.cb.decl_map)
11496 delete tcctx.cb.decl_map;
11497 pop_gimplify_context (NULL);
11498 BIND_EXPR_BODY (bind) = list;
11499 pop_cfun ();
11500 }
11501
11502 static void
11503 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
11504 {
11505 tree c, clauses;
11506 gimple *g;
11507 size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
11508
11509 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
11510 gcc_assert (clauses);
11511 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
11512 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
11513 switch (OMP_CLAUSE_DEPEND_KIND (c))
11514 {
11515 case OMP_CLAUSE_DEPEND_LAST:
11516 /* Lowering already done at gimplification. */
11517 return;
11518 case OMP_CLAUSE_DEPEND_IN:
11519 cnt[2]++;
11520 break;
11521 case OMP_CLAUSE_DEPEND_OUT:
11522 case OMP_CLAUSE_DEPEND_INOUT:
11523 cnt[0]++;
11524 break;
11525 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
11526 cnt[1]++;
11527 break;
11528 case OMP_CLAUSE_DEPEND_DEPOBJ:
11529 cnt[3]++;
11530 break;
11531 case OMP_CLAUSE_DEPEND_SOURCE:
11532 case OMP_CLAUSE_DEPEND_SINK:
11533 /* FALLTHRU */
11534 default:
11535 gcc_unreachable ();
11536 }
11537 if (cnt[1] || cnt[3])
11538 idx = 5;
11539 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
11540 tree type = build_array_type_nelts (ptr_type_node, total + idx);
11541 tree array = create_tmp_var (type);
11542 TREE_ADDRESSABLE (array) = 1;
11543 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
11544 NULL_TREE);
11545 if (idx == 5)
11546 {
11547 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
11548 gimple_seq_add_stmt (iseq, g);
11549 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
11550 NULL_TREE);
11551 }
11552 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
11553 gimple_seq_add_stmt (iseq, g);
11554 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
11555 {
11556 r = build4 (ARRAY_REF, ptr_type_node, array,
11557 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
11558 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
11559 gimple_seq_add_stmt (iseq, g);
11560 }
11561 for (i = 0; i < 4; i++)
11562 {
11563 if (cnt[i] == 0)
11564 continue;
11565 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
11566 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
11567 continue;
11568 else
11569 {
11570 switch (OMP_CLAUSE_DEPEND_KIND (c))
11571 {
11572 case OMP_CLAUSE_DEPEND_IN:
11573 if (i != 2)
11574 continue;
11575 break;
11576 case OMP_CLAUSE_DEPEND_OUT:
11577 case OMP_CLAUSE_DEPEND_INOUT:
11578 if (i != 0)
11579 continue;
11580 break;
11581 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
11582 if (i != 1)
11583 continue;
11584 break;
11585 case OMP_CLAUSE_DEPEND_DEPOBJ:
11586 if (i != 3)
11587 continue;
11588 break;
11589 default:
11590 gcc_unreachable ();
11591 }
11592 tree t = OMP_CLAUSE_DECL (c);
11593 t = fold_convert (ptr_type_node, t);
11594 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
11595 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
11596 NULL_TREE, NULL_TREE);
11597 g = gimple_build_assign (r, t);
11598 gimple_seq_add_stmt (iseq, g);
11599 }
11600 }
11601 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
11602 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
11603 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
11604 OMP_CLAUSE_CHAIN (c) = *pclauses;
11605 *pclauses = c;
11606 tree clobber = build_clobber (type);
11607 g = gimple_build_assign (array, clobber);
11608 gimple_seq_add_stmt (oseq, g);
11609 }
11610
11611 /* Lower the OpenMP parallel or task directive in the current statement
11612 in GSI_P. CTX holds context information for the directive. */
11613
11614 static void
11615 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11616 {
11617 tree clauses;
11618 tree child_fn, t;
11619 gimple *stmt = gsi_stmt (*gsi_p);
11620 gbind *par_bind, *bind, *dep_bind = NULL;
11621 gimple_seq par_body;
11622 location_t loc = gimple_location (stmt);
11623
11624 clauses = gimple_omp_taskreg_clauses (stmt);
11625 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11626 && gimple_omp_task_taskwait_p (stmt))
11627 {
11628 par_bind = NULL;
11629 par_body = NULL;
11630 }
11631 else
11632 {
11633 par_bind
11634 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
11635 par_body = gimple_bind_body (par_bind);
11636 }
11637 child_fn = ctx->cb.dst_fn;
11638 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
11639 && !gimple_omp_parallel_combined_p (stmt))
11640 {
11641 struct walk_stmt_info wi;
11642 int ws_num = 0;
11643
11644 memset (&wi, 0, sizeof (wi));
11645 wi.info = &ws_num;
11646 wi.val_only = true;
11647 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
11648 if (ws_num == 1)
11649 gimple_omp_parallel_set_combined_p (stmt, true);
11650 }
11651 gimple_seq dep_ilist = NULL;
11652 gimple_seq dep_olist = NULL;
11653 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11654 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
11655 {
11656 push_gimplify_context ();
11657 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11658 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
11659 &dep_ilist, &dep_olist);
11660 }
11661
11662 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11663 && gimple_omp_task_taskwait_p (stmt))
11664 {
11665 if (dep_bind)
11666 {
11667 gsi_replace (gsi_p, dep_bind, true);
11668 gimple_bind_add_seq (dep_bind, dep_ilist);
11669 gimple_bind_add_stmt (dep_bind, stmt);
11670 gimple_bind_add_seq (dep_bind, dep_olist);
11671 pop_gimplify_context (dep_bind);
11672 }
11673 return;
11674 }
11675
11676 if (ctx->srecord_type)
11677 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
11678
11679 gimple_seq tskred_ilist = NULL;
11680 gimple_seq tskred_olist = NULL;
11681 if ((is_task_ctx (ctx)
11682 && gimple_omp_task_taskloop_p (ctx->stmt)
11683 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
11684 OMP_CLAUSE_REDUCTION))
11685 || (is_parallel_ctx (ctx)
11686 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
11687 OMP_CLAUSE__REDUCTEMP_)))
11688 {
11689 if (dep_bind == NULL)
11690 {
11691 push_gimplify_context ();
11692 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11693 }
11694 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
11695 : OMP_PARALLEL,
11696 gimple_omp_taskreg_clauses (ctx->stmt),
11697 &tskred_ilist, &tskred_olist);
11698 }
11699
11700 push_gimplify_context ();
11701
11702 gimple_seq par_olist = NULL;
11703 gimple_seq par_ilist = NULL;
11704 gimple_seq par_rlist = NULL;
11705 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
11706 lower_omp (&par_body, ctx);
11707 if (gimple_code (stmt) != GIMPLE_OMP_TASK)
11708 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
11709
11710 /* Declare all the variables created by mapping and the variables
11711 declared in the scope of the parallel body. */
11712 record_vars_into (ctx->block_vars, child_fn);
11713 maybe_remove_omp_member_access_dummy_vars (par_bind);
11714 record_vars_into (gimple_bind_vars (par_bind), child_fn);
11715
11716 if (ctx->record_type)
11717 {
11718 ctx->sender_decl
11719 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
11720 : ctx->record_type, ".omp_data_o");
11721 DECL_NAMELESS (ctx->sender_decl) = 1;
11722 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
11723 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
11724 }
11725
11726 gimple_seq olist = NULL;
11727 gimple_seq ilist = NULL;
11728 lower_send_clauses (clauses, &ilist, &olist, ctx);
11729 lower_send_shared_vars (&ilist, &olist, ctx);
11730
11731 if (ctx->record_type)
11732 {
11733 tree clobber = build_clobber (TREE_TYPE (ctx->sender_decl));
11734 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
11735 clobber));
11736 }
11737
11738 /* Once all the expansions are done, sequence all the different
11739 fragments inside gimple_omp_body. */
11740
11741 gimple_seq new_body = NULL;
11742
11743 if (ctx->record_type)
11744 {
11745 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
11746 /* fixup_child_record_type might have changed receiver_decl's type. */
11747 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
11748 gimple_seq_add_stmt (&new_body,
11749 gimple_build_assign (ctx->receiver_decl, t));
11750 }
11751
11752 gimple_seq_add_seq (&new_body, par_ilist);
11753 gimple_seq_add_seq (&new_body, par_body);
11754 gimple_seq_add_seq (&new_body, par_rlist);
11755 if (ctx->cancellable)
11756 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
11757 gimple_seq_add_seq (&new_body, par_olist);
11758 new_body = maybe_catch_exception (new_body);
11759 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
11760 gimple_seq_add_stmt (&new_body,
11761 gimple_build_omp_continue (integer_zero_node,
11762 integer_zero_node));
11763 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
11764 gimple_omp_set_body (stmt, new_body);
11765
11766 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
11767 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11768 else
11769 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
11770 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
11771 gimple_bind_add_seq (bind, ilist);
11772 gimple_bind_add_stmt (bind, stmt);
11773 gimple_bind_add_seq (bind, olist);
11774
11775 pop_gimplify_context (NULL);
11776
11777 if (dep_bind)
11778 {
11779 gimple_bind_add_seq (dep_bind, dep_ilist);
11780 gimple_bind_add_seq (dep_bind, tskred_ilist);
11781 gimple_bind_add_stmt (dep_bind, bind);
11782 gimple_bind_add_seq (dep_bind, tskred_olist);
11783 gimple_bind_add_seq (dep_bind, dep_olist);
11784 pop_gimplify_context (dep_bind);
11785 }
11786 }
11787
11788 /* Lower the GIMPLE_OMP_TARGET in the current statement
11789 in GSI_P. CTX holds context information for the directive. */
11790
11791 static void
11792 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11793 {
11794 tree clauses;
11795 tree child_fn, t, c;
11796 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
11797 gbind *tgt_bind, *bind, *dep_bind = NULL;
11798 gimple_seq tgt_body, olist, ilist, fplist, new_body;
11799 location_t loc = gimple_location (stmt);
11800 bool offloaded, data_region;
11801 unsigned int map_cnt = 0;
11802
11803 offloaded = is_gimple_omp_offloaded (stmt);
11804 switch (gimple_omp_target_kind (stmt))
11805 {
11806 case GF_OMP_TARGET_KIND_REGION:
11807 case GF_OMP_TARGET_KIND_UPDATE:
11808 case GF_OMP_TARGET_KIND_ENTER_DATA:
11809 case GF_OMP_TARGET_KIND_EXIT_DATA:
11810 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
11811 case GF_OMP_TARGET_KIND_OACC_KERNELS:
11812 case GF_OMP_TARGET_KIND_OACC_SERIAL:
11813 case GF_OMP_TARGET_KIND_OACC_UPDATE:
11814 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
11815 case GF_OMP_TARGET_KIND_OACC_DECLARE:
11816 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
11817 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
11818 data_region = false;
11819 break;
11820 case GF_OMP_TARGET_KIND_DATA:
11821 case GF_OMP_TARGET_KIND_OACC_DATA:
11822 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
11823 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
11824 data_region = true;
11825 break;
11826 default:
11827 gcc_unreachable ();
11828 }
11829
11830 clauses = gimple_omp_target_clauses (stmt);
11831
11832 gimple_seq dep_ilist = NULL;
11833 gimple_seq dep_olist = NULL;
11834 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
11835 {
11836 push_gimplify_context ();
11837 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11838 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
11839 &dep_ilist, &dep_olist);
11840 }
11841
11842 tgt_bind = NULL;
11843 tgt_body = NULL;
11844 if (offloaded)
11845 {
11846 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
11847 tgt_body = gimple_bind_body (tgt_bind);
11848 }
11849 else if (data_region)
11850 tgt_body = gimple_omp_body (stmt);
11851 child_fn = ctx->cb.dst_fn;
11852
11853 push_gimplify_context ();
11854 fplist = NULL;
11855
11856 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11857 switch (OMP_CLAUSE_CODE (c))
11858 {
11859 tree var, x;
11860
11861 default:
11862 break;
11863 case OMP_CLAUSE_MAP:
11864 #if CHECKING_P
11865 /* First check what we're prepared to handle in the following. */
11866 switch (OMP_CLAUSE_MAP_KIND (c))
11867 {
11868 case GOMP_MAP_ALLOC:
11869 case GOMP_MAP_TO:
11870 case GOMP_MAP_FROM:
11871 case GOMP_MAP_TOFROM:
11872 case GOMP_MAP_POINTER:
11873 case GOMP_MAP_TO_PSET:
11874 case GOMP_MAP_DELETE:
11875 case GOMP_MAP_RELEASE:
11876 case GOMP_MAP_ALWAYS_TO:
11877 case GOMP_MAP_ALWAYS_FROM:
11878 case GOMP_MAP_ALWAYS_TOFROM:
11879 case GOMP_MAP_FIRSTPRIVATE_POINTER:
11880 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
11881 case GOMP_MAP_STRUCT:
11882 case GOMP_MAP_ALWAYS_POINTER:
11883 case GOMP_MAP_ATTACH:
11884 case GOMP_MAP_DETACH:
11885 break;
11886 case GOMP_MAP_IF_PRESENT:
11887 case GOMP_MAP_FORCE_ALLOC:
11888 case GOMP_MAP_FORCE_TO:
11889 case GOMP_MAP_FORCE_FROM:
11890 case GOMP_MAP_FORCE_TOFROM:
11891 case GOMP_MAP_FORCE_PRESENT:
11892 case GOMP_MAP_FORCE_DEVICEPTR:
11893 case GOMP_MAP_DEVICE_RESIDENT:
11894 case GOMP_MAP_LINK:
11895 case GOMP_MAP_FORCE_DETACH:
11896 gcc_assert (is_gimple_omp_oacc (stmt));
11897 break;
11898 default:
11899 gcc_unreachable ();
11900 }
11901 #endif
11902 /* FALLTHRU */
11903 case OMP_CLAUSE_TO:
11904 case OMP_CLAUSE_FROM:
11905 oacc_firstprivate:
11906 var = OMP_CLAUSE_DECL (c);
11907 if (!DECL_P (var))
11908 {
11909 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
11910 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11911 && (OMP_CLAUSE_MAP_KIND (c)
11912 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
11913 map_cnt++;
11914 continue;
11915 }
11916
11917 if (DECL_SIZE (var)
11918 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
11919 {
11920 tree var2 = DECL_VALUE_EXPR (var);
11921 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
11922 var2 = TREE_OPERAND (var2, 0);
11923 gcc_assert (DECL_P (var2));
11924 var = var2;
11925 }
11926
11927 if (offloaded
11928 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11929 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11930 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
11931 {
11932 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11933 {
11934 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
11935 && varpool_node::get_create (var)->offloadable)
11936 continue;
11937
11938 tree type = build_pointer_type (TREE_TYPE (var));
11939 tree new_var = lookup_decl (var, ctx);
11940 x = create_tmp_var_raw (type, get_name (new_var));
11941 gimple_add_tmp_var (x);
11942 x = build_simple_mem_ref (x);
11943 SET_DECL_VALUE_EXPR (new_var, x);
11944 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11945 }
11946 continue;
11947 }
11948
11949 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11950 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
11951 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
11952 && is_omp_target (stmt))
11953 {
11954 gcc_assert (maybe_lookup_field (c, ctx));
11955 map_cnt++;
11956 continue;
11957 }
11958
11959 if (!maybe_lookup_field (var, ctx))
11960 continue;
11961
11962 /* Don't remap compute constructs' reduction variables, because the
11963 intermediate result must be local to each gang. */
11964 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11965 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
11966 {
11967 x = build_receiver_ref (var, true, ctx);
11968 tree new_var = lookup_decl (var, ctx);
11969
11970 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11971 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
11972 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11973 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11974 x = build_simple_mem_ref (x);
11975 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11976 {
11977 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
11978 if (omp_is_reference (new_var)
11979 && (TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE
11980 || DECL_BY_REFERENCE (var)))
11981 {
11982 /* Create a local object to hold the instance
11983 value. */
11984 tree type = TREE_TYPE (TREE_TYPE (new_var));
11985 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
11986 tree inst = create_tmp_var (type, id);
11987 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
11988 x = build_fold_addr_expr (inst);
11989 }
11990 gimplify_assign (new_var, x, &fplist);
11991 }
11992 else if (DECL_P (new_var))
11993 {
11994 SET_DECL_VALUE_EXPR (new_var, x);
11995 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11996 }
11997 else
11998 gcc_unreachable ();
11999 }
12000 map_cnt++;
12001 break;
12002
12003 case OMP_CLAUSE_FIRSTPRIVATE:
12004 gcc_checking_assert (offloaded);
12005 if (is_gimple_omp_oacc (ctx->stmt))
12006 {
12007 /* No 'firstprivate' clauses on OpenACC 'kernels'. */
12008 gcc_checking_assert (!is_oacc_kernels (ctx));
12009 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12010 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12011
12012 goto oacc_firstprivate;
12013 }
12014 map_cnt++;
12015 var = OMP_CLAUSE_DECL (c);
12016 if (!omp_is_reference (var)
12017 && !is_gimple_reg_type (TREE_TYPE (var)))
12018 {
12019 tree new_var = lookup_decl (var, ctx);
12020 if (is_variable_sized (var))
12021 {
12022 tree pvar = DECL_VALUE_EXPR (var);
12023 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12024 pvar = TREE_OPERAND (pvar, 0);
12025 gcc_assert (DECL_P (pvar));
12026 tree new_pvar = lookup_decl (pvar, ctx);
12027 x = build_fold_indirect_ref (new_pvar);
12028 TREE_THIS_NOTRAP (x) = 1;
12029 }
12030 else
12031 x = build_receiver_ref (var, true, ctx);
12032 SET_DECL_VALUE_EXPR (new_var, x);
12033 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12034 }
12035 break;
12036
12037 case OMP_CLAUSE_PRIVATE:
12038 gcc_checking_assert (offloaded);
12039 if (is_gimple_omp_oacc (ctx->stmt))
12040 {
12041 /* No 'private' clauses on OpenACC 'kernels'. */
12042 gcc_checking_assert (!is_oacc_kernels (ctx));
12043 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12044 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12045
12046 break;
12047 }
12048 var = OMP_CLAUSE_DECL (c);
12049 if (is_variable_sized (var))
12050 {
12051 tree new_var = lookup_decl (var, ctx);
12052 tree pvar = DECL_VALUE_EXPR (var);
12053 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12054 pvar = TREE_OPERAND (pvar, 0);
12055 gcc_assert (DECL_P (pvar));
12056 tree new_pvar = lookup_decl (pvar, ctx);
12057 x = build_fold_indirect_ref (new_pvar);
12058 TREE_THIS_NOTRAP (x) = 1;
12059 SET_DECL_VALUE_EXPR (new_var, x);
12060 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12061 }
12062 break;
12063
12064 case OMP_CLAUSE_USE_DEVICE_PTR:
12065 case OMP_CLAUSE_USE_DEVICE_ADDR:
12066 case OMP_CLAUSE_IS_DEVICE_PTR:
12067 var = OMP_CLAUSE_DECL (c);
12068 map_cnt++;
12069 if (is_variable_sized (var))
12070 {
12071 tree new_var = lookup_decl (var, ctx);
12072 tree pvar = DECL_VALUE_EXPR (var);
12073 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12074 pvar = TREE_OPERAND (pvar, 0);
12075 gcc_assert (DECL_P (pvar));
12076 tree new_pvar = lookup_decl (pvar, ctx);
12077 x = build_fold_indirect_ref (new_pvar);
12078 TREE_THIS_NOTRAP (x) = 1;
12079 SET_DECL_VALUE_EXPR (new_var, x);
12080 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12081 }
12082 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12083 && !omp_is_reference (var)
12084 && !omp_is_allocatable_or_ptr (var)
12085 && !lang_hooks.decls.omp_array_data (var, true))
12086 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12087 {
12088 tree new_var = lookup_decl (var, ctx);
12089 tree type = build_pointer_type (TREE_TYPE (var));
12090 x = create_tmp_var_raw (type, get_name (new_var));
12091 gimple_add_tmp_var (x);
12092 x = build_simple_mem_ref (x);
12093 SET_DECL_VALUE_EXPR (new_var, x);
12094 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12095 }
12096 else
12097 {
12098 tree new_var = lookup_decl (var, ctx);
12099 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
12100 gimple_add_tmp_var (x);
12101 SET_DECL_VALUE_EXPR (new_var, x);
12102 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12103 }
12104 break;
12105 }
12106
12107 if (offloaded)
12108 {
12109 target_nesting_level++;
12110 lower_omp (&tgt_body, ctx);
12111 target_nesting_level--;
12112 }
12113 else if (data_region)
12114 lower_omp (&tgt_body, ctx);
12115
12116 if (offloaded)
12117 {
12118 /* Declare all the variables created by mapping and the variables
12119 declared in the scope of the target body. */
12120 record_vars_into (ctx->block_vars, child_fn);
12121 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
12122 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
12123 }
12124
12125 olist = NULL;
12126 ilist = NULL;
12127 if (ctx->record_type)
12128 {
12129 ctx->sender_decl
12130 = create_tmp_var (ctx->record_type, ".omp_data_arr");
12131 DECL_NAMELESS (ctx->sender_decl) = 1;
12132 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
12133 t = make_tree_vec (3);
12134 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
12135 TREE_VEC_ELT (t, 1)
12136 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
12137 ".omp_data_sizes");
12138 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
12139 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
12140 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
12141 tree tkind_type = short_unsigned_type_node;
12142 int talign_shift = 8;
12143 TREE_VEC_ELT (t, 2)
12144 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
12145 ".omp_data_kinds");
12146 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
12147 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
12148 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
12149 gimple_omp_target_set_data_arg (stmt, t);
12150
12151 vec<constructor_elt, va_gc> *vsize;
12152 vec<constructor_elt, va_gc> *vkind;
12153 vec_alloc (vsize, map_cnt);
12154 vec_alloc (vkind, map_cnt);
12155 unsigned int map_idx = 0;
12156
12157 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12158 switch (OMP_CLAUSE_CODE (c))
12159 {
12160 tree ovar, nc, s, purpose, var, x, type;
12161 unsigned int talign;
12162
12163 default:
12164 break;
12165
12166 case OMP_CLAUSE_MAP:
12167 case OMP_CLAUSE_TO:
12168 case OMP_CLAUSE_FROM:
12169 oacc_firstprivate_map:
12170 nc = c;
12171 ovar = OMP_CLAUSE_DECL (c);
12172 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12173 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12174 || (OMP_CLAUSE_MAP_KIND (c)
12175 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
12176 break;
12177 if (!DECL_P (ovar))
12178 {
12179 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12180 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
12181 {
12182 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
12183 == get_base_address (ovar));
12184 nc = OMP_CLAUSE_CHAIN (c);
12185 ovar = OMP_CLAUSE_DECL (nc);
12186 }
12187 else
12188 {
12189 tree x = build_sender_ref (ovar, ctx);
12190 tree v
12191 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
12192 gimplify_assign (x, v, &ilist);
12193 nc = NULL_TREE;
12194 }
12195 }
12196 else
12197 {
12198 if (DECL_SIZE (ovar)
12199 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
12200 {
12201 tree ovar2 = DECL_VALUE_EXPR (ovar);
12202 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
12203 ovar2 = TREE_OPERAND (ovar2, 0);
12204 gcc_assert (DECL_P (ovar2));
12205 ovar = ovar2;
12206 }
12207 if (!maybe_lookup_field (ovar, ctx)
12208 && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12209 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12210 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)))
12211 continue;
12212 }
12213
12214 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
12215 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
12216 talign = DECL_ALIGN_UNIT (ovar);
12217
12218 if (nc
12219 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12220 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12221 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
12222 && is_omp_target (stmt))
12223 {
12224 var = lookup_decl_in_outer_ctx (ovar, ctx);
12225 x = build_sender_ref (c, ctx);
12226 gimplify_assign (x, build_fold_addr_expr (var), &ilist);
12227 }
12228 else if (nc)
12229 {
12230 var = lookup_decl_in_outer_ctx (ovar, ctx);
12231 x = build_sender_ref (ovar, ctx);
12232
12233 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12234 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
12235 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12236 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
12237 {
12238 gcc_assert (offloaded);
12239 tree avar
12240 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
12241 mark_addressable (avar);
12242 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
12243 talign = DECL_ALIGN_UNIT (avar);
12244 avar = build_fold_addr_expr (avar);
12245 gimplify_assign (x, avar, &ilist);
12246 }
12247 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12248 {
12249 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
12250 if (!omp_is_reference (var))
12251 {
12252 if (is_gimple_reg (var)
12253 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
12254 TREE_NO_WARNING (var) = 1;
12255 var = build_fold_addr_expr (var);
12256 }
12257 else
12258 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
12259 gimplify_assign (x, var, &ilist);
12260 }
12261 else if (is_gimple_reg (var))
12262 {
12263 gcc_assert (offloaded);
12264 tree avar = create_tmp_var (TREE_TYPE (var));
12265 mark_addressable (avar);
12266 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
12267 if (GOMP_MAP_COPY_TO_P (map_kind)
12268 || map_kind == GOMP_MAP_POINTER
12269 || map_kind == GOMP_MAP_TO_PSET
12270 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
12271 {
12272 /* If we need to initialize a temporary
12273 with VAR because it is not addressable, and
12274 the variable hasn't been initialized yet, then
12275 we'll get a warning for the store to avar.
12276 Don't warn in that case, the mapping might
12277 be implicit. */
12278 TREE_NO_WARNING (var) = 1;
12279 gimplify_assign (avar, var, &ilist);
12280 }
12281 avar = build_fold_addr_expr (avar);
12282 gimplify_assign (x, avar, &ilist);
12283 if ((GOMP_MAP_COPY_FROM_P (map_kind)
12284 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
12285 && !TYPE_READONLY (TREE_TYPE (var)))
12286 {
12287 x = unshare_expr (x);
12288 x = build_simple_mem_ref (x);
12289 gimplify_assign (var, x, &olist);
12290 }
12291 }
12292 else
12293 {
12294 /* While MAP is handled explicitly by the FE,
12295 for 'target update', only the identified is passed. */
12296 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM
12297 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO)
12298 && (omp_is_allocatable_or_ptr (var)
12299 && omp_check_optional_argument (var, false)))
12300 var = build_fold_indirect_ref (var);
12301 else if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FROM
12302 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TO)
12303 || (!omp_is_allocatable_or_ptr (var)
12304 && !omp_check_optional_argument (var, false)))
12305 var = build_fold_addr_expr (var);
12306 gimplify_assign (x, var, &ilist);
12307 }
12308 }
12309 s = NULL_TREE;
12310 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12311 {
12312 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
12313 s = TREE_TYPE (ovar);
12314 if (TREE_CODE (s) == REFERENCE_TYPE
12315 || omp_check_optional_argument (ovar, false))
12316 s = TREE_TYPE (s);
12317 s = TYPE_SIZE_UNIT (s);
12318 }
12319 else
12320 s = OMP_CLAUSE_SIZE (c);
12321 if (s == NULL_TREE)
12322 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
12323 s = fold_convert (size_type_node, s);
12324 purpose = size_int (map_idx++);
12325 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
12326 if (TREE_CODE (s) != INTEGER_CST)
12327 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
12328
12329 unsigned HOST_WIDE_INT tkind, tkind_zero;
12330 switch (OMP_CLAUSE_CODE (c))
12331 {
12332 case OMP_CLAUSE_MAP:
12333 tkind = OMP_CLAUSE_MAP_KIND (c);
12334 tkind_zero = tkind;
12335 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
12336 switch (tkind)
12337 {
12338 case GOMP_MAP_ALLOC:
12339 case GOMP_MAP_IF_PRESENT:
12340 case GOMP_MAP_TO:
12341 case GOMP_MAP_FROM:
12342 case GOMP_MAP_TOFROM:
12343 case GOMP_MAP_ALWAYS_TO:
12344 case GOMP_MAP_ALWAYS_FROM:
12345 case GOMP_MAP_ALWAYS_TOFROM:
12346 case GOMP_MAP_RELEASE:
12347 case GOMP_MAP_FORCE_TO:
12348 case GOMP_MAP_FORCE_FROM:
12349 case GOMP_MAP_FORCE_TOFROM:
12350 case GOMP_MAP_FORCE_PRESENT:
12351 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
12352 break;
12353 case GOMP_MAP_DELETE:
12354 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
12355 default:
12356 break;
12357 }
12358 if (tkind_zero != tkind)
12359 {
12360 if (integer_zerop (s))
12361 tkind = tkind_zero;
12362 else if (integer_nonzerop (s))
12363 tkind_zero = tkind;
12364 }
12365 break;
12366 case OMP_CLAUSE_FIRSTPRIVATE:
12367 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
12368 tkind = GOMP_MAP_TO;
12369 tkind_zero = tkind;
12370 break;
12371 case OMP_CLAUSE_TO:
12372 tkind = GOMP_MAP_TO;
12373 tkind_zero = tkind;
12374 break;
12375 case OMP_CLAUSE_FROM:
12376 tkind = GOMP_MAP_FROM;
12377 tkind_zero = tkind;
12378 break;
12379 default:
12380 gcc_unreachable ();
12381 }
12382 gcc_checking_assert (tkind
12383 < (HOST_WIDE_INT_C (1U) << talign_shift));
12384 gcc_checking_assert (tkind_zero
12385 < (HOST_WIDE_INT_C (1U) << talign_shift));
12386 talign = ceil_log2 (talign);
12387 tkind |= talign << talign_shift;
12388 tkind_zero |= talign << talign_shift;
12389 gcc_checking_assert (tkind
12390 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
12391 gcc_checking_assert (tkind_zero
12392 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
12393 if (tkind == tkind_zero)
12394 x = build_int_cstu (tkind_type, tkind);
12395 else
12396 {
12397 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
12398 x = build3 (COND_EXPR, tkind_type,
12399 fold_build2 (EQ_EXPR, boolean_type_node,
12400 unshare_expr (s), size_zero_node),
12401 build_int_cstu (tkind_type, tkind_zero),
12402 build_int_cstu (tkind_type, tkind));
12403 }
12404 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
12405 if (nc && nc != c)
12406 c = nc;
12407 break;
12408
12409 case OMP_CLAUSE_FIRSTPRIVATE:
12410 if (is_gimple_omp_oacc (ctx->stmt))
12411 goto oacc_firstprivate_map;
12412 ovar = OMP_CLAUSE_DECL (c);
12413 if (omp_is_reference (ovar))
12414 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
12415 else
12416 talign = DECL_ALIGN_UNIT (ovar);
12417 var = lookup_decl_in_outer_ctx (ovar, ctx);
12418 x = build_sender_ref (ovar, ctx);
12419 tkind = GOMP_MAP_FIRSTPRIVATE;
12420 type = TREE_TYPE (ovar);
12421 if (omp_is_reference (ovar))
12422 type = TREE_TYPE (type);
12423 if ((INTEGRAL_TYPE_P (type)
12424 && TYPE_PRECISION (type) <= POINTER_SIZE)
12425 || TREE_CODE (type) == POINTER_TYPE)
12426 {
12427 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
12428 tree t = var;
12429 if (omp_is_reference (var))
12430 t = build_simple_mem_ref (var);
12431 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
12432 TREE_NO_WARNING (var) = 1;
12433 if (TREE_CODE (type) != POINTER_TYPE)
12434 t = fold_convert (pointer_sized_int_node, t);
12435 t = fold_convert (TREE_TYPE (x), t);
12436 gimplify_assign (x, t, &ilist);
12437 }
12438 else if (omp_is_reference (var))
12439 gimplify_assign (x, var, &ilist);
12440 else if (is_gimple_reg (var))
12441 {
12442 tree avar = create_tmp_var (TREE_TYPE (var));
12443 mark_addressable (avar);
12444 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
12445 TREE_NO_WARNING (var) = 1;
12446 gimplify_assign (avar, var, &ilist);
12447 avar = build_fold_addr_expr (avar);
12448 gimplify_assign (x, avar, &ilist);
12449 }
12450 else
12451 {
12452 var = build_fold_addr_expr (var);
12453 gimplify_assign (x, var, &ilist);
12454 }
12455 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
12456 s = size_int (0);
12457 else if (omp_is_reference (ovar))
12458 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
12459 else
12460 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
12461 s = fold_convert (size_type_node, s);
12462 purpose = size_int (map_idx++);
12463 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
12464 if (TREE_CODE (s) != INTEGER_CST)
12465 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
12466
12467 gcc_checking_assert (tkind
12468 < (HOST_WIDE_INT_C (1U) << talign_shift));
12469 talign = ceil_log2 (talign);
12470 tkind |= talign << talign_shift;
12471 gcc_checking_assert (tkind
12472 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
12473 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
12474 build_int_cstu (tkind_type, tkind));
12475 break;
12476
12477 case OMP_CLAUSE_USE_DEVICE_PTR:
12478 case OMP_CLAUSE_USE_DEVICE_ADDR:
12479 case OMP_CLAUSE_IS_DEVICE_PTR:
12480 ovar = OMP_CLAUSE_DECL (c);
12481 var = lookup_decl_in_outer_ctx (ovar, ctx);
12482
12483 if (lang_hooks.decls.omp_array_data (ovar, true))
12484 {
12485 tkind = (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
12486 ? GOMP_MAP_USE_DEVICE_PTR : GOMP_MAP_FIRSTPRIVATE_INT);
12487 x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx);
12488 }
12489 else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
12490 {
12491 tkind = GOMP_MAP_USE_DEVICE_PTR;
12492 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
12493 }
12494 else
12495 {
12496 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
12497 x = build_sender_ref (ovar, ctx);
12498 }
12499
12500 if (is_gimple_omp_oacc (ctx->stmt))
12501 {
12502 gcc_assert (tkind == GOMP_MAP_USE_DEVICE_PTR);
12503
12504 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c))
12505 tkind = GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT;
12506 }
12507
12508 type = TREE_TYPE (ovar);
12509 if (lang_hooks.decls.omp_array_data (ovar, true))
12510 var = lang_hooks.decls.omp_array_data (ovar, false);
12511 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12512 && !omp_is_reference (ovar)
12513 && !omp_is_allocatable_or_ptr (ovar))
12514 || TREE_CODE (type) == ARRAY_TYPE)
12515 var = build_fold_addr_expr (var);
12516 else
12517 {
12518 if (omp_is_reference (ovar)
12519 || omp_check_optional_argument (ovar, false)
12520 || omp_is_allocatable_or_ptr (ovar))
12521 {
12522 type = TREE_TYPE (type);
12523 if (POINTER_TYPE_P (type)
12524 && TREE_CODE (type) != ARRAY_TYPE
12525 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
12526 && !omp_is_allocatable_or_ptr (ovar))
12527 || (omp_is_reference (ovar)
12528 && omp_is_allocatable_or_ptr (ovar))))
12529 var = build_simple_mem_ref (var);
12530 var = fold_convert (TREE_TYPE (x), var);
12531 }
12532 }
12533 tree present;
12534 present = omp_check_optional_argument (ovar, true);
12535 if (present)
12536 {
12537 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
12538 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
12539 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
12540 tree new_x = unshare_expr (x);
12541 gimplify_expr (&present, &ilist, NULL, is_gimple_val,
12542 fb_rvalue);
12543 gcond *cond = gimple_build_cond_from_tree (present,
12544 notnull_label,
12545 null_label);
12546 gimple_seq_add_stmt (&ilist, cond);
12547 gimple_seq_add_stmt (&ilist, gimple_build_label (null_label));
12548 gimplify_assign (new_x, null_pointer_node, &ilist);
12549 gimple_seq_add_stmt (&ilist, gimple_build_goto (opt_arg_label));
12550 gimple_seq_add_stmt (&ilist,
12551 gimple_build_label (notnull_label));
12552 gimplify_assign (x, var, &ilist);
12553 gimple_seq_add_stmt (&ilist,
12554 gimple_build_label (opt_arg_label));
12555 }
12556 else
12557 gimplify_assign (x, var, &ilist);
12558 s = size_int (0);
12559 purpose = size_int (map_idx++);
12560 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
12561 gcc_checking_assert (tkind
12562 < (HOST_WIDE_INT_C (1U) << talign_shift));
12563 gcc_checking_assert (tkind
12564 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
12565 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
12566 build_int_cstu (tkind_type, tkind));
12567 break;
12568 }
12569
12570 gcc_assert (map_idx == map_cnt);
12571
12572 DECL_INITIAL (TREE_VEC_ELT (t, 1))
12573 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
12574 DECL_INITIAL (TREE_VEC_ELT (t, 2))
12575 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
12576 for (int i = 1; i <= 2; i++)
12577 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
12578 {
12579 gimple_seq initlist = NULL;
12580 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
12581 TREE_VEC_ELT (t, i)),
12582 &initlist, true, NULL_TREE);
12583 gimple_seq_add_seq (&ilist, initlist);
12584
12585 tree clobber = build_clobber (TREE_TYPE (TREE_VEC_ELT (t, i)));
12586 gimple_seq_add_stmt (&olist,
12587 gimple_build_assign (TREE_VEC_ELT (t, i),
12588 clobber));
12589 }
12590
12591 tree clobber = build_clobber (ctx->record_type);
12592 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
12593 clobber));
12594 }
12595
12596 /* Once all the expansions are done, sequence all the different
12597 fragments inside gimple_omp_body. */
12598
12599 new_body = NULL;
12600
12601 if (offloaded
12602 && ctx->record_type)
12603 {
12604 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
12605 /* fixup_child_record_type might have changed receiver_decl's type. */
12606 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
12607 gimple_seq_add_stmt (&new_body,
12608 gimple_build_assign (ctx->receiver_decl, t));
12609 }
12610 gimple_seq_add_seq (&new_body, fplist);
12611
12612 if (offloaded || data_region)
12613 {
12614 tree prev = NULL_TREE;
12615 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12616 switch (OMP_CLAUSE_CODE (c))
12617 {
12618 tree var, x;
12619 default:
12620 break;
12621 case OMP_CLAUSE_FIRSTPRIVATE:
12622 if (is_gimple_omp_oacc (ctx->stmt))
12623 break;
12624 var = OMP_CLAUSE_DECL (c);
12625 if (omp_is_reference (var)
12626 || is_gimple_reg_type (TREE_TYPE (var)))
12627 {
12628 tree new_var = lookup_decl (var, ctx);
12629 tree type;
12630 type = TREE_TYPE (var);
12631 if (omp_is_reference (var))
12632 type = TREE_TYPE (type);
12633 if ((INTEGRAL_TYPE_P (type)
12634 && TYPE_PRECISION (type) <= POINTER_SIZE)
12635 || TREE_CODE (type) == POINTER_TYPE)
12636 {
12637 x = build_receiver_ref (var, false, ctx);
12638 if (TREE_CODE (type) != POINTER_TYPE)
12639 x = fold_convert (pointer_sized_int_node, x);
12640 x = fold_convert (type, x);
12641 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12642 fb_rvalue);
12643 if (omp_is_reference (var))
12644 {
12645 tree v = create_tmp_var_raw (type, get_name (var));
12646 gimple_add_tmp_var (v);
12647 TREE_ADDRESSABLE (v) = 1;
12648 gimple_seq_add_stmt (&new_body,
12649 gimple_build_assign (v, x));
12650 x = build_fold_addr_expr (v);
12651 }
12652 gimple_seq_add_stmt (&new_body,
12653 gimple_build_assign (new_var, x));
12654 }
12655 else
12656 {
12657 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
12658 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12659 fb_rvalue);
12660 gimple_seq_add_stmt (&new_body,
12661 gimple_build_assign (new_var, x));
12662 }
12663 }
12664 else if (is_variable_sized (var))
12665 {
12666 tree pvar = DECL_VALUE_EXPR (var);
12667 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12668 pvar = TREE_OPERAND (pvar, 0);
12669 gcc_assert (DECL_P (pvar));
12670 tree new_var = lookup_decl (pvar, ctx);
12671 x = build_receiver_ref (var, false, ctx);
12672 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12673 gimple_seq_add_stmt (&new_body,
12674 gimple_build_assign (new_var, x));
12675 }
12676 break;
12677 case OMP_CLAUSE_PRIVATE:
12678 if (is_gimple_omp_oacc (ctx->stmt))
12679 break;
12680 var = OMP_CLAUSE_DECL (c);
12681 if (omp_is_reference (var))
12682 {
12683 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12684 tree new_var = lookup_decl (var, ctx);
12685 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
12686 if (TREE_CONSTANT (x))
12687 {
12688 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
12689 get_name (var));
12690 gimple_add_tmp_var (x);
12691 TREE_ADDRESSABLE (x) = 1;
12692 x = build_fold_addr_expr_loc (clause_loc, x);
12693 }
12694 else
12695 break;
12696
12697 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12698 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12699 gimple_seq_add_stmt (&new_body,
12700 gimple_build_assign (new_var, x));
12701 }
12702 break;
12703 case OMP_CLAUSE_USE_DEVICE_PTR:
12704 case OMP_CLAUSE_USE_DEVICE_ADDR:
12705 case OMP_CLAUSE_IS_DEVICE_PTR:
12706 tree new_var;
12707 gimple_seq assign_body;
12708 bool is_array_data;
12709 bool do_optional_check;
12710 assign_body = NULL;
12711 do_optional_check = false;
12712 var = OMP_CLAUSE_DECL (c);
12713 is_array_data = lang_hooks.decls.omp_array_data (var, true) != NULL;
12714
12715 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
12716 x = build_sender_ref (is_array_data
12717 ? (splay_tree_key) &DECL_NAME (var)
12718 : (splay_tree_key) &DECL_UID (var), ctx);
12719 else
12720 x = build_receiver_ref (var, false, ctx);
12721
12722 if (is_array_data)
12723 {
12724 bool is_ref = omp_is_reference (var);
12725 do_optional_check = true;
12726 /* First, we copy the descriptor data from the host; then
12727 we update its data to point to the target address. */
12728 new_var = lookup_decl (var, ctx);
12729 new_var = DECL_VALUE_EXPR (new_var);
12730 tree v = new_var;
12731
12732 if (is_ref)
12733 {
12734 var = build_fold_indirect_ref (var);
12735 gimplify_expr (&var, &assign_body, NULL, is_gimple_val,
12736 fb_rvalue);
12737 v = create_tmp_var_raw (TREE_TYPE (var), get_name (var));
12738 gimple_add_tmp_var (v);
12739 TREE_ADDRESSABLE (v) = 1;
12740 gimple_seq_add_stmt (&assign_body,
12741 gimple_build_assign (v, var));
12742 tree rhs = build_fold_addr_expr (v);
12743 gimple_seq_add_stmt (&assign_body,
12744 gimple_build_assign (new_var, rhs));
12745 }
12746 else
12747 gimple_seq_add_stmt (&assign_body,
12748 gimple_build_assign (new_var, var));
12749
12750 tree v2 = lang_hooks.decls.omp_array_data (unshare_expr (v), false);
12751 gcc_assert (v2);
12752 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12753 gimple_seq_add_stmt (&assign_body,
12754 gimple_build_assign (v2, x));
12755 }
12756 else if (is_variable_sized (var))
12757 {
12758 tree pvar = DECL_VALUE_EXPR (var);
12759 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12760 pvar = TREE_OPERAND (pvar, 0);
12761 gcc_assert (DECL_P (pvar));
12762 new_var = lookup_decl (pvar, ctx);
12763 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12764 gimple_seq_add_stmt (&assign_body,
12765 gimple_build_assign (new_var, x));
12766 }
12767 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12768 && !omp_is_reference (var)
12769 && !omp_is_allocatable_or_ptr (var))
12770 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12771 {
12772 new_var = lookup_decl (var, ctx);
12773 new_var = DECL_VALUE_EXPR (new_var);
12774 gcc_assert (TREE_CODE (new_var) == MEM_REF);
12775 new_var = TREE_OPERAND (new_var, 0);
12776 gcc_assert (DECL_P (new_var));
12777 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12778 gimple_seq_add_stmt (&assign_body,
12779 gimple_build_assign (new_var, x));
12780 }
12781 else
12782 {
12783 tree type = TREE_TYPE (var);
12784 new_var = lookup_decl (var, ctx);
12785 if (omp_is_reference (var))
12786 {
12787 type = TREE_TYPE (type);
12788 if (POINTER_TYPE_P (type)
12789 && TREE_CODE (type) != ARRAY_TYPE
12790 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
12791 || (omp_is_reference (var)
12792 && omp_is_allocatable_or_ptr (var))))
12793 {
12794 tree v = create_tmp_var_raw (type, get_name (var));
12795 gimple_add_tmp_var (v);
12796 TREE_ADDRESSABLE (v) = 1;
12797 x = fold_convert (type, x);
12798 gimplify_expr (&x, &assign_body, NULL, is_gimple_val,
12799 fb_rvalue);
12800 gimple_seq_add_stmt (&assign_body,
12801 gimple_build_assign (v, x));
12802 x = build_fold_addr_expr (v);
12803 do_optional_check = true;
12804 }
12805 }
12806 new_var = DECL_VALUE_EXPR (new_var);
12807 x = fold_convert (TREE_TYPE (new_var), x);
12808 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12809 gimple_seq_add_stmt (&assign_body,
12810 gimple_build_assign (new_var, x));
12811 }
12812 tree present;
12813 present = (do_optional_check
12814 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c), true)
12815 : NULL_TREE);
12816 if (present)
12817 {
12818 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
12819 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
12820 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
12821 glabel *null_glabel = gimple_build_label (null_label);
12822 glabel *notnull_glabel = gimple_build_label (notnull_label);
12823 ggoto *opt_arg_ggoto = gimple_build_goto (opt_arg_label);
12824 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12825 fb_rvalue);
12826 gimplify_expr (&present, &new_body, NULL, is_gimple_val,
12827 fb_rvalue);
12828 gcond *cond = gimple_build_cond_from_tree (present,
12829 notnull_label,
12830 null_label);
12831 gimple_seq_add_stmt (&new_body, cond);
12832 gimple_seq_add_stmt (&new_body, null_glabel);
12833 gimplify_assign (new_var, null_pointer_node, &new_body);
12834 gimple_seq_add_stmt (&new_body, opt_arg_ggoto);
12835 gimple_seq_add_stmt (&new_body, notnull_glabel);
12836 gimple_seq_add_seq (&new_body, assign_body);
12837 gimple_seq_add_stmt (&new_body,
12838 gimple_build_label (opt_arg_label));
12839 }
12840 else
12841 gimple_seq_add_seq (&new_body, assign_body);
12842 break;
12843 }
12844 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
12845 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
12846 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
12847 or references to VLAs. */
12848 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12849 switch (OMP_CLAUSE_CODE (c))
12850 {
12851 tree var;
12852 default:
12853 break;
12854 case OMP_CLAUSE_MAP:
12855 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12856 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
12857 {
12858 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12859 poly_int64 offset = 0;
12860 gcc_assert (prev);
12861 var = OMP_CLAUSE_DECL (c);
12862 if (DECL_P (var)
12863 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
12864 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
12865 ctx))
12866 && varpool_node::get_create (var)->offloadable)
12867 break;
12868 if (TREE_CODE (var) == INDIRECT_REF
12869 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
12870 var = TREE_OPERAND (var, 0);
12871 if (TREE_CODE (var) == COMPONENT_REF)
12872 {
12873 var = get_addr_base_and_unit_offset (var, &offset);
12874 gcc_assert (var != NULL_TREE && DECL_P (var));
12875 }
12876 else if (DECL_SIZE (var)
12877 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
12878 {
12879 tree var2 = DECL_VALUE_EXPR (var);
12880 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
12881 var2 = TREE_OPERAND (var2, 0);
12882 gcc_assert (DECL_P (var2));
12883 var = var2;
12884 }
12885 tree new_var = lookup_decl (var, ctx), x;
12886 tree type = TREE_TYPE (new_var);
12887 bool is_ref;
12888 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
12889 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
12890 == COMPONENT_REF))
12891 {
12892 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
12893 is_ref = true;
12894 new_var = build2 (MEM_REF, type,
12895 build_fold_addr_expr (new_var),
12896 build_int_cst (build_pointer_type (type),
12897 offset));
12898 }
12899 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
12900 {
12901 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
12902 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
12903 new_var = build2 (MEM_REF, type,
12904 build_fold_addr_expr (new_var),
12905 build_int_cst (build_pointer_type (type),
12906 offset));
12907 }
12908 else
12909 is_ref = omp_is_reference (var);
12910 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
12911 is_ref = false;
12912 bool ref_to_array = false;
12913 if (is_ref)
12914 {
12915 type = TREE_TYPE (type);
12916 if (TREE_CODE (type) == ARRAY_TYPE)
12917 {
12918 type = build_pointer_type (type);
12919 ref_to_array = true;
12920 }
12921 }
12922 else if (TREE_CODE (type) == ARRAY_TYPE)
12923 {
12924 tree decl2 = DECL_VALUE_EXPR (new_var);
12925 gcc_assert (TREE_CODE (decl2) == MEM_REF);
12926 decl2 = TREE_OPERAND (decl2, 0);
12927 gcc_assert (DECL_P (decl2));
12928 new_var = decl2;
12929 type = TREE_TYPE (new_var);
12930 }
12931 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
12932 x = fold_convert_loc (clause_loc, type, x);
12933 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
12934 {
12935 tree bias = OMP_CLAUSE_SIZE (c);
12936 if (DECL_P (bias))
12937 bias = lookup_decl (bias, ctx);
12938 bias = fold_convert_loc (clause_loc, sizetype, bias);
12939 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
12940 bias);
12941 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
12942 TREE_TYPE (x), x, bias);
12943 }
12944 if (ref_to_array)
12945 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12946 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12947 if (is_ref && !ref_to_array)
12948 {
12949 tree t = create_tmp_var_raw (type, get_name (var));
12950 gimple_add_tmp_var (t);
12951 TREE_ADDRESSABLE (t) = 1;
12952 gimple_seq_add_stmt (&new_body,
12953 gimple_build_assign (t, x));
12954 x = build_fold_addr_expr_loc (clause_loc, t);
12955 }
12956 gimple_seq_add_stmt (&new_body,
12957 gimple_build_assign (new_var, x));
12958 prev = NULL_TREE;
12959 }
12960 else if (OMP_CLAUSE_CHAIN (c)
12961 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
12962 == OMP_CLAUSE_MAP
12963 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
12964 == GOMP_MAP_FIRSTPRIVATE_POINTER
12965 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
12966 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
12967 prev = c;
12968 break;
12969 case OMP_CLAUSE_PRIVATE:
12970 var = OMP_CLAUSE_DECL (c);
12971 if (is_variable_sized (var))
12972 {
12973 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12974 tree new_var = lookup_decl (var, ctx);
12975 tree pvar = DECL_VALUE_EXPR (var);
12976 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12977 pvar = TREE_OPERAND (pvar, 0);
12978 gcc_assert (DECL_P (pvar));
12979 tree new_pvar = lookup_decl (pvar, ctx);
12980 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
12981 tree al = size_int (DECL_ALIGN (var));
12982 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
12983 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
12984 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
12985 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12986 gimple_seq_add_stmt (&new_body,
12987 gimple_build_assign (new_pvar, x));
12988 }
12989 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
12990 {
12991 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12992 tree new_var = lookup_decl (var, ctx);
12993 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
12994 if (TREE_CONSTANT (x))
12995 break;
12996 else
12997 {
12998 tree atmp
12999 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
13000 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
13001 tree al = size_int (TYPE_ALIGN (rtype));
13002 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
13003 }
13004
13005 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13006 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13007 gimple_seq_add_stmt (&new_body,
13008 gimple_build_assign (new_var, x));
13009 }
13010 break;
13011 }
13012
13013 gimple_seq fork_seq = NULL;
13014 gimple_seq join_seq = NULL;
13015
13016 if (offloaded && is_gimple_omp_oacc (ctx->stmt))
13017 {
13018 /* If there are reductions on the offloaded region itself, treat
13019 them as a dummy GANG loop. */
13020 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
13021
13022 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
13023 false, NULL, NULL, &fork_seq, &join_seq, ctx);
13024 }
13025
13026 gimple_seq_add_seq (&new_body, fork_seq);
13027 gimple_seq_add_seq (&new_body, tgt_body);
13028 gimple_seq_add_seq (&new_body, join_seq);
13029
13030 if (offloaded)
13031 {
13032 new_body = maybe_catch_exception (new_body);
13033 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
13034 }
13035 gimple_omp_set_body (stmt, new_body);
13036 }
13037
13038 bind = gimple_build_bind (NULL, NULL,
13039 tgt_bind ? gimple_bind_block (tgt_bind)
13040 : NULL_TREE);
13041 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
13042 gimple_bind_add_seq (bind, ilist);
13043 gimple_bind_add_stmt (bind, stmt);
13044 gimple_bind_add_seq (bind, olist);
13045
13046 pop_gimplify_context (NULL);
13047
13048 if (dep_bind)
13049 {
13050 gimple_bind_add_seq (dep_bind, dep_ilist);
13051 gimple_bind_add_stmt (dep_bind, bind);
13052 gimple_bind_add_seq (dep_bind, dep_olist);
13053 pop_gimplify_context (dep_bind);
13054 }
13055 }
13056
13057 /* Expand code for an OpenMP teams directive. */
13058
13059 static void
13060 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
13061 {
13062 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
13063 push_gimplify_context ();
13064
13065 tree block = make_node (BLOCK);
13066 gbind *bind = gimple_build_bind (NULL, NULL, block);
13067 gsi_replace (gsi_p, bind, true);
13068 gimple_seq bind_body = NULL;
13069 gimple_seq dlist = NULL;
13070 gimple_seq olist = NULL;
13071
13072 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
13073 OMP_CLAUSE_NUM_TEAMS);
13074 if (num_teams == NULL_TREE)
13075 num_teams = build_int_cst (unsigned_type_node, 0);
13076 else
13077 {
13078 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
13079 num_teams = fold_convert (unsigned_type_node, num_teams);
13080 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
13081 }
13082 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
13083 OMP_CLAUSE_THREAD_LIMIT);
13084 if (thread_limit == NULL_TREE)
13085 thread_limit = build_int_cst (unsigned_type_node, 0);
13086 else
13087 {
13088 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
13089 thread_limit = fold_convert (unsigned_type_node, thread_limit);
13090 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
13091 fb_rvalue);
13092 }
13093
13094 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
13095 &bind_body, &dlist, ctx, NULL);
13096 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
13097 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
13098 NULL, ctx);
13099 gimple_seq_add_stmt (&bind_body, teams_stmt);
13100
13101 location_t loc = gimple_location (teams_stmt);
13102 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
13103 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
13104 gimple_set_location (call, loc);
13105 gimple_seq_add_stmt (&bind_body, call);
13106
13107 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
13108 gimple_omp_set_body (teams_stmt, NULL);
13109 gimple_seq_add_seq (&bind_body, olist);
13110 gimple_seq_add_seq (&bind_body, dlist);
13111 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
13112 gimple_bind_set_body (bind, bind_body);
13113
13114 pop_gimplify_context (bind);
13115
13116 gimple_bind_append_vars (bind, ctx->block_vars);
13117 BLOCK_VARS (block) = ctx->block_vars;
13118 if (BLOCK_VARS (block))
13119 TREE_USED (block) = 1;
13120 }
13121
13122 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
13123 regimplified. If DATA is non-NULL, lower_omp_1 is outside
13124 of OMP context, but with task_shared_vars set. */
13125
13126 static tree
13127 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
13128 void *data)
13129 {
13130 tree t = *tp;
13131
13132 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
13133 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
13134 return t;
13135
13136 if (task_shared_vars
13137 && DECL_P (t)
13138 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
13139 return t;
13140
13141 /* If a global variable has been privatized, TREE_CONSTANT on
13142 ADDR_EXPR might be wrong. */
13143 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
13144 recompute_tree_invariant_for_addr_expr (t);
13145
13146 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
13147 return NULL_TREE;
13148 }
13149
13150 /* Data to be communicated between lower_omp_regimplify_operands and
13151 lower_omp_regimplify_operands_p. */
13152
13153 struct lower_omp_regimplify_operands_data
13154 {
13155 omp_context *ctx;
13156 vec<tree> *decls;
13157 };
13158
13159 /* Helper function for lower_omp_regimplify_operands. Find
13160 omp_member_access_dummy_var vars and adjust temporarily their
13161 DECL_VALUE_EXPRs if needed. */
13162
13163 static tree
13164 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
13165 void *data)
13166 {
13167 tree t = omp_member_access_dummy_var (*tp);
13168 if (t)
13169 {
13170 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
13171 lower_omp_regimplify_operands_data *ldata
13172 = (lower_omp_regimplify_operands_data *) wi->info;
13173 tree o = maybe_lookup_decl (t, ldata->ctx);
13174 if (o != t)
13175 {
13176 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
13177 ldata->decls->safe_push (*tp);
13178 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
13179 SET_DECL_VALUE_EXPR (*tp, v);
13180 }
13181 }
13182 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
13183 return NULL_TREE;
13184 }
13185
13186 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
13187 of omp_member_access_dummy_var vars during regimplification. */
13188
13189 static void
13190 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
13191 gimple_stmt_iterator *gsi_p)
13192 {
13193 auto_vec<tree, 10> decls;
13194 if (ctx)
13195 {
13196 struct walk_stmt_info wi;
13197 memset (&wi, '\0', sizeof (wi));
13198 struct lower_omp_regimplify_operands_data data;
13199 data.ctx = ctx;
13200 data.decls = &decls;
13201 wi.info = &data;
13202 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
13203 }
13204 gimple_regimplify_operands (stmt, gsi_p);
13205 while (!decls.is_empty ())
13206 {
13207 tree t = decls.pop ();
13208 tree v = decls.pop ();
13209 SET_DECL_VALUE_EXPR (t, v);
13210 }
13211 }
13212
13213 static void
13214 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
13215 {
13216 gimple *stmt = gsi_stmt (*gsi_p);
13217 struct walk_stmt_info wi;
13218 gcall *call_stmt;
13219
13220 if (gimple_has_location (stmt))
13221 input_location = gimple_location (stmt);
13222
13223 if (task_shared_vars)
13224 memset (&wi, '\0', sizeof (wi));
13225
13226 /* If we have issued syntax errors, avoid doing any heavy lifting.
13227 Just replace the OMP directives with a NOP to avoid
13228 confusing RTL expansion. */
13229 if (seen_error () && is_gimple_omp (stmt))
13230 {
13231 gsi_replace (gsi_p, gimple_build_nop (), true);
13232 return;
13233 }
13234
13235 switch (gimple_code (stmt))
13236 {
13237 case GIMPLE_COND:
13238 {
13239 gcond *cond_stmt = as_a <gcond *> (stmt);
13240 if ((ctx || task_shared_vars)
13241 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
13242 lower_omp_regimplify_p,
13243 ctx ? NULL : &wi, NULL)
13244 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
13245 lower_omp_regimplify_p,
13246 ctx ? NULL : &wi, NULL)))
13247 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
13248 }
13249 break;
13250 case GIMPLE_CATCH:
13251 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
13252 break;
13253 case GIMPLE_EH_FILTER:
13254 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
13255 break;
13256 case GIMPLE_TRY:
13257 lower_omp (gimple_try_eval_ptr (stmt), ctx);
13258 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
13259 break;
13260 case GIMPLE_TRANSACTION:
13261 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
13262 ctx);
13263 break;
13264 case GIMPLE_BIND:
13265 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
13266 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
13267 break;
13268 case GIMPLE_OMP_PARALLEL:
13269 case GIMPLE_OMP_TASK:
13270 ctx = maybe_lookup_ctx (stmt);
13271 gcc_assert (ctx);
13272 if (ctx->cancellable)
13273 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
13274 lower_omp_taskreg (gsi_p, ctx);
13275 break;
13276 case GIMPLE_OMP_FOR:
13277 ctx = maybe_lookup_ctx (stmt);
13278 gcc_assert (ctx);
13279 if (ctx->cancellable)
13280 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
13281 lower_omp_for (gsi_p, ctx);
13282 break;
13283 case GIMPLE_OMP_SECTIONS:
13284 ctx = maybe_lookup_ctx (stmt);
13285 gcc_assert (ctx);
13286 if (ctx->cancellable)
13287 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
13288 lower_omp_sections (gsi_p, ctx);
13289 break;
13290 case GIMPLE_OMP_SINGLE:
13291 ctx = maybe_lookup_ctx (stmt);
13292 gcc_assert (ctx);
13293 lower_omp_single (gsi_p, ctx);
13294 break;
13295 case GIMPLE_OMP_MASTER:
13296 ctx = maybe_lookup_ctx (stmt);
13297 gcc_assert (ctx);
13298 lower_omp_master (gsi_p, ctx);
13299 break;
13300 case GIMPLE_OMP_TASKGROUP:
13301 ctx = maybe_lookup_ctx (stmt);
13302 gcc_assert (ctx);
13303 lower_omp_taskgroup (gsi_p, ctx);
13304 break;
13305 case GIMPLE_OMP_ORDERED:
13306 ctx = maybe_lookup_ctx (stmt);
13307 gcc_assert (ctx);
13308 lower_omp_ordered (gsi_p, ctx);
13309 break;
13310 case GIMPLE_OMP_SCAN:
13311 ctx = maybe_lookup_ctx (stmt);
13312 gcc_assert (ctx);
13313 lower_omp_scan (gsi_p, ctx);
13314 break;
13315 case GIMPLE_OMP_CRITICAL:
13316 ctx = maybe_lookup_ctx (stmt);
13317 gcc_assert (ctx);
13318 lower_omp_critical (gsi_p, ctx);
13319 break;
13320 case GIMPLE_OMP_ATOMIC_LOAD:
13321 if ((ctx || task_shared_vars)
13322 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
13323 as_a <gomp_atomic_load *> (stmt)),
13324 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
13325 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
13326 break;
13327 case GIMPLE_OMP_TARGET:
13328 ctx = maybe_lookup_ctx (stmt);
13329 gcc_assert (ctx);
13330 lower_omp_target (gsi_p, ctx);
13331 break;
13332 case GIMPLE_OMP_TEAMS:
13333 ctx = maybe_lookup_ctx (stmt);
13334 gcc_assert (ctx);
13335 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
13336 lower_omp_taskreg (gsi_p, ctx);
13337 else
13338 lower_omp_teams (gsi_p, ctx);
13339 break;
13340 case GIMPLE_CALL:
13341 tree fndecl;
13342 call_stmt = as_a <gcall *> (stmt);
13343 fndecl = gimple_call_fndecl (call_stmt);
13344 if (fndecl
13345 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
13346 switch (DECL_FUNCTION_CODE (fndecl))
13347 {
13348 case BUILT_IN_GOMP_BARRIER:
13349 if (ctx == NULL)
13350 break;
13351 /* FALLTHRU */
13352 case BUILT_IN_GOMP_CANCEL:
13353 case BUILT_IN_GOMP_CANCELLATION_POINT:
13354 omp_context *cctx;
13355 cctx = ctx;
13356 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
13357 cctx = cctx->outer;
13358 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
13359 if (!cctx->cancellable)
13360 {
13361 if (DECL_FUNCTION_CODE (fndecl)
13362 == BUILT_IN_GOMP_CANCELLATION_POINT)
13363 {
13364 stmt = gimple_build_nop ();
13365 gsi_replace (gsi_p, stmt, false);
13366 }
13367 break;
13368 }
13369 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
13370 {
13371 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
13372 gimple_call_set_fndecl (call_stmt, fndecl);
13373 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
13374 }
13375 tree lhs;
13376 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
13377 gimple_call_set_lhs (call_stmt, lhs);
13378 tree fallthru_label;
13379 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
13380 gimple *g;
13381 g = gimple_build_label (fallthru_label);
13382 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
13383 g = gimple_build_cond (NE_EXPR, lhs,
13384 fold_convert (TREE_TYPE (lhs),
13385 boolean_false_node),
13386 cctx->cancel_label, fallthru_label);
13387 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
13388 break;
13389 default:
13390 break;
13391 }
13392 goto regimplify;
13393
13394 case GIMPLE_ASSIGN:
13395 for (omp_context *up = ctx; up; up = up->outer)
13396 {
13397 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
13398 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
13399 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
13400 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
13401 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
13402 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
13403 && (gimple_omp_target_kind (up->stmt)
13404 == GF_OMP_TARGET_KIND_DATA)))
13405 continue;
13406 else if (!up->lastprivate_conditional_map)
13407 break;
13408 tree lhs = get_base_address (gimple_assign_lhs (stmt));
13409 if (TREE_CODE (lhs) == MEM_REF
13410 && DECL_P (TREE_OPERAND (lhs, 0))
13411 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
13412 0))) == REFERENCE_TYPE)
13413 lhs = TREE_OPERAND (lhs, 0);
13414 if (DECL_P (lhs))
13415 if (tree *v = up->lastprivate_conditional_map->get (lhs))
13416 {
13417 tree clauses;
13418 if (up->combined_into_simd_safelen1)
13419 {
13420 up = up->outer;
13421 if (gimple_code (up->stmt) == GIMPLE_OMP_SCAN)
13422 up = up->outer;
13423 }
13424 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
13425 clauses = gimple_omp_for_clauses (up->stmt);
13426 else
13427 clauses = gimple_omp_sections_clauses (up->stmt);
13428 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
13429 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
13430 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
13431 OMP_CLAUSE__CONDTEMP_);
13432 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
13433 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
13434 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
13435 }
13436 }
13437 /* FALLTHRU */
13438
13439 default:
13440 regimplify:
13441 if ((ctx || task_shared_vars)
13442 && walk_gimple_op (stmt, lower_omp_regimplify_p,
13443 ctx ? NULL : &wi))
13444 {
13445 /* Just remove clobbers, this should happen only if we have
13446 "privatized" local addressable variables in SIMD regions,
13447 the clobber isn't needed in that case and gimplifying address
13448 of the ARRAY_REF into a pointer and creating MEM_REF based
13449 clobber would create worse code than we get with the clobber
13450 dropped. */
13451 if (gimple_clobber_p (stmt))
13452 {
13453 gsi_replace (gsi_p, gimple_build_nop (), true);
13454 break;
13455 }
13456 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
13457 }
13458 break;
13459 }
13460 }
13461
13462 static void
13463 lower_omp (gimple_seq *body, omp_context *ctx)
13464 {
13465 location_t saved_location = input_location;
13466 gimple_stmt_iterator gsi;
13467 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
13468 lower_omp_1 (&gsi, ctx);
13469 /* During gimplification, we haven't folded statments inside offloading
13470 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
13471 if (target_nesting_level || taskreg_nesting_level)
13472 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
13473 fold_stmt (&gsi);
13474 input_location = saved_location;
13475 }
13476
13477 /* Main entry point. */
13478
13479 static unsigned int
13480 execute_lower_omp (void)
13481 {
13482 gimple_seq body;
13483 int i;
13484 omp_context *ctx;
13485
13486 /* This pass always runs, to provide PROP_gimple_lomp.
13487 But often, there is nothing to do. */
13488 if (flag_openacc == 0 && flag_openmp == 0
13489 && flag_openmp_simd == 0)
13490 return 0;
13491
13492 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
13493 delete_omp_context);
13494
13495 body = gimple_body (current_function_decl);
13496
13497 scan_omp (&body, NULL);
13498 gcc_assert (taskreg_nesting_level == 0);
13499 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
13500 finish_taskreg_scan (ctx);
13501 taskreg_contexts.release ();
13502
13503 if (all_contexts->root)
13504 {
13505 if (task_shared_vars)
13506 push_gimplify_context ();
13507 lower_omp (&body, NULL);
13508 if (task_shared_vars)
13509 pop_gimplify_context (NULL);
13510 }
13511
13512 if (all_contexts)
13513 {
13514 splay_tree_delete (all_contexts);
13515 all_contexts = NULL;
13516 }
13517 BITMAP_FREE (task_shared_vars);
13518 BITMAP_FREE (global_nonaddressable_vars);
13519
13520 /* If current function is a method, remove artificial dummy VAR_DECL created
13521 for non-static data member privatization, they aren't needed for
13522 debuginfo nor anything else, have been already replaced everywhere in the
13523 IL and cause problems with LTO. */
13524 if (DECL_ARGUMENTS (current_function_decl)
13525 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
13526 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
13527 == POINTER_TYPE))
13528 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
13529 return 0;
13530 }
13531
13532 namespace {
13533
13534 const pass_data pass_data_lower_omp =
13535 {
13536 GIMPLE_PASS, /* type */
13537 "omplower", /* name */
13538 OPTGROUP_OMP, /* optinfo_flags */
13539 TV_NONE, /* tv_id */
13540 PROP_gimple_any, /* properties_required */
13541 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
13542 0, /* properties_destroyed */
13543 0, /* todo_flags_start */
13544 0, /* todo_flags_finish */
13545 };
13546
13547 class pass_lower_omp : public gimple_opt_pass
13548 {
13549 public:
13550 pass_lower_omp (gcc::context *ctxt)
13551 : gimple_opt_pass (pass_data_lower_omp, ctxt)
13552 {}
13553
13554 /* opt_pass methods: */
13555 virtual unsigned int execute (function *) { return execute_lower_omp (); }
13556
13557 }; // class pass_lower_omp
13558
13559 } // anon namespace
13560
13561 gimple_opt_pass *
13562 make_pass_lower_omp (gcc::context *ctxt)
13563 {
13564 return new pass_lower_omp (ctxt);
13565 }
13566 \f
13567 /* The following is a utility to diagnose structured block violations.
13568 It is not part of the "omplower" pass, as that's invoked too late. It
13569 should be invoked by the respective front ends after gimplification. */
13570
13571 static splay_tree all_labels;
13572
13573 /* Check for mismatched contexts and generate an error if needed. Return
13574 true if an error is detected. */
13575
13576 static bool
13577 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
13578 gimple *branch_ctx, gimple *label_ctx)
13579 {
13580 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
13581 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
13582
13583 if (label_ctx == branch_ctx)
13584 return false;
13585
13586 const char* kind = NULL;
13587
13588 if (flag_openacc)
13589 {
13590 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
13591 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
13592 {
13593 gcc_checking_assert (kind == NULL);
13594 kind = "OpenACC";
13595 }
13596 }
13597 if (kind == NULL)
13598 {
13599 gcc_checking_assert (flag_openmp || flag_openmp_simd);
13600 kind = "OpenMP";
13601 }
13602
13603 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
13604 so we could traverse it and issue a correct "exit" or "enter" error
13605 message upon a structured block violation.
13606
13607 We built the context by building a list with tree_cons'ing, but there is
13608 no easy counterpart in gimple tuples. It seems like far too much work
13609 for issuing exit/enter error messages. If someone really misses the
13610 distinct error message... patches welcome. */
13611
13612 #if 0
13613 /* Try to avoid confusing the user by producing and error message
13614 with correct "exit" or "enter" verbiage. We prefer "exit"
13615 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
13616 if (branch_ctx == NULL)
13617 exit_p = false;
13618 else
13619 {
13620 while (label_ctx)
13621 {
13622 if (TREE_VALUE (label_ctx) == branch_ctx)
13623 {
13624 exit_p = false;
13625 break;
13626 }
13627 label_ctx = TREE_CHAIN (label_ctx);
13628 }
13629 }
13630
13631 if (exit_p)
13632 error ("invalid exit from %s structured block", kind);
13633 else
13634 error ("invalid entry to %s structured block", kind);
13635 #endif
13636
13637 /* If it's obvious we have an invalid entry, be specific about the error. */
13638 if (branch_ctx == NULL)
13639 error ("invalid entry to %s structured block", kind);
13640 else
13641 {
13642 /* Otherwise, be vague and lazy, but efficient. */
13643 error ("invalid branch to/from %s structured block", kind);
13644 }
13645
13646 gsi_replace (gsi_p, gimple_build_nop (), false);
13647 return true;
13648 }
13649
13650 /* Pass 1: Create a minimal tree of structured blocks, and record
13651 where each label is found. */
13652
13653 static tree
13654 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
13655 struct walk_stmt_info *wi)
13656 {
13657 gimple *context = (gimple *) wi->info;
13658 gimple *inner_context;
13659 gimple *stmt = gsi_stmt (*gsi_p);
13660
13661 *handled_ops_p = true;
13662
13663 switch (gimple_code (stmt))
13664 {
13665 WALK_SUBSTMTS;
13666
13667 case GIMPLE_OMP_PARALLEL:
13668 case GIMPLE_OMP_TASK:
13669 case GIMPLE_OMP_SECTIONS:
13670 case GIMPLE_OMP_SINGLE:
13671 case GIMPLE_OMP_SECTION:
13672 case GIMPLE_OMP_MASTER:
13673 case GIMPLE_OMP_ORDERED:
13674 case GIMPLE_OMP_SCAN:
13675 case GIMPLE_OMP_CRITICAL:
13676 case GIMPLE_OMP_TARGET:
13677 case GIMPLE_OMP_TEAMS:
13678 case GIMPLE_OMP_TASKGROUP:
13679 /* The minimal context here is just the current OMP construct. */
13680 inner_context = stmt;
13681 wi->info = inner_context;
13682 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
13683 wi->info = context;
13684 break;
13685
13686 case GIMPLE_OMP_FOR:
13687 inner_context = stmt;
13688 wi->info = inner_context;
13689 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13690 walk them. */
13691 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
13692 diagnose_sb_1, NULL, wi);
13693 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
13694 wi->info = context;
13695 break;
13696
13697 case GIMPLE_LABEL:
13698 splay_tree_insert (all_labels,
13699 (splay_tree_key) gimple_label_label (
13700 as_a <glabel *> (stmt)),
13701 (splay_tree_value) context);
13702 break;
13703
13704 default:
13705 break;
13706 }
13707
13708 return NULL_TREE;
13709 }
13710
13711 /* Pass 2: Check each branch and see if its context differs from that of
13712 the destination label's context. */
13713
13714 static tree
13715 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
13716 struct walk_stmt_info *wi)
13717 {
13718 gimple *context = (gimple *) wi->info;
13719 splay_tree_node n;
13720 gimple *stmt = gsi_stmt (*gsi_p);
13721
13722 *handled_ops_p = true;
13723
13724 switch (gimple_code (stmt))
13725 {
13726 WALK_SUBSTMTS;
13727
13728 case GIMPLE_OMP_PARALLEL:
13729 case GIMPLE_OMP_TASK:
13730 case GIMPLE_OMP_SECTIONS:
13731 case GIMPLE_OMP_SINGLE:
13732 case GIMPLE_OMP_SECTION:
13733 case GIMPLE_OMP_MASTER:
13734 case GIMPLE_OMP_ORDERED:
13735 case GIMPLE_OMP_SCAN:
13736 case GIMPLE_OMP_CRITICAL:
13737 case GIMPLE_OMP_TARGET:
13738 case GIMPLE_OMP_TEAMS:
13739 case GIMPLE_OMP_TASKGROUP:
13740 wi->info = stmt;
13741 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
13742 wi->info = context;
13743 break;
13744
13745 case GIMPLE_OMP_FOR:
13746 wi->info = stmt;
13747 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13748 walk them. */
13749 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
13750 diagnose_sb_2, NULL, wi);
13751 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
13752 wi->info = context;
13753 break;
13754
13755 case GIMPLE_COND:
13756 {
13757 gcond *cond_stmt = as_a <gcond *> (stmt);
13758 tree lab = gimple_cond_true_label (cond_stmt);
13759 if (lab)
13760 {
13761 n = splay_tree_lookup (all_labels,
13762 (splay_tree_key) lab);
13763 diagnose_sb_0 (gsi_p, context,
13764 n ? (gimple *) n->value : NULL);
13765 }
13766 lab = gimple_cond_false_label (cond_stmt);
13767 if (lab)
13768 {
13769 n = splay_tree_lookup (all_labels,
13770 (splay_tree_key) lab);
13771 diagnose_sb_0 (gsi_p, context,
13772 n ? (gimple *) n->value : NULL);
13773 }
13774 }
13775 break;
13776
13777 case GIMPLE_GOTO:
13778 {
13779 tree lab = gimple_goto_dest (stmt);
13780 if (TREE_CODE (lab) != LABEL_DECL)
13781 break;
13782
13783 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
13784 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
13785 }
13786 break;
13787
13788 case GIMPLE_SWITCH:
13789 {
13790 gswitch *switch_stmt = as_a <gswitch *> (stmt);
13791 unsigned int i;
13792 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
13793 {
13794 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
13795 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
13796 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
13797 break;
13798 }
13799 }
13800 break;
13801
13802 case GIMPLE_RETURN:
13803 diagnose_sb_0 (gsi_p, context, NULL);
13804 break;
13805
13806 default:
13807 break;
13808 }
13809
13810 return NULL_TREE;
13811 }
13812
13813 static unsigned int
13814 diagnose_omp_structured_block_errors (void)
13815 {
13816 struct walk_stmt_info wi;
13817 gimple_seq body = gimple_body (current_function_decl);
13818
13819 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
13820
13821 memset (&wi, 0, sizeof (wi));
13822 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
13823
13824 memset (&wi, 0, sizeof (wi));
13825 wi.want_locations = true;
13826 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
13827
13828 gimple_set_body (current_function_decl, body);
13829
13830 splay_tree_delete (all_labels);
13831 all_labels = NULL;
13832
13833 return 0;
13834 }
13835
13836 namespace {
13837
13838 const pass_data pass_data_diagnose_omp_blocks =
13839 {
13840 GIMPLE_PASS, /* type */
13841 "*diagnose_omp_blocks", /* name */
13842 OPTGROUP_OMP, /* optinfo_flags */
13843 TV_NONE, /* tv_id */
13844 PROP_gimple_any, /* properties_required */
13845 0, /* properties_provided */
13846 0, /* properties_destroyed */
13847 0, /* todo_flags_start */
13848 0, /* todo_flags_finish */
13849 };
13850
13851 class pass_diagnose_omp_blocks : public gimple_opt_pass
13852 {
13853 public:
13854 pass_diagnose_omp_blocks (gcc::context *ctxt)
13855 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
13856 {}
13857
13858 /* opt_pass methods: */
13859 virtual bool gate (function *)
13860 {
13861 return flag_openacc || flag_openmp || flag_openmp_simd;
13862 }
13863 virtual unsigned int execute (function *)
13864 {
13865 return diagnose_omp_structured_block_errors ();
13866 }
13867
13868 }; // class pass_diagnose_omp_blocks
13869
13870 } // anon namespace
13871
13872 gimple_opt_pass *
13873 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
13874 {
13875 return new pass_diagnose_omp_blocks (ctxt);
13876 }
13877 \f
13878
13879 #include "gt-omp-low.h"