Daily bump.
[gcc.git] / gcc / gimplify.c
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2021 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "gimple.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 #include "ssa.h"
36 #include "cgraph.h"
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "calls.h"
42 #include "varasm.h"
43 #include "stmt.h"
44 #include "expr.h"
45 #include "gimple-fold.h"
46 #include "tree-eh.h"
47 #include "gimplify.h"
48 #include "gimple-iterator.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
54 #include "tree-cfg.h"
55 #include "tree-ssa.h"
56 #include "omp-general.h"
57 #include "omp-low.h"
58 #include "gimple-low.h"
59 #include "gomp-constants.h"
60 #include "splay-tree.h"
61 #include "gimple-walk.h"
62 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
63 #include "builtins.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "asan.h"
67 #include "dbgcnt.h"
68 #include "omp-offload.h"
69 #include "context.h"
70 #include "tree-nested.h"
71
72 /* Hash set of poisoned variables in a bind expr. */
73 static hash_set<tree> *asan_poisoned_variables = NULL;
74
75 enum gimplify_omp_var_data
76 {
77 GOVD_SEEN = 0x000001,
78 GOVD_EXPLICIT = 0x000002,
79 GOVD_SHARED = 0x000004,
80 GOVD_PRIVATE = 0x000008,
81 GOVD_FIRSTPRIVATE = 0x000010,
82 GOVD_LASTPRIVATE = 0x000020,
83 GOVD_REDUCTION = 0x000040,
84 GOVD_LOCAL = 0x00080,
85 GOVD_MAP = 0x000100,
86 GOVD_DEBUG_PRIVATE = 0x000200,
87 GOVD_PRIVATE_OUTER_REF = 0x000400,
88 GOVD_LINEAR = 0x000800,
89 GOVD_ALIGNED = 0x001000,
90
91 /* Flag for GOVD_MAP: don't copy back. */
92 GOVD_MAP_TO_ONLY = 0x002000,
93
94 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
95 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 0x004000,
96
97 GOVD_MAP_0LEN_ARRAY = 0x008000,
98
99 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
100 GOVD_MAP_ALWAYS_TO = 0x010000,
101
102 /* Flag for shared vars that are or might be stored to in the region. */
103 GOVD_WRITTEN = 0x020000,
104
105 /* Flag for GOVD_MAP, if it is a forced mapping. */
106 GOVD_MAP_FORCE = 0x040000,
107
108 /* Flag for GOVD_MAP: must be present already. */
109 GOVD_MAP_FORCE_PRESENT = 0x080000,
110
111 /* Flag for GOVD_MAP: only allocate. */
112 GOVD_MAP_ALLOC_ONLY = 0x100000,
113
114 /* Flag for GOVD_MAP: only copy back. */
115 GOVD_MAP_FROM_ONLY = 0x200000,
116
117 GOVD_NONTEMPORAL = 0x400000,
118
119 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
120 GOVD_LASTPRIVATE_CONDITIONAL = 0x800000,
121
122 GOVD_CONDTEMP = 0x1000000,
123
124 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
125 GOVD_REDUCTION_INSCAN = 0x2000000,
126
127 /* Flag for GOVD_MAP: (struct) vars that have pointer attachments for
128 fields. */
129 GOVD_MAP_HAS_ATTACHMENTS = 8388608,
130
131 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
132 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
133 | GOVD_LOCAL)
134 };
135
136
137 enum omp_region_type
138 {
139 ORT_WORKSHARE = 0x00,
140 ORT_TASKGROUP = 0x01,
141 ORT_SIMD = 0x04,
142
143 ORT_PARALLEL = 0x08,
144 ORT_COMBINED_PARALLEL = ORT_PARALLEL | 1,
145
146 ORT_TASK = 0x10,
147 ORT_UNTIED_TASK = ORT_TASK | 1,
148 ORT_TASKLOOP = ORT_TASK | 2,
149 ORT_UNTIED_TASKLOOP = ORT_UNTIED_TASK | 2,
150
151 ORT_TEAMS = 0x20,
152 ORT_COMBINED_TEAMS = ORT_TEAMS | 1,
153 ORT_HOST_TEAMS = ORT_TEAMS | 2,
154 ORT_COMBINED_HOST_TEAMS = ORT_COMBINED_TEAMS | 2,
155
156 /* Data region. */
157 ORT_TARGET_DATA = 0x40,
158
159 /* Data region with offloading. */
160 ORT_TARGET = 0x80,
161 ORT_COMBINED_TARGET = ORT_TARGET | 1,
162 ORT_IMPLICIT_TARGET = ORT_TARGET | 2,
163
164 /* OpenACC variants. */
165 ORT_ACC = 0x100, /* A generic OpenACC region. */
166 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
167 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
168 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 2, /* Kernels construct. */
169 ORT_ACC_SERIAL = ORT_ACC | ORT_TARGET | 4, /* Serial construct. */
170 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 2, /* Host data. */
171
172 /* Dummy OpenMP region, used to disable expansion of
173 DECL_VALUE_EXPRs in taskloop pre body. */
174 ORT_NONE = 0x200
175 };
176
177 /* Gimplify hashtable helper. */
178
179 struct gimplify_hasher : free_ptr_hash <elt_t>
180 {
181 static inline hashval_t hash (const elt_t *);
182 static inline bool equal (const elt_t *, const elt_t *);
183 };
184
185 struct gimplify_ctx
186 {
187 struct gimplify_ctx *prev_context;
188
189 vec<gbind *> bind_expr_stack;
190 tree temps;
191 gimple_seq conditional_cleanups;
192 tree exit_label;
193 tree return_temp;
194
195 vec<tree> case_labels;
196 hash_set<tree> *live_switch_vars;
197 /* The formal temporary table. Should this be persistent? */
198 hash_table<gimplify_hasher> *temp_htab;
199
200 int conditions;
201 unsigned into_ssa : 1;
202 unsigned allow_rhs_cond_expr : 1;
203 unsigned in_cleanup_point_expr : 1;
204 unsigned keep_stack : 1;
205 unsigned save_stack : 1;
206 unsigned in_switch_expr : 1;
207 };
208
209 enum gimplify_defaultmap_kind
210 {
211 GDMK_SCALAR,
212 GDMK_AGGREGATE,
213 GDMK_ALLOCATABLE,
214 GDMK_POINTER
215 };
216
217 struct gimplify_omp_ctx
218 {
219 struct gimplify_omp_ctx *outer_context;
220 splay_tree variables;
221 hash_set<tree> *privatized_types;
222 tree clauses;
223 /* Iteration variables in an OMP_FOR. */
224 vec<tree> loop_iter_var;
225 location_t location;
226 enum omp_clause_default_kind default_kind;
227 enum omp_region_type region_type;
228 enum tree_code code;
229 bool combined_loop;
230 bool distribute;
231 bool target_firstprivatize_array_bases;
232 bool add_safelen1;
233 bool order_concurrent;
234 bool has_depend;
235 bool in_for_exprs;
236 int defaultmap[4];
237 };
238
239 static struct gimplify_ctx *gimplify_ctxp;
240 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
241 static bool in_omp_construct;
242
243 /* Forward declaration. */
244 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
245 static hash_map<tree, tree> *oacc_declare_returns;
246 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
247 bool (*) (tree), fallback_t, bool);
248
249 /* Shorter alias name for the above function for use in gimplify.c
250 only. */
251
252 static inline void
253 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
254 {
255 gimple_seq_add_stmt_without_update (seq_p, gs);
256 }
257
258 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
259 NULL, a new sequence is allocated. This function is
260 similar to gimple_seq_add_seq, but does not scan the operands.
261 During gimplification, we need to manipulate statement sequences
262 before the def/use vectors have been constructed. */
263
264 static void
265 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
266 {
267 gimple_stmt_iterator si;
268
269 if (src == NULL)
270 return;
271
272 si = gsi_last (*dst_p);
273 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
274 }
275
276
277 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
278 and popping gimplify contexts. */
279
280 static struct gimplify_ctx *ctx_pool = NULL;
281
282 /* Return a gimplify context struct from the pool. */
283
284 static inline struct gimplify_ctx *
285 ctx_alloc (void)
286 {
287 struct gimplify_ctx * c = ctx_pool;
288
289 if (c)
290 ctx_pool = c->prev_context;
291 else
292 c = XNEW (struct gimplify_ctx);
293
294 memset (c, '\0', sizeof (*c));
295 return c;
296 }
297
298 /* Put gimplify context C back into the pool. */
299
300 static inline void
301 ctx_free (struct gimplify_ctx *c)
302 {
303 c->prev_context = ctx_pool;
304 ctx_pool = c;
305 }
306
307 /* Free allocated ctx stack memory. */
308
309 void
310 free_gimplify_stack (void)
311 {
312 struct gimplify_ctx *c;
313
314 while ((c = ctx_pool))
315 {
316 ctx_pool = c->prev_context;
317 free (c);
318 }
319 }
320
321
322 /* Set up a context for the gimplifier. */
323
324 void
325 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
326 {
327 struct gimplify_ctx *c = ctx_alloc ();
328
329 c->prev_context = gimplify_ctxp;
330 gimplify_ctxp = c;
331 gimplify_ctxp->into_ssa = in_ssa;
332 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
333 }
334
335 /* Tear down a context for the gimplifier. If BODY is non-null, then
336 put the temporaries into the outer BIND_EXPR. Otherwise, put them
337 in the local_decls.
338
339 BODY is not a sequence, but the first tuple in a sequence. */
340
341 void
342 pop_gimplify_context (gimple *body)
343 {
344 struct gimplify_ctx *c = gimplify_ctxp;
345
346 gcc_assert (c
347 && (!c->bind_expr_stack.exists ()
348 || c->bind_expr_stack.is_empty ()));
349 c->bind_expr_stack.release ();
350 gimplify_ctxp = c->prev_context;
351
352 if (body)
353 declare_vars (c->temps, body, false);
354 else
355 record_vars (c->temps);
356
357 delete c->temp_htab;
358 c->temp_htab = NULL;
359 ctx_free (c);
360 }
361
362 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
363
364 static void
365 gimple_push_bind_expr (gbind *bind_stmt)
366 {
367 gimplify_ctxp->bind_expr_stack.reserve (8);
368 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
369 }
370
371 /* Pop the first element off the stack of bindings. */
372
373 static void
374 gimple_pop_bind_expr (void)
375 {
376 gimplify_ctxp->bind_expr_stack.pop ();
377 }
378
379 /* Return the first element of the stack of bindings. */
380
381 gbind *
382 gimple_current_bind_expr (void)
383 {
384 return gimplify_ctxp->bind_expr_stack.last ();
385 }
386
387 /* Return the stack of bindings created during gimplification. */
388
389 vec<gbind *>
390 gimple_bind_expr_stack (void)
391 {
392 return gimplify_ctxp->bind_expr_stack;
393 }
394
395 /* Return true iff there is a COND_EXPR between us and the innermost
396 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
397
398 static bool
399 gimple_conditional_context (void)
400 {
401 return gimplify_ctxp->conditions > 0;
402 }
403
404 /* Note that we've entered a COND_EXPR. */
405
406 static void
407 gimple_push_condition (void)
408 {
409 #ifdef ENABLE_GIMPLE_CHECKING
410 if (gimplify_ctxp->conditions == 0)
411 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
412 #endif
413 ++(gimplify_ctxp->conditions);
414 }
415
416 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
417 now, add any conditional cleanups we've seen to the prequeue. */
418
419 static void
420 gimple_pop_condition (gimple_seq *pre_p)
421 {
422 int conds = --(gimplify_ctxp->conditions);
423
424 gcc_assert (conds >= 0);
425 if (conds == 0)
426 {
427 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
428 gimplify_ctxp->conditional_cleanups = NULL;
429 }
430 }
431
432 /* A stable comparison routine for use with splay trees and DECLs. */
433
434 static int
435 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
436 {
437 tree a = (tree) xa;
438 tree b = (tree) xb;
439
440 return DECL_UID (a) - DECL_UID (b);
441 }
442
443 /* Create a new omp construct that deals with variable remapping. */
444
445 static struct gimplify_omp_ctx *
446 new_omp_context (enum omp_region_type region_type)
447 {
448 struct gimplify_omp_ctx *c;
449
450 c = XCNEW (struct gimplify_omp_ctx);
451 c->outer_context = gimplify_omp_ctxp;
452 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
453 c->privatized_types = new hash_set<tree>;
454 c->location = input_location;
455 c->region_type = region_type;
456 if ((region_type & ORT_TASK) == 0)
457 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
458 else
459 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
460 c->defaultmap[GDMK_SCALAR] = GOVD_MAP;
461 c->defaultmap[GDMK_AGGREGATE] = GOVD_MAP;
462 c->defaultmap[GDMK_ALLOCATABLE] = GOVD_MAP;
463 c->defaultmap[GDMK_POINTER] = GOVD_MAP;
464
465 return c;
466 }
467
468 /* Destroy an omp construct that deals with variable remapping. */
469
470 static void
471 delete_omp_context (struct gimplify_omp_ctx *c)
472 {
473 splay_tree_delete (c->variables);
474 delete c->privatized_types;
475 c->loop_iter_var.release ();
476 XDELETE (c);
477 }
478
479 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
480 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
481
482 /* Both gimplify the statement T and append it to *SEQ_P. This function
483 behaves exactly as gimplify_stmt, but you don't have to pass T as a
484 reference. */
485
486 void
487 gimplify_and_add (tree t, gimple_seq *seq_p)
488 {
489 gimplify_stmt (&t, seq_p);
490 }
491
492 /* Gimplify statement T into sequence *SEQ_P, and return the first
493 tuple in the sequence of generated tuples for this statement.
494 Return NULL if gimplifying T produced no tuples. */
495
496 static gimple *
497 gimplify_and_return_first (tree t, gimple_seq *seq_p)
498 {
499 gimple_stmt_iterator last = gsi_last (*seq_p);
500
501 gimplify_and_add (t, seq_p);
502
503 if (!gsi_end_p (last))
504 {
505 gsi_next (&last);
506 return gsi_stmt (last);
507 }
508 else
509 return gimple_seq_first_stmt (*seq_p);
510 }
511
512 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
513 LHS, or for a call argument. */
514
515 static bool
516 is_gimple_mem_rhs (tree t)
517 {
518 /* If we're dealing with a renamable type, either source or dest must be
519 a renamed variable. */
520 if (is_gimple_reg_type (TREE_TYPE (t)))
521 return is_gimple_val (t);
522 else
523 return is_gimple_val (t) || is_gimple_lvalue (t);
524 }
525
526 /* Return true if T is a CALL_EXPR or an expression that can be
527 assigned to a temporary. Note that this predicate should only be
528 used during gimplification. See the rationale for this in
529 gimplify_modify_expr. */
530
531 static bool
532 is_gimple_reg_rhs_or_call (tree t)
533 {
534 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
535 || TREE_CODE (t) == CALL_EXPR);
536 }
537
538 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
539 this predicate should only be used during gimplification. See the
540 rationale for this in gimplify_modify_expr. */
541
542 static bool
543 is_gimple_mem_rhs_or_call (tree t)
544 {
545 /* If we're dealing with a renamable type, either source or dest must be
546 a renamed variable. */
547 if (is_gimple_reg_type (TREE_TYPE (t)))
548 return is_gimple_val (t);
549 else
550 return (is_gimple_val (t)
551 || is_gimple_lvalue (t)
552 || TREE_CLOBBER_P (t)
553 || TREE_CODE (t) == CALL_EXPR);
554 }
555
556 /* Create a temporary with a name derived from VAL. Subroutine of
557 lookup_tmp_var; nobody else should call this function. */
558
559 static inline tree
560 create_tmp_from_val (tree val)
561 {
562 /* Drop all qualifiers and address-space information from the value type. */
563 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
564 tree var = create_tmp_var (type, get_name (val));
565 return var;
566 }
567
568 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
569 an existing expression temporary. */
570
571 static tree
572 lookup_tmp_var (tree val, bool is_formal)
573 {
574 tree ret;
575
576 /* If not optimizing, never really reuse a temporary. local-alloc
577 won't allocate any variable that is used in more than one basic
578 block, which means it will go into memory, causing much extra
579 work in reload and final and poorer code generation, outweighing
580 the extra memory allocation here. */
581 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
582 ret = create_tmp_from_val (val);
583 else
584 {
585 elt_t elt, *elt_p;
586 elt_t **slot;
587
588 elt.val = val;
589 if (!gimplify_ctxp->temp_htab)
590 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
591 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
592 if (*slot == NULL)
593 {
594 elt_p = XNEW (elt_t);
595 elt_p->val = val;
596 elt_p->temp = ret = create_tmp_from_val (val);
597 *slot = elt_p;
598 }
599 else
600 {
601 elt_p = *slot;
602 ret = elt_p->temp;
603 }
604 }
605
606 return ret;
607 }
608
609 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
610
611 static tree
612 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
613 bool is_formal, bool allow_ssa)
614 {
615 tree t, mod;
616
617 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
618 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
619 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
620 fb_rvalue);
621
622 if (allow_ssa
623 && gimplify_ctxp->into_ssa
624 && is_gimple_reg_type (TREE_TYPE (val)))
625 {
626 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
627 if (! gimple_in_ssa_p (cfun))
628 {
629 const char *name = get_name (val);
630 if (name)
631 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
632 }
633 }
634 else
635 t = lookup_tmp_var (val, is_formal);
636
637 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
638
639 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
640
641 /* gimplify_modify_expr might want to reduce this further. */
642 gimplify_and_add (mod, pre_p);
643 ggc_free (mod);
644
645 return t;
646 }
647
648 /* Return a formal temporary variable initialized with VAL. PRE_P is as
649 in gimplify_expr. Only use this function if:
650
651 1) The value of the unfactored expression represented by VAL will not
652 change between the initialization and use of the temporary, and
653 2) The temporary will not be otherwise modified.
654
655 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
656 and #2 means it is inappropriate for && temps.
657
658 For other cases, use get_initialized_tmp_var instead. */
659
660 tree
661 get_formal_tmp_var (tree val, gimple_seq *pre_p)
662 {
663 return internal_get_tmp_var (val, pre_p, NULL, true, true);
664 }
665
666 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
667 are as in gimplify_expr. */
668
669 tree
670 get_initialized_tmp_var (tree val, gimple_seq *pre_p,
671 gimple_seq *post_p /* = NULL */,
672 bool allow_ssa /* = true */)
673 {
674 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa);
675 }
676
677 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
678 generate debug info for them; otherwise don't. */
679
680 void
681 declare_vars (tree vars, gimple *gs, bool debug_info)
682 {
683 tree last = vars;
684 if (last)
685 {
686 tree temps, block;
687
688 gbind *scope = as_a <gbind *> (gs);
689
690 temps = nreverse (last);
691
692 block = gimple_bind_block (scope);
693 gcc_assert (!block || TREE_CODE (block) == BLOCK);
694 if (!block || !debug_info)
695 {
696 DECL_CHAIN (last) = gimple_bind_vars (scope);
697 gimple_bind_set_vars (scope, temps);
698 }
699 else
700 {
701 /* We need to attach the nodes both to the BIND_EXPR and to its
702 associated BLOCK for debugging purposes. The key point here
703 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
704 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
705 if (BLOCK_VARS (block))
706 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
707 else
708 {
709 gimple_bind_set_vars (scope,
710 chainon (gimple_bind_vars (scope), temps));
711 BLOCK_VARS (block) = temps;
712 }
713 }
714 }
715 }
716
717 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
718 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
719 no such upper bound can be obtained. */
720
721 static void
722 force_constant_size (tree var)
723 {
724 /* The only attempt we make is by querying the maximum size of objects
725 of the variable's type. */
726
727 HOST_WIDE_INT max_size;
728
729 gcc_assert (VAR_P (var));
730
731 max_size = max_int_size_in_bytes (TREE_TYPE (var));
732
733 gcc_assert (max_size >= 0);
734
735 DECL_SIZE_UNIT (var)
736 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
737 DECL_SIZE (var)
738 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
739 }
740
741 /* Push the temporary variable TMP into the current binding. */
742
743 void
744 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
745 {
746 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
747
748 /* Later processing assumes that the object size is constant, which might
749 not be true at this point. Force the use of a constant upper bound in
750 this case. */
751 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
752 force_constant_size (tmp);
753
754 DECL_CONTEXT (tmp) = fn->decl;
755 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
756
757 record_vars_into (tmp, fn->decl);
758 }
759
760 /* Push the temporary variable TMP into the current binding. */
761
762 void
763 gimple_add_tmp_var (tree tmp)
764 {
765 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
766
767 /* Later processing assumes that the object size is constant, which might
768 not be true at this point. Force the use of a constant upper bound in
769 this case. */
770 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
771 force_constant_size (tmp);
772
773 DECL_CONTEXT (tmp) = current_function_decl;
774 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
775
776 if (gimplify_ctxp)
777 {
778 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
779 gimplify_ctxp->temps = tmp;
780
781 /* Mark temporaries local within the nearest enclosing parallel. */
782 if (gimplify_omp_ctxp)
783 {
784 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
785 int flag = GOVD_LOCAL | GOVD_SEEN;
786 while (ctx
787 && (ctx->region_type == ORT_WORKSHARE
788 || ctx->region_type == ORT_TASKGROUP
789 || ctx->region_type == ORT_SIMD
790 || ctx->region_type == ORT_ACC))
791 {
792 if (ctx->region_type == ORT_SIMD
793 && TREE_ADDRESSABLE (tmp)
794 && !TREE_STATIC (tmp))
795 {
796 if (TREE_CODE (DECL_SIZE_UNIT (tmp)) != INTEGER_CST)
797 ctx->add_safelen1 = true;
798 else if (ctx->in_for_exprs)
799 flag = GOVD_PRIVATE;
800 else
801 flag = GOVD_PRIVATE | GOVD_SEEN;
802 break;
803 }
804 ctx = ctx->outer_context;
805 }
806 if (ctx)
807 omp_add_variable (ctx, tmp, flag);
808 }
809 }
810 else if (cfun)
811 record_vars (tmp);
812 else
813 {
814 gimple_seq body_seq;
815
816 /* This case is for nested functions. We need to expose the locals
817 they create. */
818 body_seq = gimple_body (current_function_decl);
819 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
820 }
821 }
822
823
824 \f
825 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
826 nodes that are referenced more than once in GENERIC functions. This is
827 necessary because gimplification (translation into GIMPLE) is performed
828 by modifying tree nodes in-place, so gimplication of a shared node in a
829 first context could generate an invalid GIMPLE form in a second context.
830
831 This is achieved with a simple mark/copy/unmark algorithm that walks the
832 GENERIC representation top-down, marks nodes with TREE_VISITED the first
833 time it encounters them, duplicates them if they already have TREE_VISITED
834 set, and finally removes the TREE_VISITED marks it has set.
835
836 The algorithm works only at the function level, i.e. it generates a GENERIC
837 representation of a function with no nodes shared within the function when
838 passed a GENERIC function (except for nodes that are allowed to be shared).
839
840 At the global level, it is also necessary to unshare tree nodes that are
841 referenced in more than one function, for the same aforementioned reason.
842 This requires some cooperation from the front-end. There are 2 strategies:
843
844 1. Manual unsharing. The front-end needs to call unshare_expr on every
845 expression that might end up being shared across functions.
846
847 2. Deep unsharing. This is an extension of regular unsharing. Instead
848 of calling unshare_expr on expressions that might be shared across
849 functions, the front-end pre-marks them with TREE_VISITED. This will
850 ensure that they are unshared on the first reference within functions
851 when the regular unsharing algorithm runs. The counterpart is that
852 this algorithm must look deeper than for manual unsharing, which is
853 specified by LANG_HOOKS_DEEP_UNSHARING.
854
855 If there are only few specific cases of node sharing across functions, it is
856 probably easier for a front-end to unshare the expressions manually. On the
857 contrary, if the expressions generated at the global level are as widespread
858 as expressions generated within functions, deep unsharing is very likely the
859 way to go. */
860
861 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
862 These nodes model computations that must be done once. If we were to
863 unshare something like SAVE_EXPR(i++), the gimplification process would
864 create wrong code. However, if DATA is non-null, it must hold a pointer
865 set that is used to unshare the subtrees of these nodes. */
866
867 static tree
868 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
869 {
870 tree t = *tp;
871 enum tree_code code = TREE_CODE (t);
872
873 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
874 copy their subtrees if we can make sure to do it only once. */
875 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
876 {
877 if (data && !((hash_set<tree> *)data)->add (t))
878 ;
879 else
880 *walk_subtrees = 0;
881 }
882
883 /* Stop at types, decls, constants like copy_tree_r. */
884 else if (TREE_CODE_CLASS (code) == tcc_type
885 || TREE_CODE_CLASS (code) == tcc_declaration
886 || TREE_CODE_CLASS (code) == tcc_constant)
887 *walk_subtrees = 0;
888
889 /* Cope with the statement expression extension. */
890 else if (code == STATEMENT_LIST)
891 ;
892
893 /* Leave the bulk of the work to copy_tree_r itself. */
894 else
895 copy_tree_r (tp, walk_subtrees, NULL);
896
897 return NULL_TREE;
898 }
899
900 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
901 If *TP has been visited already, then *TP is deeply copied by calling
902 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
903
904 static tree
905 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
906 {
907 tree t = *tp;
908 enum tree_code code = TREE_CODE (t);
909
910 /* Skip types, decls, and constants. But we do want to look at their
911 types and the bounds of types. Mark them as visited so we properly
912 unmark their subtrees on the unmark pass. If we've already seen them,
913 don't look down further. */
914 if (TREE_CODE_CLASS (code) == tcc_type
915 || TREE_CODE_CLASS (code) == tcc_declaration
916 || TREE_CODE_CLASS (code) == tcc_constant)
917 {
918 if (TREE_VISITED (t))
919 *walk_subtrees = 0;
920 else
921 TREE_VISITED (t) = 1;
922 }
923
924 /* If this node has been visited already, unshare it and don't look
925 any deeper. */
926 else if (TREE_VISITED (t))
927 {
928 walk_tree (tp, mostly_copy_tree_r, data, NULL);
929 *walk_subtrees = 0;
930 }
931
932 /* Otherwise, mark the node as visited and keep looking. */
933 else
934 TREE_VISITED (t) = 1;
935
936 return NULL_TREE;
937 }
938
939 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
940 copy_if_shared_r callback unmodified. */
941
942 void
943 copy_if_shared (tree *tp, void *data)
944 {
945 walk_tree (tp, copy_if_shared_r, data, NULL);
946 }
947
948 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
949 any nested functions. */
950
951 static void
952 unshare_body (tree fndecl)
953 {
954 struct cgraph_node *cgn = cgraph_node::get (fndecl);
955 /* If the language requires deep unsharing, we need a pointer set to make
956 sure we don't repeatedly unshare subtrees of unshareable nodes. */
957 hash_set<tree> *visited
958 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
959
960 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
961 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
962 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
963
964 delete visited;
965
966 if (cgn)
967 for (cgn = first_nested_function (cgn); cgn;
968 cgn = next_nested_function (cgn))
969 unshare_body (cgn->decl);
970 }
971
972 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
973 Subtrees are walked until the first unvisited node is encountered. */
974
975 static tree
976 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
977 {
978 tree t = *tp;
979
980 /* If this node has been visited, unmark it and keep looking. */
981 if (TREE_VISITED (t))
982 TREE_VISITED (t) = 0;
983
984 /* Otherwise, don't look any deeper. */
985 else
986 *walk_subtrees = 0;
987
988 return NULL_TREE;
989 }
990
991 /* Unmark the visited trees rooted at *TP. */
992
993 static inline void
994 unmark_visited (tree *tp)
995 {
996 walk_tree (tp, unmark_visited_r, NULL, NULL);
997 }
998
999 /* Likewise, but mark all trees as not visited. */
1000
1001 static void
1002 unvisit_body (tree fndecl)
1003 {
1004 struct cgraph_node *cgn = cgraph_node::get (fndecl);
1005
1006 unmark_visited (&DECL_SAVED_TREE (fndecl));
1007 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
1008 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
1009
1010 if (cgn)
1011 for (cgn = first_nested_function (cgn);
1012 cgn; cgn = next_nested_function (cgn))
1013 unvisit_body (cgn->decl);
1014 }
1015
1016 /* Unconditionally make an unshared copy of EXPR. This is used when using
1017 stored expressions which span multiple functions, such as BINFO_VTABLE,
1018 as the normal unsharing process can't tell that they're shared. */
1019
1020 tree
1021 unshare_expr (tree expr)
1022 {
1023 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1024 return expr;
1025 }
1026
1027 /* Worker for unshare_expr_without_location. */
1028
1029 static tree
1030 prune_expr_location (tree *tp, int *walk_subtrees, void *)
1031 {
1032 if (EXPR_P (*tp))
1033 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
1034 else
1035 *walk_subtrees = 0;
1036 return NULL_TREE;
1037 }
1038
1039 /* Similar to unshare_expr but also prune all expression locations
1040 from EXPR. */
1041
1042 tree
1043 unshare_expr_without_location (tree expr)
1044 {
1045 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1046 if (EXPR_P (expr))
1047 walk_tree (&expr, prune_expr_location, NULL, NULL);
1048 return expr;
1049 }
1050
1051 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1052 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1053 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1054 EXPR is the location of the EXPR. */
1055
1056 static location_t
1057 rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION)
1058 {
1059 if (!expr)
1060 return or_else;
1061
1062 if (EXPR_HAS_LOCATION (expr))
1063 return EXPR_LOCATION (expr);
1064
1065 if (TREE_CODE (expr) != STATEMENT_LIST)
1066 return or_else;
1067
1068 tree_stmt_iterator i = tsi_start (expr);
1069
1070 bool found = false;
1071 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
1072 {
1073 found = true;
1074 tsi_next (&i);
1075 }
1076
1077 if (!found || !tsi_one_before_end_p (i))
1078 return or_else;
1079
1080 return rexpr_location (tsi_stmt (i), or_else);
1081 }
1082
1083 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1084 rexpr_location for the potential recursion. */
1085
1086 static inline bool
1087 rexpr_has_location (tree expr)
1088 {
1089 return rexpr_location (expr) != UNKNOWN_LOCATION;
1090 }
1091
1092 \f
1093 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1094 contain statements and have a value. Assign its value to a temporary
1095 and give it void_type_node. Return the temporary, or NULL_TREE if
1096 WRAPPER was already void. */
1097
1098 tree
1099 voidify_wrapper_expr (tree wrapper, tree temp)
1100 {
1101 tree type = TREE_TYPE (wrapper);
1102 if (type && !VOID_TYPE_P (type))
1103 {
1104 tree *p;
1105
1106 /* Set p to point to the body of the wrapper. Loop until we find
1107 something that isn't a wrapper. */
1108 for (p = &wrapper; p && *p; )
1109 {
1110 switch (TREE_CODE (*p))
1111 {
1112 case BIND_EXPR:
1113 TREE_SIDE_EFFECTS (*p) = 1;
1114 TREE_TYPE (*p) = void_type_node;
1115 /* For a BIND_EXPR, the body is operand 1. */
1116 p = &BIND_EXPR_BODY (*p);
1117 break;
1118
1119 case CLEANUP_POINT_EXPR:
1120 case TRY_FINALLY_EXPR:
1121 case TRY_CATCH_EXPR:
1122 TREE_SIDE_EFFECTS (*p) = 1;
1123 TREE_TYPE (*p) = void_type_node;
1124 p = &TREE_OPERAND (*p, 0);
1125 break;
1126
1127 case STATEMENT_LIST:
1128 {
1129 tree_stmt_iterator i = tsi_last (*p);
1130 TREE_SIDE_EFFECTS (*p) = 1;
1131 TREE_TYPE (*p) = void_type_node;
1132 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1133 }
1134 break;
1135
1136 case COMPOUND_EXPR:
1137 /* Advance to the last statement. Set all container types to
1138 void. */
1139 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1140 {
1141 TREE_SIDE_EFFECTS (*p) = 1;
1142 TREE_TYPE (*p) = void_type_node;
1143 }
1144 break;
1145
1146 case TRANSACTION_EXPR:
1147 TREE_SIDE_EFFECTS (*p) = 1;
1148 TREE_TYPE (*p) = void_type_node;
1149 p = &TRANSACTION_EXPR_BODY (*p);
1150 break;
1151
1152 default:
1153 /* Assume that any tree upon which voidify_wrapper_expr is
1154 directly called is a wrapper, and that its body is op0. */
1155 if (p == &wrapper)
1156 {
1157 TREE_SIDE_EFFECTS (*p) = 1;
1158 TREE_TYPE (*p) = void_type_node;
1159 p = &TREE_OPERAND (*p, 0);
1160 break;
1161 }
1162 goto out;
1163 }
1164 }
1165
1166 out:
1167 if (p == NULL || IS_EMPTY_STMT (*p))
1168 temp = NULL_TREE;
1169 else if (temp)
1170 {
1171 /* The wrapper is on the RHS of an assignment that we're pushing
1172 down. */
1173 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1174 || TREE_CODE (temp) == MODIFY_EXPR);
1175 TREE_OPERAND (temp, 1) = *p;
1176 *p = temp;
1177 }
1178 else
1179 {
1180 temp = create_tmp_var (type, "retval");
1181 *p = build2 (INIT_EXPR, type, temp, *p);
1182 }
1183
1184 return temp;
1185 }
1186
1187 return NULL_TREE;
1188 }
1189
1190 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1191 a temporary through which they communicate. */
1192
1193 static void
1194 build_stack_save_restore (gcall **save, gcall **restore)
1195 {
1196 tree tmp_var;
1197
1198 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1199 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1200 gimple_call_set_lhs (*save, tmp_var);
1201
1202 *restore
1203 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1204 1, tmp_var);
1205 }
1206
1207 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1208
1209 static tree
1210 build_asan_poison_call_expr (tree decl)
1211 {
1212 /* Do not poison variables that have size equal to zero. */
1213 tree unit_size = DECL_SIZE_UNIT (decl);
1214 if (zerop (unit_size))
1215 return NULL_TREE;
1216
1217 tree base = build_fold_addr_expr (decl);
1218
1219 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1220 void_type_node, 3,
1221 build_int_cst (integer_type_node,
1222 ASAN_MARK_POISON),
1223 base, unit_size);
1224 }
1225
1226 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1227 on POISON flag, shadow memory of a DECL variable. The call will be
1228 put on location identified by IT iterator, where BEFORE flag drives
1229 position where the stmt will be put. */
1230
1231 static void
1232 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1233 bool before)
1234 {
1235 tree unit_size = DECL_SIZE_UNIT (decl);
1236 tree base = build_fold_addr_expr (decl);
1237
1238 /* Do not poison variables that have size equal to zero. */
1239 if (zerop (unit_size))
1240 return;
1241
1242 /* It's necessary to have all stack variables aligned to ASAN granularity
1243 bytes. */
1244 gcc_assert (!hwasan_sanitize_p () || hwasan_sanitize_stack_p ());
1245 unsigned shadow_granularity
1246 = hwasan_sanitize_p () ? HWASAN_TAG_GRANULE_SIZE : ASAN_SHADOW_GRANULARITY;
1247 if (DECL_ALIGN_UNIT (decl) <= shadow_granularity)
1248 SET_DECL_ALIGN (decl, BITS_PER_UNIT * shadow_granularity);
1249
1250 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1251
1252 gimple *g
1253 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1254 build_int_cst (integer_type_node, flags),
1255 base, unit_size);
1256
1257 if (before)
1258 gsi_insert_before (it, g, GSI_NEW_STMT);
1259 else
1260 gsi_insert_after (it, g, GSI_NEW_STMT);
1261 }
1262
1263 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1264 either poisons or unpoisons a DECL. Created statement is appended
1265 to SEQ_P gimple sequence. */
1266
1267 static void
1268 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1269 {
1270 gimple_stmt_iterator it = gsi_last (*seq_p);
1271 bool before = false;
1272
1273 if (gsi_end_p (it))
1274 before = true;
1275
1276 asan_poison_variable (decl, poison, &it, before);
1277 }
1278
1279 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1280
1281 static int
1282 sort_by_decl_uid (const void *a, const void *b)
1283 {
1284 const tree *t1 = (const tree *)a;
1285 const tree *t2 = (const tree *)b;
1286
1287 int uid1 = DECL_UID (*t1);
1288 int uid2 = DECL_UID (*t2);
1289
1290 if (uid1 < uid2)
1291 return -1;
1292 else if (uid1 > uid2)
1293 return 1;
1294 else
1295 return 0;
1296 }
1297
1298 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1299 depending on POISON flag. Created statement is appended
1300 to SEQ_P gimple sequence. */
1301
1302 static void
1303 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1304 {
1305 unsigned c = variables->elements ();
1306 if (c == 0)
1307 return;
1308
1309 auto_vec<tree> sorted_variables (c);
1310
1311 for (hash_set<tree>::iterator it = variables->begin ();
1312 it != variables->end (); ++it)
1313 sorted_variables.safe_push (*it);
1314
1315 sorted_variables.qsort (sort_by_decl_uid);
1316
1317 unsigned i;
1318 tree var;
1319 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1320 {
1321 asan_poison_variable (var, poison, seq_p);
1322
1323 /* Add use_after_scope_memory attribute for the variable in order
1324 to prevent re-written into SSA. */
1325 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1326 DECL_ATTRIBUTES (var)))
1327 DECL_ATTRIBUTES (var)
1328 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1329 integer_one_node,
1330 DECL_ATTRIBUTES (var));
1331 }
1332 }
1333
1334 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1335
1336 static enum gimplify_status
1337 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1338 {
1339 tree bind_expr = *expr_p;
1340 bool old_keep_stack = gimplify_ctxp->keep_stack;
1341 bool old_save_stack = gimplify_ctxp->save_stack;
1342 tree t;
1343 gbind *bind_stmt;
1344 gimple_seq body, cleanup;
1345 gcall *stack_save;
1346 location_t start_locus = 0, end_locus = 0;
1347 tree ret_clauses = NULL;
1348
1349 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1350
1351 /* Mark variables seen in this bind expr. */
1352 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1353 {
1354 if (VAR_P (t))
1355 {
1356 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1357
1358 /* Mark variable as local. */
1359 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t))
1360 {
1361 if (! DECL_SEEN_IN_BIND_EXPR_P (t)
1362 || splay_tree_lookup (ctx->variables,
1363 (splay_tree_key) t) == NULL)
1364 {
1365 int flag = GOVD_LOCAL;
1366 if (ctx->region_type == ORT_SIMD
1367 && TREE_ADDRESSABLE (t)
1368 && !TREE_STATIC (t))
1369 {
1370 if (TREE_CODE (DECL_SIZE_UNIT (t)) != INTEGER_CST)
1371 ctx->add_safelen1 = true;
1372 else
1373 flag = GOVD_PRIVATE;
1374 }
1375 omp_add_variable (ctx, t, flag | GOVD_SEEN);
1376 }
1377 /* Static locals inside of target construct or offloaded
1378 routines need to be "omp declare target". */
1379 if (TREE_STATIC (t))
1380 for (; ctx; ctx = ctx->outer_context)
1381 if ((ctx->region_type & ORT_TARGET) != 0)
1382 {
1383 if (!lookup_attribute ("omp declare target",
1384 DECL_ATTRIBUTES (t)))
1385 {
1386 tree id = get_identifier ("omp declare target");
1387 DECL_ATTRIBUTES (t)
1388 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (t));
1389 varpool_node *node = varpool_node::get (t);
1390 if (node)
1391 {
1392 node->offloadable = 1;
1393 if (ENABLE_OFFLOADING && !DECL_EXTERNAL (t))
1394 {
1395 g->have_offload = true;
1396 if (!in_lto_p)
1397 vec_safe_push (offload_vars, t);
1398 }
1399 }
1400 }
1401 break;
1402 }
1403 }
1404
1405 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1406
1407 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1408 cfun->has_local_explicit_reg_vars = true;
1409 }
1410 }
1411
1412 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1413 BIND_EXPR_BLOCK (bind_expr));
1414 gimple_push_bind_expr (bind_stmt);
1415
1416 gimplify_ctxp->keep_stack = false;
1417 gimplify_ctxp->save_stack = false;
1418
1419 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1420 body = NULL;
1421 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1422 gimple_bind_set_body (bind_stmt, body);
1423
1424 /* Source location wise, the cleanup code (stack_restore and clobbers)
1425 belongs to the end of the block, so propagate what we have. The
1426 stack_save operation belongs to the beginning of block, which we can
1427 infer from the bind_expr directly if the block has no explicit
1428 assignment. */
1429 if (BIND_EXPR_BLOCK (bind_expr))
1430 {
1431 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1432 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1433 }
1434 if (start_locus == 0)
1435 start_locus = EXPR_LOCATION (bind_expr);
1436
1437 cleanup = NULL;
1438 stack_save = NULL;
1439
1440 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1441 the stack space allocated to the VLAs. */
1442 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1443 {
1444 gcall *stack_restore;
1445
1446 /* Save stack on entry and restore it on exit. Add a try_finally
1447 block to achieve this. */
1448 build_stack_save_restore (&stack_save, &stack_restore);
1449
1450 gimple_set_location (stack_save, start_locus);
1451 gimple_set_location (stack_restore, end_locus);
1452
1453 gimplify_seq_add_stmt (&cleanup, stack_restore);
1454 }
1455
1456 /* Add clobbers for all variables that go out of scope. */
1457 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1458 {
1459 if (VAR_P (t)
1460 && !is_global_var (t)
1461 && DECL_CONTEXT (t) == current_function_decl)
1462 {
1463 if (!DECL_HARD_REGISTER (t)
1464 && !TREE_THIS_VOLATILE (t)
1465 && !DECL_HAS_VALUE_EXPR_P (t)
1466 /* Only care for variables that have to be in memory. Others
1467 will be rewritten into SSA names, hence moved to the
1468 top-level. */
1469 && !is_gimple_reg (t)
1470 && flag_stack_reuse != SR_NONE)
1471 {
1472 tree clobber = build_clobber (TREE_TYPE (t));
1473 gimple *clobber_stmt;
1474 clobber_stmt = gimple_build_assign (t, clobber);
1475 gimple_set_location (clobber_stmt, end_locus);
1476 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1477 }
1478
1479 if (flag_openacc && oacc_declare_returns != NULL)
1480 {
1481 tree key = t;
1482 if (DECL_HAS_VALUE_EXPR_P (key))
1483 {
1484 key = DECL_VALUE_EXPR (key);
1485 if (TREE_CODE (key) == INDIRECT_REF)
1486 key = TREE_OPERAND (key, 0);
1487 }
1488 tree *c = oacc_declare_returns->get (key);
1489 if (c != NULL)
1490 {
1491 if (ret_clauses)
1492 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1493
1494 ret_clauses = unshare_expr (*c);
1495
1496 oacc_declare_returns->remove (key);
1497
1498 if (oacc_declare_returns->is_empty ())
1499 {
1500 delete oacc_declare_returns;
1501 oacc_declare_returns = NULL;
1502 }
1503 }
1504 }
1505 }
1506
1507 if (asan_poisoned_variables != NULL
1508 && asan_poisoned_variables->contains (t))
1509 {
1510 asan_poisoned_variables->remove (t);
1511 asan_poison_variable (t, true, &cleanup);
1512 }
1513
1514 if (gimplify_ctxp->live_switch_vars != NULL
1515 && gimplify_ctxp->live_switch_vars->contains (t))
1516 gimplify_ctxp->live_switch_vars->remove (t);
1517 }
1518
1519 if (ret_clauses)
1520 {
1521 gomp_target *stmt;
1522 gimple_stmt_iterator si = gsi_start (cleanup);
1523
1524 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1525 ret_clauses);
1526 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1527 }
1528
1529 if (cleanup)
1530 {
1531 gtry *gs;
1532 gimple_seq new_body;
1533
1534 new_body = NULL;
1535 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1536 GIMPLE_TRY_FINALLY);
1537
1538 if (stack_save)
1539 gimplify_seq_add_stmt (&new_body, stack_save);
1540 gimplify_seq_add_stmt (&new_body, gs);
1541 gimple_bind_set_body (bind_stmt, new_body);
1542 }
1543
1544 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1545 if (!gimplify_ctxp->keep_stack)
1546 gimplify_ctxp->keep_stack = old_keep_stack;
1547 gimplify_ctxp->save_stack = old_save_stack;
1548
1549 gimple_pop_bind_expr ();
1550
1551 gimplify_seq_add_stmt (pre_p, bind_stmt);
1552
1553 if (temp)
1554 {
1555 *expr_p = temp;
1556 return GS_OK;
1557 }
1558
1559 *expr_p = NULL_TREE;
1560 return GS_ALL_DONE;
1561 }
1562
1563 /* Maybe add early return predict statement to PRE_P sequence. */
1564
1565 static void
1566 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1567 {
1568 /* If we are not in a conditional context, add PREDICT statement. */
1569 if (gimple_conditional_context ())
1570 {
1571 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1572 NOT_TAKEN);
1573 gimplify_seq_add_stmt (pre_p, predict);
1574 }
1575 }
1576
1577 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1578 GIMPLE value, it is assigned to a new temporary and the statement is
1579 re-written to return the temporary.
1580
1581 PRE_P points to the sequence where side effects that must happen before
1582 STMT should be stored. */
1583
1584 static enum gimplify_status
1585 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1586 {
1587 greturn *ret;
1588 tree ret_expr = TREE_OPERAND (stmt, 0);
1589 tree result_decl, result;
1590
1591 if (ret_expr == error_mark_node)
1592 return GS_ERROR;
1593
1594 if (!ret_expr
1595 || TREE_CODE (ret_expr) == RESULT_DECL)
1596 {
1597 maybe_add_early_return_predict_stmt (pre_p);
1598 greturn *ret = gimple_build_return (ret_expr);
1599 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1600 gimplify_seq_add_stmt (pre_p, ret);
1601 return GS_ALL_DONE;
1602 }
1603
1604 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1605 result_decl = NULL_TREE;
1606 else if (TREE_CODE (ret_expr) == COMPOUND_EXPR)
1607 {
1608 /* Used in C++ for handling EH cleanup of the return value if a local
1609 cleanup throws. Assume the front-end knows what it's doing. */
1610 result_decl = DECL_RESULT (current_function_decl);
1611 /* But crash if we end up trying to modify ret_expr below. */
1612 ret_expr = NULL_TREE;
1613 }
1614 else
1615 {
1616 result_decl = TREE_OPERAND (ret_expr, 0);
1617
1618 /* See through a return by reference. */
1619 if (TREE_CODE (result_decl) == INDIRECT_REF)
1620 result_decl = TREE_OPERAND (result_decl, 0);
1621
1622 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1623 || TREE_CODE (ret_expr) == INIT_EXPR)
1624 && TREE_CODE (result_decl) == RESULT_DECL);
1625 }
1626
1627 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1628 Recall that aggregate_value_p is FALSE for any aggregate type that is
1629 returned in registers. If we're returning values in registers, then
1630 we don't want to extend the lifetime of the RESULT_DECL, particularly
1631 across another call. In addition, for those aggregates for which
1632 hard_function_value generates a PARALLEL, we'll die during normal
1633 expansion of structure assignments; there's special code in expand_return
1634 to handle this case that does not exist in expand_expr. */
1635 if (!result_decl)
1636 result = NULL_TREE;
1637 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1638 {
1639 if (!poly_int_tree_p (DECL_SIZE (result_decl)))
1640 {
1641 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1642 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1643 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1644 should be effectively allocated by the caller, i.e. all calls to
1645 this function must be subject to the Return Slot Optimization. */
1646 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1647 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1648 }
1649 result = result_decl;
1650 }
1651 else if (gimplify_ctxp->return_temp)
1652 result = gimplify_ctxp->return_temp;
1653 else
1654 {
1655 result = create_tmp_reg (TREE_TYPE (result_decl));
1656
1657 /* ??? With complex control flow (usually involving abnormal edges),
1658 we can wind up warning about an uninitialized value for this. Due
1659 to how this variable is constructed and initialized, this is never
1660 true. Give up and never warn. */
1661 TREE_NO_WARNING (result) = 1;
1662
1663 gimplify_ctxp->return_temp = result;
1664 }
1665
1666 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1667 Then gimplify the whole thing. */
1668 if (result != result_decl)
1669 TREE_OPERAND (ret_expr, 0) = result;
1670
1671 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1672
1673 maybe_add_early_return_predict_stmt (pre_p);
1674 ret = gimple_build_return (result);
1675 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1676 gimplify_seq_add_stmt (pre_p, ret);
1677
1678 return GS_ALL_DONE;
1679 }
1680
1681 /* Gimplify a variable-length array DECL. */
1682
1683 static void
1684 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1685 {
1686 /* This is a variable-sized decl. Simplify its size and mark it
1687 for deferred expansion. */
1688 tree t, addr, ptr_type;
1689
1690 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1691 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1692
1693 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1694 if (DECL_HAS_VALUE_EXPR_P (decl))
1695 return;
1696
1697 /* All occurrences of this decl in final gimplified code will be
1698 replaced by indirection. Setting DECL_VALUE_EXPR does two
1699 things: First, it lets the rest of the gimplifier know what
1700 replacement to use. Second, it lets the debug info know
1701 where to find the value. */
1702 ptr_type = build_pointer_type (TREE_TYPE (decl));
1703 addr = create_tmp_var (ptr_type, get_name (decl));
1704 DECL_IGNORED_P (addr) = 0;
1705 t = build_fold_indirect_ref (addr);
1706 TREE_THIS_NOTRAP (t) = 1;
1707 SET_DECL_VALUE_EXPR (decl, t);
1708 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1709
1710 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1711 max_int_size_in_bytes (TREE_TYPE (decl)));
1712 /* The call has been built for a variable-sized object. */
1713 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1714 t = fold_convert (ptr_type, t);
1715 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1716
1717 gimplify_and_add (t, seq_p);
1718
1719 /* Record the dynamic allocation associated with DECL if requested. */
1720 if (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC)
1721 record_dynamic_alloc (decl);
1722 }
1723
1724 /* A helper function to be called via walk_tree. Mark all labels under *TP
1725 as being forced. To be called for DECL_INITIAL of static variables. */
1726
1727 static tree
1728 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1729 {
1730 if (TYPE_P (*tp))
1731 *walk_subtrees = 0;
1732 if (TREE_CODE (*tp) == LABEL_DECL)
1733 {
1734 FORCED_LABEL (*tp) = 1;
1735 cfun->has_forced_label_in_static = 1;
1736 }
1737
1738 return NULL_TREE;
1739 }
1740
1741 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1742 and initialization explicit. */
1743
1744 static enum gimplify_status
1745 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1746 {
1747 tree stmt = *stmt_p;
1748 tree decl = DECL_EXPR_DECL (stmt);
1749
1750 *stmt_p = NULL_TREE;
1751
1752 if (TREE_TYPE (decl) == error_mark_node)
1753 return GS_ERROR;
1754
1755 if ((TREE_CODE (decl) == TYPE_DECL
1756 || VAR_P (decl))
1757 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1758 {
1759 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1760 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1761 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1762 }
1763
1764 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1765 in case its size expressions contain problematic nodes like CALL_EXPR. */
1766 if (TREE_CODE (decl) == TYPE_DECL
1767 && DECL_ORIGINAL_TYPE (decl)
1768 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1769 {
1770 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1771 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1772 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1773 }
1774
1775 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1776 {
1777 tree init = DECL_INITIAL (decl);
1778 bool is_vla = false;
1779
1780 poly_uint64 size;
1781 if (!poly_int_tree_p (DECL_SIZE_UNIT (decl), &size)
1782 || (!TREE_STATIC (decl)
1783 && flag_stack_check == GENERIC_STACK_CHECK
1784 && maybe_gt (size,
1785 (unsigned HOST_WIDE_INT) STACK_CHECK_MAX_VAR_SIZE)))
1786 {
1787 gimplify_vla_decl (decl, seq_p);
1788 is_vla = true;
1789 }
1790
1791 if (asan_poisoned_variables
1792 && !is_vla
1793 && TREE_ADDRESSABLE (decl)
1794 && !TREE_STATIC (decl)
1795 && !DECL_HAS_VALUE_EXPR_P (decl)
1796 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
1797 && dbg_cnt (asan_use_after_scope)
1798 && !gimplify_omp_ctxp)
1799 {
1800 asan_poisoned_variables->add (decl);
1801 asan_poison_variable (decl, false, seq_p);
1802 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1803 gimplify_ctxp->live_switch_vars->add (decl);
1804 }
1805
1806 /* Some front ends do not explicitly declare all anonymous
1807 artificial variables. We compensate here by declaring the
1808 variables, though it would be better if the front ends would
1809 explicitly declare them. */
1810 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1811 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1812 gimple_add_tmp_var (decl);
1813
1814 if (init && init != error_mark_node)
1815 {
1816 if (!TREE_STATIC (decl))
1817 {
1818 DECL_INITIAL (decl) = NULL_TREE;
1819 init = build2 (INIT_EXPR, void_type_node, decl, init);
1820 gimplify_and_add (init, seq_p);
1821 ggc_free (init);
1822 }
1823 else
1824 /* We must still examine initializers for static variables
1825 as they may contain a label address. */
1826 walk_tree (&init, force_labels_r, NULL, NULL);
1827 }
1828 }
1829
1830 return GS_ALL_DONE;
1831 }
1832
1833 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1834 and replacing the LOOP_EXPR with goto, but if the loop contains an
1835 EXIT_EXPR, we need to append a label for it to jump to. */
1836
1837 static enum gimplify_status
1838 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1839 {
1840 tree saved_label = gimplify_ctxp->exit_label;
1841 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1842
1843 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1844
1845 gimplify_ctxp->exit_label = NULL_TREE;
1846
1847 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1848
1849 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1850
1851 if (gimplify_ctxp->exit_label)
1852 gimplify_seq_add_stmt (pre_p,
1853 gimple_build_label (gimplify_ctxp->exit_label));
1854
1855 gimplify_ctxp->exit_label = saved_label;
1856
1857 *expr_p = NULL;
1858 return GS_ALL_DONE;
1859 }
1860
1861 /* Gimplify a statement list onto a sequence. These may be created either
1862 by an enlightened front-end, or by shortcut_cond_expr. */
1863
1864 static enum gimplify_status
1865 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1866 {
1867 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1868
1869 tree_stmt_iterator i = tsi_start (*expr_p);
1870
1871 while (!tsi_end_p (i))
1872 {
1873 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1874 tsi_delink (&i);
1875 }
1876
1877 if (temp)
1878 {
1879 *expr_p = temp;
1880 return GS_OK;
1881 }
1882
1883 return GS_ALL_DONE;
1884 }
1885
1886 /* Callback for walk_gimple_seq. */
1887
1888 static tree
1889 warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
1890 struct walk_stmt_info *wi)
1891 {
1892 gimple *stmt = gsi_stmt (*gsi_p);
1893
1894 *handled_ops_p = true;
1895 switch (gimple_code (stmt))
1896 {
1897 case GIMPLE_TRY:
1898 /* A compiler-generated cleanup or a user-written try block.
1899 If it's empty, don't dive into it--that would result in
1900 worse location info. */
1901 if (gimple_try_eval (stmt) == NULL)
1902 {
1903 wi->info = stmt;
1904 return integer_zero_node;
1905 }
1906 /* Fall through. */
1907 case GIMPLE_BIND:
1908 case GIMPLE_CATCH:
1909 case GIMPLE_EH_FILTER:
1910 case GIMPLE_TRANSACTION:
1911 /* Walk the sub-statements. */
1912 *handled_ops_p = false;
1913 break;
1914
1915 case GIMPLE_DEBUG:
1916 /* Ignore these. We may generate them before declarations that
1917 are never executed. If there's something to warn about,
1918 there will be non-debug stmts too, and we'll catch those. */
1919 break;
1920
1921 case GIMPLE_CALL:
1922 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1923 {
1924 *handled_ops_p = false;
1925 break;
1926 }
1927 /* Fall through. */
1928 default:
1929 /* Save the first "real" statement (not a decl/lexical scope/...). */
1930 wi->info = stmt;
1931 return integer_zero_node;
1932 }
1933 return NULL_TREE;
1934 }
1935
1936 /* Possibly warn about unreachable statements between switch's controlling
1937 expression and the first case. SEQ is the body of a switch expression. */
1938
1939 static void
1940 maybe_warn_switch_unreachable (gimple_seq seq)
1941 {
1942 if (!warn_switch_unreachable
1943 /* This warning doesn't play well with Fortran when optimizations
1944 are on. */
1945 || lang_GNU_Fortran ()
1946 || seq == NULL)
1947 return;
1948
1949 struct walk_stmt_info wi;
1950 memset (&wi, 0, sizeof (wi));
1951 walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi);
1952 gimple *stmt = (gimple *) wi.info;
1953
1954 if (stmt && gimple_code (stmt) != GIMPLE_LABEL)
1955 {
1956 if (gimple_code (stmt) == GIMPLE_GOTO
1957 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
1958 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
1959 /* Don't warn for compiler-generated gotos. These occur
1960 in Duff's devices, for example. */;
1961 else
1962 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
1963 "statement will never be executed");
1964 }
1965 }
1966
1967
1968 /* A label entry that pairs label and a location. */
1969 struct label_entry
1970 {
1971 tree label;
1972 location_t loc;
1973 };
1974
1975 /* Find LABEL in vector of label entries VEC. */
1976
1977 static struct label_entry *
1978 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
1979 {
1980 unsigned int i;
1981 struct label_entry *l;
1982
1983 FOR_EACH_VEC_ELT (*vec, i, l)
1984 if (l->label == label)
1985 return l;
1986 return NULL;
1987 }
1988
1989 /* Return true if LABEL, a LABEL_DECL, represents a case label
1990 in a vector of labels CASES. */
1991
1992 static bool
1993 case_label_p (const vec<tree> *cases, tree label)
1994 {
1995 unsigned int i;
1996 tree l;
1997
1998 FOR_EACH_VEC_ELT (*cases, i, l)
1999 if (CASE_LABEL (l) == label)
2000 return true;
2001 return false;
2002 }
2003
2004 /* Find the last nondebug statement in a scope STMT. */
2005
2006 static gimple *
2007 last_stmt_in_scope (gimple *stmt)
2008 {
2009 if (!stmt)
2010 return NULL;
2011
2012 switch (gimple_code (stmt))
2013 {
2014 case GIMPLE_BIND:
2015 {
2016 gbind *bind = as_a <gbind *> (stmt);
2017 stmt = gimple_seq_last_nondebug_stmt (gimple_bind_body (bind));
2018 return last_stmt_in_scope (stmt);
2019 }
2020
2021 case GIMPLE_TRY:
2022 {
2023 gtry *try_stmt = as_a <gtry *> (stmt);
2024 stmt = gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt));
2025 gimple *last_eval = last_stmt_in_scope (stmt);
2026 if (gimple_stmt_may_fallthru (last_eval)
2027 && (last_eval == NULL
2028 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
2029 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
2030 {
2031 stmt = gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt));
2032 return last_stmt_in_scope (stmt);
2033 }
2034 else
2035 return last_eval;
2036 }
2037
2038 case GIMPLE_DEBUG:
2039 gcc_unreachable ();
2040
2041 default:
2042 return stmt;
2043 }
2044 }
2045
2046 /* Collect interesting labels in LABELS and return the statement preceding
2047 another case label, or a user-defined label. Store a location useful
2048 to give warnings at *PREVLOC (usually the location of the returned
2049 statement or of its surrounding scope). */
2050
2051 static gimple *
2052 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
2053 auto_vec <struct label_entry> *labels,
2054 location_t *prevloc)
2055 {
2056 gimple *prev = NULL;
2057
2058 *prevloc = UNKNOWN_LOCATION;
2059 do
2060 {
2061 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND)
2062 {
2063 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2064 which starts on a GIMPLE_SWITCH and ends with a break label.
2065 Handle that as a single statement that can fall through. */
2066 gbind *bind = as_a <gbind *> (gsi_stmt (*gsi_p));
2067 gimple *first = gimple_seq_first_stmt (gimple_bind_body (bind));
2068 gimple *last = gimple_seq_last_stmt (gimple_bind_body (bind));
2069 if (last
2070 && gimple_code (first) == GIMPLE_SWITCH
2071 && gimple_code (last) == GIMPLE_LABEL)
2072 {
2073 tree label = gimple_label_label (as_a <glabel *> (last));
2074 if (SWITCH_BREAK_LABEL_P (label))
2075 {
2076 prev = bind;
2077 gsi_next (gsi_p);
2078 continue;
2079 }
2080 }
2081 }
2082 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
2083 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
2084 {
2085 /* Nested scope. Only look at the last statement of
2086 the innermost scope. */
2087 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
2088 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
2089 if (last)
2090 {
2091 prev = last;
2092 /* It might be a label without a location. Use the
2093 location of the scope then. */
2094 if (!gimple_has_location (prev))
2095 *prevloc = bind_loc;
2096 }
2097 gsi_next (gsi_p);
2098 continue;
2099 }
2100
2101 /* Ifs are tricky. */
2102 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
2103 {
2104 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
2105 tree false_lab = gimple_cond_false_label (cond_stmt);
2106 location_t if_loc = gimple_location (cond_stmt);
2107
2108 /* If we have e.g.
2109 if (i > 1) goto <D.2259>; else goto D;
2110 we can't do much with the else-branch. */
2111 if (!DECL_ARTIFICIAL (false_lab))
2112 break;
2113
2114 /* Go on until the false label, then one step back. */
2115 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
2116 {
2117 gimple *stmt = gsi_stmt (*gsi_p);
2118 if (gimple_code (stmt) == GIMPLE_LABEL
2119 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
2120 break;
2121 }
2122
2123 /* Not found? Oops. */
2124 if (gsi_end_p (*gsi_p))
2125 break;
2126
2127 struct label_entry l = { false_lab, if_loc };
2128 labels->safe_push (l);
2129
2130 /* Go to the last statement of the then branch. */
2131 gsi_prev (gsi_p);
2132
2133 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2134 <D.1759>:
2135 <stmt>;
2136 goto <D.1761>;
2137 <D.1760>:
2138 */
2139 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
2140 && !gimple_has_location (gsi_stmt (*gsi_p)))
2141 {
2142 /* Look at the statement before, it might be
2143 attribute fallthrough, in which case don't warn. */
2144 gsi_prev (gsi_p);
2145 bool fallthru_before_dest
2146 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
2147 gsi_next (gsi_p);
2148 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
2149 if (!fallthru_before_dest)
2150 {
2151 struct label_entry l = { goto_dest, if_loc };
2152 labels->safe_push (l);
2153 }
2154 }
2155 /* And move back. */
2156 gsi_next (gsi_p);
2157 }
2158
2159 /* Remember the last statement. Skip labels that are of no interest
2160 to us. */
2161 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2162 {
2163 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
2164 if (find_label_entry (labels, label))
2165 prev = gsi_stmt (*gsi_p);
2166 }
2167 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
2168 ;
2169 else if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_PREDICT)
2170 ;
2171 else if (!is_gimple_debug (gsi_stmt (*gsi_p)))
2172 prev = gsi_stmt (*gsi_p);
2173 gsi_next (gsi_p);
2174 }
2175 while (!gsi_end_p (*gsi_p)
2176 /* Stop if we find a case or a user-defined label. */
2177 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2178 || !gimple_has_location (gsi_stmt (*gsi_p))));
2179
2180 if (prev && gimple_has_location (prev))
2181 *prevloc = gimple_location (prev);
2182 return prev;
2183 }
2184
2185 /* Return true if the switch fallthough warning should occur. LABEL is
2186 the label statement that we're falling through to. */
2187
2188 static bool
2189 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2190 {
2191 gimple_stmt_iterator gsi = *gsi_p;
2192
2193 /* Don't warn if the label is marked with a "falls through" comment. */
2194 if (FALLTHROUGH_LABEL_P (label))
2195 return false;
2196
2197 /* Don't warn for non-case labels followed by a statement:
2198 case 0:
2199 foo ();
2200 label:
2201 bar ();
2202 as these are likely intentional. */
2203 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2204 {
2205 tree l;
2206 while (!gsi_end_p (gsi)
2207 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2208 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2209 && !case_label_p (&gimplify_ctxp->case_labels, l))
2210 gsi_next_nondebug (&gsi);
2211 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2212 return false;
2213 }
2214
2215 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2216 immediately breaks. */
2217 gsi = *gsi_p;
2218
2219 /* Skip all immediately following labels. */
2220 while (!gsi_end_p (gsi)
2221 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2222 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2223 gsi_next_nondebug (&gsi);
2224
2225 /* { ... something; default:; } */
2226 if (gsi_end_p (gsi)
2227 /* { ... something; default: break; } or
2228 { ... something; default: goto L; } */
2229 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2230 /* { ... something; default: return; } */
2231 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2232 return false;
2233
2234 return true;
2235 }
2236
2237 /* Callback for walk_gimple_seq. */
2238
2239 static tree
2240 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2241 struct walk_stmt_info *)
2242 {
2243 gimple *stmt = gsi_stmt (*gsi_p);
2244
2245 *handled_ops_p = true;
2246 switch (gimple_code (stmt))
2247 {
2248 case GIMPLE_TRY:
2249 case GIMPLE_BIND:
2250 case GIMPLE_CATCH:
2251 case GIMPLE_EH_FILTER:
2252 case GIMPLE_TRANSACTION:
2253 /* Walk the sub-statements. */
2254 *handled_ops_p = false;
2255 break;
2256
2257 /* Find a sequence of form:
2258
2259 GIMPLE_LABEL
2260 [...]
2261 <may fallthru stmt>
2262 GIMPLE_LABEL
2263
2264 and possibly warn. */
2265 case GIMPLE_LABEL:
2266 {
2267 /* Found a label. Skip all immediately following labels. */
2268 while (!gsi_end_p (*gsi_p)
2269 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2270 gsi_next_nondebug (gsi_p);
2271
2272 /* There might be no more statements. */
2273 if (gsi_end_p (*gsi_p))
2274 return integer_zero_node;
2275
2276 /* Vector of labels that fall through. */
2277 auto_vec <struct label_entry> labels;
2278 location_t prevloc;
2279 gimple *prev = collect_fallthrough_labels (gsi_p, &labels, &prevloc);
2280
2281 /* There might be no more statements. */
2282 if (gsi_end_p (*gsi_p))
2283 return integer_zero_node;
2284
2285 gimple *next = gsi_stmt (*gsi_p);
2286 tree label;
2287 /* If what follows is a label, then we may have a fallthrough. */
2288 if (gimple_code (next) == GIMPLE_LABEL
2289 && gimple_has_location (next)
2290 && (label = gimple_label_label (as_a <glabel *> (next)))
2291 && prev != NULL)
2292 {
2293 struct label_entry *l;
2294 bool warned_p = false;
2295 auto_diagnostic_group d;
2296 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2297 /* Quiet. */;
2298 else if (gimple_code (prev) == GIMPLE_LABEL
2299 && (label = gimple_label_label (as_a <glabel *> (prev)))
2300 && (l = find_label_entry (&labels, label)))
2301 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2302 "this statement may fall through");
2303 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2304 /* Try to be clever and don't warn when the statement
2305 can't actually fall through. */
2306 && gimple_stmt_may_fallthru (prev)
2307 && prevloc != UNKNOWN_LOCATION)
2308 warned_p = warning_at (prevloc,
2309 OPT_Wimplicit_fallthrough_,
2310 "this statement may fall through");
2311 if (warned_p)
2312 inform (gimple_location (next), "here");
2313
2314 /* Mark this label as processed so as to prevent multiple
2315 warnings in nested switches. */
2316 FALLTHROUGH_LABEL_P (label) = true;
2317
2318 /* So that next warn_implicit_fallthrough_r will start looking for
2319 a new sequence starting with this label. */
2320 gsi_prev (gsi_p);
2321 }
2322 }
2323 break;
2324 default:
2325 break;
2326 }
2327 return NULL_TREE;
2328 }
2329
2330 /* Warn when a switch case falls through. */
2331
2332 static void
2333 maybe_warn_implicit_fallthrough (gimple_seq seq)
2334 {
2335 if (!warn_implicit_fallthrough)
2336 return;
2337
2338 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2339 if (!(lang_GNU_C ()
2340 || lang_GNU_CXX ()
2341 || lang_GNU_OBJC ()))
2342 return;
2343
2344 struct walk_stmt_info wi;
2345 memset (&wi, 0, sizeof (wi));
2346 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2347 }
2348
2349 /* Callback for walk_gimple_seq. */
2350
2351 static tree
2352 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2353 struct walk_stmt_info *wi)
2354 {
2355 gimple *stmt = gsi_stmt (*gsi_p);
2356
2357 *handled_ops_p = true;
2358 switch (gimple_code (stmt))
2359 {
2360 case GIMPLE_TRY:
2361 case GIMPLE_BIND:
2362 case GIMPLE_CATCH:
2363 case GIMPLE_EH_FILTER:
2364 case GIMPLE_TRANSACTION:
2365 /* Walk the sub-statements. */
2366 *handled_ops_p = false;
2367 break;
2368 case GIMPLE_CALL:
2369 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2370 {
2371 gsi_remove (gsi_p, true);
2372 if (gsi_end_p (*gsi_p))
2373 {
2374 *static_cast<location_t *>(wi->info) = gimple_location (stmt);
2375 return integer_zero_node;
2376 }
2377
2378 bool found = false;
2379 location_t loc = gimple_location (stmt);
2380
2381 gimple_stmt_iterator gsi2 = *gsi_p;
2382 stmt = gsi_stmt (gsi2);
2383 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2384 {
2385 /* Go on until the artificial label. */
2386 tree goto_dest = gimple_goto_dest (stmt);
2387 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2388 {
2389 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2390 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2391 == goto_dest)
2392 break;
2393 }
2394
2395 /* Not found? Stop. */
2396 if (gsi_end_p (gsi2))
2397 break;
2398
2399 /* Look one past it. */
2400 gsi_next (&gsi2);
2401 }
2402
2403 /* We're looking for a case label or default label here. */
2404 while (!gsi_end_p (gsi2))
2405 {
2406 stmt = gsi_stmt (gsi2);
2407 if (gimple_code (stmt) == GIMPLE_LABEL)
2408 {
2409 tree label = gimple_label_label (as_a <glabel *> (stmt));
2410 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2411 {
2412 found = true;
2413 break;
2414 }
2415 }
2416 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2417 ;
2418 else if (!is_gimple_debug (stmt))
2419 /* Anything else is not expected. */
2420 break;
2421 gsi_next (&gsi2);
2422 }
2423 if (!found)
2424 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2425 "a case label or default label");
2426 }
2427 break;
2428 default:
2429 break;
2430 }
2431 return NULL_TREE;
2432 }
2433
2434 /* Expand all FALLTHROUGH () calls in SEQ. */
2435
2436 static void
2437 expand_FALLTHROUGH (gimple_seq *seq_p)
2438 {
2439 struct walk_stmt_info wi;
2440 location_t loc;
2441 memset (&wi, 0, sizeof (wi));
2442 wi.info = (void *) &loc;
2443 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2444 if (wi.callback_result == integer_zero_node)
2445 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2446 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2447 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2448 "a case label or default label");
2449 }
2450
2451 \f
2452 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2453 branch to. */
2454
2455 static enum gimplify_status
2456 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2457 {
2458 tree switch_expr = *expr_p;
2459 gimple_seq switch_body_seq = NULL;
2460 enum gimplify_status ret;
2461 tree index_type = TREE_TYPE (switch_expr);
2462 if (index_type == NULL_TREE)
2463 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2464
2465 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2466 fb_rvalue);
2467 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2468 return ret;
2469
2470 if (SWITCH_BODY (switch_expr))
2471 {
2472 vec<tree> labels;
2473 vec<tree> saved_labels;
2474 hash_set<tree> *saved_live_switch_vars = NULL;
2475 tree default_case = NULL_TREE;
2476 gswitch *switch_stmt;
2477
2478 /* Save old labels, get new ones from body, then restore the old
2479 labels. Save all the things from the switch body to append after. */
2480 saved_labels = gimplify_ctxp->case_labels;
2481 gimplify_ctxp->case_labels.create (8);
2482
2483 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2484 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2485 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2486 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2487 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2488 else
2489 gimplify_ctxp->live_switch_vars = NULL;
2490
2491 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2492 gimplify_ctxp->in_switch_expr = true;
2493
2494 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2495
2496 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2497 maybe_warn_switch_unreachable (switch_body_seq);
2498 maybe_warn_implicit_fallthrough (switch_body_seq);
2499 /* Only do this for the outermost GIMPLE_SWITCH. */
2500 if (!gimplify_ctxp->in_switch_expr)
2501 expand_FALLTHROUGH (&switch_body_seq);
2502
2503 labels = gimplify_ctxp->case_labels;
2504 gimplify_ctxp->case_labels = saved_labels;
2505
2506 if (gimplify_ctxp->live_switch_vars)
2507 {
2508 gcc_assert (gimplify_ctxp->live_switch_vars->is_empty ());
2509 delete gimplify_ctxp->live_switch_vars;
2510 }
2511 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2512
2513 preprocess_case_label_vec_for_gimple (labels, index_type,
2514 &default_case);
2515
2516 bool add_bind = false;
2517 if (!default_case)
2518 {
2519 glabel *new_default;
2520
2521 default_case
2522 = build_case_label (NULL_TREE, NULL_TREE,
2523 create_artificial_label (UNKNOWN_LOCATION));
2524 if (old_in_switch_expr)
2525 {
2526 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1;
2527 add_bind = true;
2528 }
2529 new_default = gimple_build_label (CASE_LABEL (default_case));
2530 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2531 }
2532 else if (old_in_switch_expr)
2533 {
2534 gimple *last = gimple_seq_last_stmt (switch_body_seq);
2535 if (last && gimple_code (last) == GIMPLE_LABEL)
2536 {
2537 tree label = gimple_label_label (as_a <glabel *> (last));
2538 if (SWITCH_BREAK_LABEL_P (label))
2539 add_bind = true;
2540 }
2541 }
2542
2543 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2544 default_case, labels);
2545 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2546 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2547 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2548 so that we can easily find the start and end of the switch
2549 statement. */
2550 if (add_bind)
2551 {
2552 gimple_seq bind_body = NULL;
2553 gimplify_seq_add_stmt (&bind_body, switch_stmt);
2554 gimple_seq_add_seq (&bind_body, switch_body_seq);
2555 gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE);
2556 gimple_set_location (bind, EXPR_LOCATION (switch_expr));
2557 gimplify_seq_add_stmt (pre_p, bind);
2558 }
2559 else
2560 {
2561 gimplify_seq_add_stmt (pre_p, switch_stmt);
2562 gimplify_seq_add_seq (pre_p, switch_body_seq);
2563 }
2564 labels.release ();
2565 }
2566 else
2567 gcc_unreachable ();
2568
2569 return GS_ALL_DONE;
2570 }
2571
2572 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2573
2574 static enum gimplify_status
2575 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2576 {
2577 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2578 == current_function_decl);
2579
2580 tree label = LABEL_EXPR_LABEL (*expr_p);
2581 glabel *label_stmt = gimple_build_label (label);
2582 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2583 gimplify_seq_add_stmt (pre_p, label_stmt);
2584
2585 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2586 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2587 NOT_TAKEN));
2588 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2589 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2590 TAKEN));
2591
2592 return GS_ALL_DONE;
2593 }
2594
2595 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2596
2597 static enum gimplify_status
2598 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2599 {
2600 struct gimplify_ctx *ctxp;
2601 glabel *label_stmt;
2602
2603 /* Invalid programs can play Duff's Device type games with, for example,
2604 #pragma omp parallel. At least in the C front end, we don't
2605 detect such invalid branches until after gimplification, in the
2606 diagnose_omp_blocks pass. */
2607 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2608 if (ctxp->case_labels.exists ())
2609 break;
2610
2611 tree label = CASE_LABEL (*expr_p);
2612 label_stmt = gimple_build_label (label);
2613 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2614 ctxp->case_labels.safe_push (*expr_p);
2615 gimplify_seq_add_stmt (pre_p, label_stmt);
2616
2617 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2618 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2619 NOT_TAKEN));
2620 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2621 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2622 TAKEN));
2623
2624 return GS_ALL_DONE;
2625 }
2626
2627 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2628 if necessary. */
2629
2630 tree
2631 build_and_jump (tree *label_p)
2632 {
2633 if (label_p == NULL)
2634 /* If there's nowhere to jump, just fall through. */
2635 return NULL_TREE;
2636
2637 if (*label_p == NULL_TREE)
2638 {
2639 tree label = create_artificial_label (UNKNOWN_LOCATION);
2640 *label_p = label;
2641 }
2642
2643 return build1 (GOTO_EXPR, void_type_node, *label_p);
2644 }
2645
2646 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2647 This also involves building a label to jump to and communicating it to
2648 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2649
2650 static enum gimplify_status
2651 gimplify_exit_expr (tree *expr_p)
2652 {
2653 tree cond = TREE_OPERAND (*expr_p, 0);
2654 tree expr;
2655
2656 expr = build_and_jump (&gimplify_ctxp->exit_label);
2657 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2658 *expr_p = expr;
2659
2660 return GS_OK;
2661 }
2662
2663 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2664 different from its canonical type, wrap the whole thing inside a
2665 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2666 type.
2667
2668 The canonical type of a COMPONENT_REF is the type of the field being
2669 referenced--unless the field is a bit-field which can be read directly
2670 in a smaller mode, in which case the canonical type is the
2671 sign-appropriate type corresponding to that mode. */
2672
2673 static void
2674 canonicalize_component_ref (tree *expr_p)
2675 {
2676 tree expr = *expr_p;
2677 tree type;
2678
2679 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2680
2681 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2682 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2683 else
2684 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2685
2686 /* One could argue that all the stuff below is not necessary for
2687 the non-bitfield case and declare it a FE error if type
2688 adjustment would be needed. */
2689 if (TREE_TYPE (expr) != type)
2690 {
2691 #ifdef ENABLE_TYPES_CHECKING
2692 tree old_type = TREE_TYPE (expr);
2693 #endif
2694 int type_quals;
2695
2696 /* We need to preserve qualifiers and propagate them from
2697 operand 0. */
2698 type_quals = TYPE_QUALS (type)
2699 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2700 if (TYPE_QUALS (type) != type_quals)
2701 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2702
2703 /* Set the type of the COMPONENT_REF to the underlying type. */
2704 TREE_TYPE (expr) = type;
2705
2706 #ifdef ENABLE_TYPES_CHECKING
2707 /* It is now a FE error, if the conversion from the canonical
2708 type to the original expression type is not useless. */
2709 gcc_assert (useless_type_conversion_p (old_type, type));
2710 #endif
2711 }
2712 }
2713
2714 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2715 to foo, embed that change in the ADDR_EXPR by converting
2716 T array[U];
2717 (T *)&array
2718 ==>
2719 &array[L]
2720 where L is the lower bound. For simplicity, only do this for constant
2721 lower bound.
2722 The constraint is that the type of &array[L] is trivially convertible
2723 to T *. */
2724
2725 static void
2726 canonicalize_addr_expr (tree *expr_p)
2727 {
2728 tree expr = *expr_p;
2729 tree addr_expr = TREE_OPERAND (expr, 0);
2730 tree datype, ddatype, pddatype;
2731
2732 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2733 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2734 || TREE_CODE (addr_expr) != ADDR_EXPR)
2735 return;
2736
2737 /* The addr_expr type should be a pointer to an array. */
2738 datype = TREE_TYPE (TREE_TYPE (addr_expr));
2739 if (TREE_CODE (datype) != ARRAY_TYPE)
2740 return;
2741
2742 /* The pointer to element type shall be trivially convertible to
2743 the expression pointer type. */
2744 ddatype = TREE_TYPE (datype);
2745 pddatype = build_pointer_type (ddatype);
2746 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2747 pddatype))
2748 return;
2749
2750 /* The lower bound and element sizes must be constant. */
2751 if (!TYPE_SIZE_UNIT (ddatype)
2752 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2753 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2754 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2755 return;
2756
2757 /* All checks succeeded. Build a new node to merge the cast. */
2758 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2759 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2760 NULL_TREE, NULL_TREE);
2761 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2762
2763 /* We can have stripped a required restrict qualifier above. */
2764 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2765 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2766 }
2767
2768 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2769 underneath as appropriate. */
2770
2771 static enum gimplify_status
2772 gimplify_conversion (tree *expr_p)
2773 {
2774 location_t loc = EXPR_LOCATION (*expr_p);
2775 gcc_assert (CONVERT_EXPR_P (*expr_p));
2776
2777 /* Then strip away all but the outermost conversion. */
2778 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2779
2780 /* And remove the outermost conversion if it's useless. */
2781 if (tree_ssa_useless_type_conversion (*expr_p))
2782 *expr_p = TREE_OPERAND (*expr_p, 0);
2783
2784 /* If we still have a conversion at the toplevel,
2785 then canonicalize some constructs. */
2786 if (CONVERT_EXPR_P (*expr_p))
2787 {
2788 tree sub = TREE_OPERAND (*expr_p, 0);
2789
2790 /* If a NOP conversion is changing the type of a COMPONENT_REF
2791 expression, then canonicalize its type now in order to expose more
2792 redundant conversions. */
2793 if (TREE_CODE (sub) == COMPONENT_REF)
2794 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
2795
2796 /* If a NOP conversion is changing a pointer to array of foo
2797 to a pointer to foo, embed that change in the ADDR_EXPR. */
2798 else if (TREE_CODE (sub) == ADDR_EXPR)
2799 canonicalize_addr_expr (expr_p);
2800 }
2801
2802 /* If we have a conversion to a non-register type force the
2803 use of a VIEW_CONVERT_EXPR instead. */
2804 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
2805 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
2806 TREE_OPERAND (*expr_p, 0));
2807
2808 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2809 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
2810 TREE_SET_CODE (*expr_p, NOP_EXPR);
2811
2812 return GS_OK;
2813 }
2814
2815 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2816 DECL_VALUE_EXPR, and it's worth re-examining things. */
2817
2818 static enum gimplify_status
2819 gimplify_var_or_parm_decl (tree *expr_p)
2820 {
2821 tree decl = *expr_p;
2822
2823 /* ??? If this is a local variable, and it has not been seen in any
2824 outer BIND_EXPR, then it's probably the result of a duplicate
2825 declaration, for which we've already issued an error. It would
2826 be really nice if the front end wouldn't leak these at all.
2827 Currently the only known culprit is C++ destructors, as seen
2828 in g++.old-deja/g++.jason/binding.C. */
2829 if (VAR_P (decl)
2830 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2831 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2832 && decl_function_context (decl) == current_function_decl)
2833 {
2834 gcc_assert (seen_error ());
2835 return GS_ERROR;
2836 }
2837
2838 /* When within an OMP context, notice uses of variables. */
2839 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2840 return GS_ALL_DONE;
2841
2842 /* If the decl is an alias for another expression, substitute it now. */
2843 if (DECL_HAS_VALUE_EXPR_P (decl))
2844 {
2845 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
2846 return GS_OK;
2847 }
2848
2849 return GS_ALL_DONE;
2850 }
2851
2852 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2853
2854 static void
2855 recalculate_side_effects (tree t)
2856 {
2857 enum tree_code code = TREE_CODE (t);
2858 int len = TREE_OPERAND_LENGTH (t);
2859 int i;
2860
2861 switch (TREE_CODE_CLASS (code))
2862 {
2863 case tcc_expression:
2864 switch (code)
2865 {
2866 case INIT_EXPR:
2867 case MODIFY_EXPR:
2868 case VA_ARG_EXPR:
2869 case PREDECREMENT_EXPR:
2870 case PREINCREMENT_EXPR:
2871 case POSTDECREMENT_EXPR:
2872 case POSTINCREMENT_EXPR:
2873 /* All of these have side-effects, no matter what their
2874 operands are. */
2875 return;
2876
2877 default:
2878 break;
2879 }
2880 /* Fall through. */
2881
2882 case tcc_comparison: /* a comparison expression */
2883 case tcc_unary: /* a unary arithmetic expression */
2884 case tcc_binary: /* a binary arithmetic expression */
2885 case tcc_reference: /* a reference */
2886 case tcc_vl_exp: /* a function call */
2887 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2888 for (i = 0; i < len; ++i)
2889 {
2890 tree op = TREE_OPERAND (t, i);
2891 if (op && TREE_SIDE_EFFECTS (op))
2892 TREE_SIDE_EFFECTS (t) = 1;
2893 }
2894 break;
2895
2896 case tcc_constant:
2897 /* No side-effects. */
2898 return;
2899
2900 default:
2901 gcc_unreachable ();
2902 }
2903 }
2904
2905 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2906 node *EXPR_P.
2907
2908 compound_lval
2909 : min_lval '[' val ']'
2910 | min_lval '.' ID
2911 | compound_lval '[' val ']'
2912 | compound_lval '.' ID
2913
2914 This is not part of the original SIMPLE definition, which separates
2915 array and member references, but it seems reasonable to handle them
2916 together. Also, this way we don't run into problems with union
2917 aliasing; gcc requires that for accesses through a union to alias, the
2918 union reference must be explicit, which was not always the case when we
2919 were splitting up array and member refs.
2920
2921 PRE_P points to the sequence where side effects that must happen before
2922 *EXPR_P should be stored.
2923
2924 POST_P points to the sequence where side effects that must happen after
2925 *EXPR_P should be stored. */
2926
2927 static enum gimplify_status
2928 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2929 fallback_t fallback)
2930 {
2931 tree *p;
2932 enum gimplify_status ret = GS_ALL_DONE, tret;
2933 int i;
2934 location_t loc = EXPR_LOCATION (*expr_p);
2935 tree expr = *expr_p;
2936
2937 /* Create a stack of the subexpressions so later we can walk them in
2938 order from inner to outer. */
2939 auto_vec<tree, 10> expr_stack;
2940
2941 /* We can handle anything that get_inner_reference can deal with. */
2942 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2943 {
2944 restart:
2945 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2946 if (TREE_CODE (*p) == INDIRECT_REF)
2947 *p = fold_indirect_ref_loc (loc, *p);
2948
2949 if (handled_component_p (*p))
2950 ;
2951 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2952 additional COMPONENT_REFs. */
2953 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
2954 && gimplify_var_or_parm_decl (p) == GS_OK)
2955 goto restart;
2956 else
2957 break;
2958
2959 expr_stack.safe_push (*p);
2960 }
2961
2962 gcc_assert (expr_stack.length ());
2963
2964 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2965 walked through and P points to the innermost expression.
2966
2967 Java requires that we elaborated nodes in source order. That
2968 means we must gimplify the inner expression followed by each of
2969 the indices, in order. But we can't gimplify the inner
2970 expression until we deal with any variable bounds, sizes, or
2971 positions in order to deal with PLACEHOLDER_EXPRs.
2972
2973 So we do this in three steps. First we deal with the annotations
2974 for any variables in the components, then we gimplify the base,
2975 then we gimplify any indices, from left to right. */
2976 for (i = expr_stack.length () - 1; i >= 0; i--)
2977 {
2978 tree t = expr_stack[i];
2979
2980 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2981 {
2982 /* Gimplify the low bound and element type size and put them into
2983 the ARRAY_REF. If these values are set, they have already been
2984 gimplified. */
2985 if (TREE_OPERAND (t, 2) == NULL_TREE)
2986 {
2987 tree low = unshare_expr (array_ref_low_bound (t));
2988 if (!is_gimple_min_invariant (low))
2989 {
2990 TREE_OPERAND (t, 2) = low;
2991 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2992 post_p, is_gimple_reg,
2993 fb_rvalue);
2994 ret = MIN (ret, tret);
2995 }
2996 }
2997 else
2998 {
2999 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3000 is_gimple_reg, fb_rvalue);
3001 ret = MIN (ret, tret);
3002 }
3003
3004 if (TREE_OPERAND (t, 3) == NULL_TREE)
3005 {
3006 tree elmt_size = array_ref_element_size (t);
3007 if (!is_gimple_min_invariant (elmt_size))
3008 {
3009 elmt_size = unshare_expr (elmt_size);
3010 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
3011 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
3012
3013 /* Divide the element size by the alignment of the element
3014 type (above). */
3015 elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR,
3016 elmt_size, factor);
3017
3018 TREE_OPERAND (t, 3) = elmt_size;
3019 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
3020 post_p, is_gimple_reg,
3021 fb_rvalue);
3022 ret = MIN (ret, tret);
3023 }
3024 }
3025 else
3026 {
3027 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
3028 is_gimple_reg, fb_rvalue);
3029 ret = MIN (ret, tret);
3030 }
3031 }
3032 else if (TREE_CODE (t) == COMPONENT_REF)
3033 {
3034 /* Set the field offset into T and gimplify it. */
3035 if (TREE_OPERAND (t, 2) == NULL_TREE)
3036 {
3037 tree offset = component_ref_field_offset (t);
3038 if (!is_gimple_min_invariant (offset))
3039 {
3040 offset = unshare_expr (offset);
3041 tree field = TREE_OPERAND (t, 1);
3042 tree factor
3043 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
3044
3045 /* Divide the offset by its alignment. */
3046 offset = size_binop_loc (loc, EXACT_DIV_EXPR,
3047 offset, factor);
3048
3049 TREE_OPERAND (t, 2) = offset;
3050 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
3051 post_p, is_gimple_reg,
3052 fb_rvalue);
3053 ret = MIN (ret, tret);
3054 }
3055 }
3056 else
3057 {
3058 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3059 is_gimple_reg, fb_rvalue);
3060 ret = MIN (ret, tret);
3061 }
3062 }
3063 }
3064
3065 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3066 so as to match the min_lval predicate. Failure to do so may result
3067 in the creation of large aggregate temporaries. */
3068 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
3069 fallback | fb_lvalue);
3070 ret = MIN (ret, tret);
3071
3072 /* And finally, the indices and operands of ARRAY_REF. During this
3073 loop we also remove any useless conversions. */
3074 for (; expr_stack.length () > 0; )
3075 {
3076 tree t = expr_stack.pop ();
3077
3078 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3079 {
3080 /* Gimplify the dimension. */
3081 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
3082 {
3083 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
3084 is_gimple_val, fb_rvalue);
3085 ret = MIN (ret, tret);
3086 }
3087 }
3088
3089 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
3090
3091 /* The innermost expression P may have originally had
3092 TREE_SIDE_EFFECTS set which would have caused all the outer
3093 expressions in *EXPR_P leading to P to also have had
3094 TREE_SIDE_EFFECTS set. */
3095 recalculate_side_effects (t);
3096 }
3097
3098 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3099 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
3100 {
3101 canonicalize_component_ref (expr_p);
3102 }
3103
3104 expr_stack.release ();
3105
3106 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
3107
3108 return ret;
3109 }
3110
3111 /* Gimplify the self modifying expression pointed to by EXPR_P
3112 (++, --, +=, -=).
3113
3114 PRE_P points to the list where side effects that must happen before
3115 *EXPR_P should be stored.
3116
3117 POST_P points to the list where side effects that must happen after
3118 *EXPR_P should be stored.
3119
3120 WANT_VALUE is nonzero iff we want to use the value of this expression
3121 in another expression.
3122
3123 ARITH_TYPE is the type the computation should be performed in. */
3124
3125 enum gimplify_status
3126 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3127 bool want_value, tree arith_type)
3128 {
3129 enum tree_code code;
3130 tree lhs, lvalue, rhs, t1;
3131 gimple_seq post = NULL, *orig_post_p = post_p;
3132 bool postfix;
3133 enum tree_code arith_code;
3134 enum gimplify_status ret;
3135 location_t loc = EXPR_LOCATION (*expr_p);
3136
3137 code = TREE_CODE (*expr_p);
3138
3139 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
3140 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
3141
3142 /* Prefix or postfix? */
3143 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
3144 /* Faster to treat as prefix if result is not used. */
3145 postfix = want_value;
3146 else
3147 postfix = false;
3148
3149 /* For postfix, make sure the inner expression's post side effects
3150 are executed after side effects from this expression. */
3151 if (postfix)
3152 post_p = &post;
3153
3154 /* Add or subtract? */
3155 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
3156 arith_code = PLUS_EXPR;
3157 else
3158 arith_code = MINUS_EXPR;
3159
3160 /* Gimplify the LHS into a GIMPLE lvalue. */
3161 lvalue = TREE_OPERAND (*expr_p, 0);
3162 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3163 if (ret == GS_ERROR)
3164 return ret;
3165
3166 /* Extract the operands to the arithmetic operation. */
3167 lhs = lvalue;
3168 rhs = TREE_OPERAND (*expr_p, 1);
3169
3170 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3171 that as the result value and in the postqueue operation. */
3172 if (postfix)
3173 {
3174 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
3175 if (ret == GS_ERROR)
3176 return ret;
3177
3178 lhs = get_initialized_tmp_var (lhs, pre_p);
3179 }
3180
3181 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3182 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
3183 {
3184 rhs = convert_to_ptrofftype_loc (loc, rhs);
3185 if (arith_code == MINUS_EXPR)
3186 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
3187 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
3188 }
3189 else
3190 t1 = fold_convert (TREE_TYPE (*expr_p),
3191 fold_build2 (arith_code, arith_type,
3192 fold_convert (arith_type, lhs),
3193 fold_convert (arith_type, rhs)));
3194
3195 if (postfix)
3196 {
3197 gimplify_assign (lvalue, t1, pre_p);
3198 gimplify_seq_add_seq (orig_post_p, post);
3199 *expr_p = lhs;
3200 return GS_ALL_DONE;
3201 }
3202 else
3203 {
3204 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3205 return GS_OK;
3206 }
3207 }
3208
3209 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3210
3211 static void
3212 maybe_with_size_expr (tree *expr_p)
3213 {
3214 tree expr = *expr_p;
3215 tree type = TREE_TYPE (expr);
3216 tree size;
3217
3218 /* If we've already wrapped this or the type is error_mark_node, we can't do
3219 anything. */
3220 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3221 || type == error_mark_node)
3222 return;
3223
3224 /* If the size isn't known or is a constant, we have nothing to do. */
3225 size = TYPE_SIZE_UNIT (type);
3226 if (!size || poly_int_tree_p (size))
3227 return;
3228
3229 /* Otherwise, make a WITH_SIZE_EXPR. */
3230 size = unshare_expr (size);
3231 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3232 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3233 }
3234
3235 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3236 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3237 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3238 gimplified to an SSA name. */
3239
3240 enum gimplify_status
3241 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3242 bool allow_ssa)
3243 {
3244 bool (*test) (tree);
3245 fallback_t fb;
3246
3247 /* In general, we allow lvalues for function arguments to avoid
3248 extra overhead of copying large aggregates out of even larger
3249 aggregates into temporaries only to copy the temporaries to
3250 the argument list. Make optimizers happy by pulling out to
3251 temporaries those types that fit in registers. */
3252 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3253 test = is_gimple_val, fb = fb_rvalue;
3254 else
3255 {
3256 test = is_gimple_lvalue, fb = fb_either;
3257 /* Also strip a TARGET_EXPR that would force an extra copy. */
3258 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3259 {
3260 tree init = TARGET_EXPR_INITIAL (*arg_p);
3261 if (init
3262 && !VOID_TYPE_P (TREE_TYPE (init)))
3263 *arg_p = init;
3264 }
3265 }
3266
3267 /* If this is a variable sized type, we must remember the size. */
3268 maybe_with_size_expr (arg_p);
3269
3270 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3271 /* Make sure arguments have the same location as the function call
3272 itself. */
3273 protected_set_expr_location (*arg_p, call_location);
3274
3275 /* There is a sequence point before a function call. Side effects in
3276 the argument list must occur before the actual call. So, when
3277 gimplifying arguments, force gimplify_expr to use an internal
3278 post queue which is then appended to the end of PRE_P. */
3279 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3280 }
3281
3282 /* Don't fold inside offloading or taskreg regions: it can break code by
3283 adding decl references that weren't in the source. We'll do it during
3284 omplower pass instead. */
3285
3286 static bool
3287 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3288 {
3289 struct gimplify_omp_ctx *ctx;
3290 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3291 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3292 return false;
3293 else if ((ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
3294 return false;
3295 /* Delay folding of builtins until the IL is in consistent state
3296 so the diagnostic machinery can do a better job. */
3297 if (gimple_call_builtin_p (gsi_stmt (*gsi)))
3298 return false;
3299 return fold_stmt (gsi);
3300 }
3301
3302 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3303 WANT_VALUE is true if the result of the call is desired. */
3304
3305 static enum gimplify_status
3306 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3307 {
3308 tree fndecl, parms, p, fnptrtype;
3309 enum gimplify_status ret;
3310 int i, nargs;
3311 gcall *call;
3312 bool builtin_va_start_p = false;
3313 location_t loc = EXPR_LOCATION (*expr_p);
3314
3315 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3316
3317 /* For reliable diagnostics during inlining, it is necessary that
3318 every call_expr be annotated with file and line. */
3319 if (! EXPR_HAS_LOCATION (*expr_p))
3320 SET_EXPR_LOCATION (*expr_p, input_location);
3321
3322 /* Gimplify internal functions created in the FEs. */
3323 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3324 {
3325 if (want_value)
3326 return GS_ALL_DONE;
3327
3328 nargs = call_expr_nargs (*expr_p);
3329 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3330 auto_vec<tree> vargs (nargs);
3331
3332 for (i = 0; i < nargs; i++)
3333 {
3334 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3335 EXPR_LOCATION (*expr_p));
3336 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3337 }
3338
3339 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3340 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
3341 gimplify_seq_add_stmt (pre_p, call);
3342 return GS_ALL_DONE;
3343 }
3344
3345 /* This may be a call to a builtin function.
3346
3347 Builtin function calls may be transformed into different
3348 (and more efficient) builtin function calls under certain
3349 circumstances. Unfortunately, gimplification can muck things
3350 up enough that the builtin expanders are not aware that certain
3351 transformations are still valid.
3352
3353 So we attempt transformation/gimplification of the call before
3354 we gimplify the CALL_EXPR. At this time we do not manage to
3355 transform all calls in the same manner as the expanders do, but
3356 we do transform most of them. */
3357 fndecl = get_callee_fndecl (*expr_p);
3358 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3359 switch (DECL_FUNCTION_CODE (fndecl))
3360 {
3361 CASE_BUILT_IN_ALLOCA:
3362 /* If the call has been built for a variable-sized object, then we
3363 want to restore the stack level when the enclosing BIND_EXPR is
3364 exited to reclaim the allocated space; otherwise, we precisely
3365 need to do the opposite and preserve the latest stack level. */
3366 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3367 gimplify_ctxp->save_stack = true;
3368 else
3369 gimplify_ctxp->keep_stack = true;
3370 break;
3371
3372 case BUILT_IN_VA_START:
3373 {
3374 builtin_va_start_p = TRUE;
3375 if (call_expr_nargs (*expr_p) < 2)
3376 {
3377 error ("too few arguments to function %<va_start%>");
3378 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3379 return GS_OK;
3380 }
3381
3382 if (fold_builtin_next_arg (*expr_p, true))
3383 {
3384 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3385 return GS_OK;
3386 }
3387 break;
3388 }
3389
3390 case BUILT_IN_EH_RETURN:
3391 cfun->calls_eh_return = true;
3392 break;
3393
3394 case BUILT_IN_CLEAR_PADDING:
3395 if (call_expr_nargs (*expr_p) == 1)
3396 {
3397 /* Remember the original type of the argument in an internal
3398 dummy second argument, as in GIMPLE pointer conversions are
3399 useless. */
3400 p = CALL_EXPR_ARG (*expr_p, 0);
3401 *expr_p
3402 = build_call_expr_loc (EXPR_LOCATION (*expr_p), fndecl, 2, p,
3403 build_zero_cst (TREE_TYPE (p)));
3404 return GS_OK;
3405 }
3406 break;
3407
3408 default:
3409 ;
3410 }
3411 if (fndecl && fndecl_built_in_p (fndecl))
3412 {
3413 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3414 if (new_tree && new_tree != *expr_p)
3415 {
3416 /* There was a transformation of this call which computes the
3417 same value, but in a more efficient way. Return and try
3418 again. */
3419 *expr_p = new_tree;
3420 return GS_OK;
3421 }
3422 }
3423
3424 /* Remember the original function pointer type. */
3425 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3426
3427 if (flag_openmp
3428 && fndecl
3429 && cfun
3430 && (cfun->curr_properties & PROP_gimple_any) == 0)
3431 {
3432 tree variant = omp_resolve_declare_variant (fndecl);
3433 if (variant != fndecl)
3434 CALL_EXPR_FN (*expr_p) = build1 (ADDR_EXPR, fnptrtype, variant);
3435 }
3436
3437 /* There is a sequence point before the call, so any side effects in
3438 the calling expression must occur before the actual call. Force
3439 gimplify_expr to use an internal post queue. */
3440 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3441 is_gimple_call_addr, fb_rvalue);
3442
3443 nargs = call_expr_nargs (*expr_p);
3444
3445 /* Get argument types for verification. */
3446 fndecl = get_callee_fndecl (*expr_p);
3447 parms = NULL_TREE;
3448 if (fndecl)
3449 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3450 else
3451 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3452
3453 if (fndecl && DECL_ARGUMENTS (fndecl))
3454 p = DECL_ARGUMENTS (fndecl);
3455 else if (parms)
3456 p = parms;
3457 else
3458 p = NULL_TREE;
3459 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3460 ;
3461
3462 /* If the last argument is __builtin_va_arg_pack () and it is not
3463 passed as a named argument, decrease the number of CALL_EXPR
3464 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3465 if (!p
3466 && i < nargs
3467 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3468 {
3469 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3470 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3471
3472 if (last_arg_fndecl
3473 && fndecl_built_in_p (last_arg_fndecl, BUILT_IN_VA_ARG_PACK))
3474 {
3475 tree call = *expr_p;
3476
3477 --nargs;
3478 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3479 CALL_EXPR_FN (call),
3480 nargs, CALL_EXPR_ARGP (call));
3481
3482 /* Copy all CALL_EXPR flags, location and block, except
3483 CALL_EXPR_VA_ARG_PACK flag. */
3484 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3485 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3486 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3487 = CALL_EXPR_RETURN_SLOT_OPT (call);
3488 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3489 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3490
3491 /* Set CALL_EXPR_VA_ARG_PACK. */
3492 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3493 }
3494 }
3495
3496 /* If the call returns twice then after building the CFG the call
3497 argument computations will no longer dominate the call because
3498 we add an abnormal incoming edge to the call. So do not use SSA
3499 vars there. */
3500 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3501
3502 /* Gimplify the function arguments. */
3503 if (nargs > 0)
3504 {
3505 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3506 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3507 PUSH_ARGS_REVERSED ? i-- : i++)
3508 {
3509 enum gimplify_status t;
3510
3511 /* Avoid gimplifying the second argument to va_start, which needs to
3512 be the plain PARM_DECL. */
3513 if ((i != 1) || !builtin_va_start_p)
3514 {
3515 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3516 EXPR_LOCATION (*expr_p), ! returns_twice);
3517
3518 if (t == GS_ERROR)
3519 ret = GS_ERROR;
3520 }
3521 }
3522 }
3523
3524 /* Gimplify the static chain. */
3525 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3526 {
3527 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3528 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3529 else
3530 {
3531 enum gimplify_status t;
3532 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3533 EXPR_LOCATION (*expr_p), ! returns_twice);
3534 if (t == GS_ERROR)
3535 ret = GS_ERROR;
3536 }
3537 }
3538
3539 /* Verify the function result. */
3540 if (want_value && fndecl
3541 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3542 {
3543 error_at (loc, "using result of function returning %<void%>");
3544 ret = GS_ERROR;
3545 }
3546
3547 /* Try this again in case gimplification exposed something. */
3548 if (ret != GS_ERROR)
3549 {
3550 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3551
3552 if (new_tree && new_tree != *expr_p)
3553 {
3554 /* There was a transformation of this call which computes the
3555 same value, but in a more efficient way. Return and try
3556 again. */
3557 *expr_p = new_tree;
3558 return GS_OK;
3559 }
3560 }
3561 else
3562 {
3563 *expr_p = error_mark_node;
3564 return GS_ERROR;
3565 }
3566
3567 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3568 decl. This allows us to eliminate redundant or useless
3569 calls to "const" functions. */
3570 if (TREE_CODE (*expr_p) == CALL_EXPR)
3571 {
3572 int flags = call_expr_flags (*expr_p);
3573 if (flags & (ECF_CONST | ECF_PURE)
3574 /* An infinite loop is considered a side effect. */
3575 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3576 TREE_SIDE_EFFECTS (*expr_p) = 0;
3577 }
3578
3579 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3580 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3581 form and delegate the creation of a GIMPLE_CALL to
3582 gimplify_modify_expr. This is always possible because when
3583 WANT_VALUE is true, the caller wants the result of this call into
3584 a temporary, which means that we will emit an INIT_EXPR in
3585 internal_get_tmp_var which will then be handled by
3586 gimplify_modify_expr. */
3587 if (!want_value)
3588 {
3589 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3590 have to do is replicate it as a GIMPLE_CALL tuple. */
3591 gimple_stmt_iterator gsi;
3592 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
3593 notice_special_calls (call);
3594 gimplify_seq_add_stmt (pre_p, call);
3595 gsi = gsi_last (*pre_p);
3596 maybe_fold_stmt (&gsi);
3597 *expr_p = NULL_TREE;
3598 }
3599 else
3600 /* Remember the original function type. */
3601 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3602 CALL_EXPR_FN (*expr_p));
3603
3604 return ret;
3605 }
3606
3607 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3608 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3609
3610 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3611 condition is true or false, respectively. If null, we should generate
3612 our own to skip over the evaluation of this specific expression.
3613
3614 LOCUS is the source location of the COND_EXPR.
3615
3616 This function is the tree equivalent of do_jump.
3617
3618 shortcut_cond_r should only be called by shortcut_cond_expr. */
3619
3620 static tree
3621 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3622 location_t locus)
3623 {
3624 tree local_label = NULL_TREE;
3625 tree t, expr = NULL;
3626
3627 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3628 retain the shortcut semantics. Just insert the gotos here;
3629 shortcut_cond_expr will append the real blocks later. */
3630 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3631 {
3632 location_t new_locus;
3633
3634 /* Turn if (a && b) into
3635
3636 if (a); else goto no;
3637 if (b) goto yes; else goto no;
3638 (no:) */
3639
3640 if (false_label_p == NULL)
3641 false_label_p = &local_label;
3642
3643 /* Keep the original source location on the first 'if'. */
3644 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3645 append_to_statement_list (t, &expr);
3646
3647 /* Set the source location of the && on the second 'if'. */
3648 new_locus = rexpr_location (pred, locus);
3649 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3650 new_locus);
3651 append_to_statement_list (t, &expr);
3652 }
3653 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3654 {
3655 location_t new_locus;
3656
3657 /* Turn if (a || b) into
3658
3659 if (a) goto yes;
3660 if (b) goto yes; else goto no;
3661 (yes:) */
3662
3663 if (true_label_p == NULL)
3664 true_label_p = &local_label;
3665
3666 /* Keep the original source location on the first 'if'. */
3667 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3668 append_to_statement_list (t, &expr);
3669
3670 /* Set the source location of the || on the second 'if'. */
3671 new_locus = rexpr_location (pred, locus);
3672 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3673 new_locus);
3674 append_to_statement_list (t, &expr);
3675 }
3676 else if (TREE_CODE (pred) == COND_EXPR
3677 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3678 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3679 {
3680 location_t new_locus;
3681
3682 /* As long as we're messing with gotos, turn if (a ? b : c) into
3683 if (a)
3684 if (b) goto yes; else goto no;
3685 else
3686 if (c) goto yes; else goto no;
3687
3688 Don't do this if one of the arms has void type, which can happen
3689 in C++ when the arm is throw. */
3690
3691 /* Keep the original source location on the first 'if'. Set the source
3692 location of the ? on the second 'if'. */
3693 new_locus = rexpr_location (pred, locus);
3694 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3695 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3696 false_label_p, locus),
3697 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3698 false_label_p, new_locus));
3699 }
3700 else
3701 {
3702 expr = build3 (COND_EXPR, void_type_node, pred,
3703 build_and_jump (true_label_p),
3704 build_and_jump (false_label_p));
3705 SET_EXPR_LOCATION (expr, locus);
3706 }
3707
3708 if (local_label)
3709 {
3710 t = build1 (LABEL_EXPR, void_type_node, local_label);
3711 append_to_statement_list (t, &expr);
3712 }
3713
3714 return expr;
3715 }
3716
3717 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
3718 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
3719 statement, if it is the last one. Otherwise, return NULL. */
3720
3721 static tree
3722 find_goto (tree expr)
3723 {
3724 if (!expr)
3725 return NULL_TREE;
3726
3727 if (TREE_CODE (expr) == GOTO_EXPR)
3728 return expr;
3729
3730 if (TREE_CODE (expr) != STATEMENT_LIST)
3731 return NULL_TREE;
3732
3733 tree_stmt_iterator i = tsi_start (expr);
3734
3735 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
3736 tsi_next (&i);
3737
3738 if (!tsi_one_before_end_p (i))
3739 return NULL_TREE;
3740
3741 return find_goto (tsi_stmt (i));
3742 }
3743
3744 /* Same as find_goto, except that it returns NULL if the destination
3745 is not a LABEL_DECL. */
3746
3747 static inline tree
3748 find_goto_label (tree expr)
3749 {
3750 tree dest = find_goto (expr);
3751 if (dest && TREE_CODE (GOTO_DESTINATION (dest)) == LABEL_DECL)
3752 return dest;
3753 return NULL_TREE;
3754 }
3755
3756 /* Given a conditional expression EXPR with short-circuit boolean
3757 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3758 predicate apart into the equivalent sequence of conditionals. */
3759
3760 static tree
3761 shortcut_cond_expr (tree expr)
3762 {
3763 tree pred = TREE_OPERAND (expr, 0);
3764 tree then_ = TREE_OPERAND (expr, 1);
3765 tree else_ = TREE_OPERAND (expr, 2);
3766 tree true_label, false_label, end_label, t;
3767 tree *true_label_p;
3768 tree *false_label_p;
3769 bool emit_end, emit_false, jump_over_else;
3770 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
3771 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
3772
3773 /* First do simple transformations. */
3774 if (!else_se)
3775 {
3776 /* If there is no 'else', turn
3777 if (a && b) then c
3778 into
3779 if (a) if (b) then c. */
3780 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3781 {
3782 /* Keep the original source location on the first 'if'. */
3783 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3784 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3785 /* Set the source location of the && on the second 'if'. */
3786 if (rexpr_has_location (pred))
3787 SET_EXPR_LOCATION (expr, rexpr_location (pred));
3788 then_ = shortcut_cond_expr (expr);
3789 then_se = then_ && TREE_SIDE_EFFECTS (then_);
3790 pred = TREE_OPERAND (pred, 0);
3791 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
3792 SET_EXPR_LOCATION (expr, locus);
3793 }
3794 }
3795
3796 if (!then_se)
3797 {
3798 /* If there is no 'then', turn
3799 if (a || b); else d
3800 into
3801 if (a); else if (b); else d. */
3802 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3803 {
3804 /* Keep the original source location on the first 'if'. */
3805 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3806 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3807 /* Set the source location of the || on the second 'if'. */
3808 if (rexpr_has_location (pred))
3809 SET_EXPR_LOCATION (expr, rexpr_location (pred));
3810 else_ = shortcut_cond_expr (expr);
3811 else_se = else_ && TREE_SIDE_EFFECTS (else_);
3812 pred = TREE_OPERAND (pred, 0);
3813 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
3814 SET_EXPR_LOCATION (expr, locus);
3815 }
3816 }
3817
3818 /* If we're done, great. */
3819 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
3820 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
3821 return expr;
3822
3823 /* Otherwise we need to mess with gotos. Change
3824 if (a) c; else d;
3825 to
3826 if (a); else goto no;
3827 c; goto end;
3828 no: d; end:
3829 and recursively gimplify the condition. */
3830
3831 true_label = false_label = end_label = NULL_TREE;
3832
3833 /* If our arms just jump somewhere, hijack those labels so we don't
3834 generate jumps to jumps. */
3835
3836 if (tree then_goto = find_goto_label (then_))
3837 {
3838 true_label = GOTO_DESTINATION (then_goto);
3839 then_ = NULL;
3840 then_se = false;
3841 }
3842
3843 if (tree else_goto = find_goto_label (else_))
3844 {
3845 false_label = GOTO_DESTINATION (else_goto);
3846 else_ = NULL;
3847 else_se = false;
3848 }
3849
3850 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3851 if (true_label)
3852 true_label_p = &true_label;
3853 else
3854 true_label_p = NULL;
3855
3856 /* The 'else' branch also needs a label if it contains interesting code. */
3857 if (false_label || else_se)
3858 false_label_p = &false_label;
3859 else
3860 false_label_p = NULL;
3861
3862 /* If there was nothing else in our arms, just forward the label(s). */
3863 if (!then_se && !else_se)
3864 return shortcut_cond_r (pred, true_label_p, false_label_p,
3865 EXPR_LOC_OR_LOC (expr, input_location));
3866
3867 /* If our last subexpression already has a terminal label, reuse it. */
3868 if (else_se)
3869 t = expr_last (else_);
3870 else if (then_se)
3871 t = expr_last (then_);
3872 else
3873 t = NULL;
3874 if (t && TREE_CODE (t) == LABEL_EXPR)
3875 end_label = LABEL_EXPR_LABEL (t);
3876
3877 /* If we don't care about jumping to the 'else' branch, jump to the end
3878 if the condition is false. */
3879 if (!false_label_p)
3880 false_label_p = &end_label;
3881
3882 /* We only want to emit these labels if we aren't hijacking them. */
3883 emit_end = (end_label == NULL_TREE);
3884 emit_false = (false_label == NULL_TREE);
3885
3886 /* We only emit the jump over the else clause if we have to--if the
3887 then clause may fall through. Otherwise we can wind up with a
3888 useless jump and a useless label at the end of gimplified code,
3889 which will cause us to think that this conditional as a whole
3890 falls through even if it doesn't. If we then inline a function
3891 which ends with such a condition, that can cause us to issue an
3892 inappropriate warning about control reaching the end of a
3893 non-void function. */
3894 jump_over_else = block_may_fallthru (then_);
3895
3896 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
3897 EXPR_LOC_OR_LOC (expr, input_location));
3898
3899 expr = NULL;
3900 append_to_statement_list (pred, &expr);
3901
3902 append_to_statement_list (then_, &expr);
3903 if (else_se)
3904 {
3905 if (jump_over_else)
3906 {
3907 tree last = expr_last (expr);
3908 t = build_and_jump (&end_label);
3909 if (rexpr_has_location (last))
3910 SET_EXPR_LOCATION (t, rexpr_location (last));
3911 append_to_statement_list (t, &expr);
3912 }
3913 if (emit_false)
3914 {
3915 t = build1 (LABEL_EXPR, void_type_node, false_label);
3916 append_to_statement_list (t, &expr);
3917 }
3918 append_to_statement_list (else_, &expr);
3919 }
3920 if (emit_end && end_label)
3921 {
3922 t = build1 (LABEL_EXPR, void_type_node, end_label);
3923 append_to_statement_list (t, &expr);
3924 }
3925
3926 return expr;
3927 }
3928
3929 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3930
3931 tree
3932 gimple_boolify (tree expr)
3933 {
3934 tree type = TREE_TYPE (expr);
3935 location_t loc = EXPR_LOCATION (expr);
3936
3937 if (TREE_CODE (expr) == NE_EXPR
3938 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3939 && integer_zerop (TREE_OPERAND (expr, 1)))
3940 {
3941 tree call = TREE_OPERAND (expr, 0);
3942 tree fn = get_callee_fndecl (call);
3943
3944 /* For __builtin_expect ((long) (x), y) recurse into x as well
3945 if x is truth_value_p. */
3946 if (fn
3947 && fndecl_built_in_p (fn, BUILT_IN_EXPECT)
3948 && call_expr_nargs (call) == 2)
3949 {
3950 tree arg = CALL_EXPR_ARG (call, 0);
3951 if (arg)
3952 {
3953 if (TREE_CODE (arg) == NOP_EXPR
3954 && TREE_TYPE (arg) == TREE_TYPE (call))
3955 arg = TREE_OPERAND (arg, 0);
3956 if (truth_value_p (TREE_CODE (arg)))
3957 {
3958 arg = gimple_boolify (arg);
3959 CALL_EXPR_ARG (call, 0)
3960 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3961 }
3962 }
3963 }
3964 }
3965
3966 switch (TREE_CODE (expr))
3967 {
3968 case TRUTH_AND_EXPR:
3969 case TRUTH_OR_EXPR:
3970 case TRUTH_XOR_EXPR:
3971 case TRUTH_ANDIF_EXPR:
3972 case TRUTH_ORIF_EXPR:
3973 /* Also boolify the arguments of truth exprs. */
3974 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3975 /* FALLTHRU */
3976
3977 case TRUTH_NOT_EXPR:
3978 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3979
3980 /* These expressions always produce boolean results. */
3981 if (TREE_CODE (type) != BOOLEAN_TYPE)
3982 TREE_TYPE (expr) = boolean_type_node;
3983 return expr;
3984
3985 case ANNOTATE_EXPR:
3986 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
3987 {
3988 case annot_expr_ivdep_kind:
3989 case annot_expr_unroll_kind:
3990 case annot_expr_no_vector_kind:
3991 case annot_expr_vector_kind:
3992 case annot_expr_parallel_kind:
3993 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3994 if (TREE_CODE (type) != BOOLEAN_TYPE)
3995 TREE_TYPE (expr) = boolean_type_node;
3996 return expr;
3997 default:
3998 gcc_unreachable ();
3999 }
4000
4001 default:
4002 if (COMPARISON_CLASS_P (expr))
4003 {
4004 /* There expressions always prduce boolean results. */
4005 if (TREE_CODE (type) != BOOLEAN_TYPE)
4006 TREE_TYPE (expr) = boolean_type_node;
4007 return expr;
4008 }
4009 /* Other expressions that get here must have boolean values, but
4010 might need to be converted to the appropriate mode. */
4011 if (TREE_CODE (type) == BOOLEAN_TYPE)
4012 return expr;
4013 return fold_convert_loc (loc, boolean_type_node, expr);
4014 }
4015 }
4016
4017 /* Given a conditional expression *EXPR_P without side effects, gimplify
4018 its operands. New statements are inserted to PRE_P. */
4019
4020 static enum gimplify_status
4021 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
4022 {
4023 tree expr = *expr_p, cond;
4024 enum gimplify_status ret, tret;
4025 enum tree_code code;
4026
4027 cond = gimple_boolify (COND_EXPR_COND (expr));
4028
4029 /* We need to handle && and || specially, as their gimplification
4030 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
4031 code = TREE_CODE (cond);
4032 if (code == TRUTH_ANDIF_EXPR)
4033 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
4034 else if (code == TRUTH_ORIF_EXPR)
4035 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
4036 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
4037 COND_EXPR_COND (*expr_p) = cond;
4038
4039 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
4040 is_gimple_val, fb_rvalue);
4041 ret = MIN (ret, tret);
4042 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
4043 is_gimple_val, fb_rvalue);
4044
4045 return MIN (ret, tret);
4046 }
4047
4048 /* Return true if evaluating EXPR could trap.
4049 EXPR is GENERIC, while tree_could_trap_p can be called
4050 only on GIMPLE. */
4051
4052 bool
4053 generic_expr_could_trap_p (tree expr)
4054 {
4055 unsigned i, n;
4056
4057 if (!expr || is_gimple_val (expr))
4058 return false;
4059
4060 if (!EXPR_P (expr) || tree_could_trap_p (expr))
4061 return true;
4062
4063 n = TREE_OPERAND_LENGTH (expr);
4064 for (i = 0; i < n; i++)
4065 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
4066 return true;
4067
4068 return false;
4069 }
4070
4071 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4072 into
4073
4074 if (p) if (p)
4075 t1 = a; a;
4076 else or else
4077 t1 = b; b;
4078 t1;
4079
4080 The second form is used when *EXPR_P is of type void.
4081
4082 PRE_P points to the list where side effects that must happen before
4083 *EXPR_P should be stored. */
4084
4085 static enum gimplify_status
4086 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
4087 {
4088 tree expr = *expr_p;
4089 tree type = TREE_TYPE (expr);
4090 location_t loc = EXPR_LOCATION (expr);
4091 tree tmp, arm1, arm2;
4092 enum gimplify_status ret;
4093 tree label_true, label_false, label_cont;
4094 bool have_then_clause_p, have_else_clause_p;
4095 gcond *cond_stmt;
4096 enum tree_code pred_code;
4097 gimple_seq seq = NULL;
4098
4099 /* If this COND_EXPR has a value, copy the values into a temporary within
4100 the arms. */
4101 if (!VOID_TYPE_P (type))
4102 {
4103 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
4104 tree result;
4105
4106 /* If either an rvalue is ok or we do not require an lvalue, create the
4107 temporary. But we cannot do that if the type is addressable. */
4108 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
4109 && !TREE_ADDRESSABLE (type))
4110 {
4111 if (gimplify_ctxp->allow_rhs_cond_expr
4112 /* If either branch has side effects or could trap, it can't be
4113 evaluated unconditionally. */
4114 && !TREE_SIDE_EFFECTS (then_)
4115 && !generic_expr_could_trap_p (then_)
4116 && !TREE_SIDE_EFFECTS (else_)
4117 && !generic_expr_could_trap_p (else_))
4118 return gimplify_pure_cond_expr (expr_p, pre_p);
4119
4120 tmp = create_tmp_var (type, "iftmp");
4121 result = tmp;
4122 }
4123
4124 /* Otherwise, only create and copy references to the values. */
4125 else
4126 {
4127 type = build_pointer_type (type);
4128
4129 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4130 then_ = build_fold_addr_expr_loc (loc, then_);
4131
4132 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4133 else_ = build_fold_addr_expr_loc (loc, else_);
4134
4135 expr
4136 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
4137
4138 tmp = create_tmp_var (type, "iftmp");
4139 result = build_simple_mem_ref_loc (loc, tmp);
4140 }
4141
4142 /* Build the new then clause, `tmp = then_;'. But don't build the
4143 assignment if the value is void; in C++ it can be if it's a throw. */
4144 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4145 TREE_OPERAND (expr, 1) = build2 (INIT_EXPR, type, tmp, then_);
4146
4147 /* Similarly, build the new else clause, `tmp = else_;'. */
4148 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4149 TREE_OPERAND (expr, 2) = build2 (INIT_EXPR, type, tmp, else_);
4150
4151 TREE_TYPE (expr) = void_type_node;
4152 recalculate_side_effects (expr);
4153
4154 /* Move the COND_EXPR to the prequeue. */
4155 gimplify_stmt (&expr, pre_p);
4156
4157 *expr_p = result;
4158 return GS_ALL_DONE;
4159 }
4160
4161 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4162 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
4163 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
4164 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
4165
4166 /* Make sure the condition has BOOLEAN_TYPE. */
4167 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4168
4169 /* Break apart && and || conditions. */
4170 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
4171 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
4172 {
4173 expr = shortcut_cond_expr (expr);
4174
4175 if (expr != *expr_p)
4176 {
4177 *expr_p = expr;
4178
4179 /* We can't rely on gimplify_expr to re-gimplify the expanded
4180 form properly, as cleanups might cause the target labels to be
4181 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4182 set up a conditional context. */
4183 gimple_push_condition ();
4184 gimplify_stmt (expr_p, &seq);
4185 gimple_pop_condition (pre_p);
4186 gimple_seq_add_seq (pre_p, seq);
4187
4188 return GS_ALL_DONE;
4189 }
4190 }
4191
4192 /* Now do the normal gimplification. */
4193
4194 /* Gimplify condition. */
4195 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
4196 is_gimple_condexpr_for_cond, fb_rvalue);
4197 if (ret == GS_ERROR)
4198 return GS_ERROR;
4199 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
4200
4201 gimple_push_condition ();
4202
4203 have_then_clause_p = have_else_clause_p = false;
4204 label_true = find_goto_label (TREE_OPERAND (expr, 1));
4205 if (label_true
4206 && DECL_CONTEXT (GOTO_DESTINATION (label_true)) == current_function_decl
4207 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4208 have different locations, otherwise we end up with incorrect
4209 location information on the branches. */
4210 && (optimize
4211 || !EXPR_HAS_LOCATION (expr)
4212 || !rexpr_has_location (label_true)
4213 || EXPR_LOCATION (expr) == rexpr_location (label_true)))
4214 {
4215 have_then_clause_p = true;
4216 label_true = GOTO_DESTINATION (label_true);
4217 }
4218 else
4219 label_true = create_artificial_label (UNKNOWN_LOCATION);
4220 label_false = find_goto_label (TREE_OPERAND (expr, 2));
4221 if (label_false
4222 && DECL_CONTEXT (GOTO_DESTINATION (label_false)) == current_function_decl
4223 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4224 have different locations, otherwise we end up with incorrect
4225 location information on the branches. */
4226 && (optimize
4227 || !EXPR_HAS_LOCATION (expr)
4228 || !rexpr_has_location (label_false)
4229 || EXPR_LOCATION (expr) == rexpr_location (label_false)))
4230 {
4231 have_else_clause_p = true;
4232 label_false = GOTO_DESTINATION (label_false);
4233 }
4234 else
4235 label_false = create_artificial_label (UNKNOWN_LOCATION);
4236
4237 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
4238 &arm2);
4239 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4240 label_false);
4241 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
4242 gimplify_seq_add_stmt (&seq, cond_stmt);
4243 gimple_stmt_iterator gsi = gsi_last (seq);
4244 maybe_fold_stmt (&gsi);
4245
4246 label_cont = NULL_TREE;
4247 if (!have_then_clause_p)
4248 {
4249 /* For if (...) {} else { code; } put label_true after
4250 the else block. */
4251 if (TREE_OPERAND (expr, 1) == NULL_TREE
4252 && !have_else_clause_p
4253 && TREE_OPERAND (expr, 2) != NULL_TREE)
4254 label_cont = label_true;
4255 else
4256 {
4257 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4258 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4259 /* For if (...) { code; } else {} or
4260 if (...) { code; } else goto label; or
4261 if (...) { code; return; } else { ... }
4262 label_cont isn't needed. */
4263 if (!have_else_clause_p
4264 && TREE_OPERAND (expr, 2) != NULL_TREE
4265 && gimple_seq_may_fallthru (seq))
4266 {
4267 gimple *g;
4268 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4269
4270 g = gimple_build_goto (label_cont);
4271
4272 /* GIMPLE_COND's are very low level; they have embedded
4273 gotos. This particular embedded goto should not be marked
4274 with the location of the original COND_EXPR, as it would
4275 correspond to the COND_EXPR's condition, not the ELSE or the
4276 THEN arms. To avoid marking it with the wrong location, flag
4277 it as "no location". */
4278 gimple_set_do_not_emit_location (g);
4279
4280 gimplify_seq_add_stmt (&seq, g);
4281 }
4282 }
4283 }
4284 if (!have_else_clause_p)
4285 {
4286 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4287 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4288 }
4289 if (label_cont)
4290 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4291
4292 gimple_pop_condition (pre_p);
4293 gimple_seq_add_seq (pre_p, seq);
4294
4295 if (ret == GS_ERROR)
4296 ; /* Do nothing. */
4297 else if (have_then_clause_p || have_else_clause_p)
4298 ret = GS_ALL_DONE;
4299 else
4300 {
4301 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4302 expr = TREE_OPERAND (expr, 0);
4303 gimplify_stmt (&expr, pre_p);
4304 }
4305
4306 *expr_p = NULL;
4307 return ret;
4308 }
4309
4310 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4311 to be marked addressable.
4312
4313 We cannot rely on such an expression being directly markable if a temporary
4314 has been created by the gimplification. In this case, we create another
4315 temporary and initialize it with a copy, which will become a store after we
4316 mark it addressable. This can happen if the front-end passed us something
4317 that it could not mark addressable yet, like a Fortran pass-by-reference
4318 parameter (int) floatvar. */
4319
4320 static void
4321 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4322 {
4323 while (handled_component_p (*expr_p))
4324 expr_p = &TREE_OPERAND (*expr_p, 0);
4325 if (is_gimple_reg (*expr_p))
4326 {
4327 /* Do not allow an SSA name as the temporary. */
4328 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false);
4329 DECL_NOT_GIMPLE_REG_P (var) = 1;
4330 *expr_p = var;
4331 }
4332 }
4333
4334 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4335 a call to __builtin_memcpy. */
4336
4337 static enum gimplify_status
4338 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4339 gimple_seq *seq_p)
4340 {
4341 tree t, to, to_ptr, from, from_ptr;
4342 gcall *gs;
4343 location_t loc = EXPR_LOCATION (*expr_p);
4344
4345 to = TREE_OPERAND (*expr_p, 0);
4346 from = TREE_OPERAND (*expr_p, 1);
4347
4348 /* Mark the RHS addressable. Beware that it may not be possible to do so
4349 directly if a temporary has been created by the gimplification. */
4350 prepare_gimple_addressable (&from, seq_p);
4351
4352 mark_addressable (from);
4353 from_ptr = build_fold_addr_expr_loc (loc, from);
4354 gimplify_arg (&from_ptr, seq_p, loc);
4355
4356 mark_addressable (to);
4357 to_ptr = build_fold_addr_expr_loc (loc, to);
4358 gimplify_arg (&to_ptr, seq_p, loc);
4359
4360 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4361
4362 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4363 gimple_call_set_alloca_for_var (gs, true);
4364
4365 if (want_value)
4366 {
4367 /* tmp = memcpy() */
4368 t = create_tmp_var (TREE_TYPE (to_ptr));
4369 gimple_call_set_lhs (gs, t);
4370 gimplify_seq_add_stmt (seq_p, gs);
4371
4372 *expr_p = build_simple_mem_ref (t);
4373 return GS_ALL_DONE;
4374 }
4375
4376 gimplify_seq_add_stmt (seq_p, gs);
4377 *expr_p = NULL;
4378 return GS_ALL_DONE;
4379 }
4380
4381 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4382 a call to __builtin_memset. In this case we know that the RHS is
4383 a CONSTRUCTOR with an empty element list. */
4384
4385 static enum gimplify_status
4386 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4387 gimple_seq *seq_p)
4388 {
4389 tree t, from, to, to_ptr;
4390 gcall *gs;
4391 location_t loc = EXPR_LOCATION (*expr_p);
4392
4393 /* Assert our assumptions, to abort instead of producing wrong code
4394 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4395 not be immediately exposed. */
4396 from = TREE_OPERAND (*expr_p, 1);
4397 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4398 from = TREE_OPERAND (from, 0);
4399
4400 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4401 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4402
4403 /* Now proceed. */
4404 to = TREE_OPERAND (*expr_p, 0);
4405
4406 to_ptr = build_fold_addr_expr_loc (loc, to);
4407 gimplify_arg (&to_ptr, seq_p, loc);
4408 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4409
4410 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4411
4412 if (want_value)
4413 {
4414 /* tmp = memset() */
4415 t = create_tmp_var (TREE_TYPE (to_ptr));
4416 gimple_call_set_lhs (gs, t);
4417 gimplify_seq_add_stmt (seq_p, gs);
4418
4419 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4420 return GS_ALL_DONE;
4421 }
4422
4423 gimplify_seq_add_stmt (seq_p, gs);
4424 *expr_p = NULL;
4425 return GS_ALL_DONE;
4426 }
4427
4428 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4429 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4430 assignment. Return non-null if we detect a potential overlap. */
4431
4432 struct gimplify_init_ctor_preeval_data
4433 {
4434 /* The base decl of the lhs object. May be NULL, in which case we
4435 have to assume the lhs is indirect. */
4436 tree lhs_base_decl;
4437
4438 /* The alias set of the lhs object. */
4439 alias_set_type lhs_alias_set;
4440 };
4441
4442 static tree
4443 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4444 {
4445 struct gimplify_init_ctor_preeval_data *data
4446 = (struct gimplify_init_ctor_preeval_data *) xdata;
4447 tree t = *tp;
4448
4449 /* If we find the base object, obviously we have overlap. */
4450 if (data->lhs_base_decl == t)
4451 return t;
4452
4453 /* If the constructor component is indirect, determine if we have a
4454 potential overlap with the lhs. The only bits of information we
4455 have to go on at this point are addressability and alias sets. */
4456 if ((INDIRECT_REF_P (t)
4457 || TREE_CODE (t) == MEM_REF)
4458 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4459 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4460 return t;
4461
4462 /* If the constructor component is a call, determine if it can hide a
4463 potential overlap with the lhs through an INDIRECT_REF like above.
4464 ??? Ugh - this is completely broken. In fact this whole analysis
4465 doesn't look conservative. */
4466 if (TREE_CODE (t) == CALL_EXPR)
4467 {
4468 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4469
4470 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4471 if (POINTER_TYPE_P (TREE_VALUE (type))
4472 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4473 && alias_sets_conflict_p (data->lhs_alias_set,
4474 get_alias_set
4475 (TREE_TYPE (TREE_VALUE (type)))))
4476 return t;
4477 }
4478
4479 if (IS_TYPE_OR_DECL_P (t))
4480 *walk_subtrees = 0;
4481 return NULL;
4482 }
4483
4484 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4485 force values that overlap with the lhs (as described by *DATA)
4486 into temporaries. */
4487
4488 static void
4489 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4490 struct gimplify_init_ctor_preeval_data *data)
4491 {
4492 enum gimplify_status one;
4493
4494 /* If the value is constant, then there's nothing to pre-evaluate. */
4495 if (TREE_CONSTANT (*expr_p))
4496 {
4497 /* Ensure it does not have side effects, it might contain a reference to
4498 the object we're initializing. */
4499 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4500 return;
4501 }
4502
4503 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4504 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4505 return;
4506
4507 /* Recurse for nested constructors. */
4508 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4509 {
4510 unsigned HOST_WIDE_INT ix;
4511 constructor_elt *ce;
4512 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4513
4514 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4515 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4516
4517 return;
4518 }
4519
4520 /* If this is a variable sized type, we must remember the size. */
4521 maybe_with_size_expr (expr_p);
4522
4523 /* Gimplify the constructor element to something appropriate for the rhs
4524 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4525 the gimplifier will consider this a store to memory. Doing this
4526 gimplification now means that we won't have to deal with complicated
4527 language-specific trees, nor trees like SAVE_EXPR that can induce
4528 exponential search behavior. */
4529 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4530 if (one == GS_ERROR)
4531 {
4532 *expr_p = NULL;
4533 return;
4534 }
4535
4536 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4537 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4538 always be true for all scalars, since is_gimple_mem_rhs insists on a
4539 temporary variable for them. */
4540 if (DECL_P (*expr_p))
4541 return;
4542
4543 /* If this is of variable size, we have no choice but to assume it doesn't
4544 overlap since we can't make a temporary for it. */
4545 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4546 return;
4547
4548 /* Otherwise, we must search for overlap ... */
4549 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4550 return;
4551
4552 /* ... and if found, force the value into a temporary. */
4553 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4554 }
4555
4556 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4557 a RANGE_EXPR in a CONSTRUCTOR for an array.
4558
4559 var = lower;
4560 loop_entry:
4561 object[var] = value;
4562 if (var == upper)
4563 goto loop_exit;
4564 var = var + 1;
4565 goto loop_entry;
4566 loop_exit:
4567
4568 We increment var _after_ the loop exit check because we might otherwise
4569 fail if upper == TYPE_MAX_VALUE (type for upper).
4570
4571 Note that we never have to deal with SAVE_EXPRs here, because this has
4572 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4573
4574 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4575 gimple_seq *, bool);
4576
4577 static void
4578 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4579 tree value, tree array_elt_type,
4580 gimple_seq *pre_p, bool cleared)
4581 {
4582 tree loop_entry_label, loop_exit_label, fall_thru_label;
4583 tree var, var_type, cref, tmp;
4584
4585 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4586 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4587 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4588
4589 /* Create and initialize the index variable. */
4590 var_type = TREE_TYPE (upper);
4591 var = create_tmp_var (var_type);
4592 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4593
4594 /* Add the loop entry label. */
4595 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4596
4597 /* Build the reference. */
4598 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4599 var, NULL_TREE, NULL_TREE);
4600
4601 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4602 the store. Otherwise just assign value to the reference. */
4603
4604 if (TREE_CODE (value) == CONSTRUCTOR)
4605 /* NB we might have to call ourself recursively through
4606 gimplify_init_ctor_eval if the value is a constructor. */
4607 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4608 pre_p, cleared);
4609 else
4610 {
4611 if (gimplify_expr (&value, pre_p, NULL, is_gimple_val, fb_rvalue)
4612 != GS_ERROR)
4613 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4614 }
4615
4616 /* We exit the loop when the index var is equal to the upper bound. */
4617 gimplify_seq_add_stmt (pre_p,
4618 gimple_build_cond (EQ_EXPR, var, upper,
4619 loop_exit_label, fall_thru_label));
4620
4621 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4622
4623 /* Otherwise, increment the index var... */
4624 tmp = build2 (PLUS_EXPR, var_type, var,
4625 fold_convert (var_type, integer_one_node));
4626 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4627
4628 /* ...and jump back to the loop entry. */
4629 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4630
4631 /* Add the loop exit label. */
4632 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4633 }
4634
4635 /* Return true if FDECL is accessing a field that is zero sized. */
4636
4637 static bool
4638 zero_sized_field_decl (const_tree fdecl)
4639 {
4640 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
4641 && integer_zerop (DECL_SIZE (fdecl)))
4642 return true;
4643 return false;
4644 }
4645
4646 /* Return true if TYPE is zero sized. */
4647
4648 static bool
4649 zero_sized_type (const_tree type)
4650 {
4651 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
4652 && integer_zerop (TYPE_SIZE (type)))
4653 return true;
4654 return false;
4655 }
4656
4657 /* A subroutine of gimplify_init_constructor. Generate individual
4658 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4659 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4660 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4661 zeroed first. */
4662
4663 static void
4664 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4665 gimple_seq *pre_p, bool cleared)
4666 {
4667 tree array_elt_type = NULL;
4668 unsigned HOST_WIDE_INT ix;
4669 tree purpose, value;
4670
4671 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4672 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4673
4674 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4675 {
4676 tree cref;
4677
4678 /* NULL values are created above for gimplification errors. */
4679 if (value == NULL)
4680 continue;
4681
4682 if (cleared && initializer_zerop (value))
4683 continue;
4684
4685 /* ??? Here's to hoping the front end fills in all of the indices,
4686 so we don't have to figure out what's missing ourselves. */
4687 gcc_assert (purpose);
4688
4689 /* Skip zero-sized fields, unless value has side-effects. This can
4690 happen with calls to functions returning a zero-sized type, which
4691 we shouldn't discard. As a number of downstream passes don't
4692 expect sets of zero-sized fields, we rely on the gimplification of
4693 the MODIFY_EXPR we make below to drop the assignment statement. */
4694 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
4695 continue;
4696
4697 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4698 whole range. */
4699 if (TREE_CODE (purpose) == RANGE_EXPR)
4700 {
4701 tree lower = TREE_OPERAND (purpose, 0);
4702 tree upper = TREE_OPERAND (purpose, 1);
4703
4704 /* If the lower bound is equal to upper, just treat it as if
4705 upper was the index. */
4706 if (simple_cst_equal (lower, upper))
4707 purpose = upper;
4708 else
4709 {
4710 gimplify_init_ctor_eval_range (object, lower, upper, value,
4711 array_elt_type, pre_p, cleared);
4712 continue;
4713 }
4714 }
4715
4716 if (array_elt_type)
4717 {
4718 /* Do not use bitsizetype for ARRAY_REF indices. */
4719 if (TYPE_DOMAIN (TREE_TYPE (object)))
4720 purpose
4721 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
4722 purpose);
4723 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4724 purpose, NULL_TREE, NULL_TREE);
4725 }
4726 else
4727 {
4728 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
4729 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
4730 unshare_expr (object), purpose, NULL_TREE);
4731 }
4732
4733 if (TREE_CODE (value) == CONSTRUCTOR
4734 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
4735 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4736 pre_p, cleared);
4737 else
4738 {
4739 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
4740 gimplify_and_add (init, pre_p);
4741 ggc_free (init);
4742 }
4743 }
4744 }
4745
4746 /* Return the appropriate RHS predicate for this LHS. */
4747
4748 gimple_predicate
4749 rhs_predicate_for (tree lhs)
4750 {
4751 if (is_gimple_reg (lhs))
4752 return is_gimple_reg_rhs_or_call;
4753 else
4754 return is_gimple_mem_rhs_or_call;
4755 }
4756
4757 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4758 before the LHS has been gimplified. */
4759
4760 static gimple_predicate
4761 initial_rhs_predicate_for (tree lhs)
4762 {
4763 if (is_gimple_reg_type (TREE_TYPE (lhs)))
4764 return is_gimple_reg_rhs_or_call;
4765 else
4766 return is_gimple_mem_rhs_or_call;
4767 }
4768
4769 /* Gimplify a C99 compound literal expression. This just means adding
4770 the DECL_EXPR before the current statement and using its anonymous
4771 decl instead. */
4772
4773 static enum gimplify_status
4774 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4775 bool (*gimple_test_f) (tree),
4776 fallback_t fallback)
4777 {
4778 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
4779 tree decl = DECL_EXPR_DECL (decl_s);
4780 tree init = DECL_INITIAL (decl);
4781 /* Mark the decl as addressable if the compound literal
4782 expression is addressable now, otherwise it is marked too late
4783 after we gimplify the initialization expression. */
4784 if (TREE_ADDRESSABLE (*expr_p))
4785 TREE_ADDRESSABLE (decl) = 1;
4786 /* Otherwise, if we don't need an lvalue and have a literal directly
4787 substitute it. Check if it matches the gimple predicate, as
4788 otherwise we'd generate a new temporary, and we can as well just
4789 use the decl we already have. */
4790 else if (!TREE_ADDRESSABLE (decl)
4791 && !TREE_THIS_VOLATILE (decl)
4792 && init
4793 && (fallback & fb_lvalue) == 0
4794 && gimple_test_f (init))
4795 {
4796 *expr_p = init;
4797 return GS_OK;
4798 }
4799
4800 /* If the decl is not addressable, then it is being used in some
4801 expression or on the right hand side of a statement, and it can
4802 be put into a readonly data section. */
4803 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
4804 TREE_READONLY (decl) = 1;
4805
4806 /* This decl isn't mentioned in the enclosing block, so add it to the
4807 list of temps. FIXME it seems a bit of a kludge to say that
4808 anonymous artificial vars aren't pushed, but everything else is. */
4809 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
4810 gimple_add_tmp_var (decl);
4811
4812 gimplify_and_add (decl_s, pre_p);
4813 *expr_p = decl;
4814 return GS_OK;
4815 }
4816
4817 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4818 return a new CONSTRUCTOR if something changed. */
4819
4820 static tree
4821 optimize_compound_literals_in_ctor (tree orig_ctor)
4822 {
4823 tree ctor = orig_ctor;
4824 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
4825 unsigned int idx, num = vec_safe_length (elts);
4826
4827 for (idx = 0; idx < num; idx++)
4828 {
4829 tree value = (*elts)[idx].value;
4830 tree newval = value;
4831 if (TREE_CODE (value) == CONSTRUCTOR)
4832 newval = optimize_compound_literals_in_ctor (value);
4833 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
4834 {
4835 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
4836 tree decl = DECL_EXPR_DECL (decl_s);
4837 tree init = DECL_INITIAL (decl);
4838
4839 if (!TREE_ADDRESSABLE (value)
4840 && !TREE_ADDRESSABLE (decl)
4841 && init
4842 && TREE_CODE (init) == CONSTRUCTOR)
4843 newval = optimize_compound_literals_in_ctor (init);
4844 }
4845 if (newval == value)
4846 continue;
4847
4848 if (ctor == orig_ctor)
4849 {
4850 ctor = copy_node (orig_ctor);
4851 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
4852 elts = CONSTRUCTOR_ELTS (ctor);
4853 }
4854 (*elts)[idx].value = newval;
4855 }
4856 return ctor;
4857 }
4858
4859 /* A subroutine of gimplify_modify_expr. Break out elements of a
4860 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4861
4862 Note that we still need to clear any elements that don't have explicit
4863 initializers, so if not all elements are initialized we keep the
4864 original MODIFY_EXPR, we just remove all of the constructor elements.
4865
4866 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4867 GS_ERROR if we would have to create a temporary when gimplifying
4868 this constructor. Otherwise, return GS_OK.
4869
4870 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4871
4872 static enum gimplify_status
4873 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4874 bool want_value, bool notify_temp_creation)
4875 {
4876 tree object, ctor, type;
4877 enum gimplify_status ret;
4878 vec<constructor_elt, va_gc> *elts;
4879
4880 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
4881
4882 if (!notify_temp_creation)
4883 {
4884 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4885 is_gimple_lvalue, fb_lvalue);
4886 if (ret == GS_ERROR)
4887 return ret;
4888 }
4889
4890 object = TREE_OPERAND (*expr_p, 0);
4891 ctor = TREE_OPERAND (*expr_p, 1)
4892 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
4893 type = TREE_TYPE (ctor);
4894 elts = CONSTRUCTOR_ELTS (ctor);
4895 ret = GS_ALL_DONE;
4896
4897 switch (TREE_CODE (type))
4898 {
4899 case RECORD_TYPE:
4900 case UNION_TYPE:
4901 case QUAL_UNION_TYPE:
4902 case ARRAY_TYPE:
4903 {
4904 /* Use readonly data for initializers of this or smaller size
4905 regardless of the num_nonzero_elements / num_unique_nonzero_elements
4906 ratio. */
4907 const HOST_WIDE_INT min_unique_size = 64;
4908 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
4909 is smaller than this, use readonly data. */
4910 const int unique_nonzero_ratio = 8;
4911 /* True if a single access of the object must be ensured. This is the
4912 case if the target is volatile, the type is non-addressable and more
4913 than one field need to be assigned. */
4914 const bool ensure_single_access
4915 = TREE_THIS_VOLATILE (object)
4916 && !TREE_ADDRESSABLE (type)
4917 && vec_safe_length (elts) > 1;
4918 struct gimplify_init_ctor_preeval_data preeval_data;
4919 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
4920 HOST_WIDE_INT num_unique_nonzero_elements;
4921 bool cleared, complete_p, valid_const_initializer;
4922
4923 /* Aggregate types must lower constructors to initialization of
4924 individual elements. The exception is that a CONSTRUCTOR node
4925 with no elements indicates zero-initialization of the whole. */
4926 if (vec_safe_is_empty (elts))
4927 {
4928 if (notify_temp_creation)
4929 return GS_OK;
4930 break;
4931 }
4932
4933 /* Fetch information about the constructor to direct later processing.
4934 We might want to make static versions of it in various cases, and
4935 can only do so if it known to be a valid constant initializer. */
4936 valid_const_initializer
4937 = categorize_ctor_elements (ctor, &num_nonzero_elements,
4938 &num_unique_nonzero_elements,
4939 &num_ctor_elements, &complete_p);
4940
4941 /* If a const aggregate variable is being initialized, then it
4942 should never be a lose to promote the variable to be static. */
4943 if (valid_const_initializer
4944 && num_nonzero_elements > 1
4945 && TREE_READONLY (object)
4946 && VAR_P (object)
4947 && !DECL_REGISTER (object)
4948 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object))
4949 /* For ctors that have many repeated nonzero elements
4950 represented through RANGE_EXPRs, prefer initializing
4951 those through runtime loops over copies of large amounts
4952 of data from readonly data section. */
4953 && (num_unique_nonzero_elements
4954 > num_nonzero_elements / unique_nonzero_ratio
4955 || ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
4956 <= (unsigned HOST_WIDE_INT) min_unique_size)))
4957 {
4958 if (notify_temp_creation)
4959 return GS_ERROR;
4960
4961 DECL_INITIAL (object) = ctor;
4962 TREE_STATIC (object) = 1;
4963 if (!DECL_NAME (object))
4964 DECL_NAME (object) = create_tmp_var_name ("C");
4965 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4966
4967 /* ??? C++ doesn't automatically append a .<number> to the
4968 assembler name, and even when it does, it looks at FE private
4969 data structures to figure out what that number should be,
4970 which are not set for this variable. I suppose this is
4971 important for local statics for inline functions, which aren't
4972 "local" in the object file sense. So in order to get a unique
4973 TU-local symbol, we must invoke the lhd version now. */
4974 lhd_set_decl_assembler_name (object);
4975
4976 *expr_p = NULL_TREE;
4977 break;
4978 }
4979
4980 /* If there are "lots" of initialized elements, even discounting
4981 those that are not address constants (and thus *must* be
4982 computed at runtime), then partition the constructor into
4983 constant and non-constant parts. Block copy the constant
4984 parts in, then generate code for the non-constant parts. */
4985 /* TODO. There's code in cp/typeck.c to do this. */
4986
4987 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4988 /* store_constructor will ignore the clearing of variable-sized
4989 objects. Initializers for such objects must explicitly set
4990 every field that needs to be set. */
4991 cleared = false;
4992 else if (!complete_p)
4993 /* If the constructor isn't complete, clear the whole object
4994 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4995
4996 ??? This ought not to be needed. For any element not present
4997 in the initializer, we should simply set them to zero. Except
4998 we'd need to *find* the elements that are not present, and that
4999 requires trickery to avoid quadratic compile-time behavior in
5000 large cases or excessive memory use in small cases. */
5001 cleared = !CONSTRUCTOR_NO_CLEARING (ctor);
5002 else if (num_ctor_elements - num_nonzero_elements
5003 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
5004 && num_nonzero_elements < num_ctor_elements / 4)
5005 /* If there are "lots" of zeros, it's more efficient to clear
5006 the memory and then set the nonzero elements. */
5007 cleared = true;
5008 else if (ensure_single_access && num_nonzero_elements == 0)
5009 /* If a single access to the target must be ensured and all elements
5010 are zero, then it's optimal to clear whatever their number. */
5011 cleared = true;
5012 else
5013 cleared = false;
5014
5015 /* If there are "lots" of initialized elements, and all of them
5016 are valid address constants, then the entire initializer can
5017 be dropped to memory, and then memcpy'd out. Don't do this
5018 for sparse arrays, though, as it's more efficient to follow
5019 the standard CONSTRUCTOR behavior of memset followed by
5020 individual element initialization. Also don't do this for small
5021 all-zero initializers (which aren't big enough to merit
5022 clearing), and don't try to make bitwise copies of
5023 TREE_ADDRESSABLE types. */
5024 if (valid_const_initializer
5025 && complete_p
5026 && !(cleared || num_nonzero_elements == 0)
5027 && !TREE_ADDRESSABLE (type))
5028 {
5029 HOST_WIDE_INT size = int_size_in_bytes (type);
5030 unsigned int align;
5031
5032 /* ??? We can still get unbounded array types, at least
5033 from the C++ front end. This seems wrong, but attempt
5034 to work around it for now. */
5035 if (size < 0)
5036 {
5037 size = int_size_in_bytes (TREE_TYPE (object));
5038 if (size >= 0)
5039 TREE_TYPE (ctor) = type = TREE_TYPE (object);
5040 }
5041
5042 /* Find the maximum alignment we can assume for the object. */
5043 /* ??? Make use of DECL_OFFSET_ALIGN. */
5044 if (DECL_P (object))
5045 align = DECL_ALIGN (object);
5046 else
5047 align = TYPE_ALIGN (type);
5048
5049 /* Do a block move either if the size is so small as to make
5050 each individual move a sub-unit move on average, or if it
5051 is so large as to make individual moves inefficient. */
5052 if (size > 0
5053 && num_nonzero_elements > 1
5054 /* For ctors that have many repeated nonzero elements
5055 represented through RANGE_EXPRs, prefer initializing
5056 those through runtime loops over copies of large amounts
5057 of data from readonly data section. */
5058 && (num_unique_nonzero_elements
5059 > num_nonzero_elements / unique_nonzero_ratio
5060 || size <= min_unique_size)
5061 && (size < num_nonzero_elements
5062 || !can_move_by_pieces (size, align)))
5063 {
5064 if (notify_temp_creation)
5065 return GS_ERROR;
5066
5067 walk_tree (&ctor, force_labels_r, NULL, NULL);
5068 ctor = tree_output_constant_def (ctor);
5069 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
5070 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
5071 TREE_OPERAND (*expr_p, 1) = ctor;
5072
5073 /* This is no longer an assignment of a CONSTRUCTOR, but
5074 we still may have processing to do on the LHS. So
5075 pretend we didn't do anything here to let that happen. */
5076 return GS_UNHANDLED;
5077 }
5078 }
5079
5080 /* If a single access to the target must be ensured and there are
5081 nonzero elements or the zero elements are not assigned en masse,
5082 initialize the target from a temporary. */
5083 if (ensure_single_access && (num_nonzero_elements > 0 || !cleared))
5084 {
5085 if (notify_temp_creation)
5086 return GS_ERROR;
5087
5088 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
5089 TREE_OPERAND (*expr_p, 0) = temp;
5090 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
5091 *expr_p,
5092 build2 (MODIFY_EXPR, void_type_node,
5093 object, temp));
5094 return GS_OK;
5095 }
5096
5097 if (notify_temp_creation)
5098 return GS_OK;
5099
5100 /* If there are nonzero elements and if needed, pre-evaluate to capture
5101 elements overlapping with the lhs into temporaries. We must do this
5102 before clearing to fetch the values before they are zeroed-out. */
5103 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
5104 {
5105 preeval_data.lhs_base_decl = get_base_address (object);
5106 if (!DECL_P (preeval_data.lhs_base_decl))
5107 preeval_data.lhs_base_decl = NULL;
5108 preeval_data.lhs_alias_set = get_alias_set (object);
5109
5110 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
5111 pre_p, post_p, &preeval_data);
5112 }
5113
5114 bool ctor_has_side_effects_p
5115 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
5116
5117 if (cleared)
5118 {
5119 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5120 Note that we still have to gimplify, in order to handle the
5121 case of variable sized types. Avoid shared tree structures. */
5122 CONSTRUCTOR_ELTS (ctor) = NULL;
5123 TREE_SIDE_EFFECTS (ctor) = 0;
5124 object = unshare_expr (object);
5125 gimplify_stmt (expr_p, pre_p);
5126 }
5127
5128 /* If we have not block cleared the object, or if there are nonzero
5129 elements in the constructor, or if the constructor has side effects,
5130 add assignments to the individual scalar fields of the object. */
5131 if (!cleared
5132 || num_nonzero_elements > 0
5133 || ctor_has_side_effects_p)
5134 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
5135
5136 *expr_p = NULL_TREE;
5137 }
5138 break;
5139
5140 case COMPLEX_TYPE:
5141 {
5142 tree r, i;
5143
5144 if (notify_temp_creation)
5145 return GS_OK;
5146
5147 /* Extract the real and imaginary parts out of the ctor. */
5148 gcc_assert (elts->length () == 2);
5149 r = (*elts)[0].value;
5150 i = (*elts)[1].value;
5151 if (r == NULL || i == NULL)
5152 {
5153 tree zero = build_zero_cst (TREE_TYPE (type));
5154 if (r == NULL)
5155 r = zero;
5156 if (i == NULL)
5157 i = zero;
5158 }
5159
5160 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5161 represent creation of a complex value. */
5162 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
5163 {
5164 ctor = build_complex (type, r, i);
5165 TREE_OPERAND (*expr_p, 1) = ctor;
5166 }
5167 else
5168 {
5169 ctor = build2 (COMPLEX_EXPR, type, r, i);
5170 TREE_OPERAND (*expr_p, 1) = ctor;
5171 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
5172 pre_p,
5173 post_p,
5174 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
5175 fb_rvalue);
5176 }
5177 }
5178 break;
5179
5180 case VECTOR_TYPE:
5181 {
5182 unsigned HOST_WIDE_INT ix;
5183 constructor_elt *ce;
5184
5185 if (notify_temp_creation)
5186 return GS_OK;
5187
5188 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5189 if (TREE_CONSTANT (ctor))
5190 {
5191 bool constant_p = true;
5192 tree value;
5193
5194 /* Even when ctor is constant, it might contain non-*_CST
5195 elements, such as addresses or trapping values like
5196 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5197 in VECTOR_CST nodes. */
5198 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
5199 if (!CONSTANT_CLASS_P (value))
5200 {
5201 constant_p = false;
5202 break;
5203 }
5204
5205 if (constant_p)
5206 {
5207 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
5208 break;
5209 }
5210
5211 TREE_CONSTANT (ctor) = 0;
5212 }
5213
5214 /* Vector types use CONSTRUCTOR all the way through gimple
5215 compilation as a general initializer. */
5216 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
5217 {
5218 enum gimplify_status tret;
5219 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
5220 fb_rvalue);
5221 if (tret == GS_ERROR)
5222 ret = GS_ERROR;
5223 else if (TREE_STATIC (ctor)
5224 && !initializer_constant_valid_p (ce->value,
5225 TREE_TYPE (ce->value)))
5226 TREE_STATIC (ctor) = 0;
5227 }
5228 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
5229 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
5230 }
5231 break;
5232
5233 default:
5234 /* So how did we get a CONSTRUCTOR for a scalar type? */
5235 gcc_unreachable ();
5236 }
5237
5238 if (ret == GS_ERROR)
5239 return GS_ERROR;
5240 /* If we have gimplified both sides of the initializer but have
5241 not emitted an assignment, do so now. */
5242 if (*expr_p)
5243 {
5244 tree lhs = TREE_OPERAND (*expr_p, 0);
5245 tree rhs = TREE_OPERAND (*expr_p, 1);
5246 if (want_value && object == lhs)
5247 lhs = unshare_expr (lhs);
5248 gassign *init = gimple_build_assign (lhs, rhs);
5249 gimplify_seq_add_stmt (pre_p, init);
5250 }
5251 if (want_value)
5252 {
5253 *expr_p = object;
5254 return GS_OK;
5255 }
5256 else
5257 {
5258 *expr_p = NULL;
5259 return GS_ALL_DONE;
5260 }
5261 }
5262
5263 /* Given a pointer value OP0, return a simplified version of an
5264 indirection through OP0, or NULL_TREE if no simplification is
5265 possible. This may only be applied to a rhs of an expression.
5266 Note that the resulting type may be different from the type pointed
5267 to in the sense that it is still compatible from the langhooks
5268 point of view. */
5269
5270 static tree
5271 gimple_fold_indirect_ref_rhs (tree t)
5272 {
5273 return gimple_fold_indirect_ref (t);
5274 }
5275
5276 /* Subroutine of gimplify_modify_expr to do simplifications of
5277 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5278 something changes. */
5279
5280 static enum gimplify_status
5281 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5282 gimple_seq *pre_p, gimple_seq *post_p,
5283 bool want_value)
5284 {
5285 enum gimplify_status ret = GS_UNHANDLED;
5286 bool changed;
5287
5288 do
5289 {
5290 changed = false;
5291 switch (TREE_CODE (*from_p))
5292 {
5293 case VAR_DECL:
5294 /* If we're assigning from a read-only variable initialized with
5295 a constructor and not volatile, do the direct assignment from
5296 the constructor, but only if the target is not volatile either
5297 since this latter assignment might end up being done on a per
5298 field basis. However, if the target is volatile and the type
5299 is aggregate and non-addressable, gimplify_init_constructor
5300 knows that it needs to ensure a single access to the target
5301 and it will return GS_OK only in this case. */
5302 if (TREE_READONLY (*from_p)
5303 && DECL_INITIAL (*from_p)
5304 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR
5305 && !TREE_THIS_VOLATILE (*from_p)
5306 && (!TREE_THIS_VOLATILE (*to_p)
5307 || (AGGREGATE_TYPE_P (TREE_TYPE (*to_p))
5308 && !TREE_ADDRESSABLE (TREE_TYPE (*to_p)))))
5309 {
5310 tree old_from = *from_p;
5311 enum gimplify_status subret;
5312
5313 /* Move the constructor into the RHS. */
5314 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5315
5316 /* Let's see if gimplify_init_constructor will need to put
5317 it in memory. */
5318 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5319 false, true);
5320 if (subret == GS_ERROR)
5321 {
5322 /* If so, revert the change. */
5323 *from_p = old_from;
5324 }
5325 else
5326 {
5327 ret = GS_OK;
5328 changed = true;
5329 }
5330 }
5331 break;
5332 case INDIRECT_REF:
5333 {
5334 /* If we have code like
5335
5336 *(const A*)(A*)&x
5337
5338 where the type of "x" is a (possibly cv-qualified variant
5339 of "A"), treat the entire expression as identical to "x".
5340 This kind of code arises in C++ when an object is bound
5341 to a const reference, and if "x" is a TARGET_EXPR we want
5342 to take advantage of the optimization below. */
5343 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5344 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
5345 if (t)
5346 {
5347 if (TREE_THIS_VOLATILE (t) != volatile_p)
5348 {
5349 if (DECL_P (t))
5350 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5351 build_fold_addr_expr (t));
5352 if (REFERENCE_CLASS_P (t))
5353 TREE_THIS_VOLATILE (t) = volatile_p;
5354 }
5355 *from_p = t;
5356 ret = GS_OK;
5357 changed = true;
5358 }
5359 break;
5360 }
5361
5362 case TARGET_EXPR:
5363 {
5364 /* If we are initializing something from a TARGET_EXPR, strip the
5365 TARGET_EXPR and initialize it directly, if possible. This can't
5366 be done if the initializer is void, since that implies that the
5367 temporary is set in some non-trivial way.
5368
5369 ??? What about code that pulls out the temp and uses it
5370 elsewhere? I think that such code never uses the TARGET_EXPR as
5371 an initializer. If I'm wrong, we'll die because the temp won't
5372 have any RTL. In that case, I guess we'll need to replace
5373 references somehow. */
5374 tree init = TARGET_EXPR_INITIAL (*from_p);
5375
5376 if (init
5377 && (TREE_CODE (*expr_p) != MODIFY_EXPR
5378 || !TARGET_EXPR_NO_ELIDE (*from_p))
5379 && !VOID_TYPE_P (TREE_TYPE (init)))
5380 {
5381 *from_p = init;
5382 ret = GS_OK;
5383 changed = true;
5384 }
5385 }
5386 break;
5387
5388 case COMPOUND_EXPR:
5389 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5390 caught. */
5391 gimplify_compound_expr (from_p, pre_p, true);
5392 ret = GS_OK;
5393 changed = true;
5394 break;
5395
5396 case CONSTRUCTOR:
5397 /* If we already made some changes, let the front end have a
5398 crack at this before we break it down. */
5399 if (ret != GS_UNHANDLED)
5400 break;
5401 /* If we're initializing from a CONSTRUCTOR, break this into
5402 individual MODIFY_EXPRs. */
5403 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5404 false);
5405
5406 case COND_EXPR:
5407 /* If we're assigning to a non-register type, push the assignment
5408 down into the branches. This is mandatory for ADDRESSABLE types,
5409 since we cannot generate temporaries for such, but it saves a
5410 copy in other cases as well. */
5411 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5412 {
5413 /* This code should mirror the code in gimplify_cond_expr. */
5414 enum tree_code code = TREE_CODE (*expr_p);
5415 tree cond = *from_p;
5416 tree result = *to_p;
5417
5418 ret = gimplify_expr (&result, pre_p, post_p,
5419 is_gimple_lvalue, fb_lvalue);
5420 if (ret != GS_ERROR)
5421 ret = GS_OK;
5422
5423 /* If we are going to write RESULT more than once, clear
5424 TREE_READONLY flag, otherwise we might incorrectly promote
5425 the variable to static const and initialize it at compile
5426 time in one of the branches. */
5427 if (VAR_P (result)
5428 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5429 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5430 TREE_READONLY (result) = 0;
5431 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5432 TREE_OPERAND (cond, 1)
5433 = build2 (code, void_type_node, result,
5434 TREE_OPERAND (cond, 1));
5435 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5436 TREE_OPERAND (cond, 2)
5437 = build2 (code, void_type_node, unshare_expr (result),
5438 TREE_OPERAND (cond, 2));
5439
5440 TREE_TYPE (cond) = void_type_node;
5441 recalculate_side_effects (cond);
5442
5443 if (want_value)
5444 {
5445 gimplify_and_add (cond, pre_p);
5446 *expr_p = unshare_expr (result);
5447 }
5448 else
5449 *expr_p = cond;
5450 return ret;
5451 }
5452 break;
5453
5454 case CALL_EXPR:
5455 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5456 return slot so that we don't generate a temporary. */
5457 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5458 && aggregate_value_p (*from_p, *from_p))
5459 {
5460 bool use_target;
5461
5462 if (!(rhs_predicate_for (*to_p))(*from_p))
5463 /* If we need a temporary, *to_p isn't accurate. */
5464 use_target = false;
5465 /* It's OK to use the return slot directly unless it's an NRV. */
5466 else if (TREE_CODE (*to_p) == RESULT_DECL
5467 && DECL_NAME (*to_p) == NULL_TREE
5468 && needs_to_live_in_memory (*to_p))
5469 use_target = true;
5470 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5471 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5472 /* Don't force regs into memory. */
5473 use_target = false;
5474 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5475 /* It's OK to use the target directly if it's being
5476 initialized. */
5477 use_target = true;
5478 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5479 != INTEGER_CST)
5480 /* Always use the target and thus RSO for variable-sized types.
5481 GIMPLE cannot deal with a variable-sized assignment
5482 embedded in a call statement. */
5483 use_target = true;
5484 else if (TREE_CODE (*to_p) != SSA_NAME
5485 && (!is_gimple_variable (*to_p)
5486 || needs_to_live_in_memory (*to_p)))
5487 /* Don't use the original target if it's already addressable;
5488 if its address escapes, and the called function uses the
5489 NRV optimization, a conforming program could see *to_p
5490 change before the called function returns; see c++/19317.
5491 When optimizing, the return_slot pass marks more functions
5492 as safe after we have escape info. */
5493 use_target = false;
5494 else
5495 use_target = true;
5496
5497 if (use_target)
5498 {
5499 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5500 mark_addressable (*to_p);
5501 }
5502 }
5503 break;
5504
5505 case WITH_SIZE_EXPR:
5506 /* Likewise for calls that return an aggregate of non-constant size,
5507 since we would not be able to generate a temporary at all. */
5508 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5509 {
5510 *from_p = TREE_OPERAND (*from_p, 0);
5511 /* We don't change ret in this case because the
5512 WITH_SIZE_EXPR might have been added in
5513 gimplify_modify_expr, so returning GS_OK would lead to an
5514 infinite loop. */
5515 changed = true;
5516 }
5517 break;
5518
5519 /* If we're initializing from a container, push the initialization
5520 inside it. */
5521 case CLEANUP_POINT_EXPR:
5522 case BIND_EXPR:
5523 case STATEMENT_LIST:
5524 {
5525 tree wrap = *from_p;
5526 tree t;
5527
5528 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5529 fb_lvalue);
5530 if (ret != GS_ERROR)
5531 ret = GS_OK;
5532
5533 t = voidify_wrapper_expr (wrap, *expr_p);
5534 gcc_assert (t == *expr_p);
5535
5536 if (want_value)
5537 {
5538 gimplify_and_add (wrap, pre_p);
5539 *expr_p = unshare_expr (*to_p);
5540 }
5541 else
5542 *expr_p = wrap;
5543 return GS_OK;
5544 }
5545
5546 case NOP_EXPR:
5547 /* Pull out compound literal expressions from a NOP_EXPR.
5548 Those are created in the C FE to drop qualifiers during
5549 lvalue conversion. */
5550 if ((TREE_CODE (TREE_OPERAND (*from_p, 0)) == COMPOUND_LITERAL_EXPR)
5551 && tree_ssa_useless_type_conversion (*from_p))
5552 {
5553 *from_p = TREE_OPERAND (*from_p, 0);
5554 ret = GS_OK;
5555 changed = true;
5556 }
5557 break;
5558
5559 case COMPOUND_LITERAL_EXPR:
5560 {
5561 tree complit = TREE_OPERAND (*expr_p, 1);
5562 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5563 tree decl = DECL_EXPR_DECL (decl_s);
5564 tree init = DECL_INITIAL (decl);
5565
5566 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5567 into struct T x = { 0, 1, 2 } if the address of the
5568 compound literal has never been taken. */
5569 if (!TREE_ADDRESSABLE (complit)
5570 && !TREE_ADDRESSABLE (decl)
5571 && init)
5572 {
5573 *expr_p = copy_node (*expr_p);
5574 TREE_OPERAND (*expr_p, 1) = init;
5575 return GS_OK;
5576 }
5577 }
5578
5579 default:
5580 break;
5581 }
5582 }
5583 while (changed);
5584
5585 return ret;
5586 }
5587
5588
5589 /* Return true if T looks like a valid GIMPLE statement. */
5590
5591 static bool
5592 is_gimple_stmt (tree t)
5593 {
5594 const enum tree_code code = TREE_CODE (t);
5595
5596 switch (code)
5597 {
5598 case NOP_EXPR:
5599 /* The only valid NOP_EXPR is the empty statement. */
5600 return IS_EMPTY_STMT (t);
5601
5602 case BIND_EXPR:
5603 case COND_EXPR:
5604 /* These are only valid if they're void. */
5605 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5606
5607 case SWITCH_EXPR:
5608 case GOTO_EXPR:
5609 case RETURN_EXPR:
5610 case LABEL_EXPR:
5611 case CASE_LABEL_EXPR:
5612 case TRY_CATCH_EXPR:
5613 case TRY_FINALLY_EXPR:
5614 case EH_FILTER_EXPR:
5615 case CATCH_EXPR:
5616 case ASM_EXPR:
5617 case STATEMENT_LIST:
5618 case OACC_PARALLEL:
5619 case OACC_KERNELS:
5620 case OACC_SERIAL:
5621 case OACC_DATA:
5622 case OACC_HOST_DATA:
5623 case OACC_DECLARE:
5624 case OACC_UPDATE:
5625 case OACC_ENTER_DATA:
5626 case OACC_EXIT_DATA:
5627 case OACC_CACHE:
5628 case OMP_PARALLEL:
5629 case OMP_FOR:
5630 case OMP_SIMD:
5631 case OMP_DISTRIBUTE:
5632 case OMP_LOOP:
5633 case OACC_LOOP:
5634 case OMP_SCAN:
5635 case OMP_SECTIONS:
5636 case OMP_SECTION:
5637 case OMP_SINGLE:
5638 case OMP_MASTER:
5639 case OMP_TASKGROUP:
5640 case OMP_ORDERED:
5641 case OMP_CRITICAL:
5642 case OMP_TASK:
5643 case OMP_TARGET:
5644 case OMP_TARGET_DATA:
5645 case OMP_TARGET_UPDATE:
5646 case OMP_TARGET_ENTER_DATA:
5647 case OMP_TARGET_EXIT_DATA:
5648 case OMP_TASKLOOP:
5649 case OMP_TEAMS:
5650 /* These are always void. */
5651 return true;
5652
5653 case CALL_EXPR:
5654 case MODIFY_EXPR:
5655 case PREDICT_EXPR:
5656 /* These are valid regardless of their type. */
5657 return true;
5658
5659 default:
5660 return false;
5661 }
5662 }
5663
5664
5665 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5666 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a gimple register.
5667
5668 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5669 other, unmodified part of the complex object just before the total store.
5670 As a consequence, if the object is still uninitialized, an undefined value
5671 will be loaded into a register, which may result in a spurious exception
5672 if the register is floating-point and the value happens to be a signaling
5673 NaN for example. Then the fully-fledged complex operations lowering pass
5674 followed by a DCE pass are necessary in order to fix things up. */
5675
5676 static enum gimplify_status
5677 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
5678 bool want_value)
5679 {
5680 enum tree_code code, ocode;
5681 tree lhs, rhs, new_rhs, other, realpart, imagpart;
5682
5683 lhs = TREE_OPERAND (*expr_p, 0);
5684 rhs = TREE_OPERAND (*expr_p, 1);
5685 code = TREE_CODE (lhs);
5686 lhs = TREE_OPERAND (lhs, 0);
5687
5688 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
5689 other = build1 (ocode, TREE_TYPE (rhs), lhs);
5690 TREE_NO_WARNING (other) = 1;
5691 other = get_formal_tmp_var (other, pre_p);
5692
5693 realpart = code == REALPART_EXPR ? rhs : other;
5694 imagpart = code == REALPART_EXPR ? other : rhs;
5695
5696 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
5697 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
5698 else
5699 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
5700
5701 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
5702 *expr_p = (want_value) ? rhs : NULL_TREE;
5703
5704 return GS_ALL_DONE;
5705 }
5706
5707 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5708
5709 modify_expr
5710 : varname '=' rhs
5711 | '*' ID '=' rhs
5712
5713 PRE_P points to the list where side effects that must happen before
5714 *EXPR_P should be stored.
5715
5716 POST_P points to the list where side effects that must happen after
5717 *EXPR_P should be stored.
5718
5719 WANT_VALUE is nonzero iff we want to use the value of this expression
5720 in another expression. */
5721
5722 static enum gimplify_status
5723 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5724 bool want_value)
5725 {
5726 tree *from_p = &TREE_OPERAND (*expr_p, 1);
5727 tree *to_p = &TREE_OPERAND (*expr_p, 0);
5728 enum gimplify_status ret = GS_UNHANDLED;
5729 gimple *assign;
5730 location_t loc = EXPR_LOCATION (*expr_p);
5731 gimple_stmt_iterator gsi;
5732
5733 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
5734 || TREE_CODE (*expr_p) == INIT_EXPR);
5735
5736 /* Trying to simplify a clobber using normal logic doesn't work,
5737 so handle it here. */
5738 if (TREE_CLOBBER_P (*from_p))
5739 {
5740 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5741 if (ret == GS_ERROR)
5742 return ret;
5743 gcc_assert (!want_value);
5744 if (!VAR_P (*to_p) && TREE_CODE (*to_p) != MEM_REF)
5745 {
5746 tree addr = get_initialized_tmp_var (build_fold_addr_expr (*to_p),
5747 pre_p, post_p);
5748 *to_p = build_simple_mem_ref_loc (EXPR_LOCATION (*to_p), addr);
5749 }
5750 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
5751 *expr_p = NULL;
5752 return GS_ALL_DONE;
5753 }
5754
5755 /* Insert pointer conversions required by the middle-end that are not
5756 required by the frontend. This fixes middle-end type checking for
5757 for example gcc.dg/redecl-6.c. */
5758 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
5759 {
5760 STRIP_USELESS_TYPE_CONVERSION (*from_p);
5761 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
5762 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
5763 }
5764
5765 /* See if any simplifications can be done based on what the RHS is. */
5766 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5767 want_value);
5768 if (ret != GS_UNHANDLED)
5769 return ret;
5770
5771 /* For zero sized types only gimplify the left hand side and right hand
5772 side as statements and throw away the assignment. Do this after
5773 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5774 types properly. */
5775 if (zero_sized_type (TREE_TYPE (*from_p))
5776 && !want_value
5777 /* Don't do this for calls that return addressable types, expand_call
5778 relies on those having a lhs. */
5779 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
5780 && TREE_CODE (*from_p) == CALL_EXPR))
5781 {
5782 gimplify_stmt (from_p, pre_p);
5783 gimplify_stmt (to_p, pre_p);
5784 *expr_p = NULL_TREE;
5785 return GS_ALL_DONE;
5786 }
5787
5788 /* If the value being copied is of variable width, compute the length
5789 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5790 before gimplifying any of the operands so that we can resolve any
5791 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5792 the size of the expression to be copied, not of the destination, so
5793 that is what we must do here. */
5794 maybe_with_size_expr (from_p);
5795
5796 /* As a special case, we have to temporarily allow for assignments
5797 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5798 a toplevel statement, when gimplifying the GENERIC expression
5799 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5800 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5801
5802 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5803 prevent gimplify_expr from trying to create a new temporary for
5804 foo's LHS, we tell it that it should only gimplify until it
5805 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5806 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5807 and all we need to do here is set 'a' to be its LHS. */
5808
5809 /* Gimplify the RHS first for C++17 and bug 71104. */
5810 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
5811 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
5812 if (ret == GS_ERROR)
5813 return ret;
5814
5815 /* Then gimplify the LHS. */
5816 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5817 twice we have to make sure to gimplify into non-SSA as otherwise
5818 the abnormal edge added later will make those defs not dominate
5819 their uses.
5820 ??? Technically this applies only to the registers used in the
5821 resulting non-register *TO_P. */
5822 bool saved_into_ssa = gimplify_ctxp->into_ssa;
5823 if (saved_into_ssa
5824 && TREE_CODE (*from_p) == CALL_EXPR
5825 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
5826 gimplify_ctxp->into_ssa = false;
5827 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5828 gimplify_ctxp->into_ssa = saved_into_ssa;
5829 if (ret == GS_ERROR)
5830 return ret;
5831
5832 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5833 guess for the predicate was wrong. */
5834 gimple_predicate final_pred = rhs_predicate_for (*to_p);
5835 if (final_pred != initial_pred)
5836 {
5837 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
5838 if (ret == GS_ERROR)
5839 return ret;
5840 }
5841
5842 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5843 size as argument to the call. */
5844 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5845 {
5846 tree call = TREE_OPERAND (*from_p, 0);
5847 tree vlasize = TREE_OPERAND (*from_p, 1);
5848
5849 if (TREE_CODE (call) == CALL_EXPR
5850 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
5851 {
5852 int nargs = call_expr_nargs (call);
5853 tree type = TREE_TYPE (call);
5854 tree ap = CALL_EXPR_ARG (call, 0);
5855 tree tag = CALL_EXPR_ARG (call, 1);
5856 tree aptag = CALL_EXPR_ARG (call, 2);
5857 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
5858 IFN_VA_ARG, type,
5859 nargs + 1, ap, tag,
5860 aptag, vlasize);
5861 TREE_OPERAND (*from_p, 0) = newcall;
5862 }
5863 }
5864
5865 /* Now see if the above changed *from_p to something we handle specially. */
5866 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5867 want_value);
5868 if (ret != GS_UNHANDLED)
5869 return ret;
5870
5871 /* If we've got a variable sized assignment between two lvalues (i.e. does
5872 not involve a call), then we can make things a bit more straightforward
5873 by converting the assignment to memcpy or memset. */
5874 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5875 {
5876 tree from = TREE_OPERAND (*from_p, 0);
5877 tree size = TREE_OPERAND (*from_p, 1);
5878
5879 if (TREE_CODE (from) == CONSTRUCTOR)
5880 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
5881
5882 if (is_gimple_addressable (from))
5883 {
5884 *from_p = from;
5885 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
5886 pre_p);
5887 }
5888 }
5889
5890 /* Transform partial stores to non-addressable complex variables into
5891 total stores. This allows us to use real instead of virtual operands
5892 for these variables, which improves optimization. */
5893 if ((TREE_CODE (*to_p) == REALPART_EXPR
5894 || TREE_CODE (*to_p) == IMAGPART_EXPR)
5895 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
5896 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
5897
5898 /* Try to alleviate the effects of the gimplification creating artificial
5899 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5900 make sure not to create DECL_DEBUG_EXPR links across functions. */
5901 if (!gimplify_ctxp->into_ssa
5902 && VAR_P (*from_p)
5903 && DECL_IGNORED_P (*from_p)
5904 && DECL_P (*to_p)
5905 && !DECL_IGNORED_P (*to_p)
5906 && decl_function_context (*to_p) == current_function_decl
5907 && decl_function_context (*from_p) == current_function_decl)
5908 {
5909 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
5910 DECL_NAME (*from_p)
5911 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
5912 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
5913 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
5914 }
5915
5916 if (want_value && TREE_THIS_VOLATILE (*to_p))
5917 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
5918
5919 if (TREE_CODE (*from_p) == CALL_EXPR)
5920 {
5921 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5922 instead of a GIMPLE_ASSIGN. */
5923 gcall *call_stmt;
5924 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
5925 {
5926 /* Gimplify internal functions created in the FEs. */
5927 int nargs = call_expr_nargs (*from_p), i;
5928 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
5929 auto_vec<tree> vargs (nargs);
5930
5931 for (i = 0; i < nargs; i++)
5932 {
5933 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
5934 EXPR_LOCATION (*from_p));
5935 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
5936 }
5937 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
5938 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
5939 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
5940 }
5941 else
5942 {
5943 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
5944 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
5945 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
5946 tree fndecl = get_callee_fndecl (*from_p);
5947 if (fndecl
5948 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
5949 && call_expr_nargs (*from_p) == 3)
5950 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
5951 CALL_EXPR_ARG (*from_p, 0),
5952 CALL_EXPR_ARG (*from_p, 1),
5953 CALL_EXPR_ARG (*from_p, 2));
5954 else
5955 {
5956 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
5957 }
5958 }
5959 notice_special_calls (call_stmt);
5960 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
5961 gimple_call_set_lhs (call_stmt, *to_p);
5962 else if (TREE_CODE (*to_p) == SSA_NAME)
5963 /* The above is somewhat premature, avoid ICEing later for a
5964 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5965 ??? This doesn't make it a default-def. */
5966 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
5967
5968 assign = call_stmt;
5969 }
5970 else
5971 {
5972 assign = gimple_build_assign (*to_p, *from_p);
5973 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
5974 if (COMPARISON_CLASS_P (*from_p))
5975 gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p));
5976 }
5977
5978 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
5979 {
5980 /* We should have got an SSA name from the start. */
5981 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
5982 || ! gimple_in_ssa_p (cfun));
5983 }
5984
5985 gimplify_seq_add_stmt (pre_p, assign);
5986 gsi = gsi_last (*pre_p);
5987 maybe_fold_stmt (&gsi);
5988
5989 if (want_value)
5990 {
5991 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
5992 return GS_OK;
5993 }
5994 else
5995 *expr_p = NULL;
5996
5997 return GS_ALL_DONE;
5998 }
5999
6000 /* Gimplify a comparison between two variable-sized objects. Do this
6001 with a call to BUILT_IN_MEMCMP. */
6002
6003 static enum gimplify_status
6004 gimplify_variable_sized_compare (tree *expr_p)
6005 {
6006 location_t loc = EXPR_LOCATION (*expr_p);
6007 tree op0 = TREE_OPERAND (*expr_p, 0);
6008 tree op1 = TREE_OPERAND (*expr_p, 1);
6009 tree t, arg, dest, src, expr;
6010
6011 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
6012 arg = unshare_expr (arg);
6013 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
6014 src = build_fold_addr_expr_loc (loc, op1);
6015 dest = build_fold_addr_expr_loc (loc, op0);
6016 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
6017 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
6018
6019 expr
6020 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
6021 SET_EXPR_LOCATION (expr, loc);
6022 *expr_p = expr;
6023
6024 return GS_OK;
6025 }
6026
6027 /* Gimplify a comparison between two aggregate objects of integral scalar
6028 mode as a comparison between the bitwise equivalent scalar values. */
6029
6030 static enum gimplify_status
6031 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
6032 {
6033 location_t loc = EXPR_LOCATION (*expr_p);
6034 tree op0 = TREE_OPERAND (*expr_p, 0);
6035 tree op1 = TREE_OPERAND (*expr_p, 1);
6036
6037 tree type = TREE_TYPE (op0);
6038 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
6039
6040 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
6041 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
6042
6043 *expr_p
6044 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
6045
6046 return GS_OK;
6047 }
6048
6049 /* Gimplify an expression sequence. This function gimplifies each
6050 expression and rewrites the original expression with the last
6051 expression of the sequence in GIMPLE form.
6052
6053 PRE_P points to the list where the side effects for all the
6054 expressions in the sequence will be emitted.
6055
6056 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6057
6058 static enum gimplify_status
6059 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6060 {
6061 tree t = *expr_p;
6062
6063 do
6064 {
6065 tree *sub_p = &TREE_OPERAND (t, 0);
6066
6067 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
6068 gimplify_compound_expr (sub_p, pre_p, false);
6069 else
6070 gimplify_stmt (sub_p, pre_p);
6071
6072 t = TREE_OPERAND (t, 1);
6073 }
6074 while (TREE_CODE (t) == COMPOUND_EXPR);
6075
6076 *expr_p = t;
6077 if (want_value)
6078 return GS_OK;
6079 else
6080 {
6081 gimplify_stmt (expr_p, pre_p);
6082 return GS_ALL_DONE;
6083 }
6084 }
6085
6086 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
6087 gimplify. After gimplification, EXPR_P will point to a new temporary
6088 that holds the original value of the SAVE_EXPR node.
6089
6090 PRE_P points to the list where side effects that must happen before
6091 *EXPR_P should be stored. */
6092
6093 static enum gimplify_status
6094 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6095 {
6096 enum gimplify_status ret = GS_ALL_DONE;
6097 tree val;
6098
6099 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
6100 val = TREE_OPERAND (*expr_p, 0);
6101
6102 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
6103 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
6104 {
6105 /* The operand may be a void-valued expression. It is
6106 being executed only for its side-effects. */
6107 if (TREE_TYPE (val) == void_type_node)
6108 {
6109 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6110 is_gimple_stmt, fb_none);
6111 val = NULL;
6112 }
6113 else
6114 /* The temporary may not be an SSA name as later abnormal and EH
6115 control flow may invalidate use/def domination. When in SSA
6116 form then assume there are no such issues and SAVE_EXPRs only
6117 appear via GENERIC foldings. */
6118 val = get_initialized_tmp_var (val, pre_p, post_p,
6119 gimple_in_ssa_p (cfun));
6120
6121 TREE_OPERAND (*expr_p, 0) = val;
6122 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
6123 }
6124
6125 *expr_p = val;
6126
6127 return ret;
6128 }
6129
6130 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6131
6132 unary_expr
6133 : ...
6134 | '&' varname
6135 ...
6136
6137 PRE_P points to the list where side effects that must happen before
6138 *EXPR_P should be stored.
6139
6140 POST_P points to the list where side effects that must happen after
6141 *EXPR_P should be stored. */
6142
6143 static enum gimplify_status
6144 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6145 {
6146 tree expr = *expr_p;
6147 tree op0 = TREE_OPERAND (expr, 0);
6148 enum gimplify_status ret;
6149 location_t loc = EXPR_LOCATION (*expr_p);
6150
6151 switch (TREE_CODE (op0))
6152 {
6153 case INDIRECT_REF:
6154 do_indirect_ref:
6155 /* Check if we are dealing with an expression of the form '&*ptr'.
6156 While the front end folds away '&*ptr' into 'ptr', these
6157 expressions may be generated internally by the compiler (e.g.,
6158 builtins like __builtin_va_end). */
6159 /* Caution: the silent array decomposition semantics we allow for
6160 ADDR_EXPR means we can't always discard the pair. */
6161 /* Gimplification of the ADDR_EXPR operand may drop
6162 cv-qualification conversions, so make sure we add them if
6163 needed. */
6164 {
6165 tree op00 = TREE_OPERAND (op0, 0);
6166 tree t_expr = TREE_TYPE (expr);
6167 tree t_op00 = TREE_TYPE (op00);
6168
6169 if (!useless_type_conversion_p (t_expr, t_op00))
6170 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
6171 *expr_p = op00;
6172 ret = GS_OK;
6173 }
6174 break;
6175
6176 case VIEW_CONVERT_EXPR:
6177 /* Take the address of our operand and then convert it to the type of
6178 this ADDR_EXPR.
6179
6180 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6181 all clear. The impact of this transformation is even less clear. */
6182
6183 /* If the operand is a useless conversion, look through it. Doing so
6184 guarantees that the ADDR_EXPR and its operand will remain of the
6185 same type. */
6186 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
6187 op0 = TREE_OPERAND (op0, 0);
6188
6189 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
6190 build_fold_addr_expr_loc (loc,
6191 TREE_OPERAND (op0, 0)));
6192 ret = GS_OK;
6193 break;
6194
6195 case MEM_REF:
6196 if (integer_zerop (TREE_OPERAND (op0, 1)))
6197 goto do_indirect_ref;
6198
6199 /* fall through */
6200
6201 default:
6202 /* If we see a call to a declared builtin or see its address
6203 being taken (we can unify those cases here) then we can mark
6204 the builtin for implicit generation by GCC. */
6205 if (TREE_CODE (op0) == FUNCTION_DECL
6206 && fndecl_built_in_p (op0, BUILT_IN_NORMAL)
6207 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
6208 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
6209
6210 /* We use fb_either here because the C frontend sometimes takes
6211 the address of a call that returns a struct; see
6212 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6213 the implied temporary explicit. */
6214
6215 /* Make the operand addressable. */
6216 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
6217 is_gimple_addressable, fb_either);
6218 if (ret == GS_ERROR)
6219 break;
6220
6221 /* Then mark it. Beware that it may not be possible to do so directly
6222 if a temporary has been created by the gimplification. */
6223 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
6224
6225 op0 = TREE_OPERAND (expr, 0);
6226
6227 /* For various reasons, the gimplification of the expression
6228 may have made a new INDIRECT_REF. */
6229 if (TREE_CODE (op0) == INDIRECT_REF
6230 || (TREE_CODE (op0) == MEM_REF
6231 && integer_zerop (TREE_OPERAND (op0, 1))))
6232 goto do_indirect_ref;
6233
6234 mark_addressable (TREE_OPERAND (expr, 0));
6235
6236 /* The FEs may end up building ADDR_EXPRs early on a decl with
6237 an incomplete type. Re-build ADDR_EXPRs in canonical form
6238 here. */
6239 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
6240 *expr_p = build_fold_addr_expr (op0);
6241
6242 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6243 recompute_tree_invariant_for_addr_expr (*expr_p);
6244
6245 /* If we re-built the ADDR_EXPR add a conversion to the original type
6246 if required. */
6247 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
6248 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
6249
6250 break;
6251 }
6252
6253 return ret;
6254 }
6255
6256 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6257 value; output operands should be a gimple lvalue. */
6258
6259 static enum gimplify_status
6260 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6261 {
6262 tree expr;
6263 int noutputs;
6264 const char **oconstraints;
6265 int i;
6266 tree link;
6267 const char *constraint;
6268 bool allows_mem, allows_reg, is_inout;
6269 enum gimplify_status ret, tret;
6270 gasm *stmt;
6271 vec<tree, va_gc> *inputs;
6272 vec<tree, va_gc> *outputs;
6273 vec<tree, va_gc> *clobbers;
6274 vec<tree, va_gc> *labels;
6275 tree link_next;
6276
6277 expr = *expr_p;
6278 noutputs = list_length (ASM_OUTPUTS (expr));
6279 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
6280
6281 inputs = NULL;
6282 outputs = NULL;
6283 clobbers = NULL;
6284 labels = NULL;
6285
6286 ret = GS_ALL_DONE;
6287 link_next = NULL_TREE;
6288 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6289 {
6290 bool ok;
6291 size_t constraint_len;
6292
6293 link_next = TREE_CHAIN (link);
6294
6295 oconstraints[i]
6296 = constraint
6297 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6298 constraint_len = strlen (constraint);
6299 if (constraint_len == 0)
6300 continue;
6301
6302 ok = parse_output_constraint (&constraint, i, 0, 0,
6303 &allows_mem, &allows_reg, &is_inout);
6304 if (!ok)
6305 {
6306 ret = GS_ERROR;
6307 is_inout = false;
6308 }
6309
6310 /* If we can't make copies, we can only accept memory.
6311 Similarly for VLAs. */
6312 tree outtype = TREE_TYPE (TREE_VALUE (link));
6313 if (outtype != error_mark_node
6314 && (TREE_ADDRESSABLE (outtype)
6315 || !COMPLETE_TYPE_P (outtype)
6316 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (outtype))))
6317 {
6318 if (allows_mem)
6319 allows_reg = 0;
6320 else
6321 {
6322 error ("impossible constraint in %<asm%>");
6323 error ("non-memory output %d must stay in memory", i);
6324 return GS_ERROR;
6325 }
6326 }
6327
6328 if (!allows_reg && allows_mem)
6329 mark_addressable (TREE_VALUE (link));
6330
6331 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6332 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6333 fb_lvalue | fb_mayfail);
6334 if (tret == GS_ERROR)
6335 {
6336 error ("invalid lvalue in %<asm%> output %d", i);
6337 ret = tret;
6338 }
6339
6340 /* If the constraint does not allow memory make sure we gimplify
6341 it to a register if it is not already but its base is. This
6342 happens for complex and vector components. */
6343 if (!allows_mem)
6344 {
6345 tree op = TREE_VALUE (link);
6346 if (! is_gimple_val (op)
6347 && is_gimple_reg_type (TREE_TYPE (op))
6348 && is_gimple_reg (get_base_address (op)))
6349 {
6350 tree tem = create_tmp_reg (TREE_TYPE (op));
6351 tree ass;
6352 if (is_inout)
6353 {
6354 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6355 tem, unshare_expr (op));
6356 gimplify_and_add (ass, pre_p);
6357 }
6358 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6359 gimplify_and_add (ass, post_p);
6360
6361 TREE_VALUE (link) = tem;
6362 tret = GS_OK;
6363 }
6364 }
6365
6366 vec_safe_push (outputs, link);
6367 TREE_CHAIN (link) = NULL_TREE;
6368
6369 if (is_inout)
6370 {
6371 /* An input/output operand. To give the optimizers more
6372 flexibility, split it into separate input and output
6373 operands. */
6374 tree input;
6375 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6376 char buf[11];
6377
6378 /* Turn the in/out constraint into an output constraint. */
6379 char *p = xstrdup (constraint);
6380 p[0] = '=';
6381 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6382
6383 /* And add a matching input constraint. */
6384 if (allows_reg)
6385 {
6386 sprintf (buf, "%u", i);
6387
6388 /* If there are multiple alternatives in the constraint,
6389 handle each of them individually. Those that allow register
6390 will be replaced with operand number, the others will stay
6391 unchanged. */
6392 if (strchr (p, ',') != NULL)
6393 {
6394 size_t len = 0, buflen = strlen (buf);
6395 char *beg, *end, *str, *dst;
6396
6397 for (beg = p + 1;;)
6398 {
6399 end = strchr (beg, ',');
6400 if (end == NULL)
6401 end = strchr (beg, '\0');
6402 if ((size_t) (end - beg) < buflen)
6403 len += buflen + 1;
6404 else
6405 len += end - beg + 1;
6406 if (*end)
6407 beg = end + 1;
6408 else
6409 break;
6410 }
6411
6412 str = (char *) alloca (len);
6413 for (beg = p + 1, dst = str;;)
6414 {
6415 const char *tem;
6416 bool mem_p, reg_p, inout_p;
6417
6418 end = strchr (beg, ',');
6419 if (end)
6420 *end = '\0';
6421 beg[-1] = '=';
6422 tem = beg - 1;
6423 parse_output_constraint (&tem, i, 0, 0,
6424 &mem_p, &reg_p, &inout_p);
6425 if (dst != str)
6426 *dst++ = ',';
6427 if (reg_p)
6428 {
6429 memcpy (dst, buf, buflen);
6430 dst += buflen;
6431 }
6432 else
6433 {
6434 if (end)
6435 len = end - beg;
6436 else
6437 len = strlen (beg);
6438 memcpy (dst, beg, len);
6439 dst += len;
6440 }
6441 if (end)
6442 beg = end + 1;
6443 else
6444 break;
6445 }
6446 *dst = '\0';
6447 input = build_string (dst - str, str);
6448 }
6449 else
6450 input = build_string (strlen (buf), buf);
6451 }
6452 else
6453 input = build_string (constraint_len - 1, constraint + 1);
6454
6455 free (p);
6456
6457 input = build_tree_list (build_tree_list (NULL_TREE, input),
6458 unshare_expr (TREE_VALUE (link)));
6459 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6460 }
6461 }
6462
6463 link_next = NULL_TREE;
6464 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6465 {
6466 link_next = TREE_CHAIN (link);
6467 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6468 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6469 oconstraints, &allows_mem, &allows_reg);
6470
6471 /* If we can't make copies, we can only accept memory. */
6472 tree intype = TREE_TYPE (TREE_VALUE (link));
6473 if (intype != error_mark_node
6474 && (TREE_ADDRESSABLE (intype)
6475 || !COMPLETE_TYPE_P (intype)
6476 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (intype))))
6477 {
6478 if (allows_mem)
6479 allows_reg = 0;
6480 else
6481 {
6482 error ("impossible constraint in %<asm%>");
6483 error ("non-memory input %d must stay in memory", i);
6484 return GS_ERROR;
6485 }
6486 }
6487
6488 /* If the operand is a memory input, it should be an lvalue. */
6489 if (!allows_reg && allows_mem)
6490 {
6491 tree inputv = TREE_VALUE (link);
6492 STRIP_NOPS (inputv);
6493 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6494 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6495 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6496 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6497 || TREE_CODE (inputv) == MODIFY_EXPR)
6498 TREE_VALUE (link) = error_mark_node;
6499 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6500 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6501 if (tret != GS_ERROR)
6502 {
6503 /* Unlike output operands, memory inputs are not guaranteed
6504 to be lvalues by the FE, and while the expressions are
6505 marked addressable there, if it is e.g. a statement
6506 expression, temporaries in it might not end up being
6507 addressable. They might be already used in the IL and thus
6508 it is too late to make them addressable now though. */
6509 tree x = TREE_VALUE (link);
6510 while (handled_component_p (x))
6511 x = TREE_OPERAND (x, 0);
6512 if (TREE_CODE (x) == MEM_REF
6513 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6514 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6515 if ((VAR_P (x)
6516 || TREE_CODE (x) == PARM_DECL
6517 || TREE_CODE (x) == RESULT_DECL)
6518 && !TREE_ADDRESSABLE (x)
6519 && is_gimple_reg (x))
6520 {
6521 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6522 input_location), 0,
6523 "memory input %d is not directly addressable",
6524 i);
6525 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6526 }
6527 }
6528 mark_addressable (TREE_VALUE (link));
6529 if (tret == GS_ERROR)
6530 {
6531 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6532 "memory input %d is not directly addressable", i);
6533 ret = tret;
6534 }
6535 }
6536 else
6537 {
6538 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6539 is_gimple_asm_val, fb_rvalue);
6540 if (tret == GS_ERROR)
6541 ret = tret;
6542 }
6543
6544 TREE_CHAIN (link) = NULL_TREE;
6545 vec_safe_push (inputs, link);
6546 }
6547
6548 link_next = NULL_TREE;
6549 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6550 {
6551 link_next = TREE_CHAIN (link);
6552 TREE_CHAIN (link) = NULL_TREE;
6553 vec_safe_push (clobbers, link);
6554 }
6555
6556 link_next = NULL_TREE;
6557 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6558 {
6559 link_next = TREE_CHAIN (link);
6560 TREE_CHAIN (link) = NULL_TREE;
6561 vec_safe_push (labels, link);
6562 }
6563
6564 /* Do not add ASMs with errors to the gimple IL stream. */
6565 if (ret != GS_ERROR)
6566 {
6567 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6568 inputs, outputs, clobbers, labels);
6569
6570 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
6571 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6572 gimple_asm_set_inline (stmt, ASM_INLINE_P (expr));
6573
6574 gimplify_seq_add_stmt (pre_p, stmt);
6575 }
6576
6577 return ret;
6578 }
6579
6580 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6581 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6582 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6583 return to this function.
6584
6585 FIXME should we complexify the prequeue handling instead? Or use flags
6586 for all the cleanups and let the optimizer tighten them up? The current
6587 code seems pretty fragile; it will break on a cleanup within any
6588 non-conditional nesting. But any such nesting would be broken, anyway;
6589 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6590 and continues out of it. We can do that at the RTL level, though, so
6591 having an optimizer to tighten up try/finally regions would be a Good
6592 Thing. */
6593
6594 static enum gimplify_status
6595 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6596 {
6597 gimple_stmt_iterator iter;
6598 gimple_seq body_sequence = NULL;
6599
6600 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6601
6602 /* We only care about the number of conditions between the innermost
6603 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6604 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6605 int old_conds = gimplify_ctxp->conditions;
6606 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6607 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6608 gimplify_ctxp->conditions = 0;
6609 gimplify_ctxp->conditional_cleanups = NULL;
6610 gimplify_ctxp->in_cleanup_point_expr = true;
6611
6612 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6613
6614 gimplify_ctxp->conditions = old_conds;
6615 gimplify_ctxp->conditional_cleanups = old_cleanups;
6616 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6617
6618 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6619 {
6620 gimple *wce = gsi_stmt (iter);
6621
6622 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6623 {
6624 if (gsi_one_before_end_p (iter))
6625 {
6626 /* Note that gsi_insert_seq_before and gsi_remove do not
6627 scan operands, unlike some other sequence mutators. */
6628 if (!gimple_wce_cleanup_eh_only (wce))
6629 gsi_insert_seq_before_without_update (&iter,
6630 gimple_wce_cleanup (wce),
6631 GSI_SAME_STMT);
6632 gsi_remove (&iter, true);
6633 break;
6634 }
6635 else
6636 {
6637 gtry *gtry;
6638 gimple_seq seq;
6639 enum gimple_try_flags kind;
6640
6641 if (gimple_wce_cleanup_eh_only (wce))
6642 kind = GIMPLE_TRY_CATCH;
6643 else
6644 kind = GIMPLE_TRY_FINALLY;
6645 seq = gsi_split_seq_after (iter);
6646
6647 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
6648 /* Do not use gsi_replace here, as it may scan operands.
6649 We want to do a simple structural modification only. */
6650 gsi_set_stmt (&iter, gtry);
6651 iter = gsi_start (gtry->eval);
6652 }
6653 }
6654 else
6655 gsi_next (&iter);
6656 }
6657
6658 gimplify_seq_add_seq (pre_p, body_sequence);
6659 if (temp)
6660 {
6661 *expr_p = temp;
6662 return GS_OK;
6663 }
6664 else
6665 {
6666 *expr_p = NULL;
6667 return GS_ALL_DONE;
6668 }
6669 }
6670
6671 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6672 is the cleanup action required. EH_ONLY is true if the cleanup should
6673 only be executed if an exception is thrown, not on normal exit.
6674 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6675 only valid for clobbers. */
6676
6677 static void
6678 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
6679 bool force_uncond = false)
6680 {
6681 gimple *wce;
6682 gimple_seq cleanup_stmts = NULL;
6683
6684 /* Errors can result in improperly nested cleanups. Which results in
6685 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6686 if (seen_error ())
6687 return;
6688
6689 if (gimple_conditional_context ())
6690 {
6691 /* If we're in a conditional context, this is more complex. We only
6692 want to run the cleanup if we actually ran the initialization that
6693 necessitates it, but we want to run it after the end of the
6694 conditional context. So we wrap the try/finally around the
6695 condition and use a flag to determine whether or not to actually
6696 run the destructor. Thus
6697
6698 test ? f(A()) : 0
6699
6700 becomes (approximately)
6701
6702 flag = 0;
6703 try {
6704 if (test) { A::A(temp); flag = 1; val = f(temp); }
6705 else { val = 0; }
6706 } finally {
6707 if (flag) A::~A(temp);
6708 }
6709 val
6710 */
6711 if (force_uncond)
6712 {
6713 gimplify_stmt (&cleanup, &cleanup_stmts);
6714 wce = gimple_build_wce (cleanup_stmts);
6715 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6716 }
6717 else
6718 {
6719 tree flag = create_tmp_var (boolean_type_node, "cleanup");
6720 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
6721 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
6722
6723 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
6724 gimplify_stmt (&cleanup, &cleanup_stmts);
6725 wce = gimple_build_wce (cleanup_stmts);
6726
6727 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
6728 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6729 gimplify_seq_add_stmt (pre_p, ftrue);
6730
6731 /* Because of this manipulation, and the EH edges that jump
6732 threading cannot redirect, the temporary (VAR) will appear
6733 to be used uninitialized. Don't warn. */
6734 TREE_NO_WARNING (var) = 1;
6735 }
6736 }
6737 else
6738 {
6739 gimplify_stmt (&cleanup, &cleanup_stmts);
6740 wce = gimple_build_wce (cleanup_stmts);
6741 gimple_wce_set_cleanup_eh_only (wce, eh_only);
6742 gimplify_seq_add_stmt (pre_p, wce);
6743 }
6744 }
6745
6746 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6747
6748 static enum gimplify_status
6749 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6750 {
6751 tree targ = *expr_p;
6752 tree temp = TARGET_EXPR_SLOT (targ);
6753 tree init = TARGET_EXPR_INITIAL (targ);
6754 enum gimplify_status ret;
6755
6756 bool unpoison_empty_seq = false;
6757 gimple_stmt_iterator unpoison_it;
6758
6759 if (init)
6760 {
6761 tree cleanup = NULL_TREE;
6762
6763 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6764 to the temps list. Handle also variable length TARGET_EXPRs. */
6765 if (!poly_int_tree_p (DECL_SIZE (temp)))
6766 {
6767 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
6768 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
6769 gimplify_vla_decl (temp, pre_p);
6770 }
6771 else
6772 {
6773 /* Save location where we need to place unpoisoning. It's possible
6774 that a variable will be converted to needs_to_live_in_memory. */
6775 unpoison_it = gsi_last (*pre_p);
6776 unpoison_empty_seq = gsi_end_p (unpoison_it);
6777
6778 gimple_add_tmp_var (temp);
6779 }
6780
6781 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6782 expression is supposed to initialize the slot. */
6783 if (VOID_TYPE_P (TREE_TYPE (init)))
6784 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6785 else
6786 {
6787 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
6788 init = init_expr;
6789 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6790 init = NULL;
6791 ggc_free (init_expr);
6792 }
6793 if (ret == GS_ERROR)
6794 {
6795 /* PR c++/28266 Make sure this is expanded only once. */
6796 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6797 return GS_ERROR;
6798 }
6799 if (init)
6800 gimplify_and_add (init, pre_p);
6801
6802 /* If needed, push the cleanup for the temp. */
6803 if (TARGET_EXPR_CLEANUP (targ))
6804 {
6805 if (CLEANUP_EH_ONLY (targ))
6806 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
6807 CLEANUP_EH_ONLY (targ), pre_p);
6808 else
6809 cleanup = TARGET_EXPR_CLEANUP (targ);
6810 }
6811
6812 /* Add a clobber for the temporary going out of scope, like
6813 gimplify_bind_expr. */
6814 if (gimplify_ctxp->in_cleanup_point_expr
6815 && needs_to_live_in_memory (temp))
6816 {
6817 if (flag_stack_reuse == SR_ALL)
6818 {
6819 tree clobber = build_clobber (TREE_TYPE (temp));
6820 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
6821 gimple_push_cleanup (temp, clobber, false, pre_p, true);
6822 }
6823 if (asan_poisoned_variables
6824 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
6825 && !TREE_STATIC (temp)
6826 && dbg_cnt (asan_use_after_scope)
6827 && !gimplify_omp_ctxp)
6828 {
6829 tree asan_cleanup = build_asan_poison_call_expr (temp);
6830 if (asan_cleanup)
6831 {
6832 if (unpoison_empty_seq)
6833 unpoison_it = gsi_start (*pre_p);
6834
6835 asan_poison_variable (temp, false, &unpoison_it,
6836 unpoison_empty_seq);
6837 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
6838 }
6839 }
6840 }
6841 if (cleanup)
6842 gimple_push_cleanup (temp, cleanup, false, pre_p);
6843
6844 /* Only expand this once. */
6845 TREE_OPERAND (targ, 3) = init;
6846 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6847 }
6848 else
6849 /* We should have expanded this before. */
6850 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6851
6852 *expr_p = temp;
6853 return GS_OK;
6854 }
6855
6856 /* Gimplification of expression trees. */
6857
6858 /* Gimplify an expression which appears at statement context. The
6859 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6860 NULL, a new sequence is allocated.
6861
6862 Return true if we actually added a statement to the queue. */
6863
6864 bool
6865 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6866 {
6867 gimple_seq_node last;
6868
6869 last = gimple_seq_last (*seq_p);
6870 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
6871 return last != gimple_seq_last (*seq_p);
6872 }
6873
6874 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6875 to CTX. If entries already exist, force them to be some flavor of private.
6876 If there is no enclosing parallel, do nothing. */
6877
6878 void
6879 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
6880 {
6881 splay_tree_node n;
6882
6883 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
6884 return;
6885
6886 do
6887 {
6888 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6889 if (n != NULL)
6890 {
6891 if (n->value & GOVD_SHARED)
6892 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
6893 else if (n->value & GOVD_MAP)
6894 n->value |= GOVD_MAP_TO_ONLY;
6895 else
6896 return;
6897 }
6898 else if ((ctx->region_type & ORT_TARGET) != 0)
6899 {
6900 if (ctx->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
6901 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6902 else
6903 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
6904 }
6905 else if (ctx->region_type != ORT_WORKSHARE
6906 && ctx->region_type != ORT_TASKGROUP
6907 && ctx->region_type != ORT_SIMD
6908 && ctx->region_type != ORT_ACC
6909 && !(ctx->region_type & ORT_TARGET_DATA))
6910 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6911
6912 ctx = ctx->outer_context;
6913 }
6914 while (ctx);
6915 }
6916
6917 /* Similarly for each of the type sizes of TYPE. */
6918
6919 static void
6920 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
6921 {
6922 if (type == NULL || type == error_mark_node)
6923 return;
6924 type = TYPE_MAIN_VARIANT (type);
6925
6926 if (ctx->privatized_types->add (type))
6927 return;
6928
6929 switch (TREE_CODE (type))
6930 {
6931 case INTEGER_TYPE:
6932 case ENUMERAL_TYPE:
6933 case BOOLEAN_TYPE:
6934 case REAL_TYPE:
6935 case FIXED_POINT_TYPE:
6936 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
6937 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
6938 break;
6939
6940 case ARRAY_TYPE:
6941 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6942 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
6943 break;
6944
6945 case RECORD_TYPE:
6946 case UNION_TYPE:
6947 case QUAL_UNION_TYPE:
6948 {
6949 tree field;
6950 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
6951 if (TREE_CODE (field) == FIELD_DECL)
6952 {
6953 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
6954 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
6955 }
6956 }
6957 break;
6958
6959 case POINTER_TYPE:
6960 case REFERENCE_TYPE:
6961 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6962 break;
6963
6964 default:
6965 break;
6966 }
6967
6968 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
6969 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
6970 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
6971 }
6972
6973 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
6974
6975 static void
6976 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
6977 {
6978 splay_tree_node n;
6979 unsigned int nflags;
6980 tree t;
6981
6982 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
6983 return;
6984
6985 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
6986 there are constructors involved somewhere. Exception is a shared clause,
6987 there is nothing privatized in that case. */
6988 if ((flags & GOVD_SHARED) == 0
6989 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
6990 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
6991 flags |= GOVD_SEEN;
6992
6993 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6994 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6995 {
6996 /* We shouldn't be re-adding the decl with the same data
6997 sharing class. */
6998 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
6999 nflags = n->value | flags;
7000 /* The only combination of data sharing classes we should see is
7001 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
7002 reduction variables to be used in data sharing clauses. */
7003 gcc_assert ((ctx->region_type & ORT_ACC) != 0
7004 || ((nflags & GOVD_DATA_SHARE_CLASS)
7005 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
7006 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
7007 n->value = nflags;
7008 return;
7009 }
7010
7011 /* When adding a variable-sized variable, we have to handle all sorts
7012 of additional bits of data: the pointer replacement variable, and
7013 the parameters of the type. */
7014 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7015 {
7016 /* Add the pointer replacement variable as PRIVATE if the variable
7017 replacement is private, else FIRSTPRIVATE since we'll need the
7018 address of the original variable either for SHARED, or for the
7019 copy into or out of the context. */
7020 if (!(flags & GOVD_LOCAL) && ctx->region_type != ORT_TASKGROUP)
7021 {
7022 if (flags & GOVD_MAP)
7023 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
7024 else if (flags & GOVD_PRIVATE)
7025 nflags = GOVD_PRIVATE;
7026 else if (((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7027 && (flags & GOVD_FIRSTPRIVATE))
7028 || (ctx->region_type == ORT_TARGET_DATA
7029 && (flags & GOVD_DATA_SHARE_CLASS) == 0))
7030 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
7031 else
7032 nflags = GOVD_FIRSTPRIVATE;
7033 nflags |= flags & GOVD_SEEN;
7034 t = DECL_VALUE_EXPR (decl);
7035 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7036 t = TREE_OPERAND (t, 0);
7037 gcc_assert (DECL_P (t));
7038 omp_add_variable (ctx, t, nflags);
7039 }
7040
7041 /* Add all of the variable and type parameters (which should have
7042 been gimplified to a formal temporary) as FIRSTPRIVATE. */
7043 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
7044 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
7045 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7046
7047 /* The variable-sized variable itself is never SHARED, only some form
7048 of PRIVATE. The sharing would take place via the pointer variable
7049 which we remapped above. */
7050 if (flags & GOVD_SHARED)
7051 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
7052 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
7053
7054 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
7055 alloca statement we generate for the variable, so make sure it
7056 is available. This isn't automatically needed for the SHARED
7057 case, since we won't be allocating local storage then.
7058 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
7059 in this case omp_notice_variable will be called later
7060 on when it is gimplified. */
7061 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
7062 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
7063 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
7064 }
7065 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
7066 && lang_hooks.decls.omp_privatize_by_reference (decl))
7067 {
7068 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7069
7070 /* Similar to the direct variable sized case above, we'll need the
7071 size of references being privatized. */
7072 if ((flags & GOVD_SHARED) == 0)
7073 {
7074 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7075 if (DECL_P (t))
7076 omp_notice_variable (ctx, t, true);
7077 }
7078 }
7079
7080 if (n != NULL)
7081 n->value |= flags;
7082 else
7083 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
7084
7085 /* For reductions clauses in OpenACC loop directives, by default create a
7086 copy clause on the enclosing parallel construct for carrying back the
7087 results. */
7088 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
7089 {
7090 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
7091 while (outer_ctx)
7092 {
7093 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
7094 if (n != NULL)
7095 {
7096 /* Ignore local variables and explicitly declared clauses. */
7097 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
7098 break;
7099 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
7100 {
7101 /* According to the OpenACC spec, such a reduction variable
7102 should already have a copy map on a kernels construct,
7103 verify that here. */
7104 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
7105 && (n->value & GOVD_MAP));
7106 }
7107 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7108 {
7109 /* Remove firstprivate and make it a copy map. */
7110 n->value &= ~GOVD_FIRSTPRIVATE;
7111 n->value |= GOVD_MAP;
7112 }
7113 }
7114 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7115 {
7116 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
7117 GOVD_MAP | GOVD_SEEN);
7118 break;
7119 }
7120 outer_ctx = outer_ctx->outer_context;
7121 }
7122 }
7123 }
7124
7125 /* Notice a threadprivate variable DECL used in OMP context CTX.
7126 This just prints out diagnostics about threadprivate variable uses
7127 in untied tasks. If DECL2 is non-NULL, prevent this warning
7128 on that variable. */
7129
7130 static bool
7131 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
7132 tree decl2)
7133 {
7134 splay_tree_node n;
7135 struct gimplify_omp_ctx *octx;
7136
7137 for (octx = ctx; octx; octx = octx->outer_context)
7138 if ((octx->region_type & ORT_TARGET) != 0
7139 || octx->order_concurrent)
7140 {
7141 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
7142 if (n == NULL)
7143 {
7144 if (octx->order_concurrent)
7145 {
7146 error ("threadprivate variable %qE used in a region with"
7147 " %<order(concurrent)%> clause", DECL_NAME (decl));
7148 inform (octx->location, "enclosing region");
7149 }
7150 else
7151 {
7152 error ("threadprivate variable %qE used in target region",
7153 DECL_NAME (decl));
7154 inform (octx->location, "enclosing target region");
7155 }
7156 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
7157 }
7158 if (decl2)
7159 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
7160 }
7161
7162 if (ctx->region_type != ORT_UNTIED_TASK)
7163 return false;
7164 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7165 if (n == NULL)
7166 {
7167 error ("threadprivate variable %qE used in untied task",
7168 DECL_NAME (decl));
7169 inform (ctx->location, "enclosing task");
7170 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
7171 }
7172 if (decl2)
7173 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
7174 return false;
7175 }
7176
7177 /* Return true if global var DECL is device resident. */
7178
7179 static bool
7180 device_resident_p (tree decl)
7181 {
7182 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
7183
7184 if (!attr)
7185 return false;
7186
7187 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
7188 {
7189 tree c = TREE_VALUE (t);
7190 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
7191 return true;
7192 }
7193
7194 return false;
7195 }
7196
7197 /* Return true if DECL has an ACC DECLARE attribute. */
7198
7199 static bool
7200 is_oacc_declared (tree decl)
7201 {
7202 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
7203 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
7204 return declared != NULL_TREE;
7205 }
7206
7207 /* Determine outer default flags for DECL mentioned in an OMP region
7208 but not declared in an enclosing clause.
7209
7210 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7211 remapped firstprivate instead of shared. To some extent this is
7212 addressed in omp_firstprivatize_type_sizes, but not
7213 effectively. */
7214
7215 static unsigned
7216 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
7217 bool in_code, unsigned flags)
7218 {
7219 enum omp_clause_default_kind default_kind = ctx->default_kind;
7220 enum omp_clause_default_kind kind;
7221
7222 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
7223 if (ctx->region_type & ORT_TASK)
7224 {
7225 tree detach_clause = omp_find_clause (ctx->clauses, OMP_CLAUSE_DETACH);
7226
7227 /* The event-handle specified by a detach clause should always be firstprivate,
7228 regardless of the current default. */
7229 if (detach_clause && OMP_CLAUSE_DECL (detach_clause) == decl)
7230 kind = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
7231 }
7232 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
7233 default_kind = kind;
7234 else if (VAR_P (decl) && TREE_STATIC (decl) && DECL_IN_CONSTANT_POOL (decl))
7235 default_kind = OMP_CLAUSE_DEFAULT_SHARED;
7236
7237 switch (default_kind)
7238 {
7239 case OMP_CLAUSE_DEFAULT_NONE:
7240 {
7241 const char *rtype;
7242
7243 if (ctx->region_type & ORT_PARALLEL)
7244 rtype = "parallel";
7245 else if ((ctx->region_type & ORT_TASKLOOP) == ORT_TASKLOOP)
7246 rtype = "taskloop";
7247 else if (ctx->region_type & ORT_TASK)
7248 rtype = "task";
7249 else if (ctx->region_type & ORT_TEAMS)
7250 rtype = "teams";
7251 else
7252 gcc_unreachable ();
7253
7254 error ("%qE not specified in enclosing %qs",
7255 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
7256 inform (ctx->location, "enclosing %qs", rtype);
7257 }
7258 /* FALLTHRU */
7259 case OMP_CLAUSE_DEFAULT_SHARED:
7260 flags |= GOVD_SHARED;
7261 break;
7262 case OMP_CLAUSE_DEFAULT_PRIVATE:
7263 flags |= GOVD_PRIVATE;
7264 break;
7265 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
7266 flags |= GOVD_FIRSTPRIVATE;
7267 break;
7268 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
7269 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7270 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
7271 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
7272 {
7273 omp_notice_variable (octx, decl, in_code);
7274 for (; octx; octx = octx->outer_context)
7275 {
7276 splay_tree_node n2;
7277
7278 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
7279 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
7280 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
7281 continue;
7282 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
7283 {
7284 flags |= GOVD_FIRSTPRIVATE;
7285 goto found_outer;
7286 }
7287 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
7288 {
7289 flags |= GOVD_SHARED;
7290 goto found_outer;
7291 }
7292 }
7293 }
7294
7295 if (TREE_CODE (decl) == PARM_DECL
7296 || (!is_global_var (decl)
7297 && DECL_CONTEXT (decl) == current_function_decl))
7298 flags |= GOVD_FIRSTPRIVATE;
7299 else
7300 flags |= GOVD_SHARED;
7301 found_outer:
7302 break;
7303
7304 default:
7305 gcc_unreachable ();
7306 }
7307
7308 return flags;
7309 }
7310
7311
7312 /* Determine outer default flags for DECL mentioned in an OACC region
7313 but not declared in an enclosing clause. */
7314
7315 static unsigned
7316 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
7317 {
7318 const char *rkind;
7319 bool on_device = false;
7320 bool is_private = false;
7321 bool declared = is_oacc_declared (decl);
7322 tree type = TREE_TYPE (decl);
7323
7324 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7325 type = TREE_TYPE (type);
7326
7327 /* For Fortran COMMON blocks, only used variables in those blocks are
7328 transfered and remapped. The block itself will have a private clause to
7329 avoid transfering the data twice.
7330 The hook evaluates to false by default. For a variable in Fortran's COMMON
7331 or EQUIVALENCE block, returns 'true' (as we have shared=false) - as only
7332 the variables in such a COMMON/EQUIVALENCE block shall be privatized not
7333 the whole block. For C++ and Fortran, it can also be true under certain
7334 other conditions, if DECL_HAS_VALUE_EXPR. */
7335 if (RECORD_OR_UNION_TYPE_P (type))
7336 is_private = lang_hooks.decls.omp_disregard_value_expr (decl, false);
7337
7338 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
7339 && is_global_var (decl)
7340 && device_resident_p (decl)
7341 && !is_private)
7342 {
7343 on_device = true;
7344 flags |= GOVD_MAP_TO_ONLY;
7345 }
7346
7347 switch (ctx->region_type)
7348 {
7349 case ORT_ACC_KERNELS:
7350 rkind = "kernels";
7351
7352 if (is_private)
7353 flags |= GOVD_FIRSTPRIVATE;
7354 else if (AGGREGATE_TYPE_P (type))
7355 {
7356 /* Aggregates default to 'present_or_copy', or 'present'. */
7357 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7358 flags |= GOVD_MAP;
7359 else
7360 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7361 }
7362 else
7363 /* Scalars default to 'copy'. */
7364 flags |= GOVD_MAP | GOVD_MAP_FORCE;
7365
7366 break;
7367
7368 case ORT_ACC_PARALLEL:
7369 case ORT_ACC_SERIAL:
7370 rkind = ctx->region_type == ORT_ACC_PARALLEL ? "parallel" : "serial";
7371
7372 if (is_private)
7373 flags |= GOVD_FIRSTPRIVATE;
7374 else if (on_device || declared)
7375 flags |= GOVD_MAP;
7376 else if (AGGREGATE_TYPE_P (type))
7377 {
7378 /* Aggregates default to 'present_or_copy', or 'present'. */
7379 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7380 flags |= GOVD_MAP;
7381 else
7382 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7383 }
7384 else
7385 /* Scalars default to 'firstprivate'. */
7386 flags |= GOVD_FIRSTPRIVATE;
7387
7388 break;
7389
7390 default:
7391 gcc_unreachable ();
7392 }
7393
7394 if (DECL_ARTIFICIAL (decl))
7395 ; /* We can get compiler-generated decls, and should not complain
7396 about them. */
7397 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7398 {
7399 error ("%qE not specified in enclosing OpenACC %qs construct",
7400 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
7401 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
7402 }
7403 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7404 ; /* Handled above. */
7405 else
7406 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7407
7408 return flags;
7409 }
7410
7411 /* Record the fact that DECL was used within the OMP context CTX.
7412 IN_CODE is true when real code uses DECL, and false when we should
7413 merely emit default(none) errors. Return true if DECL is going to
7414 be remapped and thus DECL shouldn't be gimplified into its
7415 DECL_VALUE_EXPR (if any). */
7416
7417 static bool
7418 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7419 {
7420 splay_tree_node n;
7421 unsigned flags = in_code ? GOVD_SEEN : 0;
7422 bool ret = false, shared;
7423
7424 if (error_operand_p (decl))
7425 return false;
7426
7427 if (ctx->region_type == ORT_NONE)
7428 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7429
7430 if (is_global_var (decl))
7431 {
7432 /* Threadprivate variables are predetermined. */
7433 if (DECL_THREAD_LOCAL_P (decl))
7434 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7435
7436 if (DECL_HAS_VALUE_EXPR_P (decl))
7437 {
7438 if (ctx->region_type & ORT_ACC)
7439 /* For OpenACC, defer expansion of value to avoid transfering
7440 privatized common block data instead of im-/explicitly transfered
7441 variables which are in common blocks. */
7442 ;
7443 else
7444 {
7445 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7446
7447 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7448 return omp_notice_threadprivate_variable (ctx, decl, value);
7449 }
7450 }
7451
7452 if (gimplify_omp_ctxp->outer_context == NULL
7453 && VAR_P (decl)
7454 && oacc_get_fn_attrib (current_function_decl))
7455 {
7456 location_t loc = DECL_SOURCE_LOCATION (decl);
7457
7458 if (lookup_attribute ("omp declare target link",
7459 DECL_ATTRIBUTES (decl)))
7460 {
7461 error_at (loc,
7462 "%qE with %<link%> clause used in %<routine%> function",
7463 DECL_NAME (decl));
7464 return false;
7465 }
7466 else if (!lookup_attribute ("omp declare target",
7467 DECL_ATTRIBUTES (decl)))
7468 {
7469 error_at (loc,
7470 "%qE requires a %<declare%> directive for use "
7471 "in a %<routine%> function", DECL_NAME (decl));
7472 return false;
7473 }
7474 }
7475 }
7476
7477 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7478 if ((ctx->region_type & ORT_TARGET) != 0)
7479 {
7480 if (ctx->region_type & ORT_ACC)
7481 /* For OpenACC, as remarked above, defer expansion. */
7482 shared = false;
7483 else
7484 shared = true;
7485
7486 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7487 if (n == NULL)
7488 {
7489 unsigned nflags = flags;
7490 if ((ctx->region_type & ORT_ACC) == 0)
7491 {
7492 bool is_declare_target = false;
7493 if (is_global_var (decl)
7494 && varpool_node::get_create (decl)->offloadable)
7495 {
7496 struct gimplify_omp_ctx *octx;
7497 for (octx = ctx->outer_context;
7498 octx; octx = octx->outer_context)
7499 {
7500 n = splay_tree_lookup (octx->variables,
7501 (splay_tree_key)decl);
7502 if (n
7503 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7504 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7505 break;
7506 }
7507 is_declare_target = octx == NULL;
7508 }
7509 if (!is_declare_target)
7510 {
7511 int gdmk;
7512 enum omp_clause_defaultmap_kind kind;
7513 if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7514 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7515 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7516 == POINTER_TYPE)))
7517 gdmk = GDMK_POINTER;
7518 else if (lang_hooks.decls.omp_scalar_p (decl))
7519 gdmk = GDMK_SCALAR;
7520 else
7521 gdmk = GDMK_AGGREGATE;
7522 kind = lang_hooks.decls.omp_predetermined_mapping (decl);
7523 if (kind != OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED)
7524 {
7525 if (kind == OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE)
7526 nflags |= GOVD_FIRSTPRIVATE;
7527 else if (kind == OMP_CLAUSE_DEFAULTMAP_TO)
7528 nflags |= GOVD_MAP | GOVD_MAP_TO_ONLY;
7529 else
7530 gcc_unreachable ();
7531 }
7532 else if (ctx->defaultmap[gdmk] == 0)
7533 {
7534 tree d = lang_hooks.decls.omp_report_decl (decl);
7535 error ("%qE not specified in enclosing %<target%>",
7536 DECL_NAME (d));
7537 inform (ctx->location, "enclosing %<target%>");
7538 }
7539 else if (ctx->defaultmap[gdmk]
7540 & (GOVD_MAP_0LEN_ARRAY | GOVD_FIRSTPRIVATE))
7541 nflags |= ctx->defaultmap[gdmk];
7542 else
7543 {
7544 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
7545 nflags |= ctx->defaultmap[gdmk] & ~GOVD_MAP;
7546 }
7547 }
7548 }
7549
7550 struct gimplify_omp_ctx *octx = ctx->outer_context;
7551 if ((ctx->region_type & ORT_ACC) && octx)
7552 {
7553 /* Look in outer OpenACC contexts, to see if there's a
7554 data attribute for this variable. */
7555 omp_notice_variable (octx, decl, in_code);
7556
7557 for (; octx; octx = octx->outer_context)
7558 {
7559 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7560 break;
7561 splay_tree_node n2
7562 = splay_tree_lookup (octx->variables,
7563 (splay_tree_key) decl);
7564 if (n2)
7565 {
7566 if (octx->region_type == ORT_ACC_HOST_DATA)
7567 error ("variable %qE declared in enclosing "
7568 "%<host_data%> region", DECL_NAME (decl));
7569 nflags |= GOVD_MAP;
7570 if (octx->region_type == ORT_ACC_DATA
7571 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7572 nflags |= GOVD_MAP_0LEN_ARRAY;
7573 goto found_outer;
7574 }
7575 }
7576 }
7577
7578 if ((nflags & ~(GOVD_MAP_TO_ONLY | GOVD_MAP_FROM_ONLY
7579 | GOVD_MAP_ALLOC_ONLY)) == flags)
7580 {
7581 tree type = TREE_TYPE (decl);
7582
7583 if (gimplify_omp_ctxp->target_firstprivatize_array_bases
7584 && lang_hooks.decls.omp_privatize_by_reference (decl))
7585 type = TREE_TYPE (type);
7586 if (!lang_hooks.types.omp_mappable_type (type))
7587 {
7588 error ("%qD referenced in target region does not have "
7589 "a mappable type", decl);
7590 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7591 }
7592 else
7593 {
7594 if ((ctx->region_type & ORT_ACC) != 0)
7595 nflags = oacc_default_clause (ctx, decl, flags);
7596 else
7597 nflags |= GOVD_MAP;
7598 }
7599 }
7600 found_outer:
7601 omp_add_variable (ctx, decl, nflags);
7602 }
7603 else
7604 {
7605 /* If nothing changed, there's nothing left to do. */
7606 if ((n->value & flags) == flags)
7607 return ret;
7608 flags |= n->value;
7609 n->value = flags;
7610 }
7611 goto do_outer;
7612 }
7613
7614 if (n == NULL)
7615 {
7616 if (ctx->region_type == ORT_WORKSHARE
7617 || ctx->region_type == ORT_TASKGROUP
7618 || ctx->region_type == ORT_SIMD
7619 || ctx->region_type == ORT_ACC
7620 || (ctx->region_type & ORT_TARGET_DATA) != 0)
7621 goto do_outer;
7622
7623 flags = omp_default_clause (ctx, decl, in_code, flags);
7624
7625 if ((flags & GOVD_PRIVATE)
7626 && lang_hooks.decls.omp_private_outer_ref (decl))
7627 flags |= GOVD_PRIVATE_OUTER_REF;
7628
7629 omp_add_variable (ctx, decl, flags);
7630
7631 shared = (flags & GOVD_SHARED) != 0;
7632 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7633 goto do_outer;
7634 }
7635
7636 /* Don't mark as GOVD_SEEN addressable temporaries seen only in simd
7637 lb, b or incr expressions, those shouldn't be turned into simd arrays. */
7638 if (ctx->region_type == ORT_SIMD
7639 && ctx->in_for_exprs
7640 && ((n->value & (GOVD_PRIVATE | GOVD_SEEN | GOVD_EXPLICIT))
7641 == GOVD_PRIVATE))
7642 flags &= ~GOVD_SEEN;
7643
7644 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
7645 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
7646 && DECL_SIZE (decl))
7647 {
7648 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7649 {
7650 splay_tree_node n2;
7651 tree t = DECL_VALUE_EXPR (decl);
7652 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7653 t = TREE_OPERAND (t, 0);
7654 gcc_assert (DECL_P (t));
7655 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7656 n2->value |= GOVD_SEEN;
7657 }
7658 else if (lang_hooks.decls.omp_privatize_by_reference (decl)
7659 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
7660 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
7661 != INTEGER_CST))
7662 {
7663 splay_tree_node n2;
7664 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7665 gcc_assert (DECL_P (t));
7666 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7667 if (n2)
7668 omp_notice_variable (ctx, t, true);
7669 }
7670 }
7671
7672 if (ctx->region_type & ORT_ACC)
7673 /* For OpenACC, as remarked above, defer expansion. */
7674 shared = false;
7675 else
7676 shared = ((flags | n->value) & GOVD_SHARED) != 0;
7677 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7678
7679 /* If nothing changed, there's nothing left to do. */
7680 if ((n->value & flags) == flags)
7681 return ret;
7682 flags |= n->value;
7683 n->value = flags;
7684
7685 do_outer:
7686 /* If the variable is private in the current context, then we don't
7687 need to propagate anything to an outer context. */
7688 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
7689 return ret;
7690 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7691 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7692 return ret;
7693 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7694 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7695 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7696 return ret;
7697 if (ctx->outer_context
7698 && omp_notice_variable (ctx->outer_context, decl, in_code))
7699 return true;
7700 return ret;
7701 }
7702
7703 /* Verify that DECL is private within CTX. If there's specific information
7704 to the contrary in the innermost scope, generate an error. */
7705
7706 static bool
7707 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
7708 {
7709 splay_tree_node n;
7710
7711 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7712 if (n != NULL)
7713 {
7714 if (n->value & GOVD_SHARED)
7715 {
7716 if (ctx == gimplify_omp_ctxp)
7717 {
7718 if (simd)
7719 error ("iteration variable %qE is predetermined linear",
7720 DECL_NAME (decl));
7721 else
7722 error ("iteration variable %qE should be private",
7723 DECL_NAME (decl));
7724 n->value = GOVD_PRIVATE;
7725 return true;
7726 }
7727 else
7728 return false;
7729 }
7730 else if ((n->value & GOVD_EXPLICIT) != 0
7731 && (ctx == gimplify_omp_ctxp
7732 || (ctx->region_type == ORT_COMBINED_PARALLEL
7733 && gimplify_omp_ctxp->outer_context == ctx)))
7734 {
7735 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
7736 error ("iteration variable %qE should not be firstprivate",
7737 DECL_NAME (decl));
7738 else if ((n->value & GOVD_REDUCTION) != 0)
7739 error ("iteration variable %qE should not be reduction",
7740 DECL_NAME (decl));
7741 else if (simd != 1 && (n->value & GOVD_LINEAR) != 0)
7742 error ("iteration variable %qE should not be linear",
7743 DECL_NAME (decl));
7744 }
7745 return (ctx == gimplify_omp_ctxp
7746 || (ctx->region_type == ORT_COMBINED_PARALLEL
7747 && gimplify_omp_ctxp->outer_context == ctx));
7748 }
7749
7750 if (ctx->region_type != ORT_WORKSHARE
7751 && ctx->region_type != ORT_TASKGROUP
7752 && ctx->region_type != ORT_SIMD
7753 && ctx->region_type != ORT_ACC)
7754 return false;
7755 else if (ctx->outer_context)
7756 return omp_is_private (ctx->outer_context, decl, simd);
7757 return false;
7758 }
7759
7760 /* Return true if DECL is private within a parallel region
7761 that binds to the current construct's context or in parallel
7762 region's REDUCTION clause. */
7763
7764 static bool
7765 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
7766 {
7767 splay_tree_node n;
7768
7769 do
7770 {
7771 ctx = ctx->outer_context;
7772 if (ctx == NULL)
7773 {
7774 if (is_global_var (decl))
7775 return false;
7776
7777 /* References might be private, but might be shared too,
7778 when checking for copyprivate, assume they might be
7779 private, otherwise assume they might be shared. */
7780 if (copyprivate)
7781 return true;
7782
7783 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7784 return false;
7785
7786 /* Treat C++ privatized non-static data members outside
7787 of the privatization the same. */
7788 if (omp_member_access_dummy_var (decl))
7789 return false;
7790
7791 return true;
7792 }
7793
7794 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7795
7796 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7797 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
7798 continue;
7799
7800 if (n != NULL)
7801 {
7802 if ((n->value & GOVD_LOCAL) != 0
7803 && omp_member_access_dummy_var (decl))
7804 return false;
7805 return (n->value & GOVD_SHARED) == 0;
7806 }
7807 }
7808 while (ctx->region_type == ORT_WORKSHARE
7809 || ctx->region_type == ORT_TASKGROUP
7810 || ctx->region_type == ORT_SIMD
7811 || ctx->region_type == ORT_ACC);
7812 return false;
7813 }
7814
7815 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7816
7817 static tree
7818 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
7819 {
7820 tree t = *tp;
7821
7822 /* If this node has been visited, unmark it and keep looking. */
7823 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
7824 return t;
7825
7826 if (IS_TYPE_OR_DECL_P (t))
7827 *walk_subtrees = 0;
7828 return NULL_TREE;
7829 }
7830
7831 /* If *LIST_P contains any OpenMP depend clauses with iterators,
7832 lower all the depend clauses by populating corresponding depend
7833 array. Returns 0 if there are no such depend clauses, or
7834 2 if all depend clauses should be removed, 1 otherwise. */
7835
7836 static int
7837 gimplify_omp_depend (tree *list_p, gimple_seq *pre_p)
7838 {
7839 tree c;
7840 gimple *g;
7841 size_t n[4] = { 0, 0, 0, 0 };
7842 bool unused[4];
7843 tree counts[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
7844 tree last_iter = NULL_TREE, last_count = NULL_TREE;
7845 size_t i, j;
7846 location_t first_loc = UNKNOWN_LOCATION;
7847
7848 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
7849 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7850 {
7851 switch (OMP_CLAUSE_DEPEND_KIND (c))
7852 {
7853 case OMP_CLAUSE_DEPEND_IN:
7854 i = 2;
7855 break;
7856 case OMP_CLAUSE_DEPEND_OUT:
7857 case OMP_CLAUSE_DEPEND_INOUT:
7858 i = 0;
7859 break;
7860 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
7861 i = 1;
7862 break;
7863 case OMP_CLAUSE_DEPEND_DEPOBJ:
7864 i = 3;
7865 break;
7866 case OMP_CLAUSE_DEPEND_SOURCE:
7867 case OMP_CLAUSE_DEPEND_SINK:
7868 continue;
7869 default:
7870 gcc_unreachable ();
7871 }
7872 tree t = OMP_CLAUSE_DECL (c);
7873 if (first_loc == UNKNOWN_LOCATION)
7874 first_loc = OMP_CLAUSE_LOCATION (c);
7875 if (TREE_CODE (t) == TREE_LIST
7876 && TREE_PURPOSE (t)
7877 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
7878 {
7879 if (TREE_PURPOSE (t) != last_iter)
7880 {
7881 tree tcnt = size_one_node;
7882 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
7883 {
7884 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
7885 is_gimple_val, fb_rvalue) == GS_ERROR
7886 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
7887 is_gimple_val, fb_rvalue) == GS_ERROR
7888 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
7889 is_gimple_val, fb_rvalue) == GS_ERROR
7890 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
7891 is_gimple_val, fb_rvalue)
7892 == GS_ERROR))
7893 return 2;
7894 tree var = TREE_VEC_ELT (it, 0);
7895 tree begin = TREE_VEC_ELT (it, 1);
7896 tree end = TREE_VEC_ELT (it, 2);
7897 tree step = TREE_VEC_ELT (it, 3);
7898 tree orig_step = TREE_VEC_ELT (it, 4);
7899 tree type = TREE_TYPE (var);
7900 tree stype = TREE_TYPE (step);
7901 location_t loc = DECL_SOURCE_LOCATION (var);
7902 tree endmbegin;
7903 /* Compute count for this iterator as
7904 orig_step > 0
7905 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
7906 : (begin > end ? (end - begin + (step + 1)) / step : 0)
7907 and compute product of those for the entire depend
7908 clause. */
7909 if (POINTER_TYPE_P (type))
7910 endmbegin = fold_build2_loc (loc, POINTER_DIFF_EXPR,
7911 stype, end, begin);
7912 else
7913 endmbegin = fold_build2_loc (loc, MINUS_EXPR, type,
7914 end, begin);
7915 tree stepm1 = fold_build2_loc (loc, MINUS_EXPR, stype,
7916 step,
7917 build_int_cst (stype, 1));
7918 tree stepp1 = fold_build2_loc (loc, PLUS_EXPR, stype, step,
7919 build_int_cst (stype, 1));
7920 tree pos = fold_build2_loc (loc, PLUS_EXPR, stype,
7921 unshare_expr (endmbegin),
7922 stepm1);
7923 pos = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
7924 pos, step);
7925 tree neg = fold_build2_loc (loc, PLUS_EXPR, stype,
7926 endmbegin, stepp1);
7927 if (TYPE_UNSIGNED (stype))
7928 {
7929 neg = fold_build1_loc (loc, NEGATE_EXPR, stype, neg);
7930 step = fold_build1_loc (loc, NEGATE_EXPR, stype, step);
7931 }
7932 neg = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
7933 neg, step);
7934 step = NULL_TREE;
7935 tree cond = fold_build2_loc (loc, LT_EXPR,
7936 boolean_type_node,
7937 begin, end);
7938 pos = fold_build3_loc (loc, COND_EXPR, stype, cond, pos,
7939 build_int_cst (stype, 0));
7940 cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node,
7941 end, begin);
7942 neg = fold_build3_loc (loc, COND_EXPR, stype, cond, neg,
7943 build_int_cst (stype, 0));
7944 tree osteptype = TREE_TYPE (orig_step);
7945 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
7946 orig_step,
7947 build_int_cst (osteptype, 0));
7948 tree cnt = fold_build3_loc (loc, COND_EXPR, stype,
7949 cond, pos, neg);
7950 cnt = fold_convert_loc (loc, sizetype, cnt);
7951 if (gimplify_expr (&cnt, pre_p, NULL, is_gimple_val,
7952 fb_rvalue) == GS_ERROR)
7953 return 2;
7954 tcnt = size_binop_loc (loc, MULT_EXPR, tcnt, cnt);
7955 }
7956 if (gimplify_expr (&tcnt, pre_p, NULL, is_gimple_val,
7957 fb_rvalue) == GS_ERROR)
7958 return 2;
7959 last_iter = TREE_PURPOSE (t);
7960 last_count = tcnt;
7961 }
7962 if (counts[i] == NULL_TREE)
7963 counts[i] = last_count;
7964 else
7965 counts[i] = size_binop_loc (OMP_CLAUSE_LOCATION (c),
7966 PLUS_EXPR, counts[i], last_count);
7967 }
7968 else
7969 n[i]++;
7970 }
7971 for (i = 0; i < 4; i++)
7972 if (counts[i])
7973 break;
7974 if (i == 4)
7975 return 0;
7976
7977 tree total = size_zero_node;
7978 for (i = 0; i < 4; i++)
7979 {
7980 unused[i] = counts[i] == NULL_TREE && n[i] == 0;
7981 if (counts[i] == NULL_TREE)
7982 counts[i] = size_zero_node;
7983 if (n[i])
7984 counts[i] = size_binop (PLUS_EXPR, counts[i], size_int (n[i]));
7985 if (gimplify_expr (&counts[i], pre_p, NULL, is_gimple_val,
7986 fb_rvalue) == GS_ERROR)
7987 return 2;
7988 total = size_binop (PLUS_EXPR, total, counts[i]);
7989 }
7990
7991 if (gimplify_expr (&total, pre_p, NULL, is_gimple_val, fb_rvalue)
7992 == GS_ERROR)
7993 return 2;
7994 bool is_old = unused[1] && unused[3];
7995 tree totalpx = size_binop (PLUS_EXPR, unshare_expr (total),
7996 size_int (is_old ? 1 : 4));
7997 tree type = build_array_type (ptr_type_node, build_index_type (totalpx));
7998 tree array = create_tmp_var_raw (type);
7999 TREE_ADDRESSABLE (array) = 1;
8000 if (!poly_int_tree_p (totalpx))
8001 {
8002 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array)))
8003 gimplify_type_sizes (TREE_TYPE (array), pre_p);
8004 if (gimplify_omp_ctxp)
8005 {
8006 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8007 while (ctx
8008 && (ctx->region_type == ORT_WORKSHARE
8009 || ctx->region_type == ORT_TASKGROUP
8010 || ctx->region_type == ORT_SIMD
8011 || ctx->region_type == ORT_ACC))
8012 ctx = ctx->outer_context;
8013 if (ctx)
8014 omp_add_variable (ctx, array, GOVD_LOCAL | GOVD_SEEN);
8015 }
8016 gimplify_vla_decl (array, pre_p);
8017 }
8018 else
8019 gimple_add_tmp_var (array);
8020 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
8021 NULL_TREE);
8022 tree tem;
8023 if (!is_old)
8024 {
8025 tem = build2 (MODIFY_EXPR, void_type_node, r,
8026 build_int_cst (ptr_type_node, 0));
8027 gimplify_and_add (tem, pre_p);
8028 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
8029 NULL_TREE);
8030 }
8031 tem = build2 (MODIFY_EXPR, void_type_node, r,
8032 fold_convert (ptr_type_node, total));
8033 gimplify_and_add (tem, pre_p);
8034 for (i = 1; i < (is_old ? 2 : 4); i++)
8035 {
8036 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (i + !is_old),
8037 NULL_TREE, NULL_TREE);
8038 tem = build2 (MODIFY_EXPR, void_type_node, r, counts[i - 1]);
8039 gimplify_and_add (tem, pre_p);
8040 }
8041
8042 tree cnts[4];
8043 for (j = 4; j; j--)
8044 if (!unused[j - 1])
8045 break;
8046 for (i = 0; i < 4; i++)
8047 {
8048 if (i && (i >= j || unused[i - 1]))
8049 {
8050 cnts[i] = cnts[i - 1];
8051 continue;
8052 }
8053 cnts[i] = create_tmp_var (sizetype);
8054 if (i == 0)
8055 g = gimple_build_assign (cnts[i], size_int (is_old ? 2 : 5));
8056 else
8057 {
8058 tree t;
8059 if (is_old)
8060 t = size_binop (PLUS_EXPR, counts[0], size_int (2));
8061 else
8062 t = size_binop (PLUS_EXPR, cnts[i - 1], counts[i - 1]);
8063 if (gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue)
8064 == GS_ERROR)
8065 return 2;
8066 g = gimple_build_assign (cnts[i], t);
8067 }
8068 gimple_seq_add_stmt (pre_p, g);
8069 }
8070
8071 last_iter = NULL_TREE;
8072 tree last_bind = NULL_TREE;
8073 tree *last_body = NULL;
8074 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8075 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8076 {
8077 switch (OMP_CLAUSE_DEPEND_KIND (c))
8078 {
8079 case OMP_CLAUSE_DEPEND_IN:
8080 i = 2;
8081 break;
8082 case OMP_CLAUSE_DEPEND_OUT:
8083 case OMP_CLAUSE_DEPEND_INOUT:
8084 i = 0;
8085 break;
8086 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8087 i = 1;
8088 break;
8089 case OMP_CLAUSE_DEPEND_DEPOBJ:
8090 i = 3;
8091 break;
8092 case OMP_CLAUSE_DEPEND_SOURCE:
8093 case OMP_CLAUSE_DEPEND_SINK:
8094 continue;
8095 default:
8096 gcc_unreachable ();
8097 }
8098 tree t = OMP_CLAUSE_DECL (c);
8099 if (TREE_CODE (t) == TREE_LIST
8100 && TREE_PURPOSE (t)
8101 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8102 {
8103 if (TREE_PURPOSE (t) != last_iter)
8104 {
8105 if (last_bind)
8106 gimplify_and_add (last_bind, pre_p);
8107 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
8108 last_bind = build3 (BIND_EXPR, void_type_node,
8109 BLOCK_VARS (block), NULL, block);
8110 TREE_SIDE_EFFECTS (last_bind) = 1;
8111 SET_EXPR_LOCATION (last_bind, OMP_CLAUSE_LOCATION (c));
8112 tree *p = &BIND_EXPR_BODY (last_bind);
8113 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8114 {
8115 tree var = TREE_VEC_ELT (it, 0);
8116 tree begin = TREE_VEC_ELT (it, 1);
8117 tree end = TREE_VEC_ELT (it, 2);
8118 tree step = TREE_VEC_ELT (it, 3);
8119 tree orig_step = TREE_VEC_ELT (it, 4);
8120 tree type = TREE_TYPE (var);
8121 location_t loc = DECL_SOURCE_LOCATION (var);
8122 /* Emit:
8123 var = begin;
8124 goto cond_label;
8125 beg_label:
8126 ...
8127 var = var + step;
8128 cond_label:
8129 if (orig_step > 0) {
8130 if (var < end) goto beg_label;
8131 } else {
8132 if (var > end) goto beg_label;
8133 }
8134 for each iterator, with inner iterators added to
8135 the ... above. */
8136 tree beg_label = create_artificial_label (loc);
8137 tree cond_label = NULL_TREE;
8138 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8139 var, begin);
8140 append_to_statement_list_force (tem, p);
8141 tem = build_and_jump (&cond_label);
8142 append_to_statement_list_force (tem, p);
8143 tem = build1 (LABEL_EXPR, void_type_node, beg_label);
8144 append_to_statement_list (tem, p);
8145 tree bind = build3 (BIND_EXPR, void_type_node, NULL_TREE,
8146 NULL_TREE, NULL_TREE);
8147 TREE_SIDE_EFFECTS (bind) = 1;
8148 SET_EXPR_LOCATION (bind, loc);
8149 append_to_statement_list_force (bind, p);
8150 if (POINTER_TYPE_P (type))
8151 tem = build2_loc (loc, POINTER_PLUS_EXPR, type,
8152 var, fold_convert_loc (loc, sizetype,
8153 step));
8154 else
8155 tem = build2_loc (loc, PLUS_EXPR, type, var, step);
8156 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8157 var, tem);
8158 append_to_statement_list_force (tem, p);
8159 tem = build1 (LABEL_EXPR, void_type_node, cond_label);
8160 append_to_statement_list (tem, p);
8161 tree cond = fold_build2_loc (loc, LT_EXPR,
8162 boolean_type_node,
8163 var, end);
8164 tree pos
8165 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8166 cond, build_and_jump (&beg_label),
8167 void_node);
8168 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8169 var, end);
8170 tree neg
8171 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8172 cond, build_and_jump (&beg_label),
8173 void_node);
8174 tree osteptype = TREE_TYPE (orig_step);
8175 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8176 orig_step,
8177 build_int_cst (osteptype, 0));
8178 tem = fold_build3_loc (loc, COND_EXPR, void_type_node,
8179 cond, pos, neg);
8180 append_to_statement_list_force (tem, p);
8181 p = &BIND_EXPR_BODY (bind);
8182 }
8183 last_body = p;
8184 }
8185 last_iter = TREE_PURPOSE (t);
8186 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
8187 {
8188 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t),
8189 0), last_body);
8190 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
8191 }
8192 if (error_operand_p (TREE_VALUE (t)))
8193 return 2;
8194 TREE_VALUE (t) = build_fold_addr_expr (TREE_VALUE (t));
8195 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8196 NULL_TREE, NULL_TREE);
8197 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8198 void_type_node, r, TREE_VALUE (t));
8199 append_to_statement_list_force (tem, last_body);
8200 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8201 void_type_node, cnts[i],
8202 size_binop (PLUS_EXPR, cnts[i], size_int (1)));
8203 append_to_statement_list_force (tem, last_body);
8204 TREE_VALUE (t) = null_pointer_node;
8205 }
8206 else
8207 {
8208 if (last_bind)
8209 {
8210 gimplify_and_add (last_bind, pre_p);
8211 last_bind = NULL_TREE;
8212 }
8213 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8214 {
8215 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8216 NULL, is_gimple_val, fb_rvalue);
8217 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8218 }
8219 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8220 return 2;
8221 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8222 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8223 is_gimple_val, fb_rvalue) == GS_ERROR)
8224 return 2;
8225 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8226 NULL_TREE, NULL_TREE);
8227 tem = build2 (MODIFY_EXPR, void_type_node, r, OMP_CLAUSE_DECL (c));
8228 gimplify_and_add (tem, pre_p);
8229 g = gimple_build_assign (cnts[i], size_binop (PLUS_EXPR, cnts[i],
8230 size_int (1)));
8231 gimple_seq_add_stmt (pre_p, g);
8232 }
8233 }
8234 if (last_bind)
8235 gimplify_and_add (last_bind, pre_p);
8236 tree cond = boolean_false_node;
8237 if (is_old)
8238 {
8239 if (!unused[0])
8240 cond = build2_loc (first_loc, NE_EXPR, boolean_type_node, cnts[0],
8241 size_binop_loc (first_loc, PLUS_EXPR, counts[0],
8242 size_int (2)));
8243 if (!unused[2])
8244 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8245 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8246 cnts[2],
8247 size_binop_loc (first_loc, PLUS_EXPR,
8248 totalpx,
8249 size_int (1))));
8250 }
8251 else
8252 {
8253 tree prev = size_int (5);
8254 for (i = 0; i < 4; i++)
8255 {
8256 if (unused[i])
8257 continue;
8258 prev = size_binop_loc (first_loc, PLUS_EXPR, counts[i], prev);
8259 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8260 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8261 cnts[i], unshare_expr (prev)));
8262 }
8263 }
8264 tem = build3_loc (first_loc, COND_EXPR, void_type_node, cond,
8265 build_call_expr_loc (first_loc,
8266 builtin_decl_explicit (BUILT_IN_TRAP),
8267 0), void_node);
8268 gimplify_and_add (tem, pre_p);
8269 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
8270 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
8271 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
8272 OMP_CLAUSE_CHAIN (c) = *list_p;
8273 *list_p = c;
8274 return 1;
8275 }
8276
8277 /* Insert a GOMP_MAP_ALLOC or GOMP_MAP_RELEASE node following a
8278 GOMP_MAP_STRUCT mapping. C is an always_pointer mapping. STRUCT_NODE is
8279 the struct node to insert the new mapping after (when the struct node is
8280 initially created). PREV_NODE is the first of two or three mappings for a
8281 pointer, and is either:
8282 - the node before C, when a pair of mappings is used, e.g. for a C/C++
8283 array section.
8284 - not the node before C. This is true when we have a reference-to-pointer
8285 type (with a mapping for the reference and for the pointer), or for
8286 Fortran derived-type mappings with a GOMP_MAP_TO_PSET.
8287 If SCP is non-null, the new node is inserted before *SCP.
8288 if SCP is null, the new node is inserted before PREV_NODE.
8289 The return type is:
8290 - PREV_NODE, if SCP is non-null.
8291 - The newly-created ALLOC or RELEASE node, if SCP is null.
8292 - The second newly-created ALLOC or RELEASE node, if we are mapping a
8293 reference to a pointer. */
8294
8295 static tree
8296 insert_struct_comp_map (enum tree_code code, tree c, tree struct_node,
8297 tree prev_node, tree *scp)
8298 {
8299 enum gomp_map_kind mkind
8300 = (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA)
8301 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8302
8303 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
8304 tree cl = scp ? prev_node : c2;
8305 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8306 OMP_CLAUSE_DECL (c2) = unshare_expr (OMP_CLAUSE_DECL (c));
8307 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : prev_node;
8308 if (OMP_CLAUSE_CHAIN (prev_node) != c
8309 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (prev_node)) == OMP_CLAUSE_MAP
8310 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node))
8311 == GOMP_MAP_TO_PSET))
8312 OMP_CLAUSE_SIZE (c2) = OMP_CLAUSE_SIZE (OMP_CLAUSE_CHAIN (prev_node));
8313 else
8314 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (ptr_type_node);
8315 if (struct_node)
8316 OMP_CLAUSE_CHAIN (struct_node) = c2;
8317
8318 /* We might need to create an additional mapping if we have a reference to a
8319 pointer (in C++). Don't do this if we have something other than a
8320 GOMP_MAP_ALWAYS_POINTER though, i.e. a GOMP_MAP_TO_PSET. */
8321 if (OMP_CLAUSE_CHAIN (prev_node) != c
8322 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (prev_node)) == OMP_CLAUSE_MAP
8323 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node))
8324 == GOMP_MAP_ALWAYS_POINTER)
8325 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node))
8326 == GOMP_MAP_ATTACH_DETACH)))
8327 {
8328 tree c4 = OMP_CLAUSE_CHAIN (prev_node);
8329 tree c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
8330 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8331 OMP_CLAUSE_DECL (c3) = unshare_expr (OMP_CLAUSE_DECL (c4));
8332 OMP_CLAUSE_SIZE (c3) = TYPE_SIZE_UNIT (ptr_type_node);
8333 OMP_CLAUSE_CHAIN (c3) = prev_node;
8334 if (!scp)
8335 OMP_CLAUSE_CHAIN (c2) = c3;
8336 else
8337 cl = c3;
8338 }
8339
8340 if (scp)
8341 *scp = c2;
8342
8343 return cl;
8344 }
8345
8346 /* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
8347 and set *BITPOSP and *POFFSETP to the bit offset of the access.
8348 If BASE_REF is non-NULL and the containing object is a reference, set
8349 *BASE_REF to that reference before dereferencing the object.
8350 If BASE_REF is NULL, check that the containing object is a COMPONENT_REF or
8351 has array type, else return NULL. */
8352
8353 static tree
8354 extract_base_bit_offset (tree base, tree *base_ref, poly_int64 *bitposp,
8355 poly_offset_int *poffsetp)
8356 {
8357 tree offset;
8358 poly_int64 bitsize, bitpos;
8359 machine_mode mode;
8360 int unsignedp, reversep, volatilep = 0;
8361 poly_offset_int poffset;
8362
8363 if (base_ref)
8364 {
8365 *base_ref = NULL_TREE;
8366
8367 while (TREE_CODE (base) == ARRAY_REF)
8368 base = TREE_OPERAND (base, 0);
8369
8370 if (TREE_CODE (base) == INDIRECT_REF)
8371 base = TREE_OPERAND (base, 0);
8372 }
8373 else
8374 {
8375 if (TREE_CODE (base) == ARRAY_REF)
8376 {
8377 while (TREE_CODE (base) == ARRAY_REF)
8378 base = TREE_OPERAND (base, 0);
8379 if (TREE_CODE (base) != COMPONENT_REF
8380 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE)
8381 return NULL_TREE;
8382 }
8383 else if (TREE_CODE (base) == INDIRECT_REF
8384 && TREE_CODE (TREE_OPERAND (base, 0)) == COMPONENT_REF
8385 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
8386 == REFERENCE_TYPE))
8387 base = TREE_OPERAND (base, 0);
8388 }
8389
8390 base = get_inner_reference (base, &bitsize, &bitpos, &offset, &mode,
8391 &unsignedp, &reversep, &volatilep);
8392
8393 tree orig_base = base;
8394
8395 if ((TREE_CODE (base) == INDIRECT_REF
8396 || (TREE_CODE (base) == MEM_REF
8397 && integer_zerop (TREE_OPERAND (base, 1))))
8398 && DECL_P (TREE_OPERAND (base, 0))
8399 && TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0))) == REFERENCE_TYPE)
8400 base = TREE_OPERAND (base, 0);
8401
8402 gcc_assert (offset == NULL_TREE || poly_int_tree_p (offset));
8403
8404 if (offset)
8405 poffset = wi::to_poly_offset (offset);
8406 else
8407 poffset = 0;
8408
8409 if (maybe_ne (bitpos, 0))
8410 poffset += bits_to_bytes_round_down (bitpos);
8411
8412 *bitposp = bitpos;
8413 *poffsetp = poffset;
8414
8415 /* Set *BASE_REF if BASE was a dereferenced reference variable. */
8416 if (base_ref && orig_base != base)
8417 *base_ref = orig_base;
8418
8419 return base;
8420 }
8421
8422 /* Returns true if EXPR is or contains (as a sub-component) BASE_PTR. */
8423
8424 static bool
8425 is_or_contains_p (tree expr, tree base_ptr)
8426 {
8427 while (expr != base_ptr)
8428 if (TREE_CODE (base_ptr) == COMPONENT_REF)
8429 base_ptr = TREE_OPERAND (base_ptr, 0);
8430 else
8431 break;
8432 return expr == base_ptr;
8433 }
8434
8435 /* Implement OpenMP 5.x map ordering rules for target directives. There are
8436 several rules, and with some level of ambiguity, hopefully we can at least
8437 collect the complexity here in one place. */
8438
8439 static void
8440 omp_target_reorder_clauses (tree *list_p)
8441 {
8442 /* Collect refs to alloc/release/delete maps. */
8443 auto_vec<tree, 32> ard;
8444 tree *cp = list_p;
8445 while (*cp != NULL_TREE)
8446 if (OMP_CLAUSE_CODE (*cp) == OMP_CLAUSE_MAP
8447 && (OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_ALLOC
8448 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_RELEASE
8449 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_DELETE))
8450 {
8451 /* Unlink cp and push to ard. */
8452 tree c = *cp;
8453 tree nc = OMP_CLAUSE_CHAIN (c);
8454 *cp = nc;
8455 ard.safe_push (c);
8456
8457 /* Any associated pointer type maps should also move along. */
8458 while (*cp != NULL_TREE
8459 && OMP_CLAUSE_CODE (*cp) == OMP_CLAUSE_MAP
8460 && (OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
8461 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_FIRSTPRIVATE_POINTER
8462 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_ATTACH_DETACH
8463 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_POINTER
8464 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_ALWAYS_POINTER
8465 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_TO_PSET))
8466 {
8467 c = *cp;
8468 nc = OMP_CLAUSE_CHAIN (c);
8469 *cp = nc;
8470 ard.safe_push (c);
8471 }
8472 }
8473 else
8474 cp = &OMP_CLAUSE_CHAIN (*cp);
8475
8476 /* Link alloc/release/delete maps to the end of list. */
8477 for (unsigned int i = 0; i < ard.length (); i++)
8478 {
8479 *cp = ard[i];
8480 cp = &OMP_CLAUSE_CHAIN (ard[i]);
8481 }
8482 *cp = NULL_TREE;
8483
8484 /* OpenMP 5.0 requires that pointer variables are mapped before
8485 its use as a base-pointer. */
8486 auto_vec<tree *, 32> atf;
8487 for (tree *cp = list_p; *cp; cp = &OMP_CLAUSE_CHAIN (*cp))
8488 if (OMP_CLAUSE_CODE (*cp) == OMP_CLAUSE_MAP)
8489 {
8490 /* Collect alloc, to, from, to/from clause tree pointers. */
8491 gomp_map_kind k = OMP_CLAUSE_MAP_KIND (*cp);
8492 if (k == GOMP_MAP_ALLOC
8493 || k == GOMP_MAP_TO
8494 || k == GOMP_MAP_FROM
8495 || k == GOMP_MAP_TOFROM
8496 || k == GOMP_MAP_ALWAYS_TO
8497 || k == GOMP_MAP_ALWAYS_FROM
8498 || k == GOMP_MAP_ALWAYS_TOFROM)
8499 atf.safe_push (cp);
8500 }
8501
8502 for (unsigned int i = 0; i < atf.length (); i++)
8503 if (atf[i])
8504 {
8505 tree *cp = atf[i];
8506 tree decl = OMP_CLAUSE_DECL (*cp);
8507 if (TREE_CODE (decl) == INDIRECT_REF || TREE_CODE (decl) == MEM_REF)
8508 {
8509 tree base_ptr = TREE_OPERAND (decl, 0);
8510 STRIP_TYPE_NOPS (base_ptr);
8511 for (unsigned int j = i + 1; j < atf.length (); j++)
8512 {
8513 tree *cp2 = atf[j];
8514 tree decl2 = OMP_CLAUSE_DECL (*cp2);
8515 if (is_or_contains_p (decl2, base_ptr))
8516 {
8517 /* Move *cp2 to before *cp. */
8518 tree c = *cp2;
8519 *cp2 = OMP_CLAUSE_CHAIN (c);
8520 OMP_CLAUSE_CHAIN (c) = *cp;
8521 *cp = c;
8522 atf[j] = NULL;
8523 }
8524 }
8525 }
8526 }
8527 }
8528
8529 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
8530 and previous omp contexts. */
8531
8532 static void
8533 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
8534 enum omp_region_type region_type,
8535 enum tree_code code)
8536 {
8537 struct gimplify_omp_ctx *ctx, *outer_ctx;
8538 tree c;
8539 hash_map<tree, tree> *struct_map_to_clause = NULL;
8540 hash_set<tree> *struct_deref_set = NULL;
8541 tree *prev_list_p = NULL, *orig_list_p = list_p;
8542 int handled_depend_iterators = -1;
8543 int nowait = -1;
8544
8545 ctx = new_omp_context (region_type);
8546 ctx->code = code;
8547 outer_ctx = ctx->outer_context;
8548 if (code == OMP_TARGET)
8549 {
8550 if (!lang_GNU_Fortran ())
8551 ctx->defaultmap[GDMK_POINTER] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
8552 ctx->defaultmap[GDMK_SCALAR] = GOVD_FIRSTPRIVATE;
8553 }
8554 if (!lang_GNU_Fortran ())
8555 switch (code)
8556 {
8557 case OMP_TARGET:
8558 case OMP_TARGET_DATA:
8559 case OMP_TARGET_ENTER_DATA:
8560 case OMP_TARGET_EXIT_DATA:
8561 case OACC_DECLARE:
8562 case OACC_HOST_DATA:
8563 case OACC_PARALLEL:
8564 case OACC_KERNELS:
8565 ctx->target_firstprivatize_array_bases = true;
8566 default:
8567 break;
8568 }
8569
8570 if (code == OMP_TARGET
8571 || code == OMP_TARGET_DATA
8572 || code == OMP_TARGET_ENTER_DATA
8573 || code == OMP_TARGET_EXIT_DATA)
8574 omp_target_reorder_clauses (list_p);
8575
8576 while ((c = *list_p) != NULL)
8577 {
8578 bool remove = false;
8579 bool notice_outer = true;
8580 const char *check_non_private = NULL;
8581 unsigned int flags;
8582 tree decl;
8583
8584 switch (OMP_CLAUSE_CODE (c))
8585 {
8586 case OMP_CLAUSE_PRIVATE:
8587 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
8588 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
8589 {
8590 flags |= GOVD_PRIVATE_OUTER_REF;
8591 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
8592 }
8593 else
8594 notice_outer = false;
8595 goto do_add;
8596 case OMP_CLAUSE_SHARED:
8597 flags = GOVD_SHARED | GOVD_EXPLICIT;
8598 goto do_add;
8599 case OMP_CLAUSE_FIRSTPRIVATE:
8600 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8601 check_non_private = "firstprivate";
8602 goto do_add;
8603 case OMP_CLAUSE_LASTPRIVATE:
8604 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
8605 switch (code)
8606 {
8607 case OMP_DISTRIBUTE:
8608 error_at (OMP_CLAUSE_LOCATION (c),
8609 "conditional %<lastprivate%> clause on "
8610 "%qs construct", "distribute");
8611 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8612 break;
8613 case OMP_TASKLOOP:
8614 error_at (OMP_CLAUSE_LOCATION (c),
8615 "conditional %<lastprivate%> clause on "
8616 "%qs construct", "taskloop");
8617 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8618 break;
8619 default:
8620 break;
8621 }
8622 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
8623 if (code != OMP_LOOP)
8624 check_non_private = "lastprivate";
8625 decl = OMP_CLAUSE_DECL (c);
8626 if (error_operand_p (decl))
8627 goto do_add;
8628 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
8629 && !lang_hooks.decls.omp_scalar_p (decl))
8630 {
8631 error_at (OMP_CLAUSE_LOCATION (c),
8632 "non-scalar variable %qD in conditional "
8633 "%<lastprivate%> clause", decl);
8634 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8635 }
8636 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
8637 flags |= GOVD_LASTPRIVATE_CONDITIONAL;
8638 if (outer_ctx
8639 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
8640 || ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
8641 == ORT_COMBINED_TEAMS))
8642 && splay_tree_lookup (outer_ctx->variables,
8643 (splay_tree_key) decl) == NULL)
8644 {
8645 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
8646 if (outer_ctx->outer_context)
8647 omp_notice_variable (outer_ctx->outer_context, decl, true);
8648 }
8649 else if (outer_ctx
8650 && (outer_ctx->region_type & ORT_TASK) != 0
8651 && outer_ctx->combined_loop
8652 && splay_tree_lookup (outer_ctx->variables,
8653 (splay_tree_key) decl) == NULL)
8654 {
8655 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
8656 if (outer_ctx->outer_context)
8657 omp_notice_variable (outer_ctx->outer_context, decl, true);
8658 }
8659 else if (outer_ctx
8660 && (outer_ctx->region_type == ORT_WORKSHARE
8661 || outer_ctx->region_type == ORT_ACC)
8662 && outer_ctx->combined_loop
8663 && splay_tree_lookup (outer_ctx->variables,
8664 (splay_tree_key) decl) == NULL
8665 && !omp_check_private (outer_ctx, decl, false))
8666 {
8667 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
8668 if (outer_ctx->outer_context
8669 && (outer_ctx->outer_context->region_type
8670 == ORT_COMBINED_PARALLEL)
8671 && splay_tree_lookup (outer_ctx->outer_context->variables,
8672 (splay_tree_key) decl) == NULL)
8673 {
8674 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
8675 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
8676 if (octx->outer_context)
8677 {
8678 octx = octx->outer_context;
8679 if (octx->region_type == ORT_WORKSHARE
8680 && octx->combined_loop
8681 && splay_tree_lookup (octx->variables,
8682 (splay_tree_key) decl) == NULL
8683 && !omp_check_private (octx, decl, false))
8684 {
8685 omp_add_variable (octx, decl,
8686 GOVD_LASTPRIVATE | GOVD_SEEN);
8687 octx = octx->outer_context;
8688 if (octx
8689 && ((octx->region_type & ORT_COMBINED_TEAMS)
8690 == ORT_COMBINED_TEAMS)
8691 && (splay_tree_lookup (octx->variables,
8692 (splay_tree_key) decl)
8693 == NULL))
8694 {
8695 omp_add_variable (octx, decl,
8696 GOVD_SHARED | GOVD_SEEN);
8697 octx = octx->outer_context;
8698 }
8699 }
8700 if (octx)
8701 omp_notice_variable (octx, decl, true);
8702 }
8703 }
8704 else if (outer_ctx->outer_context)
8705 omp_notice_variable (outer_ctx->outer_context, decl, true);
8706 }
8707 goto do_add;
8708 case OMP_CLAUSE_REDUCTION:
8709 if (OMP_CLAUSE_REDUCTION_TASK (c))
8710 {
8711 if (region_type == ORT_WORKSHARE)
8712 {
8713 if (nowait == -1)
8714 nowait = omp_find_clause (*list_p,
8715 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8716 if (nowait
8717 && (outer_ctx == NULL
8718 || outer_ctx->region_type != ORT_COMBINED_PARALLEL))
8719 {
8720 error_at (OMP_CLAUSE_LOCATION (c),
8721 "%<task%> reduction modifier on a construct "
8722 "with a %<nowait%> clause");
8723 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
8724 }
8725 }
8726 else if ((region_type & ORT_PARALLEL) != ORT_PARALLEL)
8727 {
8728 error_at (OMP_CLAUSE_LOCATION (c),
8729 "invalid %<task%> reduction modifier on construct "
8730 "other than %<parallel%>, %qs or %<sections%>",
8731 lang_GNU_Fortran () ? "do" : "for");
8732 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
8733 }
8734 }
8735 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
8736 switch (code)
8737 {
8738 case OMP_SECTIONS:
8739 error_at (OMP_CLAUSE_LOCATION (c),
8740 "%<inscan%> %<reduction%> clause on "
8741 "%qs construct", "sections");
8742 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8743 break;
8744 case OMP_PARALLEL:
8745 error_at (OMP_CLAUSE_LOCATION (c),
8746 "%<inscan%> %<reduction%> clause on "
8747 "%qs construct", "parallel");
8748 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8749 break;
8750 case OMP_TEAMS:
8751 error_at (OMP_CLAUSE_LOCATION (c),
8752 "%<inscan%> %<reduction%> clause on "
8753 "%qs construct", "teams");
8754 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8755 break;
8756 case OMP_TASKLOOP:
8757 error_at (OMP_CLAUSE_LOCATION (c),
8758 "%<inscan%> %<reduction%> clause on "
8759 "%qs construct", "taskloop");
8760 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8761 break;
8762 default:
8763 break;
8764 }
8765 /* FALLTHRU */
8766 case OMP_CLAUSE_IN_REDUCTION:
8767 case OMP_CLAUSE_TASK_REDUCTION:
8768 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
8769 /* OpenACC permits reductions on private variables. */
8770 if (!(region_type & ORT_ACC)
8771 /* taskgroup is actually not a worksharing region. */
8772 && code != OMP_TASKGROUP)
8773 check_non_private = omp_clause_code_name[OMP_CLAUSE_CODE (c)];
8774 decl = OMP_CLAUSE_DECL (c);
8775 if (TREE_CODE (decl) == MEM_REF)
8776 {
8777 tree type = TREE_TYPE (decl);
8778 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
8779 NULL, is_gimple_val, fb_rvalue, false)
8780 == GS_ERROR)
8781 {
8782 remove = true;
8783 break;
8784 }
8785 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8786 if (DECL_P (v))
8787 {
8788 omp_firstprivatize_variable (ctx, v);
8789 omp_notice_variable (ctx, v, true);
8790 }
8791 decl = TREE_OPERAND (decl, 0);
8792 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
8793 {
8794 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
8795 NULL, is_gimple_val, fb_rvalue, false)
8796 == GS_ERROR)
8797 {
8798 remove = true;
8799 break;
8800 }
8801 v = TREE_OPERAND (decl, 1);
8802 if (DECL_P (v))
8803 {
8804 omp_firstprivatize_variable (ctx, v);
8805 omp_notice_variable (ctx, v, true);
8806 }
8807 decl = TREE_OPERAND (decl, 0);
8808 }
8809 if (TREE_CODE (decl) == ADDR_EXPR
8810 || TREE_CODE (decl) == INDIRECT_REF)
8811 decl = TREE_OPERAND (decl, 0);
8812 }
8813 goto do_add_decl;
8814 case OMP_CLAUSE_LINEAR:
8815 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
8816 is_gimple_val, fb_rvalue) == GS_ERROR)
8817 {
8818 remove = true;
8819 break;
8820 }
8821 else
8822 {
8823 if (code == OMP_SIMD
8824 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8825 {
8826 struct gimplify_omp_ctx *octx = outer_ctx;
8827 if (octx
8828 && octx->region_type == ORT_WORKSHARE
8829 && octx->combined_loop
8830 && !octx->distribute)
8831 {
8832 if (octx->outer_context
8833 && (octx->outer_context->region_type
8834 == ORT_COMBINED_PARALLEL))
8835 octx = octx->outer_context->outer_context;
8836 else
8837 octx = octx->outer_context;
8838 }
8839 if (octx
8840 && octx->region_type == ORT_WORKSHARE
8841 && octx->combined_loop
8842 && octx->distribute)
8843 {
8844 error_at (OMP_CLAUSE_LOCATION (c),
8845 "%<linear%> clause for variable other than "
8846 "loop iterator specified on construct "
8847 "combined with %<distribute%>");
8848 remove = true;
8849 break;
8850 }
8851 }
8852 /* For combined #pragma omp parallel for simd, need to put
8853 lastprivate and perhaps firstprivate too on the
8854 parallel. Similarly for #pragma omp for simd. */
8855 struct gimplify_omp_ctx *octx = outer_ctx;
8856 decl = NULL_TREE;
8857 do
8858 {
8859 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8860 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8861 break;
8862 decl = OMP_CLAUSE_DECL (c);
8863 if (error_operand_p (decl))
8864 {
8865 decl = NULL_TREE;
8866 break;
8867 }
8868 flags = GOVD_SEEN;
8869 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8870 flags |= GOVD_FIRSTPRIVATE;
8871 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8872 flags |= GOVD_LASTPRIVATE;
8873 if (octx
8874 && octx->region_type == ORT_WORKSHARE
8875 && octx->combined_loop)
8876 {
8877 if (octx->outer_context
8878 && (octx->outer_context->region_type
8879 == ORT_COMBINED_PARALLEL))
8880 octx = octx->outer_context;
8881 else if (omp_check_private (octx, decl, false))
8882 break;
8883 }
8884 else if (octx
8885 && (octx->region_type & ORT_TASK) != 0
8886 && octx->combined_loop)
8887 ;
8888 else if (octx
8889 && octx->region_type == ORT_COMBINED_PARALLEL
8890 && ctx->region_type == ORT_WORKSHARE
8891 && octx == outer_ctx)
8892 flags = GOVD_SEEN | GOVD_SHARED;
8893 else if (octx
8894 && ((octx->region_type & ORT_COMBINED_TEAMS)
8895 == ORT_COMBINED_TEAMS))
8896 flags = GOVD_SEEN | GOVD_SHARED;
8897 else if (octx
8898 && octx->region_type == ORT_COMBINED_TARGET)
8899 {
8900 flags &= ~GOVD_LASTPRIVATE;
8901 if (flags == GOVD_SEEN)
8902 break;
8903 }
8904 else
8905 break;
8906 splay_tree_node on
8907 = splay_tree_lookup (octx->variables,
8908 (splay_tree_key) decl);
8909 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
8910 {
8911 octx = NULL;
8912 break;
8913 }
8914 omp_add_variable (octx, decl, flags);
8915 if (octx->outer_context == NULL)
8916 break;
8917 octx = octx->outer_context;
8918 }
8919 while (1);
8920 if (octx
8921 && decl
8922 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8923 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
8924 omp_notice_variable (octx, decl, true);
8925 }
8926 flags = GOVD_LINEAR | GOVD_EXPLICIT;
8927 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8928 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8929 {
8930 notice_outer = false;
8931 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8932 }
8933 goto do_add;
8934
8935 case OMP_CLAUSE_MAP:
8936 decl = OMP_CLAUSE_DECL (c);
8937 if (error_operand_p (decl))
8938 remove = true;
8939 switch (code)
8940 {
8941 case OMP_TARGET:
8942 break;
8943 case OACC_DATA:
8944 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
8945 break;
8946 /* FALLTHRU */
8947 case OMP_TARGET_DATA:
8948 case OMP_TARGET_ENTER_DATA:
8949 case OMP_TARGET_EXIT_DATA:
8950 case OACC_ENTER_DATA:
8951 case OACC_EXIT_DATA:
8952 case OACC_HOST_DATA:
8953 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8954 || (OMP_CLAUSE_MAP_KIND (c)
8955 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8956 /* For target {,enter ,exit }data only the array slice is
8957 mapped, but not the pointer to it. */
8958 remove = true;
8959 break;
8960 default:
8961 break;
8962 }
8963 /* For Fortran, not only the pointer to the data is mapped but also
8964 the address of the pointer, the array descriptor etc.; for
8965 'exit data' - and in particular for 'delete:' - having an 'alloc:'
8966 does not make sense. Likewise, for 'update' only transferring the
8967 data itself is needed as the rest has been handled in previous
8968 directives. However, for 'exit data', the array descriptor needs
8969 to be delete; hence, we turn the MAP_TO_PSET into a MAP_DELETE.
8970
8971 NOTE: Generally, it is not safe to perform "enter data" operations
8972 on arrays where the data *or the descriptor* may go out of scope
8973 before a corresponding "exit data" operation -- and such a
8974 descriptor may be synthesized temporarily, e.g. to pass an
8975 explicit-shape array to a function expecting an assumed-shape
8976 argument. Performing "enter data" inside the called function
8977 would thus be problematic. */
8978 if (code == OMP_TARGET_EXIT_DATA
8979 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET)
8980 OMP_CLAUSE_SET_MAP_KIND (c, OMP_CLAUSE_MAP_KIND (*prev_list_p)
8981 == GOMP_MAP_DELETE
8982 ? GOMP_MAP_DELETE : GOMP_MAP_RELEASE);
8983 else if ((code == OMP_TARGET_EXIT_DATA || code == OMP_TARGET_UPDATE)
8984 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
8985 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET))
8986 remove = true;
8987
8988 if (remove)
8989 break;
8990 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
8991 {
8992 struct gimplify_omp_ctx *octx;
8993 for (octx = outer_ctx; octx; octx = octx->outer_context)
8994 {
8995 if (octx->region_type != ORT_ACC_HOST_DATA)
8996 break;
8997 splay_tree_node n2
8998 = splay_tree_lookup (octx->variables,
8999 (splay_tree_key) decl);
9000 if (n2)
9001 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
9002 "declared in enclosing %<host_data%> region",
9003 DECL_NAME (decl));
9004 }
9005 }
9006 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9007 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
9008 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
9009 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
9010 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
9011 {
9012 remove = true;
9013 break;
9014 }
9015 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9016 || (OMP_CLAUSE_MAP_KIND (c)
9017 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9018 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
9019 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
9020 {
9021 OMP_CLAUSE_SIZE (c)
9022 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
9023 false);
9024 if ((region_type & ORT_TARGET) != 0)
9025 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
9026 GOVD_FIRSTPRIVATE | GOVD_SEEN);
9027 }
9028
9029 if (!DECL_P (decl))
9030 {
9031 tree d = decl, *pd;
9032 if (TREE_CODE (d) == ARRAY_REF)
9033 {
9034 while (TREE_CODE (d) == ARRAY_REF)
9035 d = TREE_OPERAND (d, 0);
9036 if (TREE_CODE (d) == COMPONENT_REF
9037 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
9038 decl = d;
9039 }
9040 pd = &OMP_CLAUSE_DECL (c);
9041 if (d == decl
9042 && TREE_CODE (decl) == INDIRECT_REF
9043 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
9044 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9045 == REFERENCE_TYPE))
9046 {
9047 pd = &TREE_OPERAND (decl, 0);
9048 decl = TREE_OPERAND (decl, 0);
9049 }
9050 bool indir_p = false;
9051 tree orig_decl = decl;
9052 tree decl_ref = NULL_TREE;
9053 if ((region_type & (ORT_ACC | ORT_TARGET | ORT_TARGET_DATA)) != 0
9054 && TREE_CODE (*pd) == COMPONENT_REF
9055 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH
9056 && code != OACC_UPDATE)
9057 {
9058 while (TREE_CODE (decl) == COMPONENT_REF)
9059 {
9060 decl = TREE_OPERAND (decl, 0);
9061 if (((TREE_CODE (decl) == MEM_REF
9062 && integer_zerop (TREE_OPERAND (decl, 1)))
9063 || INDIRECT_REF_P (decl))
9064 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9065 == POINTER_TYPE))
9066 {
9067 indir_p = true;
9068 decl = TREE_OPERAND (decl, 0);
9069 }
9070 if (TREE_CODE (decl) == INDIRECT_REF
9071 && DECL_P (TREE_OPERAND (decl, 0))
9072 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9073 == REFERENCE_TYPE))
9074 {
9075 decl_ref = decl;
9076 decl = TREE_OPERAND (decl, 0);
9077 }
9078 }
9079 }
9080 else if (TREE_CODE (decl) == COMPONENT_REF)
9081 {
9082 while (TREE_CODE (decl) == COMPONENT_REF)
9083 decl = TREE_OPERAND (decl, 0);
9084 if (TREE_CODE (decl) == INDIRECT_REF
9085 && DECL_P (TREE_OPERAND (decl, 0))
9086 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9087 == REFERENCE_TYPE))
9088 decl = TREE_OPERAND (decl, 0);
9089 }
9090 if (decl != orig_decl && DECL_P (decl) && indir_p)
9091 {
9092 gomp_map_kind k
9093 = ((code == OACC_EXIT_DATA || code == OMP_TARGET_EXIT_DATA)
9094 ? GOMP_MAP_DETACH : GOMP_MAP_ATTACH);
9095 /* We have a dereference of a struct member. Make this an
9096 attach/detach operation, and ensure the base pointer is
9097 mapped as a FIRSTPRIVATE_POINTER. */
9098 OMP_CLAUSE_SET_MAP_KIND (c, k);
9099 flags = GOVD_MAP | GOVD_SEEN | GOVD_EXPLICIT;
9100 tree next_clause = OMP_CLAUSE_CHAIN (c);
9101 if (k == GOMP_MAP_ATTACH
9102 && code != OACC_ENTER_DATA
9103 && code != OMP_TARGET_ENTER_DATA
9104 && (!next_clause
9105 || (OMP_CLAUSE_CODE (next_clause) != OMP_CLAUSE_MAP)
9106 || (OMP_CLAUSE_MAP_KIND (next_clause)
9107 != GOMP_MAP_POINTER)
9108 || OMP_CLAUSE_DECL (next_clause) != decl)
9109 && (!struct_deref_set
9110 || !struct_deref_set->contains (decl)))
9111 {
9112 if (!struct_deref_set)
9113 struct_deref_set = new hash_set<tree> ();
9114 /* As well as the attach, we also need a
9115 FIRSTPRIVATE_POINTER clause to properly map the
9116 pointer to the struct base. */
9117 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9118 OMP_CLAUSE_MAP);
9119 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ALLOC);
9120 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c2)
9121 = 1;
9122 tree charptr_zero
9123 = build_int_cst (build_pointer_type (char_type_node),
9124 0);
9125 OMP_CLAUSE_DECL (c2)
9126 = build2 (MEM_REF, char_type_node,
9127 decl_ref ? decl_ref : decl, charptr_zero);
9128 OMP_CLAUSE_SIZE (c2) = size_zero_node;
9129 tree c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9130 OMP_CLAUSE_MAP);
9131 OMP_CLAUSE_SET_MAP_KIND (c3,
9132 GOMP_MAP_FIRSTPRIVATE_POINTER);
9133 OMP_CLAUSE_DECL (c3) = decl;
9134 OMP_CLAUSE_SIZE (c3) = size_zero_node;
9135 tree mapgrp = *prev_list_p;
9136 *prev_list_p = c2;
9137 OMP_CLAUSE_CHAIN (c3) = mapgrp;
9138 OMP_CLAUSE_CHAIN (c2) = c3;
9139
9140 struct_deref_set->add (decl);
9141 }
9142 goto do_add_decl;
9143 }
9144 /* An "attach/detach" operation on an update directive should
9145 behave as a GOMP_MAP_ALWAYS_POINTER. Beware that
9146 unlike attach or detach map kinds, GOMP_MAP_ALWAYS_POINTER
9147 depends on the previous mapping. */
9148 if (code == OACC_UPDATE
9149 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
9150 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALWAYS_POINTER);
9151 if (DECL_P (decl)
9152 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
9153 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
9154 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH
9155 && code != OACC_UPDATE
9156 && code != OMP_TARGET_UPDATE)
9157 {
9158 if (error_operand_p (decl))
9159 {
9160 remove = true;
9161 break;
9162 }
9163
9164 tree stype = TREE_TYPE (decl);
9165 if (TREE_CODE (stype) == REFERENCE_TYPE)
9166 stype = TREE_TYPE (stype);
9167 if (TYPE_SIZE_UNIT (stype) == NULL
9168 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
9169 {
9170 error_at (OMP_CLAUSE_LOCATION (c),
9171 "mapping field %qE of variable length "
9172 "structure", OMP_CLAUSE_DECL (c));
9173 remove = true;
9174 break;
9175 }
9176
9177 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER
9178 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
9179 {
9180 /* Error recovery. */
9181 if (prev_list_p == NULL)
9182 {
9183 remove = true;
9184 break;
9185 }
9186 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
9187 {
9188 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
9189 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
9190 {
9191 remove = true;
9192 break;
9193 }
9194 }
9195 }
9196
9197 poly_offset_int offset1;
9198 poly_int64 bitpos1;
9199 tree base_ref;
9200
9201 tree base
9202 = extract_base_bit_offset (OMP_CLAUSE_DECL (c), &base_ref,
9203 &bitpos1, &offset1);
9204
9205 gcc_assert (base == decl);
9206
9207 splay_tree_node n
9208 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
9209 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
9210 == GOMP_MAP_ALWAYS_POINTER);
9211 bool attach_detach = (OMP_CLAUSE_MAP_KIND (c)
9212 == GOMP_MAP_ATTACH_DETACH);
9213 bool attach = OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
9214 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH;
9215 bool has_attachments = false;
9216 /* For OpenACC, pointers in structs should trigger an
9217 attach action. */
9218 if (attach_detach
9219 && ((region_type & (ORT_ACC | ORT_TARGET | ORT_TARGET_DATA))
9220 || code == OMP_TARGET_ENTER_DATA
9221 || code == OMP_TARGET_EXIT_DATA))
9222
9223 {
9224 /* Turn a GOMP_MAP_ATTACH_DETACH clause into a
9225 GOMP_MAP_ATTACH or GOMP_MAP_DETACH clause after we
9226 have detected a case that needs a GOMP_MAP_STRUCT
9227 mapping added. */
9228 gomp_map_kind k
9229 = ((code == OACC_EXIT_DATA || code == OMP_TARGET_EXIT_DATA)
9230 ? GOMP_MAP_DETACH : GOMP_MAP_ATTACH);
9231 OMP_CLAUSE_SET_MAP_KIND (c, k);
9232 has_attachments = true;
9233 }
9234 if (n == NULL || (n->value & GOVD_MAP) == 0)
9235 {
9236 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9237 OMP_CLAUSE_MAP);
9238 gomp_map_kind k = attach ? GOMP_MAP_FORCE_PRESENT
9239 : GOMP_MAP_STRUCT;
9240
9241 OMP_CLAUSE_SET_MAP_KIND (l, k);
9242 if (base_ref)
9243 OMP_CLAUSE_DECL (l) = unshare_expr (base_ref);
9244 else
9245 OMP_CLAUSE_DECL (l) = decl;
9246 OMP_CLAUSE_SIZE (l)
9247 = (!attach
9248 ? size_int (1)
9249 : DECL_P (OMP_CLAUSE_DECL (l))
9250 ? DECL_SIZE_UNIT (OMP_CLAUSE_DECL (l))
9251 : TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (l))));
9252 if (struct_map_to_clause == NULL)
9253 struct_map_to_clause = new hash_map<tree, tree>;
9254 struct_map_to_clause->put (decl, l);
9255 if (ptr || attach_detach)
9256 {
9257 insert_struct_comp_map (code, c, l, *prev_list_p,
9258 NULL);
9259 *prev_list_p = l;
9260 prev_list_p = NULL;
9261 }
9262 else
9263 {
9264 OMP_CLAUSE_CHAIN (l) = c;
9265 *list_p = l;
9266 list_p = &OMP_CLAUSE_CHAIN (l);
9267 }
9268 if (base_ref && code == OMP_TARGET)
9269 {
9270 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9271 OMP_CLAUSE_MAP);
9272 enum gomp_map_kind mkind
9273 = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
9274 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
9275 OMP_CLAUSE_DECL (c2) = decl;
9276 OMP_CLAUSE_SIZE (c2) = size_zero_node;
9277 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
9278 OMP_CLAUSE_CHAIN (l) = c2;
9279 }
9280 flags = GOVD_MAP | GOVD_EXPLICIT;
9281 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c))
9282 || ptr
9283 || attach_detach)
9284 flags |= GOVD_SEEN;
9285 if (has_attachments)
9286 flags |= GOVD_MAP_HAS_ATTACHMENTS;
9287 goto do_add_decl;
9288 }
9289 else if (struct_map_to_clause)
9290 {
9291 tree *osc = struct_map_to_clause->get (decl);
9292 tree *sc = NULL, *scp = NULL;
9293 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c))
9294 || ptr
9295 || attach_detach)
9296 n->value |= GOVD_SEEN;
9297 sc = &OMP_CLAUSE_CHAIN (*osc);
9298 if (*sc != c
9299 && (OMP_CLAUSE_MAP_KIND (*sc)
9300 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9301 sc = &OMP_CLAUSE_CHAIN (*sc);
9302 /* Here "prev_list_p" is the end of the inserted
9303 alloc/release nodes after the struct node, OSC. */
9304 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
9305 if ((ptr || attach_detach) && sc == prev_list_p)
9306 break;
9307 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
9308 != COMPONENT_REF
9309 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
9310 != INDIRECT_REF)
9311 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
9312 != ARRAY_REF))
9313 break;
9314 else
9315 {
9316 tree sc_decl = OMP_CLAUSE_DECL (*sc);
9317 poly_offset_int offsetn;
9318 poly_int64 bitposn;
9319 tree base
9320 = extract_base_bit_offset (sc_decl, NULL,
9321 &bitposn, &offsetn);
9322 if (base != decl)
9323 break;
9324 if (scp)
9325 continue;
9326 if ((region_type & ORT_ACC) != 0)
9327 {
9328 /* This duplicate checking code is currently only
9329 enabled for OpenACC. */
9330 tree d1 = OMP_CLAUSE_DECL (*sc);
9331 tree d2 = OMP_CLAUSE_DECL (c);
9332 while (TREE_CODE (d1) == ARRAY_REF)
9333 d1 = TREE_OPERAND (d1, 0);
9334 while (TREE_CODE (d2) == ARRAY_REF)
9335 d2 = TREE_OPERAND (d2, 0);
9336 if (TREE_CODE (d1) == INDIRECT_REF)
9337 d1 = TREE_OPERAND (d1, 0);
9338 if (TREE_CODE (d2) == INDIRECT_REF)
9339 d2 = TREE_OPERAND (d2, 0);
9340 while (TREE_CODE (d1) == COMPONENT_REF)
9341 if (TREE_CODE (d2) == COMPONENT_REF
9342 && TREE_OPERAND (d1, 1)
9343 == TREE_OPERAND (d2, 1))
9344 {
9345 d1 = TREE_OPERAND (d1, 0);
9346 d2 = TREE_OPERAND (d2, 0);
9347 }
9348 else
9349 break;
9350 if (d1 == d2)
9351 {
9352 error_at (OMP_CLAUSE_LOCATION (c),
9353 "%qE appears more than once in map "
9354 "clauses", OMP_CLAUSE_DECL (c));
9355 remove = true;
9356 break;
9357 }
9358 }
9359 if (maybe_lt (offset1, offsetn)
9360 || (known_eq (offset1, offsetn)
9361 && maybe_lt (bitpos1, bitposn)))
9362 {
9363 if (ptr || attach_detach)
9364 scp = sc;
9365 else
9366 break;
9367 }
9368 }
9369 if (remove)
9370 break;
9371 if (!attach)
9372 OMP_CLAUSE_SIZE (*osc)
9373 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
9374 size_one_node);
9375 if (ptr || attach_detach)
9376 {
9377 tree cl = insert_struct_comp_map (code, c, NULL,
9378 *prev_list_p, scp);
9379 if (sc == prev_list_p)
9380 {
9381 *sc = cl;
9382 prev_list_p = NULL;
9383 }
9384 else
9385 {
9386 *prev_list_p = OMP_CLAUSE_CHAIN (c);
9387 list_p = prev_list_p;
9388 prev_list_p = NULL;
9389 OMP_CLAUSE_CHAIN (c) = *sc;
9390 *sc = cl;
9391 continue;
9392 }
9393 }
9394 else if (*sc != c)
9395 {
9396 *list_p = OMP_CLAUSE_CHAIN (c);
9397 OMP_CLAUSE_CHAIN (c) = *sc;
9398 *sc = c;
9399 continue;
9400 }
9401 }
9402 }
9403
9404 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
9405 == GS_ERROR)
9406 {
9407 remove = true;
9408 break;
9409 }
9410
9411 if (!remove
9412 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
9413 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH_DETACH
9414 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
9415 && OMP_CLAUSE_CHAIN (c)
9416 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
9417 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
9418 == GOMP_MAP_ALWAYS_POINTER)
9419 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
9420 == GOMP_MAP_ATTACH_DETACH)
9421 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
9422 == GOMP_MAP_TO_PSET)))
9423 prev_list_p = list_p;
9424
9425 break;
9426 }
9427 else
9428 {
9429 /* DECL_P (decl) == true */
9430 tree *sc;
9431 if (struct_map_to_clause
9432 && (sc = struct_map_to_clause->get (decl)) != NULL
9433 && OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_STRUCT
9434 && decl == OMP_CLAUSE_DECL (*sc))
9435 {
9436 /* We have found a map of the whole structure after a
9437 leading GOMP_MAP_STRUCT has been created, so refill the
9438 leading clause into a map of the whole structure
9439 variable, and remove the current one.
9440 TODO: we should be able to remove some maps of the
9441 following structure element maps if they are of
9442 compatible TO/FROM/ALLOC type. */
9443 OMP_CLAUSE_SET_MAP_KIND (*sc, OMP_CLAUSE_MAP_KIND (c));
9444 OMP_CLAUSE_SIZE (*sc) = unshare_expr (OMP_CLAUSE_SIZE (c));
9445 remove = true;
9446 break;
9447 }
9448 }
9449 flags = GOVD_MAP | GOVD_EXPLICIT;
9450 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
9451 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
9452 flags |= GOVD_MAP_ALWAYS_TO;
9453
9454 if ((code == OMP_TARGET
9455 || code == OMP_TARGET_DATA
9456 || code == OMP_TARGET_ENTER_DATA
9457 || code == OMP_TARGET_EXIT_DATA)
9458 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
9459 {
9460 for (struct gimplify_omp_ctx *octx = outer_ctx; octx;
9461 octx = octx->outer_context)
9462 {
9463 splay_tree_node n
9464 = splay_tree_lookup (octx->variables,
9465 (splay_tree_key) OMP_CLAUSE_DECL (c));
9466 /* If this is contained in an outer OpenMP region as a
9467 firstprivate value, remove the attach/detach. */
9468 if (n && (n->value & GOVD_FIRSTPRIVATE))
9469 {
9470 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FIRSTPRIVATE_POINTER);
9471 goto do_add;
9472 }
9473 }
9474
9475 enum gomp_map_kind map_kind = (code == OMP_TARGET_EXIT_DATA
9476 ? GOMP_MAP_DETACH
9477 : GOMP_MAP_ATTACH);
9478 OMP_CLAUSE_SET_MAP_KIND (c, map_kind);
9479 }
9480
9481 goto do_add;
9482
9483 case OMP_CLAUSE_DEPEND:
9484 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
9485 {
9486 tree deps = OMP_CLAUSE_DECL (c);
9487 while (deps && TREE_CODE (deps) == TREE_LIST)
9488 {
9489 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
9490 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
9491 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
9492 pre_p, NULL, is_gimple_val, fb_rvalue);
9493 deps = TREE_CHAIN (deps);
9494 }
9495 break;
9496 }
9497 else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
9498 break;
9499 if (handled_depend_iterators == -1)
9500 handled_depend_iterators = gimplify_omp_depend (list_p, pre_p);
9501 if (handled_depend_iterators)
9502 {
9503 if (handled_depend_iterators == 2)
9504 remove = true;
9505 break;
9506 }
9507 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
9508 {
9509 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
9510 NULL, is_gimple_val, fb_rvalue);
9511 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
9512 }
9513 if (error_operand_p (OMP_CLAUSE_DECL (c)))
9514 {
9515 remove = true;
9516 break;
9517 }
9518 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
9519 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
9520 is_gimple_val, fb_rvalue) == GS_ERROR)
9521 {
9522 remove = true;
9523 break;
9524 }
9525 if (code == OMP_TASK)
9526 ctx->has_depend = true;
9527 break;
9528
9529 case OMP_CLAUSE_TO:
9530 case OMP_CLAUSE_FROM:
9531 case OMP_CLAUSE__CACHE_:
9532 decl = OMP_CLAUSE_DECL (c);
9533 if (error_operand_p (decl))
9534 {
9535 remove = true;
9536 break;
9537 }
9538 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9539 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
9540 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
9541 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
9542 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
9543 {
9544 remove = true;
9545 break;
9546 }
9547 if (!DECL_P (decl))
9548 {
9549 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
9550 NULL, is_gimple_lvalue, fb_lvalue)
9551 == GS_ERROR)
9552 {
9553 remove = true;
9554 break;
9555 }
9556 break;
9557 }
9558 goto do_notice;
9559
9560 case OMP_CLAUSE_USE_DEVICE_PTR:
9561 case OMP_CLAUSE_USE_DEVICE_ADDR:
9562 flags = GOVD_EXPLICIT;
9563 goto do_add;
9564
9565 case OMP_CLAUSE_IS_DEVICE_PTR:
9566 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
9567 goto do_add;
9568
9569 do_add:
9570 decl = OMP_CLAUSE_DECL (c);
9571 do_add_decl:
9572 if (error_operand_p (decl))
9573 {
9574 remove = true;
9575 break;
9576 }
9577 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
9578 {
9579 tree t = omp_member_access_dummy_var (decl);
9580 if (t)
9581 {
9582 tree v = DECL_VALUE_EXPR (decl);
9583 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
9584 if (outer_ctx)
9585 omp_notice_variable (outer_ctx, t, true);
9586 }
9587 }
9588 if (code == OACC_DATA
9589 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9590 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
9591 flags |= GOVD_MAP_0LEN_ARRAY;
9592 omp_add_variable (ctx, decl, flags);
9593 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9594 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
9595 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9596 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9597 {
9598 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
9599 GOVD_LOCAL | GOVD_SEEN);
9600 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
9601 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
9602 find_decl_expr,
9603 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
9604 NULL) == NULL_TREE)
9605 omp_add_variable (ctx,
9606 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
9607 GOVD_LOCAL | GOVD_SEEN);
9608 gimplify_omp_ctxp = ctx;
9609 push_gimplify_context ();
9610
9611 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9612 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9613
9614 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
9615 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
9616 pop_gimplify_context
9617 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
9618 push_gimplify_context ();
9619 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
9620 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9621 pop_gimplify_context
9622 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
9623 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
9624 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
9625
9626 gimplify_omp_ctxp = outer_ctx;
9627 }
9628 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
9629 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
9630 {
9631 gimplify_omp_ctxp = ctx;
9632 push_gimplify_context ();
9633 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
9634 {
9635 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
9636 NULL, NULL);
9637 TREE_SIDE_EFFECTS (bind) = 1;
9638 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
9639 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
9640 }
9641 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
9642 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
9643 pop_gimplify_context
9644 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
9645 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
9646
9647 gimplify_omp_ctxp = outer_ctx;
9648 }
9649 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9650 && OMP_CLAUSE_LINEAR_STMT (c))
9651 {
9652 gimplify_omp_ctxp = ctx;
9653 push_gimplify_context ();
9654 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
9655 {
9656 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
9657 NULL, NULL);
9658 TREE_SIDE_EFFECTS (bind) = 1;
9659 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
9660 OMP_CLAUSE_LINEAR_STMT (c) = bind;
9661 }
9662 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
9663 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
9664 pop_gimplify_context
9665 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
9666 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
9667
9668 gimplify_omp_ctxp = outer_ctx;
9669 }
9670 if (notice_outer)
9671 goto do_notice;
9672 break;
9673
9674 case OMP_CLAUSE_COPYIN:
9675 case OMP_CLAUSE_COPYPRIVATE:
9676 decl = OMP_CLAUSE_DECL (c);
9677 if (error_operand_p (decl))
9678 {
9679 remove = true;
9680 break;
9681 }
9682 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
9683 && !remove
9684 && !omp_check_private (ctx, decl, true))
9685 {
9686 remove = true;
9687 if (is_global_var (decl))
9688 {
9689 if (DECL_THREAD_LOCAL_P (decl))
9690 remove = false;
9691 else if (DECL_HAS_VALUE_EXPR_P (decl))
9692 {
9693 tree value = get_base_address (DECL_VALUE_EXPR (decl));
9694
9695 if (value
9696 && DECL_P (value)
9697 && DECL_THREAD_LOCAL_P (value))
9698 remove = false;
9699 }
9700 }
9701 if (remove)
9702 error_at (OMP_CLAUSE_LOCATION (c),
9703 "copyprivate variable %qE is not threadprivate"
9704 " or private in outer context", DECL_NAME (decl));
9705 }
9706 do_notice:
9707 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9708 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
9709 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
9710 && outer_ctx
9711 && ((region_type & ORT_TASKLOOP) == ORT_TASKLOOP
9712 || (region_type == ORT_WORKSHARE
9713 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9714 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
9715 || code == OMP_LOOP)))
9716 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
9717 || (code == OMP_LOOP
9718 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9719 && ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
9720 == ORT_COMBINED_TEAMS))))
9721 {
9722 splay_tree_node on
9723 = splay_tree_lookup (outer_ctx->variables,
9724 (splay_tree_key)decl);
9725 if (on == NULL || (on->value & GOVD_DATA_SHARE_CLASS) == 0)
9726 {
9727 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9728 && TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
9729 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
9730 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
9731 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
9732 == POINTER_TYPE))))
9733 omp_firstprivatize_variable (outer_ctx, decl);
9734 else
9735 {
9736 omp_add_variable (outer_ctx, decl,
9737 GOVD_SEEN | GOVD_SHARED);
9738 if (outer_ctx->outer_context)
9739 omp_notice_variable (outer_ctx->outer_context, decl,
9740 true);
9741 }
9742 }
9743 }
9744 if (outer_ctx)
9745 omp_notice_variable (outer_ctx, decl, true);
9746 if (check_non_private
9747 && region_type == ORT_WORKSHARE
9748 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
9749 || decl == OMP_CLAUSE_DECL (c)
9750 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
9751 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
9752 == ADDR_EXPR
9753 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
9754 == POINTER_PLUS_EXPR
9755 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
9756 (OMP_CLAUSE_DECL (c), 0), 0))
9757 == ADDR_EXPR)))))
9758 && omp_check_private (ctx, decl, false))
9759 {
9760 error ("%s variable %qE is private in outer context",
9761 check_non_private, DECL_NAME (decl));
9762 remove = true;
9763 }
9764 break;
9765
9766 case OMP_CLAUSE_DETACH:
9767 flags = GOVD_FIRSTPRIVATE | GOVD_SEEN;
9768 goto do_add;
9769
9770 case OMP_CLAUSE_IF:
9771 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
9772 && OMP_CLAUSE_IF_MODIFIER (c) != code)
9773 {
9774 const char *p[2];
9775 for (int i = 0; i < 2; i++)
9776 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
9777 {
9778 case VOID_CST: p[i] = "cancel"; break;
9779 case OMP_PARALLEL: p[i] = "parallel"; break;
9780 case OMP_SIMD: p[i] = "simd"; break;
9781 case OMP_TASK: p[i] = "task"; break;
9782 case OMP_TASKLOOP: p[i] = "taskloop"; break;
9783 case OMP_TARGET_DATA: p[i] = "target data"; break;
9784 case OMP_TARGET: p[i] = "target"; break;
9785 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
9786 case OMP_TARGET_ENTER_DATA:
9787 p[i] = "target enter data"; break;
9788 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
9789 default: gcc_unreachable ();
9790 }
9791 error_at (OMP_CLAUSE_LOCATION (c),
9792 "expected %qs %<if%> clause modifier rather than %qs",
9793 p[0], p[1]);
9794 remove = true;
9795 }
9796 /* Fall through. */
9797
9798 case OMP_CLAUSE_FINAL:
9799 OMP_CLAUSE_OPERAND (c, 0)
9800 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
9801 /* Fall through. */
9802
9803 case OMP_CLAUSE_SCHEDULE:
9804 case OMP_CLAUSE_NUM_THREADS:
9805 case OMP_CLAUSE_NUM_TEAMS:
9806 case OMP_CLAUSE_THREAD_LIMIT:
9807 case OMP_CLAUSE_DIST_SCHEDULE:
9808 case OMP_CLAUSE_DEVICE:
9809 case OMP_CLAUSE_PRIORITY:
9810 case OMP_CLAUSE_GRAINSIZE:
9811 case OMP_CLAUSE_NUM_TASKS:
9812 case OMP_CLAUSE_HINT:
9813 case OMP_CLAUSE_ASYNC:
9814 case OMP_CLAUSE_WAIT:
9815 case OMP_CLAUSE_NUM_GANGS:
9816 case OMP_CLAUSE_NUM_WORKERS:
9817 case OMP_CLAUSE_VECTOR_LENGTH:
9818 case OMP_CLAUSE_WORKER:
9819 case OMP_CLAUSE_VECTOR:
9820 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
9821 is_gimple_val, fb_rvalue) == GS_ERROR)
9822 remove = true;
9823 break;
9824
9825 case OMP_CLAUSE_GANG:
9826 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
9827 is_gimple_val, fb_rvalue) == GS_ERROR)
9828 remove = true;
9829 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
9830 is_gimple_val, fb_rvalue) == GS_ERROR)
9831 remove = true;
9832 break;
9833
9834 case OMP_CLAUSE_NOWAIT:
9835 nowait = 1;
9836 break;
9837
9838 case OMP_CLAUSE_ORDERED:
9839 case OMP_CLAUSE_UNTIED:
9840 case OMP_CLAUSE_COLLAPSE:
9841 case OMP_CLAUSE_TILE:
9842 case OMP_CLAUSE_AUTO:
9843 case OMP_CLAUSE_SEQ:
9844 case OMP_CLAUSE_INDEPENDENT:
9845 case OMP_CLAUSE_MERGEABLE:
9846 case OMP_CLAUSE_PROC_BIND:
9847 case OMP_CLAUSE_SAFELEN:
9848 case OMP_CLAUSE_SIMDLEN:
9849 case OMP_CLAUSE_NOGROUP:
9850 case OMP_CLAUSE_THREADS:
9851 case OMP_CLAUSE_SIMD:
9852 case OMP_CLAUSE_BIND:
9853 case OMP_CLAUSE_IF_PRESENT:
9854 case OMP_CLAUSE_FINALIZE:
9855 break;
9856
9857 case OMP_CLAUSE_ORDER:
9858 ctx->order_concurrent = true;
9859 break;
9860
9861 case OMP_CLAUSE_DEFAULTMAP:
9862 enum gimplify_defaultmap_kind gdmkmin, gdmkmax;
9863 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c))
9864 {
9865 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED:
9866 gdmkmin = GDMK_SCALAR;
9867 gdmkmax = GDMK_POINTER;
9868 break;
9869 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR:
9870 gdmkmin = gdmkmax = GDMK_SCALAR;
9871 break;
9872 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE:
9873 gdmkmin = gdmkmax = GDMK_AGGREGATE;
9874 break;
9875 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE:
9876 gdmkmin = gdmkmax = GDMK_ALLOCATABLE;
9877 break;
9878 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER:
9879 gdmkmin = gdmkmax = GDMK_POINTER;
9880 break;
9881 default:
9882 gcc_unreachable ();
9883 }
9884 for (int gdmk = gdmkmin; gdmk <= gdmkmax; gdmk++)
9885 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c))
9886 {
9887 case OMP_CLAUSE_DEFAULTMAP_ALLOC:
9888 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_ALLOC_ONLY;
9889 break;
9890 case OMP_CLAUSE_DEFAULTMAP_TO:
9891 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_TO_ONLY;
9892 break;
9893 case OMP_CLAUSE_DEFAULTMAP_FROM:
9894 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FROM_ONLY;
9895 break;
9896 case OMP_CLAUSE_DEFAULTMAP_TOFROM:
9897 ctx->defaultmap[gdmk] = GOVD_MAP;
9898 break;
9899 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE:
9900 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
9901 break;
9902 case OMP_CLAUSE_DEFAULTMAP_NONE:
9903 ctx->defaultmap[gdmk] = 0;
9904 break;
9905 case OMP_CLAUSE_DEFAULTMAP_DEFAULT:
9906 switch (gdmk)
9907 {
9908 case GDMK_SCALAR:
9909 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
9910 break;
9911 case GDMK_AGGREGATE:
9912 case GDMK_ALLOCATABLE:
9913 ctx->defaultmap[gdmk] = GOVD_MAP;
9914 break;
9915 case GDMK_POINTER:
9916 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
9917 break;
9918 default:
9919 gcc_unreachable ();
9920 }
9921 break;
9922 default:
9923 gcc_unreachable ();
9924 }
9925 break;
9926
9927 case OMP_CLAUSE_ALIGNED:
9928 decl = OMP_CLAUSE_DECL (c);
9929 if (error_operand_p (decl))
9930 {
9931 remove = true;
9932 break;
9933 }
9934 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
9935 is_gimple_val, fb_rvalue) == GS_ERROR)
9936 {
9937 remove = true;
9938 break;
9939 }
9940 if (!is_global_var (decl)
9941 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
9942 omp_add_variable (ctx, decl, GOVD_ALIGNED);
9943 break;
9944
9945 case OMP_CLAUSE_NONTEMPORAL:
9946 decl = OMP_CLAUSE_DECL (c);
9947 if (error_operand_p (decl))
9948 {
9949 remove = true;
9950 break;
9951 }
9952 omp_add_variable (ctx, decl, GOVD_NONTEMPORAL);
9953 break;
9954
9955 case OMP_CLAUSE_ALLOCATE:
9956 decl = OMP_CLAUSE_DECL (c);
9957 if (error_operand_p (decl))
9958 {
9959 remove = true;
9960 break;
9961 }
9962 if (gimplify_expr (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (c), pre_p, NULL,
9963 is_gimple_val, fb_rvalue) == GS_ERROR)
9964 {
9965 remove = true;
9966 break;
9967 }
9968 else if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
9969 || (TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
9970 == INTEGER_CST))
9971 ;
9972 else if (code == OMP_TASKLOOP
9973 || !DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
9974 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
9975 = get_initialized_tmp_var (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
9976 pre_p, NULL, false);
9977 break;
9978
9979 case OMP_CLAUSE_DEFAULT:
9980 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
9981 break;
9982
9983 case OMP_CLAUSE_INCLUSIVE:
9984 case OMP_CLAUSE_EXCLUSIVE:
9985 decl = OMP_CLAUSE_DECL (c);
9986 {
9987 splay_tree_node n = splay_tree_lookup (outer_ctx->variables,
9988 (splay_tree_key) decl);
9989 if (n == NULL || (n->value & GOVD_REDUCTION) == 0)
9990 {
9991 error_at (OMP_CLAUSE_LOCATION (c),
9992 "%qD specified in %qs clause but not in %<inscan%> "
9993 "%<reduction%> clause on the containing construct",
9994 decl, omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
9995 remove = true;
9996 }
9997 else
9998 {
9999 n->value |= GOVD_REDUCTION_INSCAN;
10000 if (outer_ctx->region_type == ORT_SIMD
10001 && outer_ctx->outer_context
10002 && outer_ctx->outer_context->region_type == ORT_WORKSHARE)
10003 {
10004 n = splay_tree_lookup (outer_ctx->outer_context->variables,
10005 (splay_tree_key) decl);
10006 if (n && (n->value & GOVD_REDUCTION) != 0)
10007 n->value |= GOVD_REDUCTION_INSCAN;
10008 }
10009 }
10010 }
10011 break;
10012
10013 default:
10014 gcc_unreachable ();
10015 }
10016
10017 if (code == OACC_DATA
10018 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
10019 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
10020 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
10021 remove = true;
10022 if (remove)
10023 *list_p = OMP_CLAUSE_CHAIN (c);
10024 else
10025 list_p = &OMP_CLAUSE_CHAIN (c);
10026 }
10027
10028 ctx->clauses = *orig_list_p;
10029 gimplify_omp_ctxp = ctx;
10030 if (struct_map_to_clause)
10031 delete struct_map_to_clause;
10032 if (struct_deref_set)
10033 delete struct_deref_set;
10034 }
10035
10036 /* Return true if DECL is a candidate for shared to firstprivate
10037 optimization. We only consider non-addressable scalars, not
10038 too big, and not references. */
10039
10040 static bool
10041 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
10042 {
10043 if (TREE_ADDRESSABLE (decl))
10044 return false;
10045 tree type = TREE_TYPE (decl);
10046 if (!is_gimple_reg_type (type)
10047 || TREE_CODE (type) == REFERENCE_TYPE
10048 || TREE_ADDRESSABLE (type))
10049 return false;
10050 /* Don't optimize too large decls, as each thread/task will have
10051 its own. */
10052 HOST_WIDE_INT len = int_size_in_bytes (type);
10053 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
10054 return false;
10055 if (lang_hooks.decls.omp_privatize_by_reference (decl))
10056 return false;
10057 return true;
10058 }
10059
10060 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
10061 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
10062 GOVD_WRITTEN in outer contexts. */
10063
10064 static void
10065 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
10066 {
10067 for (; ctx; ctx = ctx->outer_context)
10068 {
10069 splay_tree_node n = splay_tree_lookup (ctx->variables,
10070 (splay_tree_key) decl);
10071 if (n == NULL)
10072 continue;
10073 else if (n->value & GOVD_SHARED)
10074 {
10075 n->value |= GOVD_WRITTEN;
10076 return;
10077 }
10078 else if (n->value & GOVD_DATA_SHARE_CLASS)
10079 return;
10080 }
10081 }
10082
10083 /* Helper callback for walk_gimple_seq to discover possible stores
10084 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
10085 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
10086 for those. */
10087
10088 static tree
10089 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
10090 {
10091 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
10092
10093 *walk_subtrees = 0;
10094 if (!wi->is_lhs)
10095 return NULL_TREE;
10096
10097 tree op = *tp;
10098 do
10099 {
10100 if (handled_component_p (op))
10101 op = TREE_OPERAND (op, 0);
10102 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
10103 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
10104 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
10105 else
10106 break;
10107 }
10108 while (1);
10109 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
10110 return NULL_TREE;
10111
10112 omp_mark_stores (gimplify_omp_ctxp, op);
10113 return NULL_TREE;
10114 }
10115
10116 /* Helper callback for walk_gimple_seq to discover possible stores
10117 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
10118 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
10119 for those. */
10120
10121 static tree
10122 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
10123 bool *handled_ops_p,
10124 struct walk_stmt_info *wi)
10125 {
10126 gimple *stmt = gsi_stmt (*gsi_p);
10127 switch (gimple_code (stmt))
10128 {
10129 /* Don't recurse on OpenMP constructs for which
10130 gimplify_adjust_omp_clauses already handled the bodies,
10131 except handle gimple_omp_for_pre_body. */
10132 case GIMPLE_OMP_FOR:
10133 *handled_ops_p = true;
10134 if (gimple_omp_for_pre_body (stmt))
10135 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
10136 omp_find_stores_stmt, omp_find_stores_op, wi);
10137 break;
10138 case GIMPLE_OMP_PARALLEL:
10139 case GIMPLE_OMP_TASK:
10140 case GIMPLE_OMP_SECTIONS:
10141 case GIMPLE_OMP_SINGLE:
10142 case GIMPLE_OMP_TARGET:
10143 case GIMPLE_OMP_TEAMS:
10144 case GIMPLE_OMP_CRITICAL:
10145 *handled_ops_p = true;
10146 break;
10147 default:
10148 break;
10149 }
10150 return NULL_TREE;
10151 }
10152
10153 struct gimplify_adjust_omp_clauses_data
10154 {
10155 tree *list_p;
10156 gimple_seq *pre_p;
10157 };
10158
10159 /* For all variables that were not actually used within the context,
10160 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
10161
10162 static int
10163 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
10164 {
10165 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
10166 gimple_seq *pre_p
10167 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
10168 tree decl = (tree) n->key;
10169 unsigned flags = n->value;
10170 enum omp_clause_code code;
10171 tree clause;
10172 bool private_debug;
10173
10174 if (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
10175 && (flags & GOVD_LASTPRIVATE_CONDITIONAL) != 0)
10176 flags = GOVD_SHARED | GOVD_SEEN | GOVD_WRITTEN;
10177 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
10178 return 0;
10179 if ((flags & GOVD_SEEN) == 0)
10180 return 0;
10181 if ((flags & GOVD_MAP_HAS_ATTACHMENTS) != 0)
10182 return 0;
10183 if (flags & GOVD_DEBUG_PRIVATE)
10184 {
10185 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
10186 private_debug = true;
10187 }
10188 else if (flags & GOVD_MAP)
10189 private_debug = false;
10190 else
10191 private_debug
10192 = lang_hooks.decls.omp_private_debug_clause (decl,
10193 !!(flags & GOVD_SHARED));
10194 if (private_debug)
10195 code = OMP_CLAUSE_PRIVATE;
10196 else if (flags & GOVD_MAP)
10197 {
10198 code = OMP_CLAUSE_MAP;
10199 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
10200 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
10201 {
10202 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
10203 return 0;
10204 }
10205 if (VAR_P (decl)
10206 && DECL_IN_CONSTANT_POOL (decl)
10207 && !lookup_attribute ("omp declare target",
10208 DECL_ATTRIBUTES (decl)))
10209 {
10210 tree id = get_identifier ("omp declare target");
10211 DECL_ATTRIBUTES (decl)
10212 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
10213 varpool_node *node = varpool_node::get (decl);
10214 if (node)
10215 {
10216 node->offloadable = 1;
10217 if (ENABLE_OFFLOADING)
10218 g->have_offload = true;
10219 }
10220 }
10221 }
10222 else if (flags & GOVD_SHARED)
10223 {
10224 if (is_global_var (decl))
10225 {
10226 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
10227 while (ctx != NULL)
10228 {
10229 splay_tree_node on
10230 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10231 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
10232 | GOVD_PRIVATE | GOVD_REDUCTION
10233 | GOVD_LINEAR | GOVD_MAP)) != 0)
10234 break;
10235 ctx = ctx->outer_context;
10236 }
10237 if (ctx == NULL)
10238 return 0;
10239 }
10240 code = OMP_CLAUSE_SHARED;
10241 /* Don't optimize shared into firstprivate for read-only vars
10242 on tasks with depend clause, we shouldn't try to copy them
10243 until the dependencies are satisfied. */
10244 if (gimplify_omp_ctxp->has_depend)
10245 flags |= GOVD_WRITTEN;
10246 }
10247 else if (flags & GOVD_PRIVATE)
10248 code = OMP_CLAUSE_PRIVATE;
10249 else if (flags & GOVD_FIRSTPRIVATE)
10250 {
10251 code = OMP_CLAUSE_FIRSTPRIVATE;
10252 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
10253 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
10254 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
10255 {
10256 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
10257 "%<target%> construct", decl);
10258 return 0;
10259 }
10260 }
10261 else if (flags & GOVD_LASTPRIVATE)
10262 code = OMP_CLAUSE_LASTPRIVATE;
10263 else if (flags & (GOVD_ALIGNED | GOVD_NONTEMPORAL))
10264 return 0;
10265 else if (flags & GOVD_CONDTEMP)
10266 {
10267 code = OMP_CLAUSE__CONDTEMP_;
10268 gimple_add_tmp_var (decl);
10269 }
10270 else
10271 gcc_unreachable ();
10272
10273 if (((flags & GOVD_LASTPRIVATE)
10274 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
10275 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10276 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
10277
10278 tree chain = *list_p;
10279 clause = build_omp_clause (input_location, code);
10280 OMP_CLAUSE_DECL (clause) = decl;
10281 OMP_CLAUSE_CHAIN (clause) = chain;
10282 if (private_debug)
10283 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
10284 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
10285 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
10286 else if (code == OMP_CLAUSE_SHARED
10287 && (flags & GOVD_WRITTEN) == 0
10288 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10289 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
10290 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
10291 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
10292 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
10293 {
10294 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
10295 OMP_CLAUSE_DECL (nc) = decl;
10296 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
10297 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
10298 OMP_CLAUSE_DECL (clause)
10299 = build_simple_mem_ref_loc (input_location, decl);
10300 OMP_CLAUSE_DECL (clause)
10301 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
10302 build_int_cst (build_pointer_type (char_type_node), 0));
10303 OMP_CLAUSE_SIZE (clause) = size_zero_node;
10304 OMP_CLAUSE_SIZE (nc) = size_zero_node;
10305 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
10306 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
10307 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
10308 OMP_CLAUSE_CHAIN (nc) = chain;
10309 OMP_CLAUSE_CHAIN (clause) = nc;
10310 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10311 gimplify_omp_ctxp = ctx->outer_context;
10312 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
10313 pre_p, NULL, is_gimple_val, fb_rvalue);
10314 gimplify_omp_ctxp = ctx;
10315 }
10316 else if (code == OMP_CLAUSE_MAP)
10317 {
10318 int kind;
10319 /* Not all combinations of these GOVD_MAP flags are actually valid. */
10320 switch (flags & (GOVD_MAP_TO_ONLY
10321 | GOVD_MAP_FORCE
10322 | GOVD_MAP_FORCE_PRESENT
10323 | GOVD_MAP_ALLOC_ONLY
10324 | GOVD_MAP_FROM_ONLY))
10325 {
10326 case 0:
10327 kind = GOMP_MAP_TOFROM;
10328 break;
10329 case GOVD_MAP_FORCE:
10330 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
10331 break;
10332 case GOVD_MAP_TO_ONLY:
10333 kind = GOMP_MAP_TO;
10334 break;
10335 case GOVD_MAP_FROM_ONLY:
10336 kind = GOMP_MAP_FROM;
10337 break;
10338 case GOVD_MAP_ALLOC_ONLY:
10339 kind = GOMP_MAP_ALLOC;
10340 break;
10341 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
10342 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
10343 break;
10344 case GOVD_MAP_FORCE_PRESENT:
10345 kind = GOMP_MAP_FORCE_PRESENT;
10346 break;
10347 default:
10348 gcc_unreachable ();
10349 }
10350 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
10351 if (DECL_SIZE (decl)
10352 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
10353 {
10354 tree decl2 = DECL_VALUE_EXPR (decl);
10355 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
10356 decl2 = TREE_OPERAND (decl2, 0);
10357 gcc_assert (DECL_P (decl2));
10358 tree mem = build_simple_mem_ref (decl2);
10359 OMP_CLAUSE_DECL (clause) = mem;
10360 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
10361 if (gimplify_omp_ctxp->outer_context)
10362 {
10363 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
10364 omp_notice_variable (ctx, decl2, true);
10365 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
10366 }
10367 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
10368 OMP_CLAUSE_MAP);
10369 OMP_CLAUSE_DECL (nc) = decl;
10370 OMP_CLAUSE_SIZE (nc) = size_zero_node;
10371 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
10372 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
10373 else
10374 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
10375 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
10376 OMP_CLAUSE_CHAIN (clause) = nc;
10377 }
10378 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
10379 && lang_hooks.decls.omp_privatize_by_reference (decl))
10380 {
10381 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
10382 OMP_CLAUSE_SIZE (clause)
10383 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
10384 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10385 gimplify_omp_ctxp = ctx->outer_context;
10386 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
10387 pre_p, NULL, is_gimple_val, fb_rvalue);
10388 gimplify_omp_ctxp = ctx;
10389 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
10390 OMP_CLAUSE_MAP);
10391 OMP_CLAUSE_DECL (nc) = decl;
10392 OMP_CLAUSE_SIZE (nc) = size_zero_node;
10393 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
10394 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
10395 OMP_CLAUSE_CHAIN (clause) = nc;
10396 }
10397 else
10398 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
10399 }
10400 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
10401 {
10402 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
10403 OMP_CLAUSE_DECL (nc) = decl;
10404 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
10405 OMP_CLAUSE_CHAIN (nc) = chain;
10406 OMP_CLAUSE_CHAIN (clause) = nc;
10407 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10408 gimplify_omp_ctxp = ctx->outer_context;
10409 lang_hooks.decls.omp_finish_clause (nc, pre_p,
10410 (ctx->region_type & ORT_ACC) != 0);
10411 gimplify_omp_ctxp = ctx;
10412 }
10413 *list_p = clause;
10414 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10415 gimplify_omp_ctxp = ctx->outer_context;
10416 lang_hooks.decls.omp_finish_clause (clause, pre_p,
10417 (ctx->region_type & ORT_ACC) != 0);
10418 if (gimplify_omp_ctxp)
10419 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
10420 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
10421 && DECL_P (OMP_CLAUSE_SIZE (clause)))
10422 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
10423 true);
10424 gimplify_omp_ctxp = ctx;
10425 return 0;
10426 }
10427
10428 static void
10429 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
10430 enum tree_code code)
10431 {
10432 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10433 tree *orig_list_p = list_p;
10434 tree c, decl;
10435 bool has_inscan_reductions = false;
10436
10437 if (body)
10438 {
10439 struct gimplify_omp_ctx *octx;
10440 for (octx = ctx; octx; octx = octx->outer_context)
10441 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
10442 break;
10443 if (octx)
10444 {
10445 struct walk_stmt_info wi;
10446 memset (&wi, 0, sizeof (wi));
10447 walk_gimple_seq (body, omp_find_stores_stmt,
10448 omp_find_stores_op, &wi);
10449 }
10450 }
10451
10452 if (ctx->add_safelen1)
10453 {
10454 /* If there are VLAs in the body of simd loop, prevent
10455 vectorization. */
10456 gcc_assert (ctx->region_type == ORT_SIMD);
10457 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
10458 OMP_CLAUSE_SAFELEN_EXPR (c) = integer_one_node;
10459 OMP_CLAUSE_CHAIN (c) = *list_p;
10460 *list_p = c;
10461 list_p = &OMP_CLAUSE_CHAIN (c);
10462 }
10463
10464 if (ctx->region_type == ORT_WORKSHARE
10465 && ctx->outer_context
10466 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL)
10467 {
10468 for (c = ctx->outer_context->clauses; c; c = OMP_CLAUSE_CHAIN (c))
10469 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10470 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
10471 {
10472 decl = OMP_CLAUSE_DECL (c);
10473 splay_tree_node n
10474 = splay_tree_lookup (ctx->outer_context->variables,
10475 (splay_tree_key) decl);
10476 gcc_checking_assert (!splay_tree_lookup (ctx->variables,
10477 (splay_tree_key) decl));
10478 omp_add_variable (ctx, decl, n->value);
10479 tree c2 = copy_node (c);
10480 OMP_CLAUSE_CHAIN (c2) = *list_p;
10481 *list_p = c2;
10482 if ((n->value & GOVD_FIRSTPRIVATE) == 0)
10483 continue;
10484 c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10485 OMP_CLAUSE_FIRSTPRIVATE);
10486 OMP_CLAUSE_DECL (c2) = decl;
10487 OMP_CLAUSE_CHAIN (c2) = *list_p;
10488 *list_p = c2;
10489 }
10490 }
10491 while ((c = *list_p) != NULL)
10492 {
10493 splay_tree_node n;
10494 bool remove = false;
10495
10496 switch (OMP_CLAUSE_CODE (c))
10497 {
10498 case OMP_CLAUSE_FIRSTPRIVATE:
10499 if ((ctx->region_type & ORT_TARGET)
10500 && (ctx->region_type & ORT_ACC) == 0
10501 && TYPE_ATOMIC (strip_array_types
10502 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
10503 {
10504 error_at (OMP_CLAUSE_LOCATION (c),
10505 "%<_Atomic%> %qD in %<firstprivate%> clause on "
10506 "%<target%> construct", OMP_CLAUSE_DECL (c));
10507 remove = true;
10508 break;
10509 }
10510 /* FALLTHRU */
10511 case OMP_CLAUSE_PRIVATE:
10512 case OMP_CLAUSE_SHARED:
10513 case OMP_CLAUSE_LINEAR:
10514 decl = OMP_CLAUSE_DECL (c);
10515 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10516 remove = !(n->value & GOVD_SEEN);
10517 if ((n->value & GOVD_LASTPRIVATE_CONDITIONAL) != 0
10518 && code == OMP_PARALLEL
10519 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10520 remove = true;
10521 if (! remove)
10522 {
10523 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
10524 if ((n->value & GOVD_DEBUG_PRIVATE)
10525 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
10526 {
10527 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
10528 || ((n->value & GOVD_DATA_SHARE_CLASS)
10529 == GOVD_SHARED));
10530 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
10531 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
10532 }
10533 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
10534 && ctx->has_depend
10535 && DECL_P (decl))
10536 n->value |= GOVD_WRITTEN;
10537 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
10538 && (n->value & GOVD_WRITTEN) == 0
10539 && DECL_P (decl)
10540 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10541 OMP_CLAUSE_SHARED_READONLY (c) = 1;
10542 else if (DECL_P (decl)
10543 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
10544 && (n->value & GOVD_WRITTEN) != 0)
10545 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10546 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
10547 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10548 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
10549 }
10550 else
10551 n->value &= ~GOVD_EXPLICIT;
10552 break;
10553
10554 case OMP_CLAUSE_LASTPRIVATE:
10555 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
10556 accurately reflect the presence of a FIRSTPRIVATE clause. */
10557 decl = OMP_CLAUSE_DECL (c);
10558 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10559 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
10560 = (n->value & GOVD_FIRSTPRIVATE) != 0;
10561 if (code == OMP_DISTRIBUTE
10562 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
10563 {
10564 remove = true;
10565 error_at (OMP_CLAUSE_LOCATION (c),
10566 "same variable used in %<firstprivate%> and "
10567 "%<lastprivate%> clauses on %<distribute%> "
10568 "construct");
10569 }
10570 if (!remove
10571 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10572 && DECL_P (decl)
10573 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10574 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
10575 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) && code == OMP_PARALLEL)
10576 remove = true;
10577 break;
10578
10579 case OMP_CLAUSE_ALIGNED:
10580 decl = OMP_CLAUSE_DECL (c);
10581 if (!is_global_var (decl))
10582 {
10583 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10584 remove = n == NULL || !(n->value & GOVD_SEEN);
10585 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
10586 {
10587 struct gimplify_omp_ctx *octx;
10588 if (n != NULL
10589 && (n->value & (GOVD_DATA_SHARE_CLASS
10590 & ~GOVD_FIRSTPRIVATE)))
10591 remove = true;
10592 else
10593 for (octx = ctx->outer_context; octx;
10594 octx = octx->outer_context)
10595 {
10596 n = splay_tree_lookup (octx->variables,
10597 (splay_tree_key) decl);
10598 if (n == NULL)
10599 continue;
10600 if (n->value & GOVD_LOCAL)
10601 break;
10602 /* We have to avoid assigning a shared variable
10603 to itself when trying to add
10604 __builtin_assume_aligned. */
10605 if (n->value & GOVD_SHARED)
10606 {
10607 remove = true;
10608 break;
10609 }
10610 }
10611 }
10612 }
10613 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
10614 {
10615 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10616 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
10617 remove = true;
10618 }
10619 break;
10620
10621 case OMP_CLAUSE_NONTEMPORAL:
10622 decl = OMP_CLAUSE_DECL (c);
10623 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10624 remove = n == NULL || !(n->value & GOVD_SEEN);
10625 break;
10626
10627 case OMP_CLAUSE_MAP:
10628 if (code == OMP_TARGET_EXIT_DATA
10629 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
10630 {
10631 remove = true;
10632 break;
10633 }
10634 decl = OMP_CLAUSE_DECL (c);
10635 /* Data clauses associated with reductions must be
10636 compatible with present_or_copy. Warn and adjust the clause
10637 if that is not the case. */
10638 if (ctx->region_type == ORT_ACC_PARALLEL
10639 || ctx->region_type == ORT_ACC_SERIAL)
10640 {
10641 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
10642 n = NULL;
10643
10644 if (DECL_P (t))
10645 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
10646
10647 if (n && (n->value & GOVD_REDUCTION))
10648 {
10649 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
10650
10651 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
10652 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
10653 && kind != GOMP_MAP_FORCE_PRESENT
10654 && kind != GOMP_MAP_POINTER)
10655 {
10656 warning_at (OMP_CLAUSE_LOCATION (c), 0,
10657 "incompatible data clause with reduction "
10658 "on %qE; promoting to %<present_or_copy%>",
10659 DECL_NAME (t));
10660 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
10661 }
10662 }
10663 }
10664 if (!DECL_P (decl))
10665 {
10666 if ((ctx->region_type & ORT_TARGET) != 0
10667 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
10668 {
10669 if (TREE_CODE (decl) == INDIRECT_REF
10670 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
10671 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
10672 == REFERENCE_TYPE))
10673 decl = TREE_OPERAND (decl, 0);
10674 if (TREE_CODE (decl) == COMPONENT_REF)
10675 {
10676 while (TREE_CODE (decl) == COMPONENT_REF)
10677 decl = TREE_OPERAND (decl, 0);
10678 if (DECL_P (decl))
10679 {
10680 n = splay_tree_lookup (ctx->variables,
10681 (splay_tree_key) decl);
10682 if (!(n->value & GOVD_SEEN))
10683 remove = true;
10684 }
10685 }
10686 }
10687 break;
10688 }
10689 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10690 if ((ctx->region_type & ORT_TARGET) != 0
10691 && !(n->value & GOVD_SEEN)
10692 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
10693 && (!is_global_var (decl)
10694 || !lookup_attribute ("omp declare target link",
10695 DECL_ATTRIBUTES (decl))))
10696 {
10697 remove = true;
10698 /* For struct element mapping, if struct is never referenced
10699 in target block and none of the mapping has always modifier,
10700 remove all the struct element mappings, which immediately
10701 follow the GOMP_MAP_STRUCT map clause. */
10702 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
10703 {
10704 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
10705 while (cnt--)
10706 OMP_CLAUSE_CHAIN (c)
10707 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
10708 }
10709 }
10710 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
10711 && (code == OMP_TARGET_EXIT_DATA
10712 || code == OACC_EXIT_DATA))
10713 remove = true;
10714 else if (DECL_SIZE (decl)
10715 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
10716 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
10717 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
10718 && (OMP_CLAUSE_MAP_KIND (c)
10719 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
10720 {
10721 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
10722 for these, TREE_CODE (DECL_SIZE (decl)) will always be
10723 INTEGER_CST. */
10724 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
10725
10726 tree decl2 = DECL_VALUE_EXPR (decl);
10727 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
10728 decl2 = TREE_OPERAND (decl2, 0);
10729 gcc_assert (DECL_P (decl2));
10730 tree mem = build_simple_mem_ref (decl2);
10731 OMP_CLAUSE_DECL (c) = mem;
10732 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
10733 if (ctx->outer_context)
10734 {
10735 omp_notice_variable (ctx->outer_context, decl2, true);
10736 omp_notice_variable (ctx->outer_context,
10737 OMP_CLAUSE_SIZE (c), true);
10738 }
10739 if (((ctx->region_type & ORT_TARGET) != 0
10740 || !ctx->target_firstprivatize_array_bases)
10741 && ((n->value & GOVD_SEEN) == 0
10742 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
10743 {
10744 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10745 OMP_CLAUSE_MAP);
10746 OMP_CLAUSE_DECL (nc) = decl;
10747 OMP_CLAUSE_SIZE (nc) = size_zero_node;
10748 if (ctx->target_firstprivatize_array_bases)
10749 OMP_CLAUSE_SET_MAP_KIND (nc,
10750 GOMP_MAP_FIRSTPRIVATE_POINTER);
10751 else
10752 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
10753 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
10754 OMP_CLAUSE_CHAIN (c) = nc;
10755 c = nc;
10756 }
10757 }
10758 else
10759 {
10760 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
10761 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
10762 gcc_assert ((n->value & GOVD_SEEN) == 0
10763 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
10764 == 0));
10765 }
10766 break;
10767
10768 case OMP_CLAUSE_TO:
10769 case OMP_CLAUSE_FROM:
10770 case OMP_CLAUSE__CACHE_:
10771 decl = OMP_CLAUSE_DECL (c);
10772 if (!DECL_P (decl))
10773 break;
10774 if (DECL_SIZE (decl)
10775 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
10776 {
10777 tree decl2 = DECL_VALUE_EXPR (decl);
10778 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
10779 decl2 = TREE_OPERAND (decl2, 0);
10780 gcc_assert (DECL_P (decl2));
10781 tree mem = build_simple_mem_ref (decl2);
10782 OMP_CLAUSE_DECL (c) = mem;
10783 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
10784 if (ctx->outer_context)
10785 {
10786 omp_notice_variable (ctx->outer_context, decl2, true);
10787 omp_notice_variable (ctx->outer_context,
10788 OMP_CLAUSE_SIZE (c), true);
10789 }
10790 }
10791 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
10792 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
10793 break;
10794
10795 case OMP_CLAUSE_REDUCTION:
10796 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
10797 {
10798 decl = OMP_CLAUSE_DECL (c);
10799 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10800 if ((n->value & GOVD_REDUCTION_INSCAN) == 0)
10801 {
10802 remove = true;
10803 error_at (OMP_CLAUSE_LOCATION (c),
10804 "%qD specified in %<inscan%> %<reduction%> clause "
10805 "but not in %<scan%> directive clause", decl);
10806 break;
10807 }
10808 has_inscan_reductions = true;
10809 }
10810 /* FALLTHRU */
10811 case OMP_CLAUSE_IN_REDUCTION:
10812 case OMP_CLAUSE_TASK_REDUCTION:
10813 decl = OMP_CLAUSE_DECL (c);
10814 /* OpenACC reductions need a present_or_copy data clause.
10815 Add one if necessary. Emit error when the reduction is private. */
10816 if (ctx->region_type == ORT_ACC_PARALLEL
10817 || ctx->region_type == ORT_ACC_SERIAL)
10818 {
10819 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10820 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
10821 {
10822 remove = true;
10823 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
10824 "reduction on %qE", DECL_NAME (decl));
10825 }
10826 else if ((n->value & GOVD_MAP) == 0)
10827 {
10828 tree next = OMP_CLAUSE_CHAIN (c);
10829 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
10830 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
10831 OMP_CLAUSE_DECL (nc) = decl;
10832 OMP_CLAUSE_CHAIN (c) = nc;
10833 lang_hooks.decls.omp_finish_clause (nc, pre_p,
10834 (ctx->region_type
10835 & ORT_ACC) != 0);
10836 while (1)
10837 {
10838 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
10839 if (OMP_CLAUSE_CHAIN (nc) == NULL)
10840 break;
10841 nc = OMP_CLAUSE_CHAIN (nc);
10842 }
10843 OMP_CLAUSE_CHAIN (nc) = next;
10844 n->value |= GOVD_MAP;
10845 }
10846 }
10847 if (DECL_P (decl)
10848 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10849 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
10850 break;
10851
10852 case OMP_CLAUSE_ALLOCATE:
10853 decl = OMP_CLAUSE_DECL (c);
10854 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10855 if (n != NULL && !(n->value & GOVD_SEEN))
10856 {
10857 if ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE | GOVD_LINEAR))
10858 != 0
10859 && (n->value & (GOVD_REDUCTION | GOVD_LASTPRIVATE)) == 0)
10860 remove = true;
10861 }
10862 if (!remove
10863 && OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
10864 && TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)) != INTEGER_CST
10865 && ((ctx->region_type & (ORT_PARALLEL | ORT_TARGET)) != 0
10866 || (ctx->region_type & ORT_TASKLOOP) == ORT_TASK
10867 || (ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS))
10868 {
10869 tree allocator = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
10870 n = splay_tree_lookup (ctx->variables, (splay_tree_key) allocator);
10871 if (n == NULL)
10872 {
10873 enum omp_clause_default_kind default_kind
10874 = ctx->default_kind;
10875 ctx->default_kind = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
10876 omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
10877 true);
10878 ctx->default_kind = default_kind;
10879 }
10880 else
10881 omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
10882 true);
10883 }
10884 break;
10885
10886 case OMP_CLAUSE_COPYIN:
10887 case OMP_CLAUSE_COPYPRIVATE:
10888 case OMP_CLAUSE_IF:
10889 case OMP_CLAUSE_NUM_THREADS:
10890 case OMP_CLAUSE_NUM_TEAMS:
10891 case OMP_CLAUSE_THREAD_LIMIT:
10892 case OMP_CLAUSE_DIST_SCHEDULE:
10893 case OMP_CLAUSE_DEVICE:
10894 case OMP_CLAUSE_SCHEDULE:
10895 case OMP_CLAUSE_NOWAIT:
10896 case OMP_CLAUSE_ORDERED:
10897 case OMP_CLAUSE_DEFAULT:
10898 case OMP_CLAUSE_UNTIED:
10899 case OMP_CLAUSE_COLLAPSE:
10900 case OMP_CLAUSE_FINAL:
10901 case OMP_CLAUSE_MERGEABLE:
10902 case OMP_CLAUSE_PROC_BIND:
10903 case OMP_CLAUSE_SAFELEN:
10904 case OMP_CLAUSE_SIMDLEN:
10905 case OMP_CLAUSE_DEPEND:
10906 case OMP_CLAUSE_PRIORITY:
10907 case OMP_CLAUSE_GRAINSIZE:
10908 case OMP_CLAUSE_NUM_TASKS:
10909 case OMP_CLAUSE_NOGROUP:
10910 case OMP_CLAUSE_THREADS:
10911 case OMP_CLAUSE_SIMD:
10912 case OMP_CLAUSE_HINT:
10913 case OMP_CLAUSE_DEFAULTMAP:
10914 case OMP_CLAUSE_ORDER:
10915 case OMP_CLAUSE_BIND:
10916 case OMP_CLAUSE_DETACH:
10917 case OMP_CLAUSE_USE_DEVICE_PTR:
10918 case OMP_CLAUSE_USE_DEVICE_ADDR:
10919 case OMP_CLAUSE_IS_DEVICE_PTR:
10920 case OMP_CLAUSE_ASYNC:
10921 case OMP_CLAUSE_WAIT:
10922 case OMP_CLAUSE_INDEPENDENT:
10923 case OMP_CLAUSE_NUM_GANGS:
10924 case OMP_CLAUSE_NUM_WORKERS:
10925 case OMP_CLAUSE_VECTOR_LENGTH:
10926 case OMP_CLAUSE_GANG:
10927 case OMP_CLAUSE_WORKER:
10928 case OMP_CLAUSE_VECTOR:
10929 case OMP_CLAUSE_AUTO:
10930 case OMP_CLAUSE_SEQ:
10931 case OMP_CLAUSE_TILE:
10932 case OMP_CLAUSE_IF_PRESENT:
10933 case OMP_CLAUSE_FINALIZE:
10934 case OMP_CLAUSE_INCLUSIVE:
10935 case OMP_CLAUSE_EXCLUSIVE:
10936 break;
10937
10938 default:
10939 gcc_unreachable ();
10940 }
10941
10942 if (remove)
10943 *list_p = OMP_CLAUSE_CHAIN (c);
10944 else
10945 list_p = &OMP_CLAUSE_CHAIN (c);
10946 }
10947
10948 /* Add in any implicit data sharing. */
10949 struct gimplify_adjust_omp_clauses_data data;
10950 data.list_p = list_p;
10951 data.pre_p = pre_p;
10952 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
10953
10954 if (has_inscan_reductions)
10955 for (c = *orig_list_p; c; c = OMP_CLAUSE_CHAIN (c))
10956 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10957 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
10958 {
10959 error_at (OMP_CLAUSE_LOCATION (c),
10960 "%<inscan%> %<reduction%> clause used together with "
10961 "%<linear%> clause for a variable other than loop "
10962 "iterator");
10963 break;
10964 }
10965
10966 gimplify_omp_ctxp = ctx->outer_context;
10967 delete_omp_context (ctx);
10968 }
10969
10970 /* Return 0 if CONSTRUCTS selectors don't match the OpenMP context,
10971 -1 if unknown yet (simd is involved, won't be known until vectorization)
10972 and 1 if they do. If SCORES is non-NULL, it should point to an array
10973 of at least 2*NCONSTRUCTS+2 ints, and will be filled with the positions
10974 of the CONSTRUCTS (position -1 if it will never match) followed by
10975 number of constructs in the OpenMP context construct trait. If the
10976 score depends on whether it will be in a declare simd clone or not,
10977 the function returns 2 and there will be two sets of the scores, the first
10978 one for the case that it is not in a declare simd clone, the other
10979 that it is in a declare simd clone. */
10980
10981 int
10982 omp_construct_selector_matches (enum tree_code *constructs, int nconstructs,
10983 int *scores)
10984 {
10985 int matched = 0, cnt = 0;
10986 bool simd_seen = false;
10987 bool target_seen = false;
10988 int declare_simd_cnt = -1;
10989 auto_vec<enum tree_code, 16> codes;
10990 for (struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; ctx;)
10991 {
10992 if (((ctx->region_type & ORT_PARALLEL) && ctx->code == OMP_PARALLEL)
10993 || ((ctx->region_type & (ORT_TARGET | ORT_IMPLICIT_TARGET | ORT_ACC))
10994 == ORT_TARGET && ctx->code == OMP_TARGET)
10995 || ((ctx->region_type & ORT_TEAMS) && ctx->code == OMP_TEAMS)
10996 || (ctx->region_type == ORT_WORKSHARE && ctx->code == OMP_FOR)
10997 || (ctx->region_type == ORT_SIMD
10998 && ctx->code == OMP_SIMD
10999 && !omp_find_clause (ctx->clauses, OMP_CLAUSE_BIND)))
11000 {
11001 ++cnt;
11002 if (scores)
11003 codes.safe_push (ctx->code);
11004 else if (matched < nconstructs && ctx->code == constructs[matched])
11005 {
11006 if (ctx->code == OMP_SIMD)
11007 {
11008 if (matched)
11009 return 0;
11010 simd_seen = true;
11011 }
11012 ++matched;
11013 }
11014 if (ctx->code == OMP_TARGET)
11015 {
11016 if (scores == NULL)
11017 return matched < nconstructs ? 0 : simd_seen ? -1 : 1;
11018 target_seen = true;
11019 break;
11020 }
11021 }
11022 else if (ctx->region_type == ORT_WORKSHARE
11023 && ctx->code == OMP_LOOP
11024 && ctx->outer_context
11025 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL
11026 && ctx->outer_context->outer_context
11027 && ctx->outer_context->outer_context->code == OMP_LOOP
11028 && ctx->outer_context->outer_context->distribute)
11029 ctx = ctx->outer_context->outer_context;
11030 ctx = ctx->outer_context;
11031 }
11032 if (!target_seen
11033 && lookup_attribute ("omp declare simd",
11034 DECL_ATTRIBUTES (current_function_decl)))
11035 {
11036 /* Declare simd is a maybe case, it is supposed to be added only to the
11037 omp-simd-clone.c added clones and not to the base function. */
11038 declare_simd_cnt = cnt++;
11039 if (scores)
11040 codes.safe_push (OMP_SIMD);
11041 else if (cnt == 0
11042 && constructs[0] == OMP_SIMD)
11043 {
11044 gcc_assert (matched == 0);
11045 simd_seen = true;
11046 if (++matched == nconstructs)
11047 return -1;
11048 }
11049 }
11050 if (tree attr = lookup_attribute ("omp declare variant variant",
11051 DECL_ATTRIBUTES (current_function_decl)))
11052 {
11053 enum tree_code variant_constructs[5];
11054 int variant_nconstructs = 0;
11055 if (!target_seen)
11056 variant_nconstructs
11057 = omp_constructor_traits_to_codes (TREE_VALUE (attr),
11058 variant_constructs);
11059 for (int i = 0; i < variant_nconstructs; i++)
11060 {
11061 ++cnt;
11062 if (scores)
11063 codes.safe_push (variant_constructs[i]);
11064 else if (matched < nconstructs
11065 && variant_constructs[i] == constructs[matched])
11066 {
11067 if (variant_constructs[i] == OMP_SIMD)
11068 {
11069 if (matched)
11070 return 0;
11071 simd_seen = true;
11072 }
11073 ++matched;
11074 }
11075 }
11076 }
11077 if (!target_seen
11078 && lookup_attribute ("omp declare target block",
11079 DECL_ATTRIBUTES (current_function_decl)))
11080 {
11081 if (scores)
11082 codes.safe_push (OMP_TARGET);
11083 else if (matched < nconstructs && constructs[matched] == OMP_TARGET)
11084 ++matched;
11085 }
11086 if (scores)
11087 {
11088 for (int pass = 0; pass < (declare_simd_cnt == -1 ? 1 : 2); pass++)
11089 {
11090 int j = codes.length () - 1;
11091 for (int i = nconstructs - 1; i >= 0; i--)
11092 {
11093 while (j >= 0
11094 && (pass != 0 || declare_simd_cnt != j)
11095 && constructs[i] != codes[j])
11096 --j;
11097 if (pass == 0 && declare_simd_cnt != -1 && j > declare_simd_cnt)
11098 *scores++ = j - 1;
11099 else
11100 *scores++ = j;
11101 }
11102 *scores++ = ((pass == 0 && declare_simd_cnt != -1)
11103 ? codes.length () - 1 : codes.length ());
11104 }
11105 return declare_simd_cnt == -1 ? 1 : 2;
11106 }
11107 if (matched == nconstructs)
11108 return simd_seen ? -1 : 1;
11109 return 0;
11110 }
11111
11112 /* Gimplify OACC_CACHE. */
11113
11114 static void
11115 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
11116 {
11117 tree expr = *expr_p;
11118
11119 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
11120 OACC_CACHE);
11121 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
11122 OACC_CACHE);
11123
11124 /* TODO: Do something sensible with this information. */
11125
11126 *expr_p = NULL_TREE;
11127 }
11128
11129 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
11130 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
11131 kind. The entry kind will replace the one in CLAUSE, while the exit
11132 kind will be used in a new omp_clause and returned to the caller. */
11133
11134 static tree
11135 gimplify_oacc_declare_1 (tree clause)
11136 {
11137 HOST_WIDE_INT kind, new_op;
11138 bool ret = false;
11139 tree c = NULL;
11140
11141 kind = OMP_CLAUSE_MAP_KIND (clause);
11142
11143 switch (kind)
11144 {
11145 case GOMP_MAP_ALLOC:
11146 new_op = GOMP_MAP_RELEASE;
11147 ret = true;
11148 break;
11149
11150 case GOMP_MAP_FROM:
11151 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
11152 new_op = GOMP_MAP_FROM;
11153 ret = true;
11154 break;
11155
11156 case GOMP_MAP_TOFROM:
11157 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
11158 new_op = GOMP_MAP_FROM;
11159 ret = true;
11160 break;
11161
11162 case GOMP_MAP_DEVICE_RESIDENT:
11163 case GOMP_MAP_FORCE_DEVICEPTR:
11164 case GOMP_MAP_FORCE_PRESENT:
11165 case GOMP_MAP_LINK:
11166 case GOMP_MAP_POINTER:
11167 case GOMP_MAP_TO:
11168 break;
11169
11170 default:
11171 gcc_unreachable ();
11172 break;
11173 }
11174
11175 if (ret)
11176 {
11177 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
11178 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
11179 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
11180 }
11181
11182 return c;
11183 }
11184
11185 /* Gimplify OACC_DECLARE. */
11186
11187 static void
11188 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
11189 {
11190 tree expr = *expr_p;
11191 gomp_target *stmt;
11192 tree clauses, t, decl;
11193
11194 clauses = OACC_DECLARE_CLAUSES (expr);
11195
11196 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
11197 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
11198
11199 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
11200 {
11201 decl = OMP_CLAUSE_DECL (t);
11202
11203 if (TREE_CODE (decl) == MEM_REF)
11204 decl = TREE_OPERAND (decl, 0);
11205
11206 if (VAR_P (decl) && !is_oacc_declared (decl))
11207 {
11208 tree attr = get_identifier ("oacc declare target");
11209 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
11210 DECL_ATTRIBUTES (decl));
11211 }
11212
11213 if (VAR_P (decl)
11214 && !is_global_var (decl)
11215 && DECL_CONTEXT (decl) == current_function_decl)
11216 {
11217 tree c = gimplify_oacc_declare_1 (t);
11218 if (c)
11219 {
11220 if (oacc_declare_returns == NULL)
11221 oacc_declare_returns = new hash_map<tree, tree>;
11222
11223 oacc_declare_returns->put (decl, c);
11224 }
11225 }
11226
11227 if (gimplify_omp_ctxp)
11228 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
11229 }
11230
11231 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
11232 clauses);
11233
11234 gimplify_seq_add_stmt (pre_p, stmt);
11235
11236 *expr_p = NULL_TREE;
11237 }
11238
11239 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
11240 gimplification of the body, as well as scanning the body for used
11241 variables. We need to do this scan now, because variable-sized
11242 decls will be decomposed during gimplification. */
11243
11244 static void
11245 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
11246 {
11247 tree expr = *expr_p;
11248 gimple *g;
11249 gimple_seq body = NULL;
11250
11251 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
11252 OMP_PARALLEL_COMBINED (expr)
11253 ? ORT_COMBINED_PARALLEL
11254 : ORT_PARALLEL, OMP_PARALLEL);
11255
11256 push_gimplify_context ();
11257
11258 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
11259 if (gimple_code (g) == GIMPLE_BIND)
11260 pop_gimplify_context (g);
11261 else
11262 pop_gimplify_context (NULL);
11263
11264 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
11265 OMP_PARALLEL);
11266
11267 g = gimple_build_omp_parallel (body,
11268 OMP_PARALLEL_CLAUSES (expr),
11269 NULL_TREE, NULL_TREE);
11270 if (OMP_PARALLEL_COMBINED (expr))
11271 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
11272 gimplify_seq_add_stmt (pre_p, g);
11273 *expr_p = NULL_TREE;
11274 }
11275
11276 /* Gimplify the contents of an OMP_TASK statement. This involves
11277 gimplification of the body, as well as scanning the body for used
11278 variables. We need to do this scan now, because variable-sized
11279 decls will be decomposed during gimplification. */
11280
11281 static void
11282 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
11283 {
11284 tree expr = *expr_p;
11285 gimple *g;
11286 gimple_seq body = NULL;
11287
11288 if (OMP_TASK_BODY (expr) == NULL_TREE)
11289 for (tree c = OMP_TASK_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
11290 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
11291 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET)
11292 {
11293 error_at (OMP_CLAUSE_LOCATION (c),
11294 "%<mutexinoutset%> kind in %<depend%> clause on a "
11295 "%<taskwait%> construct");
11296 break;
11297 }
11298
11299 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
11300 omp_find_clause (OMP_TASK_CLAUSES (expr),
11301 OMP_CLAUSE_UNTIED)
11302 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
11303
11304 if (OMP_TASK_BODY (expr))
11305 {
11306 push_gimplify_context ();
11307
11308 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
11309 if (gimple_code (g) == GIMPLE_BIND)
11310 pop_gimplify_context (g);
11311 else
11312 pop_gimplify_context (NULL);
11313 }
11314
11315 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
11316 OMP_TASK);
11317
11318 g = gimple_build_omp_task (body,
11319 OMP_TASK_CLAUSES (expr),
11320 NULL_TREE, NULL_TREE,
11321 NULL_TREE, NULL_TREE, NULL_TREE);
11322 if (OMP_TASK_BODY (expr) == NULL_TREE)
11323 gimple_omp_task_set_taskwait_p (g, true);
11324 gimplify_seq_add_stmt (pre_p, g);
11325 *expr_p = NULL_TREE;
11326 }
11327
11328 /* Helper function for gimplify_omp_for. If *TP is not a gimple constant,
11329 force it into a temporary initialized in PRE_P and add firstprivate clause
11330 to ORIG_FOR_STMT. */
11331
11332 static void
11333 gimplify_omp_taskloop_expr (tree type, tree *tp, gimple_seq *pre_p,
11334 tree orig_for_stmt)
11335 {
11336 if (*tp == NULL || is_gimple_constant (*tp))
11337 return;
11338
11339 *tp = get_initialized_tmp_var (*tp, pre_p, NULL, false);
11340 /* Reference to pointer conversion is considered useless,
11341 but is significant for firstprivate clause. Force it
11342 here. */
11343 if (type
11344 && TREE_CODE (type) == POINTER_TYPE
11345 && TREE_CODE (TREE_TYPE (*tp)) == REFERENCE_TYPE)
11346 {
11347 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
11348 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v, *tp);
11349 gimplify_and_add (m, pre_p);
11350 *tp = v;
11351 }
11352
11353 tree c = build_omp_clause (input_location, OMP_CLAUSE_FIRSTPRIVATE);
11354 OMP_CLAUSE_DECL (c) = *tp;
11355 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
11356 OMP_FOR_CLAUSES (orig_for_stmt) = c;
11357 }
11358
11359 /* Gimplify the gross structure of an OMP_FOR statement. */
11360
11361 static enum gimplify_status
11362 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
11363 {
11364 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
11365 enum gimplify_status ret = GS_ALL_DONE;
11366 enum gimplify_status tret;
11367 gomp_for *gfor;
11368 gimple_seq for_body, for_pre_body;
11369 int i;
11370 bitmap has_decl_expr = NULL;
11371 enum omp_region_type ort = ORT_WORKSHARE;
11372 bool openacc = TREE_CODE (*expr_p) == OACC_LOOP;
11373
11374 orig_for_stmt = for_stmt = *expr_p;
11375
11376 bool loop_p = (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_BIND)
11377 != NULL_TREE);
11378 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
11379 {
11380 tree *data[4] = { NULL, NULL, NULL, NULL };
11381 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
11382 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
11383 find_combined_omp_for, data, NULL);
11384 if (inner_for_stmt == NULL_TREE)
11385 {
11386 gcc_assert (seen_error ());
11387 *expr_p = NULL_TREE;
11388 return GS_ERROR;
11389 }
11390 if (data[2] && OMP_FOR_PRE_BODY (*data[2]))
11391 {
11392 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data[2]),
11393 &OMP_FOR_PRE_BODY (for_stmt));
11394 OMP_FOR_PRE_BODY (*data[2]) = NULL_TREE;
11395 }
11396 if (OMP_FOR_PRE_BODY (inner_for_stmt))
11397 {
11398 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt),
11399 &OMP_FOR_PRE_BODY (for_stmt));
11400 OMP_FOR_PRE_BODY (inner_for_stmt) = NULL_TREE;
11401 }
11402
11403 if (data[0])
11404 {
11405 /* We have some statements or variable declarations in between
11406 the composite construct directives. Move them around the
11407 inner_for_stmt. */
11408 data[0] = expr_p;
11409 for (i = 0; i < 3; i++)
11410 if (data[i])
11411 {
11412 tree t = *data[i];
11413 if (i < 2 && data[i + 1] == &OMP_BODY (t))
11414 data[i + 1] = data[i];
11415 *data[i] = OMP_BODY (t);
11416 tree body = build3 (BIND_EXPR, void_type_node, NULL_TREE,
11417 NULL_TREE, make_node (BLOCK));
11418 OMP_BODY (t) = body;
11419 append_to_statement_list_force (inner_for_stmt,
11420 &BIND_EXPR_BODY (body));
11421 *data[3] = t;
11422 data[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body)));
11423 gcc_assert (*data[3] == inner_for_stmt);
11424 }
11425 return GS_OK;
11426 }
11427
11428 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
11429 if (!loop_p
11430 && OMP_FOR_ORIG_DECLS (inner_for_stmt)
11431 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11432 i)) == TREE_LIST
11433 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11434 i)))
11435 {
11436 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
11437 /* Class iterators aren't allowed on OMP_SIMD, so the only
11438 case we need to solve is distribute parallel for. They are
11439 allowed on the loop construct, but that is already handled
11440 in gimplify_omp_loop. */
11441 gcc_assert (TREE_CODE (inner_for_stmt) == OMP_FOR
11442 && TREE_CODE (for_stmt) == OMP_DISTRIBUTE
11443 && data[1]);
11444 tree orig_decl = TREE_PURPOSE (orig);
11445 tree last = TREE_VALUE (orig);
11446 tree *pc;
11447 for (pc = &OMP_FOR_CLAUSES (inner_for_stmt);
11448 *pc; pc = &OMP_CLAUSE_CHAIN (*pc))
11449 if ((OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE
11450 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LASTPRIVATE)
11451 && OMP_CLAUSE_DECL (*pc) == orig_decl)
11452 break;
11453 if (*pc == NULL_TREE)
11454 {
11455 tree *spc;
11456 for (spc = &OMP_PARALLEL_CLAUSES (*data[1]);
11457 *spc; spc = &OMP_CLAUSE_CHAIN (*spc))
11458 if (OMP_CLAUSE_CODE (*spc) == OMP_CLAUSE_PRIVATE
11459 && OMP_CLAUSE_DECL (*spc) == orig_decl)
11460 break;
11461 if (*spc)
11462 {
11463 tree c = *spc;
11464 *spc = OMP_CLAUSE_CHAIN (c);
11465 OMP_CLAUSE_CHAIN (c) = NULL_TREE;
11466 *pc = c;
11467 }
11468 }
11469 if (*pc == NULL_TREE)
11470 ;
11471 else if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE)
11472 {
11473 /* private clause will appear only on inner_for_stmt.
11474 Change it into firstprivate, and add private clause
11475 on for_stmt. */
11476 tree c = copy_node (*pc);
11477 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
11478 OMP_FOR_CLAUSES (for_stmt) = c;
11479 OMP_CLAUSE_CODE (*pc) = OMP_CLAUSE_FIRSTPRIVATE;
11480 lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
11481 }
11482 else
11483 {
11484 /* lastprivate clause will appear on both inner_for_stmt
11485 and for_stmt. Add firstprivate clause to
11486 inner_for_stmt. */
11487 tree c = build_omp_clause (OMP_CLAUSE_LOCATION (*pc),
11488 OMP_CLAUSE_FIRSTPRIVATE);
11489 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (*pc);
11490 OMP_CLAUSE_CHAIN (c) = *pc;
11491 *pc = c;
11492 lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
11493 }
11494 tree c = build_omp_clause (UNKNOWN_LOCATION,
11495 OMP_CLAUSE_FIRSTPRIVATE);
11496 OMP_CLAUSE_DECL (c) = last;
11497 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11498 OMP_PARALLEL_CLAUSES (*data[1]) = c;
11499 c = build_omp_clause (UNKNOWN_LOCATION,
11500 *pc ? OMP_CLAUSE_SHARED
11501 : OMP_CLAUSE_FIRSTPRIVATE);
11502 OMP_CLAUSE_DECL (c) = orig_decl;
11503 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11504 OMP_PARALLEL_CLAUSES (*data[1]) = c;
11505 }
11506 /* Similarly, take care of C++ range for temporaries, those should
11507 be firstprivate on OMP_PARALLEL if any. */
11508 if (data[1])
11509 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
11510 if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
11511 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11512 i)) == TREE_LIST
11513 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11514 i)))
11515 {
11516 tree orig
11517 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
11518 tree v = TREE_CHAIN (orig);
11519 tree c = build_omp_clause (UNKNOWN_LOCATION,
11520 OMP_CLAUSE_FIRSTPRIVATE);
11521 /* First add firstprivate clause for the __for_end artificial
11522 decl. */
11523 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 1);
11524 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
11525 == REFERENCE_TYPE)
11526 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
11527 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11528 OMP_PARALLEL_CLAUSES (*data[1]) = c;
11529 if (TREE_VEC_ELT (v, 0))
11530 {
11531 /* And now the same for __for_range artificial decl if it
11532 exists. */
11533 c = build_omp_clause (UNKNOWN_LOCATION,
11534 OMP_CLAUSE_FIRSTPRIVATE);
11535 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 0);
11536 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
11537 == REFERENCE_TYPE)
11538 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
11539 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11540 OMP_PARALLEL_CLAUSES (*data[1]) = c;
11541 }
11542 }
11543 }
11544
11545 switch (TREE_CODE (for_stmt))
11546 {
11547 case OMP_FOR:
11548 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt))
11549 {
11550 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
11551 OMP_CLAUSE_SCHEDULE))
11552 error_at (EXPR_LOCATION (for_stmt),
11553 "%qs clause may not appear on non-rectangular %qs",
11554 "schedule", "for");
11555 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED))
11556 error_at (EXPR_LOCATION (for_stmt),
11557 "%qs clause may not appear on non-rectangular %qs",
11558 "ordered", "for");
11559 }
11560 break;
11561 case OMP_DISTRIBUTE:
11562 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt)
11563 && omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
11564 OMP_CLAUSE_DIST_SCHEDULE))
11565 error_at (EXPR_LOCATION (for_stmt),
11566 "%qs clause may not appear on non-rectangular %qs",
11567 "dist_schedule", "distribute");
11568 break;
11569 case OACC_LOOP:
11570 ort = ORT_ACC;
11571 break;
11572 case OMP_TASKLOOP:
11573 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
11574 ort = ORT_UNTIED_TASKLOOP;
11575 else
11576 ort = ORT_TASKLOOP;
11577 break;
11578 case OMP_SIMD:
11579 ort = ORT_SIMD;
11580 break;
11581 default:
11582 gcc_unreachable ();
11583 }
11584
11585 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
11586 clause for the IV. */
11587 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
11588 {
11589 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
11590 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
11591 decl = TREE_OPERAND (t, 0);
11592 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11593 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11594 && OMP_CLAUSE_DECL (c) == decl)
11595 {
11596 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
11597 break;
11598 }
11599 }
11600
11601 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
11602 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
11603 loop_p && TREE_CODE (for_stmt) != OMP_SIMD
11604 ? OMP_LOOP : TREE_CODE (for_stmt));
11605
11606 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
11607 gimplify_omp_ctxp->distribute = true;
11608
11609 /* Handle OMP_FOR_INIT. */
11610 for_pre_body = NULL;
11611 if ((ort == ORT_SIMD
11612 || (inner_for_stmt && TREE_CODE (inner_for_stmt) == OMP_SIMD))
11613 && OMP_FOR_PRE_BODY (for_stmt))
11614 {
11615 has_decl_expr = BITMAP_ALLOC (NULL);
11616 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
11617 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
11618 == VAR_DECL)
11619 {
11620 t = OMP_FOR_PRE_BODY (for_stmt);
11621 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
11622 }
11623 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
11624 {
11625 tree_stmt_iterator si;
11626 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
11627 tsi_next (&si))
11628 {
11629 t = tsi_stmt (si);
11630 if (TREE_CODE (t) == DECL_EXPR
11631 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
11632 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
11633 }
11634 }
11635 }
11636 if (OMP_FOR_PRE_BODY (for_stmt))
11637 {
11638 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
11639 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
11640 else
11641 {
11642 struct gimplify_omp_ctx ctx;
11643 memset (&ctx, 0, sizeof (ctx));
11644 ctx.region_type = ORT_NONE;
11645 gimplify_omp_ctxp = &ctx;
11646 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
11647 gimplify_omp_ctxp = NULL;
11648 }
11649 }
11650 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
11651
11652 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
11653 for_stmt = inner_for_stmt;
11654
11655 /* For taskloop, need to gimplify the start, end and step before the
11656 taskloop, outside of the taskloop omp context. */
11657 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11658 {
11659 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11660 {
11661 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11662 gimple_seq *for_pre_p = (gimple_seq_empty_p (for_pre_body)
11663 ? pre_p : &for_pre_body);
11664 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
11665 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
11666 {
11667 tree v = TREE_OPERAND (t, 1);
11668 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 1),
11669 for_pre_p, orig_for_stmt);
11670 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 2),
11671 for_pre_p, orig_for_stmt);
11672 }
11673 else
11674 gimplify_omp_taskloop_expr (type, &TREE_OPERAND (t, 1), for_pre_p,
11675 orig_for_stmt);
11676
11677 /* Handle OMP_FOR_COND. */
11678 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
11679 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
11680 {
11681 tree v = TREE_OPERAND (t, 1);
11682 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 1),
11683 for_pre_p, orig_for_stmt);
11684 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 2),
11685 for_pre_p, orig_for_stmt);
11686 }
11687 else
11688 gimplify_omp_taskloop_expr (type, &TREE_OPERAND (t, 1), for_pre_p,
11689 orig_for_stmt);
11690
11691 /* Handle OMP_FOR_INCR. */
11692 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11693 if (TREE_CODE (t) == MODIFY_EXPR)
11694 {
11695 decl = TREE_OPERAND (t, 0);
11696 t = TREE_OPERAND (t, 1);
11697 tree *tp = &TREE_OPERAND (t, 1);
11698 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
11699 tp = &TREE_OPERAND (t, 0);
11700
11701 gimplify_omp_taskloop_expr (NULL_TREE, tp, for_pre_p,
11702 orig_for_stmt);
11703 }
11704 }
11705
11706 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
11707 OMP_TASKLOOP);
11708 }
11709
11710 if (orig_for_stmt != for_stmt)
11711 gimplify_omp_ctxp->combined_loop = true;
11712
11713 for_body = NULL;
11714 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
11715 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
11716 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
11717 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
11718
11719 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
11720 bool is_doacross = false;
11721 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
11722 {
11723 is_doacross = true;
11724 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
11725 (OMP_FOR_INIT (for_stmt))
11726 * 2);
11727 }
11728 int collapse = 1, tile = 0;
11729 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
11730 if (c)
11731 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
11732 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
11733 if (c)
11734 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
11735 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ALLOCATE);
11736 hash_set<tree> *allocate_uids = NULL;
11737 if (c)
11738 {
11739 allocate_uids = new hash_set<tree>;
11740 for (; c; c = OMP_CLAUSE_CHAIN (c))
11741 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE)
11742 allocate_uids->add (OMP_CLAUSE_DECL (c));
11743 }
11744 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11745 {
11746 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11747 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
11748 decl = TREE_OPERAND (t, 0);
11749 gcc_assert (DECL_P (decl));
11750 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
11751 || POINTER_TYPE_P (TREE_TYPE (decl)));
11752 if (is_doacross)
11753 {
11754 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
11755 {
11756 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
11757 if (TREE_CODE (orig_decl) == TREE_LIST)
11758 {
11759 orig_decl = TREE_PURPOSE (orig_decl);
11760 if (!orig_decl)
11761 orig_decl = decl;
11762 }
11763 gimplify_omp_ctxp->loop_iter_var.quick_push (orig_decl);
11764 }
11765 else
11766 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
11767 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
11768 }
11769
11770 /* Make sure the iteration variable is private. */
11771 tree c = NULL_TREE;
11772 tree c2 = NULL_TREE;
11773 if (orig_for_stmt != for_stmt)
11774 {
11775 /* Preserve this information until we gimplify the inner simd. */
11776 if (has_decl_expr
11777 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
11778 TREE_PRIVATE (t) = 1;
11779 }
11780 else if (ort == ORT_SIMD)
11781 {
11782 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
11783 (splay_tree_key) decl);
11784 omp_is_private (gimplify_omp_ctxp, decl,
11785 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
11786 != 1));
11787 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
11788 {
11789 omp_notice_variable (gimplify_omp_ctxp, decl, true);
11790 if (n->value & GOVD_LASTPRIVATE_CONDITIONAL)
11791 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
11792 OMP_CLAUSE_LASTPRIVATE);
11793 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
11794 OMP_CLAUSE_LASTPRIVATE))
11795 if (OMP_CLAUSE_DECL (c3) == decl)
11796 {
11797 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
11798 "conditional %<lastprivate%> on loop "
11799 "iterator %qD ignored", decl);
11800 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
11801 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
11802 }
11803 }
11804 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1 && !loop_p)
11805 {
11806 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
11807 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
11808 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
11809 if ((has_decl_expr
11810 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
11811 || TREE_PRIVATE (t))
11812 {
11813 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
11814 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11815 }
11816 struct gimplify_omp_ctx *outer
11817 = gimplify_omp_ctxp->outer_context;
11818 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11819 {
11820 if (outer->region_type == ORT_WORKSHARE
11821 && outer->combined_loop)
11822 {
11823 n = splay_tree_lookup (outer->variables,
11824 (splay_tree_key)decl);
11825 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
11826 {
11827 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
11828 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11829 }
11830 else
11831 {
11832 struct gimplify_omp_ctx *octx = outer->outer_context;
11833 if (octx
11834 && octx->region_type == ORT_COMBINED_PARALLEL
11835 && octx->outer_context
11836 && (octx->outer_context->region_type
11837 == ORT_WORKSHARE)
11838 && octx->outer_context->combined_loop)
11839 {
11840 octx = octx->outer_context;
11841 n = splay_tree_lookup (octx->variables,
11842 (splay_tree_key)decl);
11843 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
11844 {
11845 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
11846 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11847 }
11848 }
11849 }
11850 }
11851 }
11852
11853 OMP_CLAUSE_DECL (c) = decl;
11854 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
11855 OMP_FOR_CLAUSES (for_stmt) = c;
11856 omp_add_variable (gimplify_omp_ctxp, decl, flags);
11857 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11858 {
11859 if (outer->region_type == ORT_WORKSHARE
11860 && outer->combined_loop)
11861 {
11862 if (outer->outer_context
11863 && (outer->outer_context->region_type
11864 == ORT_COMBINED_PARALLEL))
11865 outer = outer->outer_context;
11866 else if (omp_check_private (outer, decl, false))
11867 outer = NULL;
11868 }
11869 else if (((outer->region_type & ORT_TASKLOOP)
11870 == ORT_TASKLOOP)
11871 && outer->combined_loop
11872 && !omp_check_private (gimplify_omp_ctxp,
11873 decl, false))
11874 ;
11875 else if (outer->region_type != ORT_COMBINED_PARALLEL)
11876 {
11877 omp_notice_variable (outer, decl, true);
11878 outer = NULL;
11879 }
11880 if (outer)
11881 {
11882 n = splay_tree_lookup (outer->variables,
11883 (splay_tree_key)decl);
11884 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11885 {
11886 omp_add_variable (outer, decl,
11887 GOVD_LASTPRIVATE | GOVD_SEEN);
11888 if (outer->region_type == ORT_COMBINED_PARALLEL
11889 && outer->outer_context
11890 && (outer->outer_context->region_type
11891 == ORT_WORKSHARE)
11892 && outer->outer_context->combined_loop)
11893 {
11894 outer = outer->outer_context;
11895 n = splay_tree_lookup (outer->variables,
11896 (splay_tree_key)decl);
11897 if (omp_check_private (outer, decl, false))
11898 outer = NULL;
11899 else if (n == NULL
11900 || ((n->value & GOVD_DATA_SHARE_CLASS)
11901 == 0))
11902 omp_add_variable (outer, decl,
11903 GOVD_LASTPRIVATE
11904 | GOVD_SEEN);
11905 else
11906 outer = NULL;
11907 }
11908 if (outer && outer->outer_context
11909 && ((outer->outer_context->region_type
11910 & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS
11911 || (((outer->region_type & ORT_TASKLOOP)
11912 == ORT_TASKLOOP)
11913 && (outer->outer_context->region_type
11914 == ORT_COMBINED_PARALLEL))))
11915 {
11916 outer = outer->outer_context;
11917 n = splay_tree_lookup (outer->variables,
11918 (splay_tree_key)decl);
11919 if (n == NULL
11920 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11921 omp_add_variable (outer, decl,
11922 GOVD_SHARED | GOVD_SEEN);
11923 else
11924 outer = NULL;
11925 }
11926 if (outer && outer->outer_context)
11927 omp_notice_variable (outer->outer_context, decl,
11928 true);
11929 }
11930 }
11931 }
11932 }
11933 else
11934 {
11935 bool lastprivate
11936 = (!has_decl_expr
11937 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
11938 if (TREE_PRIVATE (t))
11939 lastprivate = false;
11940 if (loop_p && OMP_FOR_ORIG_DECLS (for_stmt))
11941 {
11942 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
11943 if (TREE_CODE (elt) == TREE_LIST && TREE_PURPOSE (elt))
11944 lastprivate = false;
11945 }
11946
11947 struct gimplify_omp_ctx *outer
11948 = gimplify_omp_ctxp->outer_context;
11949 if (outer && lastprivate)
11950 {
11951 if (outer->region_type == ORT_WORKSHARE
11952 && outer->combined_loop)
11953 {
11954 n = splay_tree_lookup (outer->variables,
11955 (splay_tree_key)decl);
11956 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
11957 {
11958 lastprivate = false;
11959 outer = NULL;
11960 }
11961 else if (outer->outer_context
11962 && (outer->outer_context->region_type
11963 == ORT_COMBINED_PARALLEL))
11964 outer = outer->outer_context;
11965 else if (omp_check_private (outer, decl, false))
11966 outer = NULL;
11967 }
11968 else if (((outer->region_type & ORT_TASKLOOP)
11969 == ORT_TASKLOOP)
11970 && outer->combined_loop
11971 && !omp_check_private (gimplify_omp_ctxp,
11972 decl, false))
11973 ;
11974 else if (outer->region_type != ORT_COMBINED_PARALLEL)
11975 {
11976 omp_notice_variable (outer, decl, true);
11977 outer = NULL;
11978 }
11979 if (outer)
11980 {
11981 n = splay_tree_lookup (outer->variables,
11982 (splay_tree_key)decl);
11983 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11984 {
11985 omp_add_variable (outer, decl,
11986 GOVD_LASTPRIVATE | GOVD_SEEN);
11987 if (outer->region_type == ORT_COMBINED_PARALLEL
11988 && outer->outer_context
11989 && (outer->outer_context->region_type
11990 == ORT_WORKSHARE)
11991 && outer->outer_context->combined_loop)
11992 {
11993 outer = outer->outer_context;
11994 n = splay_tree_lookup (outer->variables,
11995 (splay_tree_key)decl);
11996 if (omp_check_private (outer, decl, false))
11997 outer = NULL;
11998 else if (n == NULL
11999 || ((n->value & GOVD_DATA_SHARE_CLASS)
12000 == 0))
12001 omp_add_variable (outer, decl,
12002 GOVD_LASTPRIVATE
12003 | GOVD_SEEN);
12004 else
12005 outer = NULL;
12006 }
12007 if (outer && outer->outer_context
12008 && ((outer->outer_context->region_type
12009 & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS
12010 || (((outer->region_type & ORT_TASKLOOP)
12011 == ORT_TASKLOOP)
12012 && (outer->outer_context->region_type
12013 == ORT_COMBINED_PARALLEL))))
12014 {
12015 outer = outer->outer_context;
12016 n = splay_tree_lookup (outer->variables,
12017 (splay_tree_key)decl);
12018 if (n == NULL
12019 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
12020 omp_add_variable (outer, decl,
12021 GOVD_SHARED | GOVD_SEEN);
12022 else
12023 outer = NULL;
12024 }
12025 if (outer && outer->outer_context)
12026 omp_notice_variable (outer->outer_context, decl,
12027 true);
12028 }
12029 }
12030 }
12031
12032 c = build_omp_clause (input_location,
12033 lastprivate ? OMP_CLAUSE_LASTPRIVATE
12034 : OMP_CLAUSE_PRIVATE);
12035 OMP_CLAUSE_DECL (c) = decl;
12036 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
12037 OMP_FOR_CLAUSES (for_stmt) = c;
12038 omp_add_variable (gimplify_omp_ctxp, decl,
12039 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
12040 | GOVD_EXPLICIT | GOVD_SEEN);
12041 c = NULL_TREE;
12042 }
12043 }
12044 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
12045 {
12046 omp_notice_variable (gimplify_omp_ctxp, decl, true);
12047 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
12048 (splay_tree_key) decl);
12049 if (n && (n->value & GOVD_LASTPRIVATE_CONDITIONAL))
12050 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
12051 OMP_CLAUSE_LASTPRIVATE);
12052 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
12053 OMP_CLAUSE_LASTPRIVATE))
12054 if (OMP_CLAUSE_DECL (c3) == decl)
12055 {
12056 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
12057 "conditional %<lastprivate%> on loop "
12058 "iterator %qD ignored", decl);
12059 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
12060 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
12061 }
12062 }
12063 else
12064 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
12065
12066 /* If DECL is not a gimple register, create a temporary variable to act
12067 as an iteration counter. This is valid, since DECL cannot be
12068 modified in the body of the loop. Similarly for any iteration vars
12069 in simd with collapse > 1 where the iterator vars must be
12070 lastprivate. And similarly for vars mentioned in allocate clauses. */
12071 if (orig_for_stmt != for_stmt)
12072 var = decl;
12073 else if (!is_gimple_reg (decl)
12074 || (ort == ORT_SIMD
12075 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
12076 || (allocate_uids && allocate_uids->contains (decl)))
12077 {
12078 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12079 /* Make sure omp_add_variable is not called on it prematurely.
12080 We call it ourselves a few lines later. */
12081 gimplify_omp_ctxp = NULL;
12082 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
12083 gimplify_omp_ctxp = ctx;
12084 TREE_OPERAND (t, 0) = var;
12085
12086 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
12087
12088 if (ort == ORT_SIMD
12089 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
12090 {
12091 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
12092 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
12093 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
12094 OMP_CLAUSE_DECL (c2) = var;
12095 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
12096 OMP_FOR_CLAUSES (for_stmt) = c2;
12097 omp_add_variable (gimplify_omp_ctxp, var,
12098 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
12099 if (c == NULL_TREE)
12100 {
12101 c = c2;
12102 c2 = NULL_TREE;
12103 }
12104 }
12105 else
12106 omp_add_variable (gimplify_omp_ctxp, var,
12107 GOVD_PRIVATE | GOVD_SEEN);
12108 }
12109 else
12110 var = decl;
12111
12112 gimplify_omp_ctxp->in_for_exprs = true;
12113 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
12114 {
12115 tree lb = TREE_OPERAND (t, 1);
12116 tret = gimplify_expr (&TREE_VEC_ELT (lb, 1), &for_pre_body, NULL,
12117 is_gimple_val, fb_rvalue, false);
12118 ret = MIN (ret, tret);
12119 tret = gimplify_expr (&TREE_VEC_ELT (lb, 2), &for_pre_body, NULL,
12120 is_gimple_val, fb_rvalue, false);
12121 }
12122 else
12123 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
12124 is_gimple_val, fb_rvalue, false);
12125 gimplify_omp_ctxp->in_for_exprs = false;
12126 ret = MIN (ret, tret);
12127 if (ret == GS_ERROR)
12128 return ret;
12129
12130 /* Handle OMP_FOR_COND. */
12131 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
12132 gcc_assert (COMPARISON_CLASS_P (t));
12133 gcc_assert (TREE_OPERAND (t, 0) == decl);
12134
12135 gimplify_omp_ctxp->in_for_exprs = true;
12136 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
12137 {
12138 tree ub = TREE_OPERAND (t, 1);
12139 tret = gimplify_expr (&TREE_VEC_ELT (ub, 1), &for_pre_body, NULL,
12140 is_gimple_val, fb_rvalue, false);
12141 ret = MIN (ret, tret);
12142 tret = gimplify_expr (&TREE_VEC_ELT (ub, 2), &for_pre_body, NULL,
12143 is_gimple_val, fb_rvalue, false);
12144 }
12145 else
12146 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
12147 is_gimple_val, fb_rvalue, false);
12148 gimplify_omp_ctxp->in_for_exprs = false;
12149 ret = MIN (ret, tret);
12150
12151 /* Handle OMP_FOR_INCR. */
12152 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
12153 switch (TREE_CODE (t))
12154 {
12155 case PREINCREMENT_EXPR:
12156 case POSTINCREMENT_EXPR:
12157 {
12158 tree decl = TREE_OPERAND (t, 0);
12159 /* c_omp_for_incr_canonicalize_ptr() should have been
12160 called to massage things appropriately. */
12161 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
12162
12163 if (orig_for_stmt != for_stmt)
12164 break;
12165 t = build_int_cst (TREE_TYPE (decl), 1);
12166 if (c)
12167 OMP_CLAUSE_LINEAR_STEP (c) = t;
12168 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
12169 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
12170 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
12171 break;
12172 }
12173
12174 case PREDECREMENT_EXPR:
12175 case POSTDECREMENT_EXPR:
12176 /* c_omp_for_incr_canonicalize_ptr() should have been
12177 called to massage things appropriately. */
12178 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
12179 if (orig_for_stmt != for_stmt)
12180 break;
12181 t = build_int_cst (TREE_TYPE (decl), -1);
12182 if (c)
12183 OMP_CLAUSE_LINEAR_STEP (c) = t;
12184 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
12185 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
12186 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
12187 break;
12188
12189 case MODIFY_EXPR:
12190 gcc_assert (TREE_OPERAND (t, 0) == decl);
12191 TREE_OPERAND (t, 0) = var;
12192
12193 t = TREE_OPERAND (t, 1);
12194 switch (TREE_CODE (t))
12195 {
12196 case PLUS_EXPR:
12197 if (TREE_OPERAND (t, 1) == decl)
12198 {
12199 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
12200 TREE_OPERAND (t, 0) = var;
12201 break;
12202 }
12203
12204 /* Fallthru. */
12205 case MINUS_EXPR:
12206 case POINTER_PLUS_EXPR:
12207 gcc_assert (TREE_OPERAND (t, 0) == decl);
12208 TREE_OPERAND (t, 0) = var;
12209 break;
12210 default:
12211 gcc_unreachable ();
12212 }
12213
12214 gimplify_omp_ctxp->in_for_exprs = true;
12215 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
12216 is_gimple_val, fb_rvalue, false);
12217 ret = MIN (ret, tret);
12218 if (c)
12219 {
12220 tree step = TREE_OPERAND (t, 1);
12221 tree stept = TREE_TYPE (decl);
12222 if (POINTER_TYPE_P (stept))
12223 stept = sizetype;
12224 step = fold_convert (stept, step);
12225 if (TREE_CODE (t) == MINUS_EXPR)
12226 step = fold_build1 (NEGATE_EXPR, stept, step);
12227 OMP_CLAUSE_LINEAR_STEP (c) = step;
12228 if (step != TREE_OPERAND (t, 1))
12229 {
12230 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
12231 &for_pre_body, NULL,
12232 is_gimple_val, fb_rvalue, false);
12233 ret = MIN (ret, tret);
12234 }
12235 }
12236 gimplify_omp_ctxp->in_for_exprs = false;
12237 break;
12238
12239 default:
12240 gcc_unreachable ();
12241 }
12242
12243 if (c2)
12244 {
12245 gcc_assert (c);
12246 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
12247 }
12248
12249 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
12250 {
12251 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
12252 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12253 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
12254 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
12255 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
12256 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
12257 && OMP_CLAUSE_DECL (c) == decl)
12258 {
12259 if (is_doacross && (collapse == 1 || i >= collapse))
12260 t = var;
12261 else
12262 {
12263 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
12264 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
12265 gcc_assert (TREE_OPERAND (t, 0) == var);
12266 t = TREE_OPERAND (t, 1);
12267 gcc_assert (TREE_CODE (t) == PLUS_EXPR
12268 || TREE_CODE (t) == MINUS_EXPR
12269 || TREE_CODE (t) == POINTER_PLUS_EXPR);
12270 gcc_assert (TREE_OPERAND (t, 0) == var);
12271 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
12272 is_doacross ? var : decl,
12273 TREE_OPERAND (t, 1));
12274 }
12275 gimple_seq *seq;
12276 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
12277 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
12278 else
12279 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
12280 push_gimplify_context ();
12281 gimplify_assign (decl, t, seq);
12282 gimple *bind = NULL;
12283 if (gimplify_ctxp->temps)
12284 {
12285 bind = gimple_build_bind (NULL_TREE, *seq, NULL_TREE);
12286 *seq = NULL;
12287 gimplify_seq_add_stmt (seq, bind);
12288 }
12289 pop_gimplify_context (bind);
12290 }
12291 }
12292 if (OMP_FOR_NON_RECTANGULAR (for_stmt) && var != decl)
12293 for (int j = i + 1; j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
12294 {
12295 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
12296 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
12297 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
12298 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
12299 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
12300 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
12301 gcc_assert (COMPARISON_CLASS_P (t));
12302 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
12303 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
12304 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
12305 }
12306 }
12307
12308 BITMAP_FREE (has_decl_expr);
12309 delete allocate_uids;
12310
12311 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
12312 || (loop_p && orig_for_stmt == for_stmt))
12313 {
12314 push_gimplify_context ();
12315 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
12316 {
12317 OMP_FOR_BODY (orig_for_stmt)
12318 = build3 (BIND_EXPR, void_type_node, NULL,
12319 OMP_FOR_BODY (orig_for_stmt), NULL);
12320 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
12321 }
12322 }
12323
12324 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
12325 &for_body);
12326
12327 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
12328 || (loop_p && orig_for_stmt == for_stmt))
12329 {
12330 if (gimple_code (g) == GIMPLE_BIND)
12331 pop_gimplify_context (g);
12332 else
12333 pop_gimplify_context (NULL);
12334 }
12335
12336 if (orig_for_stmt != for_stmt)
12337 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
12338 {
12339 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
12340 decl = TREE_OPERAND (t, 0);
12341 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12342 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
12343 gimplify_omp_ctxp = ctx->outer_context;
12344 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
12345 gimplify_omp_ctxp = ctx;
12346 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
12347 TREE_OPERAND (t, 0) = var;
12348 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
12349 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
12350 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
12351 if (OMP_FOR_NON_RECTANGULAR (for_stmt))
12352 for (int j = i + 1;
12353 j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
12354 {
12355 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
12356 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
12357 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
12358 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
12359 {
12360 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
12361 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
12362 }
12363 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
12364 gcc_assert (COMPARISON_CLASS_P (t));
12365 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
12366 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
12367 {
12368 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
12369 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
12370 }
12371 }
12372 }
12373
12374 gimplify_adjust_omp_clauses (pre_p, for_body,
12375 &OMP_FOR_CLAUSES (orig_for_stmt),
12376 TREE_CODE (orig_for_stmt));
12377
12378 int kind;
12379 switch (TREE_CODE (orig_for_stmt))
12380 {
12381 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
12382 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
12383 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
12384 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
12385 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
12386 default:
12387 gcc_unreachable ();
12388 }
12389 if (loop_p && kind == GF_OMP_FOR_KIND_SIMD)
12390 {
12391 gimplify_seq_add_seq (pre_p, for_pre_body);
12392 for_pre_body = NULL;
12393 }
12394 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
12395 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
12396 for_pre_body);
12397 if (orig_for_stmt != for_stmt)
12398 gimple_omp_for_set_combined_p (gfor, true);
12399 if (gimplify_omp_ctxp
12400 && (gimplify_omp_ctxp->combined_loop
12401 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
12402 && gimplify_omp_ctxp->outer_context
12403 && gimplify_omp_ctxp->outer_context->combined_loop)))
12404 {
12405 gimple_omp_for_set_combined_into_p (gfor, true);
12406 if (gimplify_omp_ctxp->combined_loop)
12407 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
12408 else
12409 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
12410 }
12411
12412 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
12413 {
12414 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
12415 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
12416 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
12417 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
12418 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
12419 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
12420 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
12421 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
12422 }
12423
12424 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
12425 constructs with GIMPLE_OMP_TASK sandwiched in between them.
12426 The outer taskloop stands for computing the number of iterations,
12427 counts for collapsed loops and holding taskloop specific clauses.
12428 The task construct stands for the effect of data sharing on the
12429 explicit task it creates and the inner taskloop stands for expansion
12430 of the static loop inside of the explicit task construct. */
12431 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
12432 {
12433 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
12434 tree task_clauses = NULL_TREE;
12435 tree c = *gfor_clauses_ptr;
12436 tree *gtask_clauses_ptr = &task_clauses;
12437 tree outer_for_clauses = NULL_TREE;
12438 tree *gforo_clauses_ptr = &outer_for_clauses;
12439 bitmap lastprivate_uids = NULL;
12440 if (omp_find_clause (c, OMP_CLAUSE_ALLOCATE))
12441 {
12442 c = omp_find_clause (c, OMP_CLAUSE_LASTPRIVATE);
12443 if (c)
12444 {
12445 lastprivate_uids = BITMAP_ALLOC (NULL);
12446 for (; c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
12447 OMP_CLAUSE_LASTPRIVATE))
12448 bitmap_set_bit (lastprivate_uids,
12449 DECL_UID (OMP_CLAUSE_DECL (c)));
12450 }
12451 c = *gfor_clauses_ptr;
12452 }
12453 for (; c; c = OMP_CLAUSE_CHAIN (c))
12454 switch (OMP_CLAUSE_CODE (c))
12455 {
12456 /* These clauses are allowed on task, move them there. */
12457 case OMP_CLAUSE_SHARED:
12458 case OMP_CLAUSE_FIRSTPRIVATE:
12459 case OMP_CLAUSE_DEFAULT:
12460 case OMP_CLAUSE_IF:
12461 case OMP_CLAUSE_UNTIED:
12462 case OMP_CLAUSE_FINAL:
12463 case OMP_CLAUSE_MERGEABLE:
12464 case OMP_CLAUSE_PRIORITY:
12465 case OMP_CLAUSE_REDUCTION:
12466 case OMP_CLAUSE_IN_REDUCTION:
12467 *gtask_clauses_ptr = c;
12468 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12469 break;
12470 case OMP_CLAUSE_PRIVATE:
12471 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
12472 {
12473 /* We want private on outer for and firstprivate
12474 on task. */
12475 *gtask_clauses_ptr
12476 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12477 OMP_CLAUSE_FIRSTPRIVATE);
12478 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
12479 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
12480 openacc);
12481 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
12482 *gforo_clauses_ptr = c;
12483 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12484 }
12485 else
12486 {
12487 *gtask_clauses_ptr = c;
12488 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12489 }
12490 break;
12491 /* These clauses go into outer taskloop clauses. */
12492 case OMP_CLAUSE_GRAINSIZE:
12493 case OMP_CLAUSE_NUM_TASKS:
12494 case OMP_CLAUSE_NOGROUP:
12495 *gforo_clauses_ptr = c;
12496 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12497 break;
12498 /* Collapse clause we duplicate on both taskloops. */
12499 case OMP_CLAUSE_COLLAPSE:
12500 *gfor_clauses_ptr = c;
12501 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12502 *gforo_clauses_ptr = copy_node (c);
12503 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
12504 break;
12505 /* For lastprivate, keep the clause on inner taskloop, and add
12506 a shared clause on task. If the same decl is also firstprivate,
12507 add also firstprivate clause on the inner taskloop. */
12508 case OMP_CLAUSE_LASTPRIVATE:
12509 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
12510 {
12511 /* For taskloop C++ lastprivate IVs, we want:
12512 1) private on outer taskloop
12513 2) firstprivate and shared on task
12514 3) lastprivate on inner taskloop */
12515 *gtask_clauses_ptr
12516 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12517 OMP_CLAUSE_FIRSTPRIVATE);
12518 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
12519 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
12520 openacc);
12521 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
12522 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
12523 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12524 OMP_CLAUSE_PRIVATE);
12525 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
12526 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
12527 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
12528 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
12529 }
12530 *gfor_clauses_ptr = c;
12531 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12532 *gtask_clauses_ptr
12533 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
12534 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
12535 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
12536 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
12537 gtask_clauses_ptr
12538 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
12539 break;
12540 /* Allocate clause we duplicate on task and inner taskloop
12541 if the decl is lastprivate, otherwise just put on task. */
12542 case OMP_CLAUSE_ALLOCATE:
12543 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
12544 && DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
12545 {
12546 /* Additionally, put firstprivate clause on task
12547 for the allocator if it is not constant. */
12548 *gtask_clauses_ptr
12549 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12550 OMP_CLAUSE_FIRSTPRIVATE);
12551 OMP_CLAUSE_DECL (*gtask_clauses_ptr)
12552 = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
12553 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
12554 }
12555 if (lastprivate_uids
12556 && bitmap_bit_p (lastprivate_uids,
12557 DECL_UID (OMP_CLAUSE_DECL (c))))
12558 {
12559 *gfor_clauses_ptr = c;
12560 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12561 *gtask_clauses_ptr = copy_node (c);
12562 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
12563 }
12564 else
12565 {
12566 *gtask_clauses_ptr = c;
12567 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12568 }
12569 break;
12570 default:
12571 gcc_unreachable ();
12572 }
12573 *gfor_clauses_ptr = NULL_TREE;
12574 *gtask_clauses_ptr = NULL_TREE;
12575 *gforo_clauses_ptr = NULL_TREE;
12576 BITMAP_FREE (lastprivate_uids);
12577 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
12578 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
12579 NULL_TREE, NULL_TREE, NULL_TREE);
12580 gimple_omp_task_set_taskloop_p (g, true);
12581 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
12582 gomp_for *gforo
12583 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
12584 gimple_omp_for_collapse (gfor),
12585 gimple_omp_for_pre_body (gfor));
12586 gimple_omp_for_set_pre_body (gfor, NULL);
12587 gimple_omp_for_set_combined_p (gforo, true);
12588 gimple_omp_for_set_combined_into_p (gfor, true);
12589 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
12590 {
12591 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
12592 tree v = create_tmp_var (type);
12593 gimple_omp_for_set_index (gforo, i, v);
12594 t = unshare_expr (gimple_omp_for_initial (gfor, i));
12595 gimple_omp_for_set_initial (gforo, i, t);
12596 gimple_omp_for_set_cond (gforo, i,
12597 gimple_omp_for_cond (gfor, i));
12598 t = unshare_expr (gimple_omp_for_final (gfor, i));
12599 gimple_omp_for_set_final (gforo, i, t);
12600 t = unshare_expr (gimple_omp_for_incr (gfor, i));
12601 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
12602 TREE_OPERAND (t, 0) = v;
12603 gimple_omp_for_set_incr (gforo, i, t);
12604 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
12605 OMP_CLAUSE_DECL (t) = v;
12606 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
12607 gimple_omp_for_set_clauses (gforo, t);
12608 if (OMP_FOR_NON_RECTANGULAR (for_stmt))
12609 {
12610 tree *p1 = NULL, *p2 = NULL;
12611 t = gimple_omp_for_initial (gforo, i);
12612 if (TREE_CODE (t) == TREE_VEC)
12613 p1 = &TREE_VEC_ELT (t, 0);
12614 t = gimple_omp_for_final (gforo, i);
12615 if (TREE_CODE (t) == TREE_VEC)
12616 {
12617 if (p1)
12618 p2 = &TREE_VEC_ELT (t, 0);
12619 else
12620 p1 = &TREE_VEC_ELT (t, 0);
12621 }
12622 if (p1)
12623 {
12624 int j;
12625 for (j = 0; j < i; j++)
12626 if (*p1 == gimple_omp_for_index (gfor, j))
12627 {
12628 *p1 = gimple_omp_for_index (gforo, j);
12629 if (p2)
12630 *p2 = *p1;
12631 break;
12632 }
12633 gcc_assert (j < i);
12634 }
12635 }
12636 }
12637 gimplify_seq_add_stmt (pre_p, gforo);
12638 }
12639 else
12640 gimplify_seq_add_stmt (pre_p, gfor);
12641
12642 if (TREE_CODE (orig_for_stmt) == OMP_FOR)
12643 {
12644 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12645 unsigned lastprivate_conditional = 0;
12646 while (ctx
12647 && (ctx->region_type == ORT_TARGET_DATA
12648 || ctx->region_type == ORT_TASKGROUP))
12649 ctx = ctx->outer_context;
12650 if (ctx && (ctx->region_type & ORT_PARALLEL) != 0)
12651 for (tree c = gimple_omp_for_clauses (gfor);
12652 c; c = OMP_CLAUSE_CHAIN (c))
12653 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12654 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
12655 ++lastprivate_conditional;
12656 if (lastprivate_conditional)
12657 {
12658 struct omp_for_data fd;
12659 omp_extract_for_data (gfor, &fd, NULL);
12660 tree type = build_array_type_nelts (unsigned_type_for (fd.iter_type),
12661 lastprivate_conditional);
12662 tree var = create_tmp_var_raw (type);
12663 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
12664 OMP_CLAUSE_DECL (c) = var;
12665 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
12666 gimple_omp_for_set_clauses (gfor, c);
12667 omp_add_variable (ctx, var, GOVD_CONDTEMP | GOVD_SEEN);
12668 }
12669 }
12670 else if (TREE_CODE (orig_for_stmt) == OMP_SIMD)
12671 {
12672 unsigned lastprivate_conditional = 0;
12673 for (tree c = gimple_omp_for_clauses (gfor); c; c = OMP_CLAUSE_CHAIN (c))
12674 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12675 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
12676 ++lastprivate_conditional;
12677 if (lastprivate_conditional)
12678 {
12679 struct omp_for_data fd;
12680 omp_extract_for_data (gfor, &fd, NULL);
12681 tree type = unsigned_type_for (fd.iter_type);
12682 while (lastprivate_conditional--)
12683 {
12684 tree c = build_omp_clause (UNKNOWN_LOCATION,
12685 OMP_CLAUSE__CONDTEMP_);
12686 OMP_CLAUSE_DECL (c) = create_tmp_var (type);
12687 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
12688 gimple_omp_for_set_clauses (gfor, c);
12689 }
12690 }
12691 }
12692
12693 if (ret != GS_ALL_DONE)
12694 return GS_ERROR;
12695 *expr_p = NULL_TREE;
12696 return GS_ALL_DONE;
12697 }
12698
12699 /* Helper for gimplify_omp_loop, called through walk_tree. */
12700
12701 static tree
12702 replace_reduction_placeholders (tree *tp, int *walk_subtrees, void *data)
12703 {
12704 if (DECL_P (*tp))
12705 {
12706 tree *d = (tree *) data;
12707 if (*tp == OMP_CLAUSE_REDUCTION_PLACEHOLDER (d[0]))
12708 {
12709 *tp = OMP_CLAUSE_REDUCTION_PLACEHOLDER (d[1]);
12710 *walk_subtrees = 0;
12711 }
12712 else if (*tp == OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (d[0]))
12713 {
12714 *tp = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (d[1]);
12715 *walk_subtrees = 0;
12716 }
12717 }
12718 return NULL_TREE;
12719 }
12720
12721 /* Gimplify the gross structure of an OMP_LOOP statement. */
12722
12723 static enum gimplify_status
12724 gimplify_omp_loop (tree *expr_p, gimple_seq *pre_p)
12725 {
12726 tree for_stmt = *expr_p;
12727 tree clauses = OMP_FOR_CLAUSES (for_stmt);
12728 struct gimplify_omp_ctx *octx = gimplify_omp_ctxp;
12729 enum omp_clause_bind_kind kind = OMP_CLAUSE_BIND_THREAD;
12730 int i;
12731
12732 /* If order is not present, the behavior is as if order(concurrent)
12733 appeared. */
12734 tree order = omp_find_clause (clauses, OMP_CLAUSE_ORDER);
12735 if (order == NULL_TREE)
12736 {
12737 order = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_ORDER);
12738 OMP_CLAUSE_CHAIN (order) = clauses;
12739 OMP_FOR_CLAUSES (for_stmt) = clauses = order;
12740 }
12741
12742 tree bind = omp_find_clause (clauses, OMP_CLAUSE_BIND);
12743 if (bind == NULL_TREE)
12744 {
12745 if (!flag_openmp) /* flag_openmp_simd */
12746 ;
12747 else if (octx && (octx->region_type & ORT_TEAMS) != 0)
12748 kind = OMP_CLAUSE_BIND_TEAMS;
12749 else if (octx && (octx->region_type & ORT_PARALLEL) != 0)
12750 kind = OMP_CLAUSE_BIND_PARALLEL;
12751 else
12752 {
12753 for (; octx; octx = octx->outer_context)
12754 {
12755 if ((octx->region_type & ORT_ACC) != 0
12756 || octx->region_type == ORT_NONE
12757 || octx->region_type == ORT_IMPLICIT_TARGET)
12758 continue;
12759 break;
12760 }
12761 if (octx == NULL && !in_omp_construct)
12762 error_at (EXPR_LOCATION (for_stmt),
12763 "%<bind%> clause not specified on a %<loop%> "
12764 "construct not nested inside another OpenMP construct");
12765 }
12766 bind = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_BIND);
12767 OMP_CLAUSE_CHAIN (bind) = clauses;
12768 OMP_CLAUSE_BIND_KIND (bind) = kind;
12769 OMP_FOR_CLAUSES (for_stmt) = bind;
12770 }
12771 else
12772 switch (OMP_CLAUSE_BIND_KIND (bind))
12773 {
12774 case OMP_CLAUSE_BIND_THREAD:
12775 break;
12776 case OMP_CLAUSE_BIND_PARALLEL:
12777 if (!flag_openmp) /* flag_openmp_simd */
12778 {
12779 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12780 break;
12781 }
12782 for (; octx; octx = octx->outer_context)
12783 if (octx->region_type == ORT_SIMD
12784 && omp_find_clause (octx->clauses, OMP_CLAUSE_BIND) == NULL_TREE)
12785 {
12786 error_at (EXPR_LOCATION (for_stmt),
12787 "%<bind(parallel)%> on a %<loop%> construct nested "
12788 "inside %<simd%> construct");
12789 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12790 break;
12791 }
12792 kind = OMP_CLAUSE_BIND_PARALLEL;
12793 break;
12794 case OMP_CLAUSE_BIND_TEAMS:
12795 if (!flag_openmp) /* flag_openmp_simd */
12796 {
12797 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12798 break;
12799 }
12800 if ((octx
12801 && octx->region_type != ORT_IMPLICIT_TARGET
12802 && octx->region_type != ORT_NONE
12803 && (octx->region_type & ORT_TEAMS) == 0)
12804 || in_omp_construct)
12805 {
12806 error_at (EXPR_LOCATION (for_stmt),
12807 "%<bind(teams)%> on a %<loop%> region not strictly "
12808 "nested inside of a %<teams%> region");
12809 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12810 break;
12811 }
12812 kind = OMP_CLAUSE_BIND_TEAMS;
12813 break;
12814 default:
12815 gcc_unreachable ();
12816 }
12817
12818 for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
12819 switch (OMP_CLAUSE_CODE (*pc))
12820 {
12821 case OMP_CLAUSE_REDUCTION:
12822 if (OMP_CLAUSE_REDUCTION_INSCAN (*pc))
12823 {
12824 error_at (OMP_CLAUSE_LOCATION (*pc),
12825 "%<inscan%> %<reduction%> clause on "
12826 "%qs construct", "loop");
12827 OMP_CLAUSE_REDUCTION_INSCAN (*pc) = 0;
12828 }
12829 if (OMP_CLAUSE_REDUCTION_TASK (*pc))
12830 {
12831 error_at (OMP_CLAUSE_LOCATION (*pc),
12832 "invalid %<task%> reduction modifier on construct "
12833 "other than %<parallel%>, %qs or %<sections%>",
12834 lang_GNU_Fortran () ? "do" : "for");
12835 OMP_CLAUSE_REDUCTION_TASK (*pc) = 0;
12836 }
12837 pc = &OMP_CLAUSE_CHAIN (*pc);
12838 break;
12839 case OMP_CLAUSE_LASTPRIVATE:
12840 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
12841 {
12842 tree t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
12843 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
12844 if (OMP_CLAUSE_DECL (*pc) == TREE_OPERAND (t, 0))
12845 break;
12846 if (OMP_FOR_ORIG_DECLS (for_stmt)
12847 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
12848 i)) == TREE_LIST
12849 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
12850 i)))
12851 {
12852 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
12853 if (OMP_CLAUSE_DECL (*pc) == TREE_PURPOSE (orig))
12854 break;
12855 }
12856 }
12857 if (i == TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)))
12858 {
12859 error_at (OMP_CLAUSE_LOCATION (*pc),
12860 "%<lastprivate%> clause on a %<loop%> construct refers "
12861 "to a variable %qD which is not the loop iterator",
12862 OMP_CLAUSE_DECL (*pc));
12863 *pc = OMP_CLAUSE_CHAIN (*pc);
12864 break;
12865 }
12866 pc = &OMP_CLAUSE_CHAIN (*pc);
12867 break;
12868 default:
12869 pc = &OMP_CLAUSE_CHAIN (*pc);
12870 break;
12871 }
12872
12873 TREE_SET_CODE (for_stmt, OMP_SIMD);
12874
12875 int last;
12876 switch (kind)
12877 {
12878 case OMP_CLAUSE_BIND_THREAD: last = 0; break;
12879 case OMP_CLAUSE_BIND_PARALLEL: last = 1; break;
12880 case OMP_CLAUSE_BIND_TEAMS: last = 2; break;
12881 }
12882 for (int pass = 1; pass <= last; pass++)
12883 {
12884 if (pass == 2)
12885 {
12886 tree bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
12887 append_to_statement_list (*expr_p, &BIND_EXPR_BODY (bind));
12888 *expr_p = make_node (OMP_PARALLEL);
12889 TREE_TYPE (*expr_p) = void_type_node;
12890 OMP_PARALLEL_BODY (*expr_p) = bind;
12891 OMP_PARALLEL_COMBINED (*expr_p) = 1;
12892 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (for_stmt));
12893 tree *pc = &OMP_PARALLEL_CLAUSES (*expr_p);
12894 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
12895 if (OMP_FOR_ORIG_DECLS (for_stmt)
12896 && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i))
12897 == TREE_LIST))
12898 {
12899 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
12900 if (TREE_PURPOSE (elt) && TREE_VALUE (elt))
12901 {
12902 *pc = build_omp_clause (UNKNOWN_LOCATION,
12903 OMP_CLAUSE_FIRSTPRIVATE);
12904 OMP_CLAUSE_DECL (*pc) = TREE_VALUE (elt);
12905 pc = &OMP_CLAUSE_CHAIN (*pc);
12906 }
12907 }
12908 }
12909 tree t = make_node (pass == 2 ? OMP_DISTRIBUTE : OMP_FOR);
12910 tree *pc = &OMP_FOR_CLAUSES (t);
12911 TREE_TYPE (t) = void_type_node;
12912 OMP_FOR_BODY (t) = *expr_p;
12913 SET_EXPR_LOCATION (t, EXPR_LOCATION (for_stmt));
12914 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12915 switch (OMP_CLAUSE_CODE (c))
12916 {
12917 case OMP_CLAUSE_BIND:
12918 case OMP_CLAUSE_ORDER:
12919 case OMP_CLAUSE_COLLAPSE:
12920 *pc = copy_node (c);
12921 pc = &OMP_CLAUSE_CHAIN (*pc);
12922 break;
12923 case OMP_CLAUSE_PRIVATE:
12924 case OMP_CLAUSE_FIRSTPRIVATE:
12925 /* Only needed on innermost. */
12926 break;
12927 case OMP_CLAUSE_LASTPRIVATE:
12928 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c) && pass != last)
12929 {
12930 *pc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12931 OMP_CLAUSE_FIRSTPRIVATE);
12932 OMP_CLAUSE_DECL (*pc) = OMP_CLAUSE_DECL (c);
12933 lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
12934 pc = &OMP_CLAUSE_CHAIN (*pc);
12935 }
12936 *pc = copy_node (c);
12937 OMP_CLAUSE_LASTPRIVATE_STMT (*pc) = NULL_TREE;
12938 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
12939 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
12940 {
12941 if (pass != last)
12942 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc) = 1;
12943 else
12944 lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
12945 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc) = 0;
12946 }
12947 pc = &OMP_CLAUSE_CHAIN (*pc);
12948 break;
12949 case OMP_CLAUSE_REDUCTION:
12950 *pc = copy_node (c);
12951 OMP_CLAUSE_DECL (*pc) = unshare_expr (OMP_CLAUSE_DECL (c));
12952 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
12953 OMP_CLAUSE_REDUCTION_INIT (*pc)
12954 = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c));
12955 OMP_CLAUSE_REDUCTION_MERGE (*pc)
12956 = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c));
12957 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc))
12958 {
12959 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc)
12960 = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c));
12961 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
12962 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc)
12963 = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c));
12964 tree nc = *pc;
12965 tree data[2] = { c, nc };
12966 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (nc),
12967 replace_reduction_placeholders,
12968 data);
12969 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (nc),
12970 replace_reduction_placeholders,
12971 data);
12972 }
12973 pc = &OMP_CLAUSE_CHAIN (*pc);
12974 break;
12975 default:
12976 gcc_unreachable ();
12977 }
12978 *pc = NULL_TREE;
12979 *expr_p = t;
12980 }
12981 return gimplify_omp_for (expr_p, pre_p);
12982 }
12983
12984
12985 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
12986 of OMP_TARGET's body. */
12987
12988 static tree
12989 find_omp_teams (tree *tp, int *walk_subtrees, void *)
12990 {
12991 *walk_subtrees = 0;
12992 switch (TREE_CODE (*tp))
12993 {
12994 case OMP_TEAMS:
12995 return *tp;
12996 case BIND_EXPR:
12997 case STATEMENT_LIST:
12998 *walk_subtrees = 1;
12999 break;
13000 default:
13001 break;
13002 }
13003 return NULL_TREE;
13004 }
13005
13006 /* Helper function of optimize_target_teams, determine if the expression
13007 can be computed safely before the target construct on the host. */
13008
13009 static tree
13010 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
13011 {
13012 splay_tree_node n;
13013
13014 if (TYPE_P (*tp))
13015 {
13016 *walk_subtrees = 0;
13017 return NULL_TREE;
13018 }
13019 switch (TREE_CODE (*tp))
13020 {
13021 case VAR_DECL:
13022 case PARM_DECL:
13023 case RESULT_DECL:
13024 *walk_subtrees = 0;
13025 if (error_operand_p (*tp)
13026 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
13027 || DECL_HAS_VALUE_EXPR_P (*tp)
13028 || DECL_THREAD_LOCAL_P (*tp)
13029 || TREE_SIDE_EFFECTS (*tp)
13030 || TREE_THIS_VOLATILE (*tp))
13031 return *tp;
13032 if (is_global_var (*tp)
13033 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
13034 || lookup_attribute ("omp declare target link",
13035 DECL_ATTRIBUTES (*tp))))
13036 return *tp;
13037 if (VAR_P (*tp)
13038 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
13039 && !is_global_var (*tp)
13040 && decl_function_context (*tp) == current_function_decl)
13041 return *tp;
13042 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
13043 (splay_tree_key) *tp);
13044 if (n == NULL)
13045 {
13046 if (gimplify_omp_ctxp->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
13047 return NULL_TREE;
13048 return *tp;
13049 }
13050 else if (n->value & GOVD_LOCAL)
13051 return *tp;
13052 else if (n->value & GOVD_FIRSTPRIVATE)
13053 return NULL_TREE;
13054 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
13055 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
13056 return NULL_TREE;
13057 return *tp;
13058 case INTEGER_CST:
13059 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
13060 return *tp;
13061 return NULL_TREE;
13062 case TARGET_EXPR:
13063 if (TARGET_EXPR_INITIAL (*tp)
13064 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
13065 return *tp;
13066 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
13067 walk_subtrees, NULL);
13068 /* Allow some reasonable subset of integral arithmetics. */
13069 case PLUS_EXPR:
13070 case MINUS_EXPR:
13071 case MULT_EXPR:
13072 case TRUNC_DIV_EXPR:
13073 case CEIL_DIV_EXPR:
13074 case FLOOR_DIV_EXPR:
13075 case ROUND_DIV_EXPR:
13076 case TRUNC_MOD_EXPR:
13077 case CEIL_MOD_EXPR:
13078 case FLOOR_MOD_EXPR:
13079 case ROUND_MOD_EXPR:
13080 case RDIV_EXPR:
13081 case EXACT_DIV_EXPR:
13082 case MIN_EXPR:
13083 case MAX_EXPR:
13084 case LSHIFT_EXPR:
13085 case RSHIFT_EXPR:
13086 case BIT_IOR_EXPR:
13087 case BIT_XOR_EXPR:
13088 case BIT_AND_EXPR:
13089 case NEGATE_EXPR:
13090 case ABS_EXPR:
13091 case BIT_NOT_EXPR:
13092 case NON_LVALUE_EXPR:
13093 CASE_CONVERT:
13094 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
13095 return *tp;
13096 return NULL_TREE;
13097 /* And disallow anything else, except for comparisons. */
13098 default:
13099 if (COMPARISON_CLASS_P (*tp))
13100 return NULL_TREE;
13101 return *tp;
13102 }
13103 }
13104
13105 /* Try to determine if the num_teams and/or thread_limit expressions
13106 can have their values determined already before entering the
13107 target construct.
13108 INTEGER_CSTs trivially are,
13109 integral decls that are firstprivate (explicitly or implicitly)
13110 or explicitly map(always, to:) or map(always, tofrom:) on the target
13111 region too, and expressions involving simple arithmetics on those
13112 too, function calls are not ok, dereferencing something neither etc.
13113 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
13114 EXPR based on what we find:
13115 0 stands for clause not specified at all, use implementation default
13116 -1 stands for value that can't be determined easily before entering
13117 the target construct.
13118 If teams construct is not present at all, use 1 for num_teams
13119 and 0 for thread_limit (only one team is involved, and the thread
13120 limit is implementation defined. */
13121
13122 static void
13123 optimize_target_teams (tree target, gimple_seq *pre_p)
13124 {
13125 tree body = OMP_BODY (target);
13126 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
13127 tree num_teams = integer_zero_node;
13128 tree thread_limit = integer_zero_node;
13129 location_t num_teams_loc = EXPR_LOCATION (target);
13130 location_t thread_limit_loc = EXPR_LOCATION (target);
13131 tree c, *p, expr;
13132 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
13133
13134 if (teams == NULL_TREE)
13135 num_teams = integer_one_node;
13136 else
13137 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
13138 {
13139 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
13140 {
13141 p = &num_teams;
13142 num_teams_loc = OMP_CLAUSE_LOCATION (c);
13143 }
13144 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
13145 {
13146 p = &thread_limit;
13147 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
13148 }
13149 else
13150 continue;
13151 expr = OMP_CLAUSE_OPERAND (c, 0);
13152 if (TREE_CODE (expr) == INTEGER_CST)
13153 {
13154 *p = expr;
13155 continue;
13156 }
13157 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
13158 {
13159 *p = integer_minus_one_node;
13160 continue;
13161 }
13162 *p = expr;
13163 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
13164 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
13165 == GS_ERROR)
13166 {
13167 gimplify_omp_ctxp = target_ctx;
13168 *p = integer_minus_one_node;
13169 continue;
13170 }
13171 gimplify_omp_ctxp = target_ctx;
13172 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
13173 OMP_CLAUSE_OPERAND (c, 0) = *p;
13174 }
13175 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
13176 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
13177 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
13178 OMP_TARGET_CLAUSES (target) = c;
13179 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
13180 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
13181 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
13182 OMP_TARGET_CLAUSES (target) = c;
13183 }
13184
13185 /* Gimplify the gross structure of several OMP constructs. */
13186
13187 static void
13188 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
13189 {
13190 tree expr = *expr_p;
13191 gimple *stmt;
13192 gimple_seq body = NULL;
13193 enum omp_region_type ort;
13194
13195 switch (TREE_CODE (expr))
13196 {
13197 case OMP_SECTIONS:
13198 case OMP_SINGLE:
13199 ort = ORT_WORKSHARE;
13200 break;
13201 case OMP_TARGET:
13202 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
13203 break;
13204 case OACC_KERNELS:
13205 ort = ORT_ACC_KERNELS;
13206 break;
13207 case OACC_PARALLEL:
13208 ort = ORT_ACC_PARALLEL;
13209 break;
13210 case OACC_SERIAL:
13211 ort = ORT_ACC_SERIAL;
13212 break;
13213 case OACC_DATA:
13214 ort = ORT_ACC_DATA;
13215 break;
13216 case OMP_TARGET_DATA:
13217 ort = ORT_TARGET_DATA;
13218 break;
13219 case OMP_TEAMS:
13220 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
13221 if (gimplify_omp_ctxp == NULL
13222 || gimplify_omp_ctxp->region_type == ORT_IMPLICIT_TARGET)
13223 ort = (enum omp_region_type) (ort | ORT_HOST_TEAMS);
13224 break;
13225 case OACC_HOST_DATA:
13226 ort = ORT_ACC_HOST_DATA;
13227 break;
13228 default:
13229 gcc_unreachable ();
13230 }
13231
13232 bool save_in_omp_construct = in_omp_construct;
13233 if ((ort & ORT_ACC) == 0)
13234 in_omp_construct = false;
13235 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
13236 TREE_CODE (expr));
13237 if (TREE_CODE (expr) == OMP_TARGET)
13238 optimize_target_teams (expr, pre_p);
13239 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0
13240 || (ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
13241 {
13242 push_gimplify_context ();
13243 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
13244 if (gimple_code (g) == GIMPLE_BIND)
13245 pop_gimplify_context (g);
13246 else
13247 pop_gimplify_context (NULL);
13248 if ((ort & ORT_TARGET_DATA) != 0)
13249 {
13250 enum built_in_function end_ix;
13251 switch (TREE_CODE (expr))
13252 {
13253 case OACC_DATA:
13254 case OACC_HOST_DATA:
13255 end_ix = BUILT_IN_GOACC_DATA_END;
13256 break;
13257 case OMP_TARGET_DATA:
13258 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
13259 break;
13260 default:
13261 gcc_unreachable ();
13262 }
13263 tree fn = builtin_decl_explicit (end_ix);
13264 g = gimple_build_call (fn, 0);
13265 gimple_seq cleanup = NULL;
13266 gimple_seq_add_stmt (&cleanup, g);
13267 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
13268 body = NULL;
13269 gimple_seq_add_stmt (&body, g);
13270 }
13271 }
13272 else
13273 gimplify_and_add (OMP_BODY (expr), &body);
13274 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
13275 TREE_CODE (expr));
13276 in_omp_construct = save_in_omp_construct;
13277
13278 switch (TREE_CODE (expr))
13279 {
13280 case OACC_DATA:
13281 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
13282 OMP_CLAUSES (expr));
13283 break;
13284 case OACC_HOST_DATA:
13285 if (omp_find_clause (OMP_CLAUSES (expr), OMP_CLAUSE_IF_PRESENT))
13286 {
13287 for (tree c = OMP_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
13288 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
13289 OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c) = 1;
13290 }
13291
13292 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
13293 OMP_CLAUSES (expr));
13294 break;
13295 case OACC_KERNELS:
13296 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
13297 OMP_CLAUSES (expr));
13298 break;
13299 case OACC_PARALLEL:
13300 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
13301 OMP_CLAUSES (expr));
13302 break;
13303 case OACC_SERIAL:
13304 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_SERIAL,
13305 OMP_CLAUSES (expr));
13306 break;
13307 case OMP_SECTIONS:
13308 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
13309 break;
13310 case OMP_SINGLE:
13311 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
13312 break;
13313 case OMP_TARGET:
13314 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
13315 OMP_CLAUSES (expr));
13316 break;
13317 case OMP_TARGET_DATA:
13318 /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
13319 to be evaluated before the use_device_{ptr,addr} clauses if they
13320 refer to the same variables. */
13321 {
13322 tree use_device_clauses;
13323 tree *pc, *uc = &use_device_clauses;
13324 for (pc = &OMP_CLAUSES (expr); *pc; )
13325 if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_PTR
13326 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_ADDR)
13327 {
13328 *uc = *pc;
13329 *pc = OMP_CLAUSE_CHAIN (*pc);
13330 uc = &OMP_CLAUSE_CHAIN (*uc);
13331 }
13332 else
13333 pc = &OMP_CLAUSE_CHAIN (*pc);
13334 *uc = NULL_TREE;
13335 *pc = use_device_clauses;
13336 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
13337 OMP_CLAUSES (expr));
13338 }
13339 break;
13340 case OMP_TEAMS:
13341 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
13342 if ((ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
13343 gimple_omp_teams_set_host (as_a <gomp_teams *> (stmt), true);
13344 break;
13345 default:
13346 gcc_unreachable ();
13347 }
13348
13349 gimplify_seq_add_stmt (pre_p, stmt);
13350 *expr_p = NULL_TREE;
13351 }
13352
13353 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
13354 target update constructs. */
13355
13356 static void
13357 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
13358 {
13359 tree expr = *expr_p;
13360 int kind;
13361 gomp_target *stmt;
13362 enum omp_region_type ort = ORT_WORKSHARE;
13363
13364 switch (TREE_CODE (expr))
13365 {
13366 case OACC_ENTER_DATA:
13367 case OACC_EXIT_DATA:
13368 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
13369 ort = ORT_ACC;
13370 break;
13371 case OACC_UPDATE:
13372 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
13373 ort = ORT_ACC;
13374 break;
13375 case OMP_TARGET_UPDATE:
13376 kind = GF_OMP_TARGET_KIND_UPDATE;
13377 break;
13378 case OMP_TARGET_ENTER_DATA:
13379 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
13380 break;
13381 case OMP_TARGET_EXIT_DATA:
13382 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
13383 break;
13384 default:
13385 gcc_unreachable ();
13386 }
13387 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
13388 ort, TREE_CODE (expr));
13389 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
13390 TREE_CODE (expr));
13391 if (TREE_CODE (expr) == OACC_UPDATE
13392 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
13393 OMP_CLAUSE_IF_PRESENT))
13394 {
13395 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
13396 clause. */
13397 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
13398 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
13399 switch (OMP_CLAUSE_MAP_KIND (c))
13400 {
13401 case GOMP_MAP_FORCE_TO:
13402 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
13403 break;
13404 case GOMP_MAP_FORCE_FROM:
13405 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FROM);
13406 break;
13407 default:
13408 break;
13409 }
13410 }
13411 else if (TREE_CODE (expr) == OACC_EXIT_DATA
13412 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
13413 OMP_CLAUSE_FINALIZE))
13414 {
13415 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote "finalize"
13416 semantics. */
13417 bool have_clause = false;
13418 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
13419 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
13420 switch (OMP_CLAUSE_MAP_KIND (c))
13421 {
13422 case GOMP_MAP_FROM:
13423 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_FROM);
13424 have_clause = true;
13425 break;
13426 case GOMP_MAP_RELEASE:
13427 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DELETE);
13428 have_clause = true;
13429 break;
13430 case GOMP_MAP_TO_PSET:
13431 /* Fortran arrays with descriptors must map that descriptor when
13432 doing standalone "attach" operations (in OpenACC). In that
13433 case GOMP_MAP_TO_PSET appears by itself with no preceding
13434 clause (see trans-openmp.c:gfc_trans_omp_clauses). */
13435 break;
13436 case GOMP_MAP_POINTER:
13437 /* TODO PR92929: we may see these here, but they'll always follow
13438 one of the clauses above, and will be handled by libgomp as
13439 one group, so no handling required here. */
13440 gcc_assert (have_clause);
13441 break;
13442 case GOMP_MAP_DETACH:
13443 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_DETACH);
13444 have_clause = false;
13445 break;
13446 case GOMP_MAP_STRUCT:
13447 have_clause = false;
13448 break;
13449 default:
13450 gcc_unreachable ();
13451 }
13452 }
13453 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
13454
13455 gimplify_seq_add_stmt (pre_p, stmt);
13456 *expr_p = NULL_TREE;
13457 }
13458
13459 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
13460 stabilized the lhs of the atomic operation as *ADDR. Return true if
13461 EXPR is this stabilized form. */
13462
13463 static bool
13464 goa_lhs_expr_p (tree expr, tree addr)
13465 {
13466 /* Also include casts to other type variants. The C front end is fond
13467 of adding these for e.g. volatile variables. This is like
13468 STRIP_TYPE_NOPS but includes the main variant lookup. */
13469 STRIP_USELESS_TYPE_CONVERSION (expr);
13470
13471 if (TREE_CODE (expr) == INDIRECT_REF)
13472 {
13473 expr = TREE_OPERAND (expr, 0);
13474 while (expr != addr
13475 && (CONVERT_EXPR_P (expr)
13476 || TREE_CODE (expr) == NON_LVALUE_EXPR)
13477 && TREE_CODE (expr) == TREE_CODE (addr)
13478 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
13479 {
13480 expr = TREE_OPERAND (expr, 0);
13481 addr = TREE_OPERAND (addr, 0);
13482 }
13483 if (expr == addr)
13484 return true;
13485 return (TREE_CODE (addr) == ADDR_EXPR
13486 && TREE_CODE (expr) == ADDR_EXPR
13487 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
13488 }
13489 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
13490 return true;
13491 return false;
13492 }
13493
13494 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
13495 expression does not involve the lhs, evaluate it into a temporary.
13496 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
13497 or -1 if an error was encountered. */
13498
13499 static int
13500 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
13501 tree lhs_var)
13502 {
13503 tree expr = *expr_p;
13504 int saw_lhs;
13505
13506 if (goa_lhs_expr_p (expr, lhs_addr))
13507 {
13508 *expr_p = lhs_var;
13509 return 1;
13510 }
13511 if (is_gimple_val (expr))
13512 return 0;
13513
13514 saw_lhs = 0;
13515 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
13516 {
13517 case tcc_binary:
13518 case tcc_comparison:
13519 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
13520 lhs_var);
13521 /* FALLTHRU */
13522 case tcc_unary:
13523 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
13524 lhs_var);
13525 break;
13526 case tcc_expression:
13527 switch (TREE_CODE (expr))
13528 {
13529 case TRUTH_ANDIF_EXPR:
13530 case TRUTH_ORIF_EXPR:
13531 case TRUTH_AND_EXPR:
13532 case TRUTH_OR_EXPR:
13533 case TRUTH_XOR_EXPR:
13534 case BIT_INSERT_EXPR:
13535 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
13536 lhs_addr, lhs_var);
13537 /* FALLTHRU */
13538 case TRUTH_NOT_EXPR:
13539 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
13540 lhs_addr, lhs_var);
13541 break;
13542 case COMPOUND_EXPR:
13543 /* Break out any preevaluations from cp_build_modify_expr. */
13544 for (; TREE_CODE (expr) == COMPOUND_EXPR;
13545 expr = TREE_OPERAND (expr, 1))
13546 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
13547 *expr_p = expr;
13548 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
13549 default:
13550 break;
13551 }
13552 break;
13553 case tcc_reference:
13554 if (TREE_CODE (expr) == BIT_FIELD_REF)
13555 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
13556 lhs_addr, lhs_var);
13557 break;
13558 default:
13559 break;
13560 }
13561
13562 if (saw_lhs == 0)
13563 {
13564 enum gimplify_status gs;
13565 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
13566 if (gs != GS_ALL_DONE)
13567 saw_lhs = -1;
13568 }
13569
13570 return saw_lhs;
13571 }
13572
13573 /* Gimplify an OMP_ATOMIC statement. */
13574
13575 static enum gimplify_status
13576 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
13577 {
13578 tree addr = TREE_OPERAND (*expr_p, 0);
13579 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
13580 ? NULL : TREE_OPERAND (*expr_p, 1);
13581 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
13582 tree tmp_load;
13583 gomp_atomic_load *loadstmt;
13584 gomp_atomic_store *storestmt;
13585
13586 tmp_load = create_tmp_reg (type);
13587 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
13588 return GS_ERROR;
13589
13590 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
13591 != GS_ALL_DONE)
13592 return GS_ERROR;
13593
13594 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr,
13595 OMP_ATOMIC_MEMORY_ORDER (*expr_p));
13596 gimplify_seq_add_stmt (pre_p, loadstmt);
13597 if (rhs)
13598 {
13599 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
13600 representatives. Use BIT_FIELD_REF on the lhs instead. */
13601 if (TREE_CODE (rhs) == BIT_INSERT_EXPR
13602 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load)))
13603 {
13604 tree bitpos = TREE_OPERAND (rhs, 2);
13605 tree op1 = TREE_OPERAND (rhs, 1);
13606 tree bitsize;
13607 tree tmp_store = tmp_load;
13608 if (TREE_CODE (*expr_p) == OMP_ATOMIC_CAPTURE_OLD)
13609 tmp_store = get_initialized_tmp_var (tmp_load, pre_p);
13610 if (INTEGRAL_TYPE_P (TREE_TYPE (op1)))
13611 bitsize = bitsize_int (TYPE_PRECISION (TREE_TYPE (op1)));
13612 else
13613 bitsize = TYPE_SIZE (TREE_TYPE (op1));
13614 gcc_assert (TREE_OPERAND (rhs, 0) == tmp_load);
13615 tree t = build2_loc (EXPR_LOCATION (rhs),
13616 MODIFY_EXPR, void_type_node,
13617 build3_loc (EXPR_LOCATION (rhs), BIT_FIELD_REF,
13618 TREE_TYPE (op1), tmp_store, bitsize,
13619 bitpos), op1);
13620 gimplify_and_add (t, pre_p);
13621 rhs = tmp_store;
13622 }
13623 if (gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
13624 != GS_ALL_DONE)
13625 return GS_ERROR;
13626 }
13627
13628 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
13629 rhs = tmp_load;
13630 storestmt
13631 = gimple_build_omp_atomic_store (rhs, OMP_ATOMIC_MEMORY_ORDER (*expr_p));
13632 gimplify_seq_add_stmt (pre_p, storestmt);
13633 switch (TREE_CODE (*expr_p))
13634 {
13635 case OMP_ATOMIC_READ:
13636 case OMP_ATOMIC_CAPTURE_OLD:
13637 *expr_p = tmp_load;
13638 gimple_omp_atomic_set_need_value (loadstmt);
13639 break;
13640 case OMP_ATOMIC_CAPTURE_NEW:
13641 *expr_p = rhs;
13642 gimple_omp_atomic_set_need_value (storestmt);
13643 break;
13644 default:
13645 *expr_p = NULL;
13646 break;
13647 }
13648
13649 return GS_ALL_DONE;
13650 }
13651
13652 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
13653 body, and adding some EH bits. */
13654
13655 static enum gimplify_status
13656 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
13657 {
13658 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
13659 gimple *body_stmt;
13660 gtransaction *trans_stmt;
13661 gimple_seq body = NULL;
13662 int subcode = 0;
13663
13664 /* Wrap the transaction body in a BIND_EXPR so we have a context
13665 where to put decls for OMP. */
13666 if (TREE_CODE (tbody) != BIND_EXPR)
13667 {
13668 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
13669 TREE_SIDE_EFFECTS (bind) = 1;
13670 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
13671 TRANSACTION_EXPR_BODY (expr) = bind;
13672 }
13673
13674 push_gimplify_context ();
13675 temp = voidify_wrapper_expr (*expr_p, NULL);
13676
13677 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
13678 pop_gimplify_context (body_stmt);
13679
13680 trans_stmt = gimple_build_transaction (body);
13681 if (TRANSACTION_EXPR_OUTER (expr))
13682 subcode = GTMA_IS_OUTER;
13683 else if (TRANSACTION_EXPR_RELAXED (expr))
13684 subcode = GTMA_IS_RELAXED;
13685 gimple_transaction_set_subcode (trans_stmt, subcode);
13686
13687 gimplify_seq_add_stmt (pre_p, trans_stmt);
13688
13689 if (temp)
13690 {
13691 *expr_p = temp;
13692 return GS_OK;
13693 }
13694
13695 *expr_p = NULL_TREE;
13696 return GS_ALL_DONE;
13697 }
13698
13699 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
13700 is the OMP_BODY of the original EXPR (which has already been
13701 gimplified so it's not present in the EXPR).
13702
13703 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
13704
13705 static gimple *
13706 gimplify_omp_ordered (tree expr, gimple_seq body)
13707 {
13708 tree c, decls;
13709 int failures = 0;
13710 unsigned int i;
13711 tree source_c = NULL_TREE;
13712 tree sink_c = NULL_TREE;
13713
13714 if (gimplify_omp_ctxp)
13715 {
13716 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
13717 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
13718 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
13719 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
13720 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
13721 {
13722 error_at (OMP_CLAUSE_LOCATION (c),
13723 "%<ordered%> construct with %<depend%> clause must be "
13724 "closely nested inside a loop with %<ordered%> clause "
13725 "with a parameter");
13726 failures++;
13727 }
13728 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
13729 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
13730 {
13731 bool fail = false;
13732 for (decls = OMP_CLAUSE_DECL (c), i = 0;
13733 decls && TREE_CODE (decls) == TREE_LIST;
13734 decls = TREE_CHAIN (decls), ++i)
13735 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
13736 continue;
13737 else if (TREE_VALUE (decls)
13738 != gimplify_omp_ctxp->loop_iter_var[2 * i])
13739 {
13740 error_at (OMP_CLAUSE_LOCATION (c),
13741 "variable %qE is not an iteration "
13742 "of outermost loop %d, expected %qE",
13743 TREE_VALUE (decls), i + 1,
13744 gimplify_omp_ctxp->loop_iter_var[2 * i]);
13745 fail = true;
13746 failures++;
13747 }
13748 else
13749 TREE_VALUE (decls)
13750 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
13751 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
13752 {
13753 error_at (OMP_CLAUSE_LOCATION (c),
13754 "number of variables in %<depend%> clause with "
13755 "%<sink%> modifier does not match number of "
13756 "iteration variables");
13757 failures++;
13758 }
13759 sink_c = c;
13760 }
13761 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
13762 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
13763 {
13764 if (source_c)
13765 {
13766 error_at (OMP_CLAUSE_LOCATION (c),
13767 "more than one %<depend%> clause with %<source%> "
13768 "modifier on an %<ordered%> construct");
13769 failures++;
13770 }
13771 else
13772 source_c = c;
13773 }
13774 }
13775 if (source_c && sink_c)
13776 {
13777 error_at (OMP_CLAUSE_LOCATION (source_c),
13778 "%<depend%> clause with %<source%> modifier specified "
13779 "together with %<depend%> clauses with %<sink%> modifier "
13780 "on the same construct");
13781 failures++;
13782 }
13783
13784 if (failures)
13785 return gimple_build_nop ();
13786 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
13787 }
13788
13789 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
13790 expression produces a value to be used as an operand inside a GIMPLE
13791 statement, the value will be stored back in *EXPR_P. This value will
13792 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
13793 an SSA_NAME. The corresponding sequence of GIMPLE statements is
13794 emitted in PRE_P and POST_P.
13795
13796 Additionally, this process may overwrite parts of the input
13797 expression during gimplification. Ideally, it should be
13798 possible to do non-destructive gimplification.
13799
13800 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
13801 the expression needs to evaluate to a value to be used as
13802 an operand in a GIMPLE statement, this value will be stored in
13803 *EXPR_P on exit. This happens when the caller specifies one
13804 of fb_lvalue or fb_rvalue fallback flags.
13805
13806 PRE_P will contain the sequence of GIMPLE statements corresponding
13807 to the evaluation of EXPR and all the side-effects that must
13808 be executed before the main expression. On exit, the last
13809 statement of PRE_P is the core statement being gimplified. For
13810 instance, when gimplifying 'if (++a)' the last statement in
13811 PRE_P will be 'if (t.1)' where t.1 is the result of
13812 pre-incrementing 'a'.
13813
13814 POST_P will contain the sequence of GIMPLE statements corresponding
13815 to the evaluation of all the side-effects that must be executed
13816 after the main expression. If this is NULL, the post
13817 side-effects are stored at the end of PRE_P.
13818
13819 The reason why the output is split in two is to handle post
13820 side-effects explicitly. In some cases, an expression may have
13821 inner and outer post side-effects which need to be emitted in
13822 an order different from the one given by the recursive
13823 traversal. For instance, for the expression (*p--)++ the post
13824 side-effects of '--' must actually occur *after* the post
13825 side-effects of '++'. However, gimplification will first visit
13826 the inner expression, so if a separate POST sequence was not
13827 used, the resulting sequence would be:
13828
13829 1 t.1 = *p
13830 2 p = p - 1
13831 3 t.2 = t.1 + 1
13832 4 *p = t.2
13833
13834 However, the post-decrement operation in line #2 must not be
13835 evaluated until after the store to *p at line #4, so the
13836 correct sequence should be:
13837
13838 1 t.1 = *p
13839 2 t.2 = t.1 + 1
13840 3 *p = t.2
13841 4 p = p - 1
13842
13843 So, by specifying a separate post queue, it is possible
13844 to emit the post side-effects in the correct order.
13845 If POST_P is NULL, an internal queue will be used. Before
13846 returning to the caller, the sequence POST_P is appended to
13847 the main output sequence PRE_P.
13848
13849 GIMPLE_TEST_F points to a function that takes a tree T and
13850 returns nonzero if T is in the GIMPLE form requested by the
13851 caller. The GIMPLE predicates are in gimple.c.
13852
13853 FALLBACK tells the function what sort of a temporary we want if
13854 gimplification cannot produce an expression that complies with
13855 GIMPLE_TEST_F.
13856
13857 fb_none means that no temporary should be generated
13858 fb_rvalue means that an rvalue is OK to generate
13859 fb_lvalue means that an lvalue is OK to generate
13860 fb_either means that either is OK, but an lvalue is preferable.
13861 fb_mayfail means that gimplification may fail (in which case
13862 GS_ERROR will be returned)
13863
13864 The return value is either GS_ERROR or GS_ALL_DONE, since this
13865 function iterates until EXPR is completely gimplified or an error
13866 occurs. */
13867
13868 enum gimplify_status
13869 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
13870 bool (*gimple_test_f) (tree), fallback_t fallback)
13871 {
13872 tree tmp;
13873 gimple_seq internal_pre = NULL;
13874 gimple_seq internal_post = NULL;
13875 tree save_expr;
13876 bool is_statement;
13877 location_t saved_location;
13878 enum gimplify_status ret;
13879 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
13880 tree label;
13881
13882 save_expr = *expr_p;
13883 if (save_expr == NULL_TREE)
13884 return GS_ALL_DONE;
13885
13886 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
13887 is_statement = gimple_test_f == is_gimple_stmt;
13888 if (is_statement)
13889 gcc_assert (pre_p);
13890
13891 /* Consistency checks. */
13892 if (gimple_test_f == is_gimple_reg)
13893 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
13894 else if (gimple_test_f == is_gimple_val
13895 || gimple_test_f == is_gimple_call_addr
13896 || gimple_test_f == is_gimple_condexpr
13897 || gimple_test_f == is_gimple_condexpr_for_cond
13898 || gimple_test_f == is_gimple_mem_rhs
13899 || gimple_test_f == is_gimple_mem_rhs_or_call
13900 || gimple_test_f == is_gimple_reg_rhs
13901 || gimple_test_f == is_gimple_reg_rhs_or_call
13902 || gimple_test_f == is_gimple_asm_val
13903 || gimple_test_f == is_gimple_mem_ref_addr)
13904 gcc_assert (fallback & fb_rvalue);
13905 else if (gimple_test_f == is_gimple_min_lval
13906 || gimple_test_f == is_gimple_lvalue)
13907 gcc_assert (fallback & fb_lvalue);
13908 else if (gimple_test_f == is_gimple_addressable)
13909 gcc_assert (fallback & fb_either);
13910 else if (gimple_test_f == is_gimple_stmt)
13911 gcc_assert (fallback == fb_none);
13912 else
13913 {
13914 /* We should have recognized the GIMPLE_TEST_F predicate to
13915 know what kind of fallback to use in case a temporary is
13916 needed to hold the value or address of *EXPR_P. */
13917 gcc_unreachable ();
13918 }
13919
13920 /* We used to check the predicate here and return immediately if it
13921 succeeds. This is wrong; the design is for gimplification to be
13922 idempotent, and for the predicates to only test for valid forms, not
13923 whether they are fully simplified. */
13924 if (pre_p == NULL)
13925 pre_p = &internal_pre;
13926
13927 if (post_p == NULL)
13928 post_p = &internal_post;
13929
13930 /* Remember the last statements added to PRE_P and POST_P. Every
13931 new statement added by the gimplification helpers needs to be
13932 annotated with location information. To centralize the
13933 responsibility, we remember the last statement that had been
13934 added to both queues before gimplifying *EXPR_P. If
13935 gimplification produces new statements in PRE_P and POST_P, those
13936 statements will be annotated with the same location information
13937 as *EXPR_P. */
13938 pre_last_gsi = gsi_last (*pre_p);
13939 post_last_gsi = gsi_last (*post_p);
13940
13941 saved_location = input_location;
13942 if (save_expr != error_mark_node
13943 && EXPR_HAS_LOCATION (*expr_p))
13944 input_location = EXPR_LOCATION (*expr_p);
13945
13946 /* Loop over the specific gimplifiers until the toplevel node
13947 remains the same. */
13948 do
13949 {
13950 /* Strip away as many useless type conversions as possible
13951 at the toplevel. */
13952 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
13953
13954 /* Remember the expr. */
13955 save_expr = *expr_p;
13956
13957 /* Die, die, die, my darling. */
13958 if (error_operand_p (save_expr))
13959 {
13960 ret = GS_ERROR;
13961 break;
13962 }
13963
13964 /* Do any language-specific gimplification. */
13965 ret = ((enum gimplify_status)
13966 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
13967 if (ret == GS_OK)
13968 {
13969 if (*expr_p == NULL_TREE)
13970 break;
13971 if (*expr_p != save_expr)
13972 continue;
13973 }
13974 else if (ret != GS_UNHANDLED)
13975 break;
13976
13977 /* Make sure that all the cases set 'ret' appropriately. */
13978 ret = GS_UNHANDLED;
13979 switch (TREE_CODE (*expr_p))
13980 {
13981 /* First deal with the special cases. */
13982
13983 case POSTINCREMENT_EXPR:
13984 case POSTDECREMENT_EXPR:
13985 case PREINCREMENT_EXPR:
13986 case PREDECREMENT_EXPR:
13987 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
13988 fallback != fb_none,
13989 TREE_TYPE (*expr_p));
13990 break;
13991
13992 case VIEW_CONVERT_EXPR:
13993 if ((fallback & fb_rvalue)
13994 && is_gimple_reg_type (TREE_TYPE (*expr_p))
13995 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
13996 {
13997 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13998 post_p, is_gimple_val, fb_rvalue);
13999 recalculate_side_effects (*expr_p);
14000 break;
14001 }
14002 /* Fallthru. */
14003
14004 case ARRAY_REF:
14005 case ARRAY_RANGE_REF:
14006 case REALPART_EXPR:
14007 case IMAGPART_EXPR:
14008 case COMPONENT_REF:
14009 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
14010 fallback ? fallback : fb_rvalue);
14011 break;
14012
14013 case COND_EXPR:
14014 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
14015
14016 /* C99 code may assign to an array in a structure value of a
14017 conditional expression, and this has undefined behavior
14018 only on execution, so create a temporary if an lvalue is
14019 required. */
14020 if (fallback == fb_lvalue)
14021 {
14022 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
14023 mark_addressable (*expr_p);
14024 ret = GS_OK;
14025 }
14026 break;
14027
14028 case CALL_EXPR:
14029 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
14030
14031 /* C99 code may assign to an array in a structure returned
14032 from a function, and this has undefined behavior only on
14033 execution, so create a temporary if an lvalue is
14034 required. */
14035 if (fallback == fb_lvalue)
14036 {
14037 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
14038 mark_addressable (*expr_p);
14039 ret = GS_OK;
14040 }
14041 break;
14042
14043 case TREE_LIST:
14044 gcc_unreachable ();
14045
14046 case COMPOUND_EXPR:
14047 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
14048 break;
14049
14050 case COMPOUND_LITERAL_EXPR:
14051 ret = gimplify_compound_literal_expr (expr_p, pre_p,
14052 gimple_test_f, fallback);
14053 break;
14054
14055 case MODIFY_EXPR:
14056 case INIT_EXPR:
14057 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
14058 fallback != fb_none);
14059 break;
14060
14061 case TRUTH_ANDIF_EXPR:
14062 case TRUTH_ORIF_EXPR:
14063 {
14064 /* Preserve the original type of the expression and the
14065 source location of the outer expression. */
14066 tree org_type = TREE_TYPE (*expr_p);
14067 *expr_p = gimple_boolify (*expr_p);
14068 *expr_p = build3_loc (input_location, COND_EXPR,
14069 org_type, *expr_p,
14070 fold_convert_loc
14071 (input_location,
14072 org_type, boolean_true_node),
14073 fold_convert_loc
14074 (input_location,
14075 org_type, boolean_false_node));
14076 ret = GS_OK;
14077 break;
14078 }
14079
14080 case TRUTH_NOT_EXPR:
14081 {
14082 tree type = TREE_TYPE (*expr_p);
14083 /* The parsers are careful to generate TRUTH_NOT_EXPR
14084 only with operands that are always zero or one.
14085 We do not fold here but handle the only interesting case
14086 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
14087 *expr_p = gimple_boolify (*expr_p);
14088 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
14089 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
14090 TREE_TYPE (*expr_p),
14091 TREE_OPERAND (*expr_p, 0));
14092 else
14093 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
14094 TREE_TYPE (*expr_p),
14095 TREE_OPERAND (*expr_p, 0),
14096 build_int_cst (TREE_TYPE (*expr_p), 1));
14097 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
14098 *expr_p = fold_convert_loc (input_location, type, *expr_p);
14099 ret = GS_OK;
14100 break;
14101 }
14102
14103 case ADDR_EXPR:
14104 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
14105 break;
14106
14107 case ANNOTATE_EXPR:
14108 {
14109 tree cond = TREE_OPERAND (*expr_p, 0);
14110 tree kind = TREE_OPERAND (*expr_p, 1);
14111 tree data = TREE_OPERAND (*expr_p, 2);
14112 tree type = TREE_TYPE (cond);
14113 if (!INTEGRAL_TYPE_P (type))
14114 {
14115 *expr_p = cond;
14116 ret = GS_OK;
14117 break;
14118 }
14119 tree tmp = create_tmp_var (type);
14120 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
14121 gcall *call
14122 = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
14123 gimple_call_set_lhs (call, tmp);
14124 gimplify_seq_add_stmt (pre_p, call);
14125 *expr_p = tmp;
14126 ret = GS_ALL_DONE;
14127 break;
14128 }
14129
14130 case VA_ARG_EXPR:
14131 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
14132 break;
14133
14134 CASE_CONVERT:
14135 if (IS_EMPTY_STMT (*expr_p))
14136 {
14137 ret = GS_ALL_DONE;
14138 break;
14139 }
14140
14141 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
14142 || fallback == fb_none)
14143 {
14144 /* Just strip a conversion to void (or in void context) and
14145 try again. */
14146 *expr_p = TREE_OPERAND (*expr_p, 0);
14147 ret = GS_OK;
14148 break;
14149 }
14150
14151 ret = gimplify_conversion (expr_p);
14152 if (ret == GS_ERROR)
14153 break;
14154 if (*expr_p != save_expr)
14155 break;
14156 /* FALLTHRU */
14157
14158 case FIX_TRUNC_EXPR:
14159 /* unary_expr: ... | '(' cast ')' val | ... */
14160 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
14161 is_gimple_val, fb_rvalue);
14162 recalculate_side_effects (*expr_p);
14163 break;
14164
14165 case INDIRECT_REF:
14166 {
14167 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
14168 bool notrap = TREE_THIS_NOTRAP (*expr_p);
14169 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
14170
14171 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
14172 if (*expr_p != save_expr)
14173 {
14174 ret = GS_OK;
14175 break;
14176 }
14177
14178 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
14179 is_gimple_reg, fb_rvalue);
14180 if (ret == GS_ERROR)
14181 break;
14182
14183 recalculate_side_effects (*expr_p);
14184 *expr_p = fold_build2_loc (input_location, MEM_REF,
14185 TREE_TYPE (*expr_p),
14186 TREE_OPERAND (*expr_p, 0),
14187 build_int_cst (saved_ptr_type, 0));
14188 TREE_THIS_VOLATILE (*expr_p) = volatilep;
14189 TREE_THIS_NOTRAP (*expr_p) = notrap;
14190 ret = GS_OK;
14191 break;
14192 }
14193
14194 /* We arrive here through the various re-gimplifcation paths. */
14195 case MEM_REF:
14196 /* First try re-folding the whole thing. */
14197 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
14198 TREE_OPERAND (*expr_p, 0),
14199 TREE_OPERAND (*expr_p, 1));
14200 if (tmp)
14201 {
14202 REF_REVERSE_STORAGE_ORDER (tmp)
14203 = REF_REVERSE_STORAGE_ORDER (*expr_p);
14204 *expr_p = tmp;
14205 recalculate_side_effects (*expr_p);
14206 ret = GS_OK;
14207 break;
14208 }
14209 /* Avoid re-gimplifying the address operand if it is already
14210 in suitable form. Re-gimplifying would mark the address
14211 operand addressable. Always gimplify when not in SSA form
14212 as we still may have to gimplify decls with value-exprs. */
14213 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
14214 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
14215 {
14216 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
14217 is_gimple_mem_ref_addr, fb_rvalue);
14218 if (ret == GS_ERROR)
14219 break;
14220 }
14221 recalculate_side_effects (*expr_p);
14222 ret = GS_ALL_DONE;
14223 break;
14224
14225 /* Constants need not be gimplified. */
14226 case INTEGER_CST:
14227 case REAL_CST:
14228 case FIXED_CST:
14229 case STRING_CST:
14230 case COMPLEX_CST:
14231 case VECTOR_CST:
14232 /* Drop the overflow flag on constants, we do not want
14233 that in the GIMPLE IL. */
14234 if (TREE_OVERFLOW_P (*expr_p))
14235 *expr_p = drop_tree_overflow (*expr_p);
14236 ret = GS_ALL_DONE;
14237 break;
14238
14239 case CONST_DECL:
14240 /* If we require an lvalue, such as for ADDR_EXPR, retain the
14241 CONST_DECL node. Otherwise the decl is replaceable by its
14242 value. */
14243 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
14244 if (fallback & fb_lvalue)
14245 ret = GS_ALL_DONE;
14246 else
14247 {
14248 *expr_p = DECL_INITIAL (*expr_p);
14249 ret = GS_OK;
14250 }
14251 break;
14252
14253 case DECL_EXPR:
14254 ret = gimplify_decl_expr (expr_p, pre_p);
14255 break;
14256
14257 case BIND_EXPR:
14258 ret = gimplify_bind_expr (expr_p, pre_p);
14259 break;
14260
14261 case LOOP_EXPR:
14262 ret = gimplify_loop_expr (expr_p, pre_p);
14263 break;
14264
14265 case SWITCH_EXPR:
14266 ret = gimplify_switch_expr (expr_p, pre_p);
14267 break;
14268
14269 case EXIT_EXPR:
14270 ret = gimplify_exit_expr (expr_p);
14271 break;
14272
14273 case GOTO_EXPR:
14274 /* If the target is not LABEL, then it is a computed jump
14275 and the target needs to be gimplified. */
14276 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
14277 {
14278 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
14279 NULL, is_gimple_val, fb_rvalue);
14280 if (ret == GS_ERROR)
14281 break;
14282 }
14283 gimplify_seq_add_stmt (pre_p,
14284 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
14285 ret = GS_ALL_DONE;
14286 break;
14287
14288 case PREDICT_EXPR:
14289 gimplify_seq_add_stmt (pre_p,
14290 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
14291 PREDICT_EXPR_OUTCOME (*expr_p)));
14292 ret = GS_ALL_DONE;
14293 break;
14294
14295 case LABEL_EXPR:
14296 ret = gimplify_label_expr (expr_p, pre_p);
14297 label = LABEL_EXPR_LABEL (*expr_p);
14298 gcc_assert (decl_function_context (label) == current_function_decl);
14299
14300 /* If the label is used in a goto statement, or address of the label
14301 is taken, we need to unpoison all variables that were seen so far.
14302 Doing so would prevent us from reporting a false positives. */
14303 if (asan_poisoned_variables
14304 && asan_used_labels != NULL
14305 && asan_used_labels->contains (label))
14306 asan_poison_variables (asan_poisoned_variables, false, pre_p);
14307 break;
14308
14309 case CASE_LABEL_EXPR:
14310 ret = gimplify_case_label_expr (expr_p, pre_p);
14311
14312 if (gimplify_ctxp->live_switch_vars)
14313 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
14314 pre_p);
14315 break;
14316
14317 case RETURN_EXPR:
14318 ret = gimplify_return_expr (*expr_p, pre_p);
14319 break;
14320
14321 case CONSTRUCTOR:
14322 /* Don't reduce this in place; let gimplify_init_constructor work its
14323 magic. Buf if we're just elaborating this for side effects, just
14324 gimplify any element that has side-effects. */
14325 if (fallback == fb_none)
14326 {
14327 unsigned HOST_WIDE_INT ix;
14328 tree val;
14329 tree temp = NULL_TREE;
14330 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
14331 if (TREE_SIDE_EFFECTS (val))
14332 append_to_statement_list (val, &temp);
14333
14334 *expr_p = temp;
14335 ret = temp ? GS_OK : GS_ALL_DONE;
14336 }
14337 /* C99 code may assign to an array in a constructed
14338 structure or union, and this has undefined behavior only
14339 on execution, so create a temporary if an lvalue is
14340 required. */
14341 else if (fallback == fb_lvalue)
14342 {
14343 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
14344 mark_addressable (*expr_p);
14345 ret = GS_OK;
14346 }
14347 else
14348 ret = GS_ALL_DONE;
14349 break;
14350
14351 /* The following are special cases that are not handled by the
14352 original GIMPLE grammar. */
14353
14354 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
14355 eliminated. */
14356 case SAVE_EXPR:
14357 ret = gimplify_save_expr (expr_p, pre_p, post_p);
14358 break;
14359
14360 case BIT_FIELD_REF:
14361 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14362 post_p, is_gimple_lvalue, fb_either);
14363 recalculate_side_effects (*expr_p);
14364 break;
14365
14366 case TARGET_MEM_REF:
14367 {
14368 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
14369
14370 if (TMR_BASE (*expr_p))
14371 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
14372 post_p, is_gimple_mem_ref_addr, fb_either);
14373 if (TMR_INDEX (*expr_p))
14374 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
14375 post_p, is_gimple_val, fb_rvalue);
14376 if (TMR_INDEX2 (*expr_p))
14377 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
14378 post_p, is_gimple_val, fb_rvalue);
14379 /* TMR_STEP and TMR_OFFSET are always integer constants. */
14380 ret = MIN (r0, r1);
14381 }
14382 break;
14383
14384 case NON_LVALUE_EXPR:
14385 /* This should have been stripped above. */
14386 gcc_unreachable ();
14387
14388 case ASM_EXPR:
14389 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
14390 break;
14391
14392 case TRY_FINALLY_EXPR:
14393 case TRY_CATCH_EXPR:
14394 {
14395 gimple_seq eval, cleanup;
14396 gtry *try_;
14397
14398 /* Calls to destructors are generated automatically in FINALLY/CATCH
14399 block. They should have location as UNKNOWN_LOCATION. However,
14400 gimplify_call_expr will reset these call stmts to input_location
14401 if it finds stmt's location is unknown. To prevent resetting for
14402 destructors, we set the input_location to unknown.
14403 Note that this only affects the destructor calls in FINALLY/CATCH
14404 block, and will automatically reset to its original value by the
14405 end of gimplify_expr. */
14406 input_location = UNKNOWN_LOCATION;
14407 eval = cleanup = NULL;
14408 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
14409 if (TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
14410 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == EH_ELSE_EXPR)
14411 {
14412 gimple_seq n = NULL, e = NULL;
14413 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
14414 0), &n);
14415 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
14416 1), &e);
14417 if (!gimple_seq_empty_p (n) && !gimple_seq_empty_p (e))
14418 {
14419 geh_else *stmt = gimple_build_eh_else (n, e);
14420 gimple_seq_add_stmt (&cleanup, stmt);
14421 }
14422 }
14423 else
14424 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
14425 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
14426 if (gimple_seq_empty_p (cleanup))
14427 {
14428 gimple_seq_add_seq (pre_p, eval);
14429 ret = GS_ALL_DONE;
14430 break;
14431 }
14432 try_ = gimple_build_try (eval, cleanup,
14433 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
14434 ? GIMPLE_TRY_FINALLY
14435 : GIMPLE_TRY_CATCH);
14436 if (EXPR_HAS_LOCATION (save_expr))
14437 gimple_set_location (try_, EXPR_LOCATION (save_expr));
14438 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
14439 gimple_set_location (try_, saved_location);
14440 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
14441 gimple_try_set_catch_is_cleanup (try_,
14442 TRY_CATCH_IS_CLEANUP (*expr_p));
14443 gimplify_seq_add_stmt (pre_p, try_);
14444 ret = GS_ALL_DONE;
14445 break;
14446 }
14447
14448 case CLEANUP_POINT_EXPR:
14449 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
14450 break;
14451
14452 case TARGET_EXPR:
14453 ret = gimplify_target_expr (expr_p, pre_p, post_p);
14454 break;
14455
14456 case CATCH_EXPR:
14457 {
14458 gimple *c;
14459 gimple_seq handler = NULL;
14460 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
14461 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
14462 gimplify_seq_add_stmt (pre_p, c);
14463 ret = GS_ALL_DONE;
14464 break;
14465 }
14466
14467 case EH_FILTER_EXPR:
14468 {
14469 gimple *ehf;
14470 gimple_seq failure = NULL;
14471
14472 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
14473 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
14474 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
14475 gimplify_seq_add_stmt (pre_p, ehf);
14476 ret = GS_ALL_DONE;
14477 break;
14478 }
14479
14480 case OBJ_TYPE_REF:
14481 {
14482 enum gimplify_status r0, r1;
14483 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
14484 post_p, is_gimple_val, fb_rvalue);
14485 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
14486 post_p, is_gimple_val, fb_rvalue);
14487 TREE_SIDE_EFFECTS (*expr_p) = 0;
14488 ret = MIN (r0, r1);
14489 }
14490 break;
14491
14492 case LABEL_DECL:
14493 /* We get here when taking the address of a label. We mark
14494 the label as "forced"; meaning it can never be removed and
14495 it is a potential target for any computed goto. */
14496 FORCED_LABEL (*expr_p) = 1;
14497 ret = GS_ALL_DONE;
14498 break;
14499
14500 case STATEMENT_LIST:
14501 ret = gimplify_statement_list (expr_p, pre_p);
14502 break;
14503
14504 case WITH_SIZE_EXPR:
14505 {
14506 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14507 post_p == &internal_post ? NULL : post_p,
14508 gimple_test_f, fallback);
14509 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
14510 is_gimple_val, fb_rvalue);
14511 ret = GS_ALL_DONE;
14512 }
14513 break;
14514
14515 case VAR_DECL:
14516 case PARM_DECL:
14517 ret = gimplify_var_or_parm_decl (expr_p);
14518 break;
14519
14520 case RESULT_DECL:
14521 /* When within an OMP context, notice uses of variables. */
14522 if (gimplify_omp_ctxp)
14523 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
14524 ret = GS_ALL_DONE;
14525 break;
14526
14527 case DEBUG_EXPR_DECL:
14528 gcc_unreachable ();
14529
14530 case DEBUG_BEGIN_STMT:
14531 gimplify_seq_add_stmt (pre_p,
14532 gimple_build_debug_begin_stmt
14533 (TREE_BLOCK (*expr_p),
14534 EXPR_LOCATION (*expr_p)));
14535 ret = GS_ALL_DONE;
14536 *expr_p = NULL;
14537 break;
14538
14539 case SSA_NAME:
14540 /* Allow callbacks into the gimplifier during optimization. */
14541 ret = GS_ALL_DONE;
14542 break;
14543
14544 case OMP_PARALLEL:
14545 gimplify_omp_parallel (expr_p, pre_p);
14546 ret = GS_ALL_DONE;
14547 break;
14548
14549 case OMP_TASK:
14550 gimplify_omp_task (expr_p, pre_p);
14551 ret = GS_ALL_DONE;
14552 break;
14553
14554 case OMP_FOR:
14555 case OMP_SIMD:
14556 case OMP_DISTRIBUTE:
14557 case OMP_TASKLOOP:
14558 case OACC_LOOP:
14559 ret = gimplify_omp_for (expr_p, pre_p);
14560 break;
14561
14562 case OMP_LOOP:
14563 ret = gimplify_omp_loop (expr_p, pre_p);
14564 break;
14565
14566 case OACC_CACHE:
14567 gimplify_oacc_cache (expr_p, pre_p);
14568 ret = GS_ALL_DONE;
14569 break;
14570
14571 case OACC_DECLARE:
14572 gimplify_oacc_declare (expr_p, pre_p);
14573 ret = GS_ALL_DONE;
14574 break;
14575
14576 case OACC_HOST_DATA:
14577 case OACC_DATA:
14578 case OACC_KERNELS:
14579 case OACC_PARALLEL:
14580 case OACC_SERIAL:
14581 case OMP_SECTIONS:
14582 case OMP_SINGLE:
14583 case OMP_TARGET:
14584 case OMP_TARGET_DATA:
14585 case OMP_TEAMS:
14586 gimplify_omp_workshare (expr_p, pre_p);
14587 ret = GS_ALL_DONE;
14588 break;
14589
14590 case OACC_ENTER_DATA:
14591 case OACC_EXIT_DATA:
14592 case OACC_UPDATE:
14593 case OMP_TARGET_UPDATE:
14594 case OMP_TARGET_ENTER_DATA:
14595 case OMP_TARGET_EXIT_DATA:
14596 gimplify_omp_target_update (expr_p, pre_p);
14597 ret = GS_ALL_DONE;
14598 break;
14599
14600 case OMP_SECTION:
14601 case OMP_MASTER:
14602 case OMP_ORDERED:
14603 case OMP_CRITICAL:
14604 case OMP_SCAN:
14605 {
14606 gimple_seq body = NULL;
14607 gimple *g;
14608 bool saved_in_omp_construct = in_omp_construct;
14609
14610 in_omp_construct = true;
14611 gimplify_and_add (OMP_BODY (*expr_p), &body);
14612 in_omp_construct = saved_in_omp_construct;
14613 switch (TREE_CODE (*expr_p))
14614 {
14615 case OMP_SECTION:
14616 g = gimple_build_omp_section (body);
14617 break;
14618 case OMP_MASTER:
14619 g = gimple_build_omp_master (body);
14620 break;
14621 case OMP_ORDERED:
14622 g = gimplify_omp_ordered (*expr_p, body);
14623 break;
14624 case OMP_CRITICAL:
14625 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
14626 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
14627 gimplify_adjust_omp_clauses (pre_p, body,
14628 &OMP_CRITICAL_CLAUSES (*expr_p),
14629 OMP_CRITICAL);
14630 g = gimple_build_omp_critical (body,
14631 OMP_CRITICAL_NAME (*expr_p),
14632 OMP_CRITICAL_CLAUSES (*expr_p));
14633 break;
14634 case OMP_SCAN:
14635 gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p),
14636 pre_p, ORT_WORKSHARE, OMP_SCAN);
14637 gimplify_adjust_omp_clauses (pre_p, body,
14638 &OMP_SCAN_CLAUSES (*expr_p),
14639 OMP_SCAN);
14640 g = gimple_build_omp_scan (body, OMP_SCAN_CLAUSES (*expr_p));
14641 break;
14642 default:
14643 gcc_unreachable ();
14644 }
14645 gimplify_seq_add_stmt (pre_p, g);
14646 ret = GS_ALL_DONE;
14647 break;
14648 }
14649
14650 case OMP_TASKGROUP:
14651 {
14652 gimple_seq body = NULL;
14653
14654 tree *pclauses = &OMP_TASKGROUP_CLAUSES (*expr_p);
14655 bool saved_in_omp_construct = in_omp_construct;
14656 gimplify_scan_omp_clauses (pclauses, pre_p, ORT_TASKGROUP,
14657 OMP_TASKGROUP);
14658 gimplify_adjust_omp_clauses (pre_p, NULL, pclauses, OMP_TASKGROUP);
14659
14660 in_omp_construct = true;
14661 gimplify_and_add (OMP_BODY (*expr_p), &body);
14662 in_omp_construct = saved_in_omp_construct;
14663 gimple_seq cleanup = NULL;
14664 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
14665 gimple *g = gimple_build_call (fn, 0);
14666 gimple_seq_add_stmt (&cleanup, g);
14667 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
14668 body = NULL;
14669 gimple_seq_add_stmt (&body, g);
14670 g = gimple_build_omp_taskgroup (body, *pclauses);
14671 gimplify_seq_add_stmt (pre_p, g);
14672 ret = GS_ALL_DONE;
14673 break;
14674 }
14675
14676 case OMP_ATOMIC:
14677 case OMP_ATOMIC_READ:
14678 case OMP_ATOMIC_CAPTURE_OLD:
14679 case OMP_ATOMIC_CAPTURE_NEW:
14680 ret = gimplify_omp_atomic (expr_p, pre_p);
14681 break;
14682
14683 case TRANSACTION_EXPR:
14684 ret = gimplify_transaction (expr_p, pre_p);
14685 break;
14686
14687 case TRUTH_AND_EXPR:
14688 case TRUTH_OR_EXPR:
14689 case TRUTH_XOR_EXPR:
14690 {
14691 tree orig_type = TREE_TYPE (*expr_p);
14692 tree new_type, xop0, xop1;
14693 *expr_p = gimple_boolify (*expr_p);
14694 new_type = TREE_TYPE (*expr_p);
14695 if (!useless_type_conversion_p (orig_type, new_type))
14696 {
14697 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
14698 ret = GS_OK;
14699 break;
14700 }
14701
14702 /* Boolified binary truth expressions are semantically equivalent
14703 to bitwise binary expressions. Canonicalize them to the
14704 bitwise variant. */
14705 switch (TREE_CODE (*expr_p))
14706 {
14707 case TRUTH_AND_EXPR:
14708 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
14709 break;
14710 case TRUTH_OR_EXPR:
14711 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
14712 break;
14713 case TRUTH_XOR_EXPR:
14714 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
14715 break;
14716 default:
14717 break;
14718 }
14719 /* Now make sure that operands have compatible type to
14720 expression's new_type. */
14721 xop0 = TREE_OPERAND (*expr_p, 0);
14722 xop1 = TREE_OPERAND (*expr_p, 1);
14723 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
14724 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
14725 new_type,
14726 xop0);
14727 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
14728 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
14729 new_type,
14730 xop1);
14731 /* Continue classified as tcc_binary. */
14732 goto expr_2;
14733 }
14734
14735 case VEC_COND_EXPR:
14736 goto expr_3;
14737
14738 case VEC_PERM_EXPR:
14739 /* Classified as tcc_expression. */
14740 goto expr_3;
14741
14742 case BIT_INSERT_EXPR:
14743 /* Argument 3 is a constant. */
14744 goto expr_2;
14745
14746 case POINTER_PLUS_EXPR:
14747 {
14748 enum gimplify_status r0, r1;
14749 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14750 post_p, is_gimple_val, fb_rvalue);
14751 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
14752 post_p, is_gimple_val, fb_rvalue);
14753 recalculate_side_effects (*expr_p);
14754 ret = MIN (r0, r1);
14755 break;
14756 }
14757
14758 default:
14759 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
14760 {
14761 case tcc_comparison:
14762 /* Handle comparison of objects of non scalar mode aggregates
14763 with a call to memcmp. It would be nice to only have to do
14764 this for variable-sized objects, but then we'd have to allow
14765 the same nest of reference nodes we allow for MODIFY_EXPR and
14766 that's too complex.
14767
14768 Compare scalar mode aggregates as scalar mode values. Using
14769 memcmp for them would be very inefficient at best, and is
14770 plain wrong if bitfields are involved. */
14771 {
14772 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
14773
14774 /* Vector comparisons need no boolification. */
14775 if (TREE_CODE (type) == VECTOR_TYPE)
14776 goto expr_2;
14777 else if (!AGGREGATE_TYPE_P (type))
14778 {
14779 tree org_type = TREE_TYPE (*expr_p);
14780 *expr_p = gimple_boolify (*expr_p);
14781 if (!useless_type_conversion_p (org_type,
14782 TREE_TYPE (*expr_p)))
14783 {
14784 *expr_p = fold_convert_loc (input_location,
14785 org_type, *expr_p);
14786 ret = GS_OK;
14787 }
14788 else
14789 goto expr_2;
14790 }
14791 else if (TYPE_MODE (type) != BLKmode)
14792 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
14793 else
14794 ret = gimplify_variable_sized_compare (expr_p);
14795
14796 break;
14797 }
14798
14799 /* If *EXPR_P does not need to be special-cased, handle it
14800 according to its class. */
14801 case tcc_unary:
14802 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14803 post_p, is_gimple_val, fb_rvalue);
14804 break;
14805
14806 case tcc_binary:
14807 expr_2:
14808 {
14809 enum gimplify_status r0, r1;
14810
14811 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14812 post_p, is_gimple_val, fb_rvalue);
14813 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
14814 post_p, is_gimple_val, fb_rvalue);
14815
14816 ret = MIN (r0, r1);
14817 break;
14818 }
14819
14820 expr_3:
14821 {
14822 enum gimplify_status r0, r1, r2;
14823
14824 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14825 post_p, is_gimple_val, fb_rvalue);
14826 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
14827 post_p, is_gimple_val, fb_rvalue);
14828 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
14829 post_p, is_gimple_val, fb_rvalue);
14830
14831 ret = MIN (MIN (r0, r1), r2);
14832 break;
14833 }
14834
14835 case tcc_declaration:
14836 case tcc_constant:
14837 ret = GS_ALL_DONE;
14838 goto dont_recalculate;
14839
14840 default:
14841 gcc_unreachable ();
14842 }
14843
14844 recalculate_side_effects (*expr_p);
14845
14846 dont_recalculate:
14847 break;
14848 }
14849
14850 gcc_assert (*expr_p || ret != GS_OK);
14851 }
14852 while (ret == GS_OK);
14853
14854 /* If we encountered an error_mark somewhere nested inside, either
14855 stub out the statement or propagate the error back out. */
14856 if (ret == GS_ERROR)
14857 {
14858 if (is_statement)
14859 *expr_p = NULL;
14860 goto out;
14861 }
14862
14863 /* This was only valid as a return value from the langhook, which
14864 we handled. Make sure it doesn't escape from any other context. */
14865 gcc_assert (ret != GS_UNHANDLED);
14866
14867 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
14868 {
14869 /* We aren't looking for a value, and we don't have a valid
14870 statement. If it doesn't have side-effects, throw it away.
14871 We can also get here with code such as "*&&L;", where L is
14872 a LABEL_DECL that is marked as FORCED_LABEL. */
14873 if (TREE_CODE (*expr_p) == LABEL_DECL
14874 || !TREE_SIDE_EFFECTS (*expr_p))
14875 *expr_p = NULL;
14876 else if (!TREE_THIS_VOLATILE (*expr_p))
14877 {
14878 /* This is probably a _REF that contains something nested that
14879 has side effects. Recurse through the operands to find it. */
14880 enum tree_code code = TREE_CODE (*expr_p);
14881
14882 switch (code)
14883 {
14884 case COMPONENT_REF:
14885 case REALPART_EXPR:
14886 case IMAGPART_EXPR:
14887 case VIEW_CONVERT_EXPR:
14888 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
14889 gimple_test_f, fallback);
14890 break;
14891
14892 case ARRAY_REF:
14893 case ARRAY_RANGE_REF:
14894 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
14895 gimple_test_f, fallback);
14896 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
14897 gimple_test_f, fallback);
14898 break;
14899
14900 default:
14901 /* Anything else with side-effects must be converted to
14902 a valid statement before we get here. */
14903 gcc_unreachable ();
14904 }
14905
14906 *expr_p = NULL;
14907 }
14908 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
14909 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
14910 {
14911 /* Historically, the compiler has treated a bare reference
14912 to a non-BLKmode volatile lvalue as forcing a load. */
14913 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
14914
14915 /* Normally, we do not want to create a temporary for a
14916 TREE_ADDRESSABLE type because such a type should not be
14917 copied by bitwise-assignment. However, we make an
14918 exception here, as all we are doing here is ensuring that
14919 we read the bytes that make up the type. We use
14920 create_tmp_var_raw because create_tmp_var will abort when
14921 given a TREE_ADDRESSABLE type. */
14922 tree tmp = create_tmp_var_raw (type, "vol");
14923 gimple_add_tmp_var (tmp);
14924 gimplify_assign (tmp, *expr_p, pre_p);
14925 *expr_p = NULL;
14926 }
14927 else
14928 /* We can't do anything useful with a volatile reference to
14929 an incomplete type, so just throw it away. Likewise for
14930 a BLKmode type, since any implicit inner load should
14931 already have been turned into an explicit one by the
14932 gimplification process. */
14933 *expr_p = NULL;
14934 }
14935
14936 /* If we are gimplifying at the statement level, we're done. Tack
14937 everything together and return. */
14938 if (fallback == fb_none || is_statement)
14939 {
14940 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
14941 it out for GC to reclaim it. */
14942 *expr_p = NULL_TREE;
14943
14944 if (!gimple_seq_empty_p (internal_pre)
14945 || !gimple_seq_empty_p (internal_post))
14946 {
14947 gimplify_seq_add_seq (&internal_pre, internal_post);
14948 gimplify_seq_add_seq (pre_p, internal_pre);
14949 }
14950
14951 /* The result of gimplifying *EXPR_P is going to be the last few
14952 statements in *PRE_P and *POST_P. Add location information
14953 to all the statements that were added by the gimplification
14954 helpers. */
14955 if (!gimple_seq_empty_p (*pre_p))
14956 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
14957
14958 if (!gimple_seq_empty_p (*post_p))
14959 annotate_all_with_location_after (*post_p, post_last_gsi,
14960 input_location);
14961
14962 goto out;
14963 }
14964
14965 #ifdef ENABLE_GIMPLE_CHECKING
14966 if (*expr_p)
14967 {
14968 enum tree_code code = TREE_CODE (*expr_p);
14969 /* These expressions should already be in gimple IR form. */
14970 gcc_assert (code != MODIFY_EXPR
14971 && code != ASM_EXPR
14972 && code != BIND_EXPR
14973 && code != CATCH_EXPR
14974 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
14975 && code != EH_FILTER_EXPR
14976 && code != GOTO_EXPR
14977 && code != LABEL_EXPR
14978 && code != LOOP_EXPR
14979 && code != SWITCH_EXPR
14980 && code != TRY_FINALLY_EXPR
14981 && code != EH_ELSE_EXPR
14982 && code != OACC_PARALLEL
14983 && code != OACC_KERNELS
14984 && code != OACC_SERIAL
14985 && code != OACC_DATA
14986 && code != OACC_HOST_DATA
14987 && code != OACC_DECLARE
14988 && code != OACC_UPDATE
14989 && code != OACC_ENTER_DATA
14990 && code != OACC_EXIT_DATA
14991 && code != OACC_CACHE
14992 && code != OMP_CRITICAL
14993 && code != OMP_FOR
14994 && code != OACC_LOOP
14995 && code != OMP_MASTER
14996 && code != OMP_TASKGROUP
14997 && code != OMP_ORDERED
14998 && code != OMP_PARALLEL
14999 && code != OMP_SCAN
15000 && code != OMP_SECTIONS
15001 && code != OMP_SECTION
15002 && code != OMP_SINGLE);
15003 }
15004 #endif
15005
15006 /* Otherwise we're gimplifying a subexpression, so the resulting
15007 value is interesting. If it's a valid operand that matches
15008 GIMPLE_TEST_F, we're done. Unless we are handling some
15009 post-effects internally; if that's the case, we need to copy into
15010 a temporary before adding the post-effects to POST_P. */
15011 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
15012 goto out;
15013
15014 /* Otherwise, we need to create a new temporary for the gimplified
15015 expression. */
15016
15017 /* We can't return an lvalue if we have an internal postqueue. The
15018 object the lvalue refers to would (probably) be modified by the
15019 postqueue; we need to copy the value out first, which means an
15020 rvalue. */
15021 if ((fallback & fb_lvalue)
15022 && gimple_seq_empty_p (internal_post)
15023 && is_gimple_addressable (*expr_p))
15024 {
15025 /* An lvalue will do. Take the address of the expression, store it
15026 in a temporary, and replace the expression with an INDIRECT_REF of
15027 that temporary. */
15028 tree ref_alias_type = reference_alias_ptr_type (*expr_p);
15029 unsigned int ref_align = get_object_alignment (*expr_p);
15030 tree ref_type = TREE_TYPE (*expr_p);
15031 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
15032 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
15033 if (TYPE_ALIGN (ref_type) != ref_align)
15034 ref_type = build_aligned_type (ref_type, ref_align);
15035 *expr_p = build2 (MEM_REF, ref_type,
15036 tmp, build_zero_cst (ref_alias_type));
15037 }
15038 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
15039 {
15040 /* An rvalue will do. Assign the gimplified expression into a
15041 new temporary TMP and replace the original expression with
15042 TMP. First, make sure that the expression has a type so that
15043 it can be assigned into a temporary. */
15044 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
15045 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
15046 }
15047 else
15048 {
15049 #ifdef ENABLE_GIMPLE_CHECKING
15050 if (!(fallback & fb_mayfail))
15051 {
15052 fprintf (stderr, "gimplification failed:\n");
15053 print_generic_expr (stderr, *expr_p);
15054 debug_tree (*expr_p);
15055 internal_error ("gimplification failed");
15056 }
15057 #endif
15058 gcc_assert (fallback & fb_mayfail);
15059
15060 /* If this is an asm statement, and the user asked for the
15061 impossible, don't die. Fail and let gimplify_asm_expr
15062 issue an error. */
15063 ret = GS_ERROR;
15064 goto out;
15065 }
15066
15067 /* Make sure the temporary matches our predicate. */
15068 gcc_assert ((*gimple_test_f) (*expr_p));
15069
15070 if (!gimple_seq_empty_p (internal_post))
15071 {
15072 annotate_all_with_location (internal_post, input_location);
15073 gimplify_seq_add_seq (pre_p, internal_post);
15074 }
15075
15076 out:
15077 input_location = saved_location;
15078 return ret;
15079 }
15080
15081 /* Like gimplify_expr but make sure the gimplified result is not itself
15082 a SSA name (but a decl if it were). Temporaries required by
15083 evaluating *EXPR_P may be still SSA names. */
15084
15085 static enum gimplify_status
15086 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
15087 bool (*gimple_test_f) (tree), fallback_t fallback,
15088 bool allow_ssa)
15089 {
15090 bool was_ssa_name_p = TREE_CODE (*expr_p) == SSA_NAME;
15091 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
15092 gimple_test_f, fallback);
15093 if (! allow_ssa
15094 && TREE_CODE (*expr_p) == SSA_NAME)
15095 {
15096 tree name = *expr_p;
15097 if (was_ssa_name_p)
15098 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
15099 else
15100 {
15101 /* Avoid the extra copy if possible. */
15102 *expr_p = create_tmp_reg (TREE_TYPE (name));
15103 if (!gimple_nop_p (SSA_NAME_DEF_STMT (name)))
15104 gimple_set_lhs (SSA_NAME_DEF_STMT (name), *expr_p);
15105 release_ssa_name (name);
15106 }
15107 }
15108 return ret;
15109 }
15110
15111 /* Look through TYPE for variable-sized objects and gimplify each such
15112 size that we find. Add to LIST_P any statements generated. */
15113
15114 void
15115 gimplify_type_sizes (tree type, gimple_seq *list_p)
15116 {
15117 tree field, t;
15118
15119 if (type == NULL || type == error_mark_node)
15120 return;
15121
15122 /* We first do the main variant, then copy into any other variants. */
15123 type = TYPE_MAIN_VARIANT (type);
15124
15125 /* Avoid infinite recursion. */
15126 if (TYPE_SIZES_GIMPLIFIED (type))
15127 return;
15128
15129 TYPE_SIZES_GIMPLIFIED (type) = 1;
15130
15131 switch (TREE_CODE (type))
15132 {
15133 case INTEGER_TYPE:
15134 case ENUMERAL_TYPE:
15135 case BOOLEAN_TYPE:
15136 case REAL_TYPE:
15137 case FIXED_POINT_TYPE:
15138 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
15139 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
15140
15141 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
15142 {
15143 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
15144 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
15145 }
15146 break;
15147
15148 case ARRAY_TYPE:
15149 /* These types may not have declarations, so handle them here. */
15150 gimplify_type_sizes (TREE_TYPE (type), list_p);
15151 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
15152 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
15153 with assigned stack slots, for -O1+ -g they should be tracked
15154 by VTA. */
15155 if (!(TYPE_NAME (type)
15156 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
15157 && DECL_IGNORED_P (TYPE_NAME (type)))
15158 && TYPE_DOMAIN (type)
15159 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
15160 {
15161 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
15162 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
15163 DECL_IGNORED_P (t) = 0;
15164 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
15165 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
15166 DECL_IGNORED_P (t) = 0;
15167 }
15168 break;
15169
15170 case RECORD_TYPE:
15171 case UNION_TYPE:
15172 case QUAL_UNION_TYPE:
15173 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
15174 if (TREE_CODE (field) == FIELD_DECL)
15175 {
15176 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
15177 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
15178 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
15179 gimplify_type_sizes (TREE_TYPE (field), list_p);
15180 }
15181 break;
15182
15183 case POINTER_TYPE:
15184 case REFERENCE_TYPE:
15185 /* We used to recurse on the pointed-to type here, which turned out to
15186 be incorrect because its definition might refer to variables not
15187 yet initialized at this point if a forward declaration is involved.
15188
15189 It was actually useful for anonymous pointed-to types to ensure
15190 that the sizes evaluation dominates every possible later use of the
15191 values. Restricting to such types here would be safe since there
15192 is no possible forward declaration around, but would introduce an
15193 undesirable middle-end semantic to anonymity. We then defer to
15194 front-ends the responsibility of ensuring that the sizes are
15195 evaluated both early and late enough, e.g. by attaching artificial
15196 type declarations to the tree. */
15197 break;
15198
15199 default:
15200 break;
15201 }
15202
15203 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
15204 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
15205
15206 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
15207 {
15208 TYPE_SIZE (t) = TYPE_SIZE (type);
15209 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
15210 TYPE_SIZES_GIMPLIFIED (t) = 1;
15211 }
15212 }
15213
15214 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
15215 a size or position, has had all of its SAVE_EXPRs evaluated.
15216 We add any required statements to *STMT_P. */
15217
15218 void
15219 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
15220 {
15221 tree expr = *expr_p;
15222
15223 /* We don't do anything if the value isn't there, is constant, or contains
15224 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
15225 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
15226 will want to replace it with a new variable, but that will cause problems
15227 if this type is from outside the function. It's OK to have that here. */
15228 if (expr == NULL_TREE
15229 || is_gimple_constant (expr)
15230 || TREE_CODE (expr) == VAR_DECL
15231 || CONTAINS_PLACEHOLDER_P (expr))
15232 return;
15233
15234 *expr_p = unshare_expr (expr);
15235
15236 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
15237 if the def vanishes. */
15238 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
15239
15240 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
15241 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
15242 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
15243 if (is_gimple_constant (*expr_p))
15244 *expr_p = get_initialized_tmp_var (*expr_p, stmt_p, NULL, false);
15245 }
15246
15247 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
15248 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
15249 is true, also gimplify the parameters. */
15250
15251 gbind *
15252 gimplify_body (tree fndecl, bool do_parms)
15253 {
15254 location_t saved_location = input_location;
15255 gimple_seq parm_stmts, parm_cleanup = NULL, seq;
15256 gimple *outer_stmt;
15257 gbind *outer_bind;
15258
15259 timevar_push (TV_TREE_GIMPLIFY);
15260
15261 init_tree_ssa (cfun);
15262
15263 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
15264 gimplification. */
15265 default_rtl_profile ();
15266
15267 gcc_assert (gimplify_ctxp == NULL);
15268 push_gimplify_context (true);
15269
15270 if (flag_openacc || flag_openmp)
15271 {
15272 gcc_assert (gimplify_omp_ctxp == NULL);
15273 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
15274 gimplify_omp_ctxp = new_omp_context (ORT_IMPLICIT_TARGET);
15275 }
15276
15277 /* Unshare most shared trees in the body and in that of any nested functions.
15278 It would seem we don't have to do this for nested functions because
15279 they are supposed to be output and then the outer function gimplified
15280 first, but the g++ front end doesn't always do it that way. */
15281 unshare_body (fndecl);
15282 unvisit_body (fndecl);
15283
15284 /* Make sure input_location isn't set to something weird. */
15285 input_location = DECL_SOURCE_LOCATION (fndecl);
15286
15287 /* Resolve callee-copies. This has to be done before processing
15288 the body so that DECL_VALUE_EXPR gets processed correctly. */
15289 parm_stmts = do_parms ? gimplify_parameters (&parm_cleanup) : NULL;
15290
15291 /* Gimplify the function's body. */
15292 seq = NULL;
15293 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
15294 outer_stmt = gimple_seq_first_nondebug_stmt (seq);
15295 if (!outer_stmt)
15296 {
15297 outer_stmt = gimple_build_nop ();
15298 gimplify_seq_add_stmt (&seq, outer_stmt);
15299 }
15300
15301 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
15302 not the case, wrap everything in a GIMPLE_BIND to make it so. */
15303 if (gimple_code (outer_stmt) == GIMPLE_BIND
15304 && (gimple_seq_first_nondebug_stmt (seq)
15305 == gimple_seq_last_nondebug_stmt (seq)))
15306 {
15307 outer_bind = as_a <gbind *> (outer_stmt);
15308 if (gimple_seq_first_stmt (seq) != outer_stmt
15309 || gimple_seq_last_stmt (seq) != outer_stmt)
15310 {
15311 /* If there are debug stmts before or after outer_stmt, move them
15312 inside of outer_bind body. */
15313 gimple_stmt_iterator gsi = gsi_for_stmt (outer_stmt, &seq);
15314 gimple_seq second_seq = NULL;
15315 if (gimple_seq_first_stmt (seq) != outer_stmt
15316 && gimple_seq_last_stmt (seq) != outer_stmt)
15317 {
15318 second_seq = gsi_split_seq_after (gsi);
15319 gsi_remove (&gsi, false);
15320 }
15321 else if (gimple_seq_first_stmt (seq) != outer_stmt)
15322 gsi_remove (&gsi, false);
15323 else
15324 {
15325 gsi_remove (&gsi, false);
15326 second_seq = seq;
15327 seq = NULL;
15328 }
15329 gimple_seq_add_seq_without_update (&seq,
15330 gimple_bind_body (outer_bind));
15331 gimple_seq_add_seq_without_update (&seq, second_seq);
15332 gimple_bind_set_body (outer_bind, seq);
15333 }
15334 }
15335 else
15336 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
15337
15338 DECL_SAVED_TREE (fndecl) = NULL_TREE;
15339
15340 /* If we had callee-copies statements, insert them at the beginning
15341 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
15342 if (!gimple_seq_empty_p (parm_stmts))
15343 {
15344 tree parm;
15345
15346 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
15347 if (parm_cleanup)
15348 {
15349 gtry *g = gimple_build_try (parm_stmts, parm_cleanup,
15350 GIMPLE_TRY_FINALLY);
15351 parm_stmts = NULL;
15352 gimple_seq_add_stmt (&parm_stmts, g);
15353 }
15354 gimple_bind_set_body (outer_bind, parm_stmts);
15355
15356 for (parm = DECL_ARGUMENTS (current_function_decl);
15357 parm; parm = DECL_CHAIN (parm))
15358 if (DECL_HAS_VALUE_EXPR_P (parm))
15359 {
15360 DECL_HAS_VALUE_EXPR_P (parm) = 0;
15361 DECL_IGNORED_P (parm) = 0;
15362 }
15363 }
15364
15365 if ((flag_openacc || flag_openmp || flag_openmp_simd)
15366 && gimplify_omp_ctxp)
15367 {
15368 delete_omp_context (gimplify_omp_ctxp);
15369 gimplify_omp_ctxp = NULL;
15370 }
15371
15372 pop_gimplify_context (outer_bind);
15373 gcc_assert (gimplify_ctxp == NULL);
15374
15375 if (flag_checking && !seen_error ())
15376 verify_gimple_in_seq (gimple_bind_body (outer_bind));
15377
15378 timevar_pop (TV_TREE_GIMPLIFY);
15379 input_location = saved_location;
15380
15381 return outer_bind;
15382 }
15383
15384 typedef char *char_p; /* For DEF_VEC_P. */
15385
15386 /* Return whether we should exclude FNDECL from instrumentation. */
15387
15388 static bool
15389 flag_instrument_functions_exclude_p (tree fndecl)
15390 {
15391 vec<char_p> *v;
15392
15393 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
15394 if (v && v->length () > 0)
15395 {
15396 const char *name;
15397 int i;
15398 char *s;
15399
15400 name = lang_hooks.decl_printable_name (fndecl, 1);
15401 FOR_EACH_VEC_ELT (*v, i, s)
15402 if (strstr (name, s) != NULL)
15403 return true;
15404 }
15405
15406 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
15407 if (v && v->length () > 0)
15408 {
15409 const char *name;
15410 int i;
15411 char *s;
15412
15413 name = DECL_SOURCE_FILE (fndecl);
15414 FOR_EACH_VEC_ELT (*v, i, s)
15415 if (strstr (name, s) != NULL)
15416 return true;
15417 }
15418
15419 return false;
15420 }
15421
15422 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
15423 node for the function we want to gimplify.
15424
15425 Return the sequence of GIMPLE statements corresponding to the body
15426 of FNDECL. */
15427
15428 void
15429 gimplify_function_tree (tree fndecl)
15430 {
15431 gimple_seq seq;
15432 gbind *bind;
15433
15434 gcc_assert (!gimple_body (fndecl));
15435
15436 if (DECL_STRUCT_FUNCTION (fndecl))
15437 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
15438 else
15439 push_struct_function (fndecl);
15440
15441 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
15442 if necessary. */
15443 cfun->curr_properties |= PROP_gimple_lva;
15444
15445 if (asan_sanitize_use_after_scope ())
15446 asan_poisoned_variables = new hash_set<tree> ();
15447 bind = gimplify_body (fndecl, true);
15448 if (asan_poisoned_variables)
15449 {
15450 delete asan_poisoned_variables;
15451 asan_poisoned_variables = NULL;
15452 }
15453
15454 /* The tree body of the function is no longer needed, replace it
15455 with the new GIMPLE body. */
15456 seq = NULL;
15457 gimple_seq_add_stmt (&seq, bind);
15458 gimple_set_body (fndecl, seq);
15459
15460 /* If we're instrumenting function entry/exit, then prepend the call to
15461 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
15462 catch the exit hook. */
15463 /* ??? Add some way to ignore exceptions for this TFE. */
15464 if (flag_instrument_function_entry_exit
15465 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
15466 /* Do not instrument extern inline functions. */
15467 && !(DECL_DECLARED_INLINE_P (fndecl)
15468 && DECL_EXTERNAL (fndecl)
15469 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
15470 && !flag_instrument_functions_exclude_p (fndecl))
15471 {
15472 tree x;
15473 gbind *new_bind;
15474 gimple *tf;
15475 gimple_seq cleanup = NULL, body = NULL;
15476 tree tmp_var, this_fn_addr;
15477 gcall *call;
15478
15479 /* The instrumentation hooks aren't going to call the instrumented
15480 function and the address they receive is expected to be matchable
15481 against symbol addresses. Make sure we don't create a trampoline,
15482 in case the current function is nested. */
15483 this_fn_addr = build_fold_addr_expr (current_function_decl);
15484 TREE_NO_TRAMPOLINE (this_fn_addr) = 1;
15485
15486 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
15487 call = gimple_build_call (x, 1, integer_zero_node);
15488 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
15489 gimple_call_set_lhs (call, tmp_var);
15490 gimplify_seq_add_stmt (&cleanup, call);
15491 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
15492 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
15493 gimplify_seq_add_stmt (&cleanup, call);
15494 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
15495
15496 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
15497 call = gimple_build_call (x, 1, integer_zero_node);
15498 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
15499 gimple_call_set_lhs (call, tmp_var);
15500 gimplify_seq_add_stmt (&body, call);
15501 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
15502 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
15503 gimplify_seq_add_stmt (&body, call);
15504 gimplify_seq_add_stmt (&body, tf);
15505 new_bind = gimple_build_bind (NULL, body, NULL);
15506
15507 /* Replace the current function body with the body
15508 wrapped in the try/finally TF. */
15509 seq = NULL;
15510 gimple_seq_add_stmt (&seq, new_bind);
15511 gimple_set_body (fndecl, seq);
15512 bind = new_bind;
15513 }
15514
15515 if (sanitize_flags_p (SANITIZE_THREAD)
15516 && param_tsan_instrument_func_entry_exit)
15517 {
15518 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
15519 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
15520 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
15521 /* Replace the current function body with the body
15522 wrapped in the try/finally TF. */
15523 seq = NULL;
15524 gimple_seq_add_stmt (&seq, new_bind);
15525 gimple_set_body (fndecl, seq);
15526 }
15527
15528 DECL_SAVED_TREE (fndecl) = NULL_TREE;
15529 cfun->curr_properties |= PROP_gimple_any;
15530
15531 pop_cfun ();
15532
15533 dump_function (TDI_gimple, fndecl);
15534 }
15535
15536 /* Return a dummy expression of type TYPE in order to keep going after an
15537 error. */
15538
15539 static tree
15540 dummy_object (tree type)
15541 {
15542 tree t = build_int_cst (build_pointer_type (type), 0);
15543 return build2 (MEM_REF, type, t, t);
15544 }
15545
15546 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
15547 builtin function, but a very special sort of operator. */
15548
15549 enum gimplify_status
15550 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
15551 gimple_seq *post_p ATTRIBUTE_UNUSED)
15552 {
15553 tree promoted_type, have_va_type;
15554 tree valist = TREE_OPERAND (*expr_p, 0);
15555 tree type = TREE_TYPE (*expr_p);
15556 tree t, tag, aptag;
15557 location_t loc = EXPR_LOCATION (*expr_p);
15558
15559 /* Verify that valist is of the proper type. */
15560 have_va_type = TREE_TYPE (valist);
15561 if (have_va_type == error_mark_node)
15562 return GS_ERROR;
15563 have_va_type = targetm.canonical_va_list_type (have_va_type);
15564 if (have_va_type == NULL_TREE
15565 && POINTER_TYPE_P (TREE_TYPE (valist)))
15566 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
15567 have_va_type
15568 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
15569 gcc_assert (have_va_type != NULL_TREE);
15570
15571 /* Generate a diagnostic for requesting data of a type that cannot
15572 be passed through `...' due to type promotion at the call site. */
15573 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
15574 != type)
15575 {
15576 static bool gave_help;
15577 bool warned;
15578 /* Use the expansion point to handle cases such as passing bool (defined
15579 in a system header) through `...'. */
15580 location_t xloc
15581 = expansion_point_location_if_in_system_header (loc);
15582
15583 /* Unfortunately, this is merely undefined, rather than a constraint
15584 violation, so we cannot make this an error. If this call is never
15585 executed, the program is still strictly conforming. */
15586 auto_diagnostic_group d;
15587 warned = warning_at (xloc, 0,
15588 "%qT is promoted to %qT when passed through %<...%>",
15589 type, promoted_type);
15590 if (!gave_help && warned)
15591 {
15592 gave_help = true;
15593 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
15594 promoted_type, type);
15595 }
15596
15597 /* We can, however, treat "undefined" any way we please.
15598 Call abort to encourage the user to fix the program. */
15599 if (warned)
15600 inform (xloc, "if this code is reached, the program will abort");
15601 /* Before the abort, allow the evaluation of the va_list
15602 expression to exit or longjmp. */
15603 gimplify_and_add (valist, pre_p);
15604 t = build_call_expr_loc (loc,
15605 builtin_decl_implicit (BUILT_IN_TRAP), 0);
15606 gimplify_and_add (t, pre_p);
15607
15608 /* This is dead code, but go ahead and finish so that the
15609 mode of the result comes out right. */
15610 *expr_p = dummy_object (type);
15611 return GS_ALL_DONE;
15612 }
15613
15614 tag = build_int_cst (build_pointer_type (type), 0);
15615 aptag = build_int_cst (TREE_TYPE (valist), 0);
15616
15617 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
15618 valist, tag, aptag);
15619
15620 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
15621 needs to be expanded. */
15622 cfun->curr_properties &= ~PROP_gimple_lva;
15623
15624 return GS_OK;
15625 }
15626
15627 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
15628
15629 DST/SRC are the destination and source respectively. You can pass
15630 ungimplified trees in DST or SRC, in which case they will be
15631 converted to a gimple operand if necessary.
15632
15633 This function returns the newly created GIMPLE_ASSIGN tuple. */
15634
15635 gimple *
15636 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
15637 {
15638 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
15639 gimplify_and_add (t, seq_p);
15640 ggc_free (t);
15641 return gimple_seq_last_stmt (*seq_p);
15642 }
15643
15644 inline hashval_t
15645 gimplify_hasher::hash (const elt_t *p)
15646 {
15647 tree t = p->val;
15648 return iterative_hash_expr (t, 0);
15649 }
15650
15651 inline bool
15652 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
15653 {
15654 tree t1 = p1->val;
15655 tree t2 = p2->val;
15656 enum tree_code code = TREE_CODE (t1);
15657
15658 if (TREE_CODE (t2) != code
15659 || TREE_TYPE (t1) != TREE_TYPE (t2))
15660 return false;
15661
15662 if (!operand_equal_p (t1, t2, 0))
15663 return false;
15664
15665 /* Only allow them to compare equal if they also hash equal; otherwise
15666 results are nondeterminate, and we fail bootstrap comparison. */
15667 gcc_checking_assert (hash (p1) == hash (p2));
15668
15669 return true;
15670 }