1/* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
4
5 Contributed by Diego Novillo <dnovillo@redhat.com>
6
7 Copyright (C) 2005-2023 Free Software Foundation, Inc.
8
9This file is part of GCC.
10
11GCC is free software; you can redistribute it and/or modify it under
12the terms of the GNU General Public License as published by the Free
13Software Foundation; either version 3, or (at your option) any later
14version.
15
16GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17WARRANTY; without even the implied warranty of MERCHANTABILITY or
18FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19for more details.
20
21You should have received a copy of the GNU General Public License
22along with GCC; see the file COPYING3. If not see
23<http://www.gnu.org/licenses/>. */
24
25#include "config.h"
26#include "system.h"
27#include "coretypes.h"
28#include "backend.h"
29#include "target.h"
30#include "tree.h"
31#include "gimple.h"
32#include "tree-pass.h"
33#include "ssa.h"
34#include "cgraph.h"
35#include "pretty-print.h"
36#include "diagnostic-core.h"
37#include "fold-const.h"
38#include "stor-layout.h"
39#include "internal-fn.h"
40#include "gimple-iterator.h"
41#include "gimple-fold.h"
42#include "gimplify.h"
43#include "gimplify-me.h"
44#include "gimple-walk.h"
45#include "tree-iterator.h"
46#include "tree-inline.h"
47#include "langhooks.h"
48#include "tree-dfa.h"
49#include "tree-ssa.h"
50#include "splay-tree.h"
51#include "omp-general.h"
52#include "omp-low.h"
53#include "gimple-low.h"
54#include "alloc-pool.h"
55#include "symbol-summary.h"
56#include "tree-nested.h"
57#include "context.h"
58#include "gomp-constants.h"
59#include "gimple-pretty-print.h"
60#include "stringpool.h"
61#include "attribs.h"
62#include "omp-offload.h"
63
64/* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
70
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
74
75/* Context structure. Used to store information about each parallel
76 directive in the code. */
77
78struct omp_context
79{
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.cc (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
85
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
89
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
96
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
104
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
108
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
112
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
116
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec<tree> task_reductions;
121
122 /* A hash map from the reduction clauses to the registered array
123 elts. */
124 hash_map<tree, unsigned> *task_reduction_map;
125
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map<tree, tree> *lastprivate_conditional_map;
129
130 /* And a hash map from the allocate variables to their corresponding
131 allocators. */
132 hash_map<tree, tree> *allocate_map;
133
134 /* A tree_list of the reduction clauses in this context. This is
135 only used for checking the consistency of OpenACC reduction
136 clauses in scan_omp_for and is not guaranteed to contain a valid
137 value outside of this function. */
138 tree local_reduction_clauses;
139
140 /* A tree_list of the reduction clauses in outer contexts. This is
141 only used for checking the consistency of OpenACC reduction
142 clauses in scan_omp_for and is not guaranteed to contain a valid
143 value outside of this function. */
144 tree outer_reduction_clauses;
145
146 /* Nesting depth of this context. Used to beautify error messages re
147 invalid gotos. The outermost ctx is depth 1, with depth 0 being
148 reserved for the main body of the function. */
149 int depth;
150
151 /* True if this parallel directive is nested within another. */
152 bool is_nested;
153
154 /* True if this construct can be cancelled. */
155 bool cancellable;
156
157 /* True if lower_omp_1 should look up lastprivate conditional in parent
158 context. */
159 bool combined_into_simd_safelen1;
160
161 /* True if there is nested scan context with inclusive clause. */
162 bool scan_inclusive;
163
164 /* True if there is nested scan context with exclusive clause. */
165 bool scan_exclusive;
166
167 /* True in the second simd loop of for simd with inscan reductions. */
168 bool for_simd_scan_phase;
169
170 /* True if there is order(concurrent) clause on the construct. */
171 bool order_concurrent;
172
173 /* True if there is bind clause on the construct (i.e. a loop construct). */
174 bool loop_p;
175
176 /* Only used for omp target contexts. True if a teams construct is
177 strictly nested in it. */
178 bool teams_nested_p;
179
180 /* Only used for omp target contexts. True if an OpenMP construct other
181 than teams is strictly nested in it. */
182 bool nonteams_nested_p;
183
184 /* Candidates for adjusting OpenACC privatization level. */
185 vec<tree> oacc_privatization_candidates;
186};
187
188static splay_tree all_contexts;
189static int taskreg_nesting_level;
190static int target_nesting_level;
191static bitmap make_addressable_vars;
192static bitmap global_nonaddressable_vars;
193static vec<omp_context *> taskreg_contexts;
194static vec<gomp_task *> task_cpyfns;
195
196static void scan_omp (gimple_seq *, omp_context *);
197static tree scan_omp_1_op (tree *, int *, void *);
198static bool omp_maybe_offloaded_ctx (omp_context *ctx);
199
200#define WALK_SUBSTMTS \
201 case GIMPLE_BIND: \
202 case GIMPLE_TRY: \
203 case GIMPLE_CATCH: \
204 case GIMPLE_EH_FILTER: \
205 case GIMPLE_ASSUME: \
206 case GIMPLE_TRANSACTION: \
207 /* The sub-statements for these should be walked. */ \
208 *handled_ops_p = false; \
209 break;
210
211/* Return whether CTX represents an OpenACC 'parallel' or 'serial' construct.
212 (This doesn't include OpenACC 'kernels' decomposed parts.) */
213
214static bool
215is_oacc_parallel_or_serial (omp_context *ctx)
216{
217 enum gimple_code outer_type = gimple_code (g: ctx->stmt);
218 return ((outer_type == GIMPLE_OMP_TARGET)
219 && ((gimple_omp_target_kind (g: ctx->stmt)
220 == GF_OMP_TARGET_KIND_OACC_PARALLEL)
221 || (gimple_omp_target_kind (g: ctx->stmt)
222 == GF_OMP_TARGET_KIND_OACC_SERIAL)));
223}
224
225/* Return whether CTX represents an OpenACC 'kernels' construct.
226 (This doesn't include OpenACC 'kernels' decomposed parts.) */
227
228static bool
229is_oacc_kernels (omp_context *ctx)
230{
231 enum gimple_code outer_type = gimple_code (g: ctx->stmt);
232 return ((outer_type == GIMPLE_OMP_TARGET)
233 && (gimple_omp_target_kind (g: ctx->stmt)
234 == GF_OMP_TARGET_KIND_OACC_KERNELS));
235}
236
237/* Return whether CTX represents an OpenACC 'kernels' decomposed part. */
238
239static bool
240is_oacc_kernels_decomposed_part (omp_context *ctx)
241{
242 enum gimple_code outer_type = gimple_code (g: ctx->stmt);
243 return ((outer_type == GIMPLE_OMP_TARGET)
244 && ((gimple_omp_target_kind (g: ctx->stmt)
245 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED)
246 || (gimple_omp_target_kind (g: ctx->stmt)
247 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE)
248 || (gimple_omp_target_kind (g: ctx->stmt)
249 == GF_OMP_TARGET_KIND_OACC_DATA_KERNELS)));
250}
251
252/* Return true if STMT corresponds to an OpenMP target region. */
253static bool
254is_omp_target (gimple *stmt)
255{
256 if (gimple_code (g: stmt) == GIMPLE_OMP_TARGET)
257 {
258 int kind = gimple_omp_target_kind (g: stmt);
259 return (kind == GF_OMP_TARGET_KIND_REGION
260 || kind == GF_OMP_TARGET_KIND_DATA
261 || kind == GF_OMP_TARGET_KIND_ENTER_DATA
262 || kind == GF_OMP_TARGET_KIND_EXIT_DATA);
263 }
264 return false;
265}
266
267/* If DECL is the artificial dummy VAR_DECL created for non-static
268 data member privatization, return the underlying "this" parameter,
269 otherwise return NULL. */
270
271tree
272omp_member_access_dummy_var (tree decl)
273{
274 if (!VAR_P (decl)
275 || !DECL_ARTIFICIAL (decl)
276 || !DECL_IGNORED_P (decl)
277 || !DECL_HAS_VALUE_EXPR_P (decl)
278 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
279 return NULL_TREE;
280
281 tree v = DECL_VALUE_EXPR (decl);
282 if (TREE_CODE (v) != COMPONENT_REF)
283 return NULL_TREE;
284
285 while (1)
286 switch (TREE_CODE (v))
287 {
288 case COMPONENT_REF:
289 case MEM_REF:
290 case INDIRECT_REF:
291 CASE_CONVERT:
292 case POINTER_PLUS_EXPR:
293 v = TREE_OPERAND (v, 0);
294 continue;
295 case PARM_DECL:
296 if (DECL_CONTEXT (v) == current_function_decl
297 && DECL_ARTIFICIAL (v)
298 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
299 return v;
300 return NULL_TREE;
301 default:
302 return NULL_TREE;
303 }
304}
305
306/* Helper for unshare_and_remap, called through walk_tree. */
307
308static tree
309unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
310{
311 tree *pair = (tree *) data;
312 if (*tp == pair[0])
313 {
314 *tp = unshare_expr (pair[1]);
315 *walk_subtrees = 0;
316 }
317 else if (IS_TYPE_OR_DECL_P (*tp))
318 *walk_subtrees = 0;
319 return NULL_TREE;
320}
321
322/* Return unshare_expr (X) with all occurrences of FROM
323 replaced with TO. */
324
325static tree
326unshare_and_remap (tree x, tree from, tree to)
327{
328 tree pair[2] = { from, to };
329 x = unshare_expr (x);
330 walk_tree (&x, unshare_and_remap_1, pair, NULL);
331 return x;
332}
333
334/* Convenience function for calling scan_omp_1_op on tree operands. */
335
336static inline tree
337scan_omp_op (tree *tp, omp_context *ctx)
338{
339 struct walk_stmt_info wi;
340
341 memset (s: &wi, c: 0, n: sizeof (wi));
342 wi.info = ctx;
343 wi.want_locations = true;
344
345 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
346}
347
348static void lower_omp (gimple_seq *, omp_context *);
349static tree lookup_decl_in_outer_ctx (tree, omp_context *);
350static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
351
352/* Return true if CTX is for an omp parallel. */
353
354static inline bool
355is_parallel_ctx (omp_context *ctx)
356{
357 return gimple_code (g: ctx->stmt) == GIMPLE_OMP_PARALLEL;
358}
359
360
361/* Return true if CTX is for an omp task. */
362
363static inline bool
364is_task_ctx (omp_context *ctx)
365{
366 return gimple_code (g: ctx->stmt) == GIMPLE_OMP_TASK;
367}
368
369
370/* Return true if CTX is for an omp taskloop. */
371
372static inline bool
373is_taskloop_ctx (omp_context *ctx)
374{
375 return gimple_code (g: ctx->stmt) == GIMPLE_OMP_FOR
376 && gimple_omp_for_kind (g: ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
377}
378
379
380/* Return true if CTX is for a host omp teams. */
381
382static inline bool
383is_host_teams_ctx (omp_context *ctx)
384{
385 return gimple_code (g: ctx->stmt) == GIMPLE_OMP_TEAMS
386 && gimple_omp_teams_host (omp_teams_stmt: as_a <gomp_teams *> (p: ctx->stmt));
387}
388
389/* Return true if CTX is for an omp parallel or omp task or host omp teams
390 (the last one is strictly not a task region in OpenMP speak, but we
391 need to treat it similarly). */
392
393static inline bool
394is_taskreg_ctx (omp_context *ctx)
395{
396 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
397}
398
399/* Return true if EXPR is variable sized. */
400
401static inline bool
402is_variable_sized (const_tree expr)
403{
404 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
405}
406
407/* Lookup variables. The "maybe" form
408 allows for the variable form to not have been entered, otherwise we
409 assert that the variable must have been entered. */
410
411static inline tree
412lookup_decl (tree var, omp_context *ctx)
413{
414 tree *n = ctx->cb.decl_map->get (k: var);
415 return *n;
416}
417
418static inline tree
419maybe_lookup_decl (const_tree var, omp_context *ctx)
420{
421 tree *n = ctx->cb.decl_map->get (k: const_cast<tree> (var));
422 return n ? *n : NULL_TREE;
423}
424
425static inline tree
426lookup_field (tree var, omp_context *ctx)
427{
428 splay_tree_node n;
429 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
430 return (tree) n->value;
431}
432
433static inline tree
434lookup_sfield (splay_tree_key key, omp_context *ctx)
435{
436 splay_tree_node n;
437 n = splay_tree_lookup (ctx->sfield_map
438 ? ctx->sfield_map : ctx->field_map, key);
439 return (tree) n->value;
440}
441
442static inline tree
443lookup_sfield (tree var, omp_context *ctx)
444{
445 return lookup_sfield (key: (splay_tree_key) var, ctx);
446}
447
448static inline tree
449maybe_lookup_field (splay_tree_key key, omp_context *ctx)
450{
451 splay_tree_node n;
452 n = splay_tree_lookup (ctx->field_map, key);
453 return n ? (tree) n->value : NULL_TREE;
454}
455
456static inline tree
457maybe_lookup_field (tree var, omp_context *ctx)
458{
459 return maybe_lookup_field (key: (splay_tree_key) var, ctx);
460}
461
462/* Return true if DECL should be copied by pointer. SHARED_CTX is
463 the parallel context if DECL is to be shared. */
464
465static bool
466use_pointer_for_field (tree decl, omp_context *shared_ctx)
467{
468 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
469 || TYPE_ATOMIC (TREE_TYPE (decl)))
470 return true;
471
472 /* We can only use copy-in/copy-out semantics for shared variables
473 when we know the value is not accessible from an outer scope. */
474 if (shared_ctx)
475 {
476 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
477
478 /* ??? Trivially accessible from anywhere. But why would we even
479 be passing an address in this case? Should we simply assert
480 this to be false, or should we have a cleanup pass that removes
481 these from the list of mappings? */
482 if (is_global_var (t: maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
483 return true;
484
485 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
486 without analyzing the expression whether or not its location
487 is accessible to anyone else. In the case of nested parallel
488 regions it certainly may be. */
489 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
490 return true;
491
492 /* Do not use copy-in/copy-out for variables that have their
493 address taken. */
494 if (is_global_var (t: decl))
495 {
496 /* For file scope vars, track whether we've seen them as
497 non-addressable initially and in that case, keep the same
498 answer for the duration of the pass, even when they are made
499 addressable later on e.g. through reduction expansion. Global
500 variables which weren't addressable before the pass will not
501 have their privatized copies address taken. See PR91216. */
502 if (!TREE_ADDRESSABLE (decl))
503 {
504 if (!global_nonaddressable_vars)
505 global_nonaddressable_vars = BITMAP_ALLOC (NULL);
506 bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl));
507 }
508 else if (!global_nonaddressable_vars
509 || !bitmap_bit_p (global_nonaddressable_vars,
510 DECL_UID (decl)))
511 return true;
512 }
513 else if (TREE_ADDRESSABLE (decl))
514 return true;
515
516 /* lower_send_shared_vars only uses copy-in, but not copy-out
517 for these. */
518 if (TREE_READONLY (decl)
519 || ((TREE_CODE (decl) == RESULT_DECL
520 || TREE_CODE (decl) == PARM_DECL)
521 && DECL_BY_REFERENCE (decl)))
522 return false;
523
524 /* Disallow copy-in/out in nested parallel if
525 decl is shared in outer parallel, otherwise
526 each thread could store the shared variable
527 in its own copy-in location, making the
528 variable no longer really shared. */
529 if (shared_ctx->is_nested)
530 {
531 omp_context *up;
532
533 for (up = shared_ctx->outer; up; up = up->outer)
534 if ((is_taskreg_ctx (ctx: up)
535 || (gimple_code (g: up->stmt) == GIMPLE_OMP_TARGET
536 && is_gimple_omp_offloaded (stmt: up->stmt)))
537 && maybe_lookup_decl (var: decl, ctx: up))
538 break;
539
540 if (up)
541 {
542 tree c;
543
544 if (gimple_code (g: up->stmt) == GIMPLE_OMP_TARGET)
545 {
546 for (c = gimple_omp_target_clauses (gs: up->stmt);
547 c; c = OMP_CLAUSE_CHAIN (c))
548 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
549 && OMP_CLAUSE_DECL (c) == decl)
550 break;
551 }
552 else
553 for (c = gimple_omp_taskreg_clauses (gs: up->stmt);
554 c; c = OMP_CLAUSE_CHAIN (c))
555 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
556 && OMP_CLAUSE_DECL (c) == decl)
557 break;
558
559 if (c)
560 goto maybe_mark_addressable_and_ret;
561 }
562 }
563
564 /* For tasks avoid using copy-in/out. As tasks can be
565 deferred or executed in different thread, when GOMP_task
566 returns, the task hasn't necessarily terminated. */
567 if (is_task_ctx (ctx: shared_ctx))
568 {
569 tree outer;
570 maybe_mark_addressable_and_ret:
571 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
572 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (decl: outer))
573 {
574 /* Taking address of OUTER in lower_send_shared_vars
575 might need regimplification of everything that uses the
576 variable. */
577 if (!make_addressable_vars)
578 make_addressable_vars = BITMAP_ALLOC (NULL);
579 bitmap_set_bit (make_addressable_vars, DECL_UID (outer));
580 TREE_ADDRESSABLE (outer) = 1;
581 }
582 return true;
583 }
584 }
585
586 return false;
587}
588
589/* Construct a new automatic decl similar to VAR. */
590
591static tree
592omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
593{
594 tree copy = copy_var_decl (var, name, type);
595
596 DECL_CONTEXT (copy) = current_function_decl;
597
598 if (ctx)
599 {
600 DECL_CHAIN (copy) = ctx->block_vars;
601 ctx->block_vars = copy;
602 }
603 else
604 record_vars (copy);
605
606 /* If VAR is listed in make_addressable_vars, it wasn't
607 originally addressable, but was only later made so.
608 We don't need to take address of privatizations
609 from that var. */
610 if (TREE_ADDRESSABLE (var)
611 && ((make_addressable_vars
612 && bitmap_bit_p (make_addressable_vars, DECL_UID (var)))
613 || (global_nonaddressable_vars
614 && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var)))))
615 TREE_ADDRESSABLE (copy) = 0;
616
617 return copy;
618}
619
620static tree
621omp_copy_decl_1 (tree var, omp_context *ctx)
622{
623 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
624}
625
626/* Build tree nodes to access the field for VAR on the receiver side. */
627
628static tree
629build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
630{
631 tree x, field = lookup_field (var, ctx);
632
633 /* If the receiver record type was remapped in the child function,
634 remap the field into the new record type. */
635 x = maybe_lookup_field (var: field, ctx);
636 if (x != NULL)
637 field = x;
638
639 x = build_simple_mem_ref (ctx->receiver_decl);
640 TREE_THIS_NOTRAP (x) = 1;
641 x = omp_build_component_ref (obj: x, field);
642 if (by_ref)
643 {
644 x = build_simple_mem_ref (x);
645 TREE_THIS_NOTRAP (x) = 1;
646 }
647
648 return x;
649}
650
651/* Build tree nodes to access VAR in the scope outer to CTX. In the case
652 of a parallel, this is a component reference; for workshare constructs
653 this is some variable. */
654
655static tree
656build_outer_var_ref (tree var, omp_context *ctx,
657 enum omp_clause_code code = OMP_CLAUSE_ERROR)
658{
659 tree x;
660 omp_context *outer = ctx->outer;
661 for (; outer; outer = outer->outer)
662 {
663 if (gimple_code (g: outer->stmt) == GIMPLE_OMP_TASKGROUP)
664 continue;
665 if (gimple_code (g: outer->stmt) == GIMPLE_OMP_SCOPE
666 && !maybe_lookup_decl (var, ctx: outer))
667 continue;
668 break;
669 }
670
671 if (is_global_var (t: maybe_lookup_decl_in_outer_ctx (var, ctx)))
672 x = var;
673 else if (is_variable_sized (expr: var))
674 {
675 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
676 x = build_outer_var_ref (var: x, ctx, code);
677 x = build_simple_mem_ref (x);
678 }
679 else if (is_taskreg_ctx (ctx))
680 {
681 bool by_ref = use_pointer_for_field (decl: var, NULL);
682 x = build_receiver_ref (var, by_ref, ctx);
683 }
684 else if ((gimple_code (g: ctx->stmt) == GIMPLE_OMP_FOR
685 && gimple_omp_for_kind (g: ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
686 || ctx->loop_p
687 || code == OMP_CLAUSE_ALLOCATE
688 || (code == OMP_CLAUSE_PRIVATE
689 && (gimple_code (g: ctx->stmt) == GIMPLE_OMP_FOR
690 || gimple_code (g: ctx->stmt) == GIMPLE_OMP_SECTIONS
691 || gimple_code (g: ctx->stmt) == GIMPLE_OMP_SINGLE)))
692 {
693 /* #pragma omp simd isn't a worksharing construct, and can reference
694 even private vars in its linear etc. clauses.
695 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
696 to private vars in all worksharing constructs. */
697 x = NULL_TREE;
698 if (outer && is_taskreg_ctx (ctx: outer))
699 x = lookup_decl (var, ctx: outer);
700 else if (outer)
701 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
702 if (x == NULL_TREE)
703 x = var;
704 }
705 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
706 {
707 gcc_assert (outer);
708 splay_tree_node n
709 = splay_tree_lookup (outer->field_map,
710 (splay_tree_key) &DECL_UID (var));
711 if (n == NULL)
712 {
713 if (is_global_var (t: maybe_lookup_decl_in_outer_ctx (var, outer)))
714 x = var;
715 else
716 x = lookup_decl (var, ctx: outer);
717 }
718 else
719 {
720 tree field = (tree) n->value;
721 /* If the receiver record type was remapped in the child function,
722 remap the field into the new record type. */
723 x = maybe_lookup_field (var: field, ctx: outer);
724 if (x != NULL)
725 field = x;
726
727 x = build_simple_mem_ref (outer->receiver_decl);
728 x = omp_build_component_ref (obj: x, field);
729 if (use_pointer_for_field (decl: var, shared_ctx: outer))
730 x = build_simple_mem_ref (x);
731 }
732 }
733 else if (outer)
734 x = lookup_decl (var, ctx: outer);
735 else if (omp_privatize_by_reference (decl: var))
736 /* This can happen with orphaned constructs. If var is reference, it is
737 possible it is shared and as such valid. */
738 x = var;
739 else if (omp_member_access_dummy_var (decl: var))
740 x = var;
741 else
742 gcc_unreachable ();
743
744 if (x == var)
745 {
746 tree t = omp_member_access_dummy_var (decl: var);
747 if (t)
748 {
749 x = DECL_VALUE_EXPR (var);
750 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
751 if (o != t)
752 x = unshare_and_remap (x, from: t, to: o);
753 else
754 x = unshare_expr (x);
755 }
756 }
757
758 if (omp_privatize_by_reference (decl: var))
759 x = build_simple_mem_ref (x);
760
761 return x;
762}
763
764/* Build tree nodes to access the field for VAR on the sender side. */
765
766static tree
767build_sender_ref (splay_tree_key key, omp_context *ctx)
768{
769 tree field = lookup_sfield (key, ctx);
770 return omp_build_component_ref (obj: ctx->sender_decl, field);
771}
772
773static tree
774build_sender_ref (tree var, omp_context *ctx)
775{
776 return build_sender_ref (key: (splay_tree_key) var, ctx);
777}
778
779/* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
780 BASE_POINTERS_RESTRICT, declare the field with restrict. */
781
782static void
783install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
784{
785 tree field, type, sfield = NULL_TREE;
786 splay_tree_key key = (splay_tree_key) var;
787
788 if ((mask & 16) != 0)
789 {
790 key = (splay_tree_key) &DECL_NAME (var);
791 gcc_checking_assert (key != (splay_tree_key) var);
792 }
793 if ((mask & 8) != 0)
794 {
795 key = (splay_tree_key) &DECL_UID (var);
796 gcc_checking_assert (key != (splay_tree_key) var);
797 }
798 gcc_assert ((mask & 1) == 0
799 || !splay_tree_lookup (ctx->field_map, key));
800 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
801 || !splay_tree_lookup (ctx->sfield_map, key));
802 gcc_assert ((mask & 3) == 3
803 || !is_gimple_omp_oacc (ctx->stmt));
804
805 type = TREE_TYPE (var);
806 if ((mask & 16) != 0)
807 type = lang_hooks.decls.omp_array_data (var, true);
808
809 /* Prevent redeclaring the var in the split-off function with a restrict
810 pointer type. Note that we only clear type itself, restrict qualifiers in
811 the pointed-to type will be ignored by points-to analysis. */
812 if (POINTER_TYPE_P (type)
813 && TYPE_RESTRICT (type))
814 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
815
816 if (mask & 4)
817 {
818 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
819 type = build_pointer_type (build_pointer_type (type));
820 }
821 else if (by_ref)
822 type = build_pointer_type (type);
823 else if ((mask & (32 | 3)) == 1
824 && omp_privatize_by_reference (decl: var))
825 type = TREE_TYPE (type);
826
827 field = build_decl (DECL_SOURCE_LOCATION (var),
828 FIELD_DECL, DECL_NAME (var), type);
829
830 /* Remember what variable this field was created for. This does have a
831 side effect of making dwarf2out ignore this member, so for helpful
832 debugging we clear it later in delete_omp_context. */
833 DECL_ABSTRACT_ORIGIN (field) = var;
834 if ((mask & 16) == 0 && type == TREE_TYPE (var))
835 {
836 SET_DECL_ALIGN (field, DECL_ALIGN (var));
837 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
838 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
839 }
840 else
841 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
842
843 if ((mask & 3) == 3)
844 {
845 insert_field_into_struct (ctx->record_type, field);
846 if (ctx->srecord_type)
847 {
848 sfield = build_decl (DECL_SOURCE_LOCATION (var),
849 FIELD_DECL, DECL_NAME (var), type);
850 DECL_ABSTRACT_ORIGIN (sfield) = var;
851 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
852 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
853 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
854 insert_field_into_struct (ctx->srecord_type, sfield);
855 }
856 }
857 else
858 {
859 if (ctx->srecord_type == NULL_TREE)
860 {
861 tree t;
862
863 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
864 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
865 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
866 {
867 sfield = build_decl (DECL_SOURCE_LOCATION (t),
868 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
869 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
870 insert_field_into_struct (ctx->srecord_type, sfield);
871 splay_tree_insert (ctx->sfield_map,
872 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
873 (splay_tree_value) sfield);
874 }
875 }
876 sfield = field;
877 insert_field_into_struct ((mask & 1) ? ctx->record_type
878 : ctx->srecord_type, field);
879 }
880
881 if (mask & 1)
882 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
883 if ((mask & 2) && ctx->sfield_map)
884 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
885}
886
887static tree
888install_var_local (tree var, omp_context *ctx)
889{
890 tree new_var = omp_copy_decl_1 (var, ctx);
891 insert_decl_map (&ctx->cb, var, new_var);
892 return new_var;
893}
894
895/* Adjust the replacement for DECL in CTX for the new context. This means
896 copying the DECL_VALUE_EXPR, and fixing up the type. */
897
898static void
899fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
900{
901 tree new_decl, size;
902
903 new_decl = lookup_decl (var: decl, ctx);
904
905 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), id: &ctx->cb);
906
907 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
908 && DECL_HAS_VALUE_EXPR_P (decl))
909 {
910 tree ve = DECL_VALUE_EXPR (decl);
911 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
912 SET_DECL_VALUE_EXPR (new_decl, ve);
913 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
914 }
915
916 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
917 {
918 size = remap_decl (DECL_SIZE (decl), id: &ctx->cb);
919 if (size == error_mark_node)
920 size = TYPE_SIZE (TREE_TYPE (new_decl));
921 DECL_SIZE (new_decl) = size;
922
923 size = remap_decl (DECL_SIZE_UNIT (decl), id: &ctx->cb);
924 if (size == error_mark_node)
925 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
926 DECL_SIZE_UNIT (new_decl) = size;
927 }
928}
929
930/* The callback for remap_decl. Search all containing contexts for a
931 mapping of the variable; this avoids having to duplicate the splay
932 tree ahead of time. We know a mapping doesn't already exist in the
933 given context. Create new mappings to implement default semantics. */
934
935static tree
936omp_copy_decl (tree var, copy_body_data *cb)
937{
938 omp_context *ctx = (omp_context *) cb;
939 tree new_var;
940
941 if (TREE_CODE (var) == LABEL_DECL)
942 {
943 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
944 return var;
945 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
946 DECL_CONTEXT (new_var) = current_function_decl;
947 insert_decl_map (&ctx->cb, var, new_var);
948 return new_var;
949 }
950
951 while (!is_taskreg_ctx (ctx))
952 {
953 ctx = ctx->outer;
954 if (ctx == NULL)
955 return var;
956 new_var = maybe_lookup_decl (var, ctx);
957 if (new_var)
958 return new_var;
959 }
960
961 if (is_global_var (t: var) || decl_function_context (var) != ctx->cb.src_fn)
962 return var;
963
964 return error_mark_node;
965}
966
967/* Create a new context, with OUTER_CTX being the surrounding context. */
968
969static omp_context *
970new_omp_context (gimple *stmt, omp_context *outer_ctx)
971{
972 omp_context *ctx = XCNEW (omp_context);
973
974 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
975 (splay_tree_value) ctx);
976 ctx->stmt = stmt;
977
978 if (outer_ctx)
979 {
980 ctx->outer = outer_ctx;
981 ctx->cb = outer_ctx->cb;
982 ctx->cb.block = NULL;
983 ctx->depth = outer_ctx->depth + 1;
984 }
985 else
986 {
987 ctx->cb.src_fn = current_function_decl;
988 ctx->cb.dst_fn = current_function_decl;
989 ctx->cb.src_node = cgraph_node::get (decl: current_function_decl);
990 gcc_checking_assert (ctx->cb.src_node);
991 ctx->cb.dst_node = ctx->cb.src_node;
992 ctx->cb.src_cfun = cfun;
993 ctx->cb.copy_decl = omp_copy_decl;
994 ctx->cb.eh_lp_nr = 0;
995 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
996 ctx->cb.adjust_array_error_bounds = true;
997 ctx->cb.dont_remap_vla_if_no_change = true;
998 ctx->depth = 1;
999 }
1000
1001 ctx->cb.decl_map = new hash_map<tree, tree>;
1002
1003 return ctx;
1004}
1005
1006static gimple_seq maybe_catch_exception (gimple_seq);
1007
1008/* Finalize task copyfn. */
1009
1010static void
1011finalize_task_copyfn (gomp_task *task_stmt)
1012{
1013 struct function *child_cfun;
1014 tree child_fn;
1015 gimple_seq seq = NULL, new_seq;
1016 gbind *bind;
1017
1018 child_fn = gimple_omp_task_copy_fn (gs: task_stmt);
1019 if (child_fn == NULL_TREE)
1020 return;
1021
1022 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
1023 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
1024
1025 push_cfun (new_cfun: child_cfun);
1026 bind = gimplify_body (child_fn, false);
1027 gimple_seq_add_stmt (&seq, bind);
1028 new_seq = maybe_catch_exception (seq);
1029 if (new_seq != seq)
1030 {
1031 bind = gimple_build_bind (NULL, new_seq, NULL);
1032 seq = NULL;
1033 gimple_seq_add_stmt (&seq, bind);
1034 }
1035 gimple_set_body (child_fn, seq);
1036 pop_cfun ();
1037
1038 /* Inform the callgraph about the new function. */
1039 cgraph_node *node = cgraph_node::get_create (child_fn);
1040 node->parallelized_function = 1;
1041 cgraph_node::add_new_function (fndecl: child_fn, lowered: false);
1042}
1043
1044/* Destroy a omp_context data structures. Called through the splay tree
1045 value delete callback. */
1046
1047static void
1048delete_omp_context (splay_tree_value value)
1049{
1050 omp_context *ctx = (omp_context *) value;
1051
1052 delete ctx->cb.decl_map;
1053
1054 if (ctx->field_map)
1055 splay_tree_delete (ctx->field_map);
1056 if (ctx->sfield_map)
1057 splay_tree_delete (ctx->sfield_map);
1058
1059 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1060 it produces corrupt debug information. */
1061 if (ctx->record_type)
1062 {
1063 tree t;
1064 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
1065 DECL_ABSTRACT_ORIGIN (t) = NULL;
1066 }
1067 if (ctx->srecord_type)
1068 {
1069 tree t;
1070 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
1071 DECL_ABSTRACT_ORIGIN (t) = NULL;
1072 }
1073
1074 if (ctx->task_reduction_map)
1075 {
1076 ctx->task_reductions.release ();
1077 delete ctx->task_reduction_map;
1078 }
1079
1080 delete ctx->lastprivate_conditional_map;
1081 delete ctx->allocate_map;
1082
1083 XDELETE (ctx);
1084}
1085
1086/* Fix up RECEIVER_DECL with a type that has been remapped to the child
1087 context. */
1088
1089static void
1090fixup_child_record_type (omp_context *ctx)
1091{
1092 tree f, type = ctx->record_type;
1093
1094 if (!ctx->receiver_decl)
1095 return;
1096 /* ??? It isn't sufficient to just call remap_type here, because
1097 variably_modified_type_p doesn't work the way we expect for
1098 record types. Testing each field for whether it needs remapping
1099 and creating a new record by hand works, however. */
1100 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1101 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1102 break;
1103 if (f)
1104 {
1105 tree name, new_fields = NULL;
1106
1107 type = lang_hooks.types.make_type (RECORD_TYPE);
1108 name = DECL_NAME (TYPE_NAME (ctx->record_type));
1109 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1110 TYPE_DECL, name, type);
1111 TYPE_NAME (type) = name;
1112
1113 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1114 {
1115 tree new_f = copy_node (f);
1116 DECL_CONTEXT (new_f) = type;
1117 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), id: &ctx->cb);
1118 DECL_CHAIN (new_f) = new_fields;
1119 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1120 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1121 &ctx->cb, NULL);
1122 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1123 &ctx->cb, NULL);
1124 new_fields = new_f;
1125
1126 /* Arrange to be able to look up the receiver field
1127 given the sender field. */
1128 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1129 (splay_tree_value) new_f);
1130 }
1131 TYPE_FIELDS (type) = nreverse (new_fields);
1132 layout_type (type);
1133 }
1134
1135 /* In a target region we never modify any of the pointers in *.omp_data_i,
1136 so attempt to help the optimizers. */
1137 if (is_gimple_omp_offloaded (stmt: ctx->stmt))
1138 type = build_qualified_type (type, TYPE_QUAL_CONST);
1139
1140 TREE_TYPE (ctx->receiver_decl)
1141 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1142}
1143
1144/* Instantiate decls as necessary in CTX to satisfy the data sharing
1145 specified by CLAUSES. */
1146
1147static void
1148scan_sharing_clauses (tree clauses, omp_context *ctx)
1149{
1150 tree c, decl;
1151 bool scan_array_reductions = false;
1152
1153 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1154 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE
1155 && (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
1156 /* omp_default_mem_alloc is 1 */
1157 || !integer_onep (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
1158 || OMP_CLAUSE_ALLOCATE_ALIGN (c) != NULL_TREE))
1159 {
1160 /* The allocate clauses that appear on a target construct or on
1161 constructs in a target region must specify an allocator expression
1162 unless a requires directive with the dynamic_allocators clause
1163 is present in the same compilation unit. */
1164 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
1165 && ((omp_requires_mask & OMP_REQUIRES_DYNAMIC_ALLOCATORS) == 0)
1166 && omp_maybe_offloaded_ctx (ctx))
1167 error_at (OMP_CLAUSE_LOCATION (c), "%<allocate%> clause must"
1168 " specify an allocator here");
1169 if (ctx->allocate_map == NULL)
1170 ctx->allocate_map = new hash_map<tree, tree>;
1171 tree val = integer_zero_node;
1172 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
1173 val = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
1174 if (OMP_CLAUSE_ALLOCATE_ALIGN (c))
1175 val = build_tree_list (val, OMP_CLAUSE_ALLOCATE_ALIGN (c));
1176 ctx->allocate_map->put (OMP_CLAUSE_DECL (c), v: val);
1177 }
1178
1179 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1180 {
1181 bool by_ref;
1182
1183 switch (OMP_CLAUSE_CODE (c))
1184 {
1185 case OMP_CLAUSE_PRIVATE:
1186 decl = OMP_CLAUSE_DECL (c);
1187 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1188 goto do_private;
1189 else if (!is_variable_sized (expr: decl))
1190 install_var_local (var: decl, ctx);
1191 break;
1192
1193 case OMP_CLAUSE_SHARED:
1194 decl = OMP_CLAUSE_DECL (c);
1195 if (ctx->allocate_map && ctx->allocate_map->get (k: decl))
1196 ctx->allocate_map->remove (k: decl);
1197 /* Ignore shared directives in teams construct inside of
1198 target construct. */
1199 if (gimple_code (g: ctx->stmt) == GIMPLE_OMP_TEAMS
1200 && !is_host_teams_ctx (ctx))
1201 {
1202 /* Global variables don't need to be copied,
1203 the receiver side will use them directly. */
1204 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1205 if (is_global_var (t: odecl))
1206 break;
1207 insert_decl_map (&ctx->cb, decl, odecl);
1208 break;
1209 }
1210 gcc_assert (is_taskreg_ctx (ctx));
1211 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1212 || !is_variable_sized (decl));
1213 /* Global variables don't need to be copied,
1214 the receiver side will use them directly. */
1215 if (is_global_var (t: maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1216 break;
1217 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1218 {
1219 use_pointer_for_field (decl, shared_ctx: ctx);
1220 break;
1221 }
1222 by_ref = use_pointer_for_field (decl, NULL);
1223 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1224 || TREE_ADDRESSABLE (decl)
1225 || by_ref
1226 || omp_privatize_by_reference (decl))
1227 {
1228 by_ref = use_pointer_for_field (decl, shared_ctx: ctx);
1229 install_var_field (var: decl, by_ref, mask: 3, ctx);
1230 install_var_local (var: decl, ctx);
1231 break;
1232 }
1233 /* We don't need to copy const scalar vars back. */
1234 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1235 goto do_private;
1236
1237 case OMP_CLAUSE_REDUCTION:
1238 /* Collect 'reduction' clauses on OpenACC compute construct. */
1239 if (is_gimple_omp_oacc (stmt: ctx->stmt)
1240 && is_gimple_omp_offloaded (stmt: ctx->stmt))
1241 {
1242 /* No 'reduction' clauses on OpenACC 'kernels'. */
1243 gcc_checking_assert (!is_oacc_kernels (ctx));
1244 /* Likewise, on OpenACC 'kernels' decomposed parts. */
1245 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
1246
1247 ctx->local_reduction_clauses
1248 = tree_cons (NULL, c, ctx->local_reduction_clauses);
1249 }
1250 /* FALLTHRU */
1251
1252 case OMP_CLAUSE_IN_REDUCTION:
1253 decl = OMP_CLAUSE_DECL (c);
1254 if (ctx->allocate_map
1255 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1256 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
1257 || OMP_CLAUSE_REDUCTION_TASK (c)))
1258 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1259 || is_task_ctx (ctx)))
1260 {
1261 /* For now. */
1262 if (ctx->allocate_map->get (k: decl))
1263 ctx->allocate_map->remove (k: decl);
1264 }
1265 if (TREE_CODE (decl) == MEM_REF)
1266 {
1267 tree t = TREE_OPERAND (decl, 0);
1268 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1269 t = TREE_OPERAND (t, 0);
1270 if (INDIRECT_REF_P (t)
1271 || TREE_CODE (t) == ADDR_EXPR)
1272 t = TREE_OPERAND (t, 0);
1273 if (is_omp_target (stmt: ctx->stmt))
1274 {
1275 if (is_variable_sized (expr: t))
1276 {
1277 gcc_assert (DECL_HAS_VALUE_EXPR_P (t));
1278 t = DECL_VALUE_EXPR (t);
1279 gcc_assert (INDIRECT_REF_P (t));
1280 t = TREE_OPERAND (t, 0);
1281 gcc_assert (DECL_P (t));
1282 }
1283 tree at = t;
1284 if (ctx->outer)
1285 scan_omp_op (tp: &at, ctx: ctx->outer);
1286 tree nt = omp_copy_decl_1 (var: at, ctx: ctx->outer);
1287 splay_tree_insert (ctx->field_map,
1288 (splay_tree_key) &DECL_CONTEXT (t),
1289 (splay_tree_value) nt);
1290 if (at != t)
1291 splay_tree_insert (ctx->field_map,
1292 (splay_tree_key) &DECL_CONTEXT (at),
1293 (splay_tree_value) nt);
1294 break;
1295 }
1296 install_var_local (var: t, ctx);
1297 if (is_taskreg_ctx (ctx)
1298 && (!is_global_var (t: maybe_lookup_decl_in_outer_ctx (t, ctx))
1299 || (is_task_ctx (ctx)
1300 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1301 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1302 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1303 == POINTER_TYPE)))))
1304 && !is_variable_sized (expr: t)
1305 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1306 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1307 && !is_task_ctx (ctx))))
1308 {
1309 by_ref = use_pointer_for_field (decl: t, NULL);
1310 if (is_task_ctx (ctx)
1311 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1312 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1313 {
1314 install_var_field (var: t, by_ref: false, mask: 1, ctx);
1315 install_var_field (var: t, by_ref, mask: 2, ctx);
1316 }
1317 else
1318 install_var_field (var: t, by_ref, mask: 3, ctx);
1319 }
1320 break;
1321 }
1322 if (is_omp_target (stmt: ctx->stmt))
1323 {
1324 tree at = decl;
1325 if (ctx->outer)
1326 scan_omp_op (tp: &at, ctx: ctx->outer);
1327 tree nt = omp_copy_decl_1 (var: at, ctx: ctx->outer);
1328 splay_tree_insert (ctx->field_map,
1329 (splay_tree_key) &DECL_CONTEXT (decl),
1330 (splay_tree_value) nt);
1331 if (at != decl)
1332 splay_tree_insert (ctx->field_map,
1333 (splay_tree_key) &DECL_CONTEXT (at),
1334 (splay_tree_value) nt);
1335 break;
1336 }
1337 if (is_task_ctx (ctx)
1338 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1339 && OMP_CLAUSE_REDUCTION_TASK (c)
1340 && is_parallel_ctx (ctx)))
1341 {
1342 /* Global variables don't need to be copied,
1343 the receiver side will use them directly. */
1344 if (!is_global_var (t: maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1345 {
1346 by_ref = use_pointer_for_field (decl, shared_ctx: ctx);
1347 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1348 install_var_field (var: decl, by_ref, mask: 3, ctx);
1349 }
1350 install_var_local (var: decl, ctx);
1351 break;
1352 }
1353 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1354 && OMP_CLAUSE_REDUCTION_TASK (c))
1355 {
1356 install_var_local (var: decl, ctx);
1357 break;
1358 }
1359 goto do_private;
1360
1361 case OMP_CLAUSE_LASTPRIVATE:
1362 /* Let the corresponding firstprivate clause create
1363 the variable. */
1364 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1365 break;
1366 /* FALLTHRU */
1367
1368 case OMP_CLAUSE_FIRSTPRIVATE:
1369 case OMP_CLAUSE_LINEAR:
1370 decl = OMP_CLAUSE_DECL (c);
1371 do_private:
1372 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1373 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR
1374 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1375 && is_gimple_omp_offloaded (stmt: ctx->stmt))
1376 {
1377 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1378 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR
1379 && lang_hooks.decls.omp_array_data (decl, true)))
1380 {
1381 by_ref = !omp_privatize_by_reference (decl);
1382 install_var_field (var: decl, by_ref, mask: 3, ctx);
1383 }
1384 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1385 {
1386 if (INDIRECT_REF_P (decl))
1387 decl = TREE_OPERAND (decl, 0);
1388 install_var_field (var: decl, by_ref: true, mask: 3, ctx);
1389 }
1390 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1391 install_var_field (var: decl, by_ref: true, mask: 3, ctx);
1392 else
1393 install_var_field (var: decl, by_ref: false, mask: 3, ctx);
1394 }
1395 if (is_variable_sized (expr: decl))
1396 {
1397 if (is_task_ctx (ctx))
1398 {
1399 if (ctx->allocate_map
1400 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1401 {
1402 /* For now. */
1403 if (ctx->allocate_map->get (k: decl))
1404 ctx->allocate_map->remove (k: decl);
1405 }
1406 install_var_field (var: decl, by_ref: false, mask: 1, ctx);
1407 }
1408 break;
1409 }
1410 else if (is_taskreg_ctx (ctx))
1411 {
1412 bool global
1413 = is_global_var (t: maybe_lookup_decl_in_outer_ctx (decl, ctx));
1414 by_ref = use_pointer_for_field (decl, NULL);
1415
1416 if (is_task_ctx (ctx)
1417 && (global || by_ref || omp_privatize_by_reference (decl)))
1418 {
1419 if (ctx->allocate_map
1420 && ctx->allocate_map->get (k: decl))
1421 install_var_field (var: decl, by_ref, mask: 32 | 1, ctx);
1422 else
1423 install_var_field (var: decl, by_ref: false, mask: 1, ctx);
1424 if (!global)
1425 install_var_field (var: decl, by_ref, mask: 2, ctx);
1426 }
1427 else if (!global)
1428 install_var_field (var: decl, by_ref, mask: 3, ctx);
1429 }
1430 install_var_local (var: decl, ctx);
1431 /* For descr arrays on target: firstprivatize data + attach ptr. */
1432 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1433 && is_gimple_omp_offloaded (stmt: ctx->stmt)
1434 && !is_gimple_omp_oacc (stmt: ctx->stmt)
1435 && lang_hooks.decls.omp_array_data (decl, true))
1436 {
1437 install_var_field (var: decl, by_ref: false, mask: 16 | 3, ctx);
1438 install_var_field (var: decl, by_ref: true, mask: 8 | 3, ctx);
1439 }
1440 break;
1441
1442 case OMP_CLAUSE_USE_DEVICE_PTR:
1443 case OMP_CLAUSE_USE_DEVICE_ADDR:
1444 decl = OMP_CLAUSE_DECL (c);
1445
1446 /* Fortran array descriptors. */
1447 if (lang_hooks.decls.omp_array_data (decl, true))
1448 install_var_field (var: decl, by_ref: false, mask: 19, ctx);
1449 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
1450 && !omp_privatize_by_reference (decl)
1451 && !omp_is_allocatable_or_ptr (decl))
1452 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1453 install_var_field (var: decl, by_ref: true, mask: 11, ctx);
1454 else
1455 install_var_field (var: decl, by_ref: false, mask: 11, ctx);
1456 if (DECL_SIZE (decl)
1457 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1458 {
1459 tree decl2 = DECL_VALUE_EXPR (decl);
1460 gcc_assert (INDIRECT_REF_P (decl2));
1461 decl2 = TREE_OPERAND (decl2, 0);
1462 gcc_assert (DECL_P (decl2));
1463 install_var_local (var: decl2, ctx);
1464 }
1465 install_var_local (var: decl, ctx);
1466 break;
1467
1468 case OMP_CLAUSE_HAS_DEVICE_ADDR:
1469 decl = OMP_CLAUSE_DECL (c);
1470 while (INDIRECT_REF_P (decl)
1471 || TREE_CODE (decl) == ARRAY_REF)
1472 decl = TREE_OPERAND (decl, 0);
1473 goto do_private;
1474
1475 case OMP_CLAUSE_IS_DEVICE_PTR:
1476 decl = OMP_CLAUSE_DECL (c);
1477 goto do_private;
1478
1479 case OMP_CLAUSE__LOOPTEMP_:
1480 case OMP_CLAUSE__REDUCTEMP_:
1481 gcc_assert (is_taskreg_ctx (ctx));
1482 decl = OMP_CLAUSE_DECL (c);
1483 install_var_field (var: decl, by_ref: false, mask: 3, ctx);
1484 install_var_local (var: decl, ctx);
1485 break;
1486
1487 case OMP_CLAUSE_COPYPRIVATE:
1488 case OMP_CLAUSE_COPYIN:
1489 decl = OMP_CLAUSE_DECL (c);
1490 by_ref = use_pointer_for_field (decl, NULL);
1491 install_var_field (var: decl, by_ref, mask: 3, ctx);
1492 break;
1493
1494 case OMP_CLAUSE_FINAL:
1495 case OMP_CLAUSE_IF:
1496 case OMP_CLAUSE_SELF:
1497 case OMP_CLAUSE_NUM_THREADS:
1498 case OMP_CLAUSE_NUM_TEAMS:
1499 case OMP_CLAUSE_THREAD_LIMIT:
1500 case OMP_CLAUSE_DEVICE:
1501 case OMP_CLAUSE_SCHEDULE:
1502 case OMP_CLAUSE_DIST_SCHEDULE:
1503 case OMP_CLAUSE_DEPEND:
1504 case OMP_CLAUSE_PRIORITY:
1505 case OMP_CLAUSE_GRAINSIZE:
1506 case OMP_CLAUSE_NUM_TASKS:
1507 case OMP_CLAUSE_NUM_GANGS:
1508 case OMP_CLAUSE_NUM_WORKERS:
1509 case OMP_CLAUSE_VECTOR_LENGTH:
1510 case OMP_CLAUSE_DETACH:
1511 case OMP_CLAUSE_FILTER:
1512 if (ctx->outer)
1513 scan_omp_op (tp: &OMP_CLAUSE_OPERAND (c, 0), ctx: ctx->outer);
1514 break;
1515
1516 case OMP_CLAUSE_TO:
1517 case OMP_CLAUSE_FROM:
1518 case OMP_CLAUSE_MAP:
1519 if (ctx->outer)
1520 scan_omp_op (tp: &OMP_CLAUSE_SIZE (c), ctx: ctx->outer);
1521 decl = OMP_CLAUSE_DECL (c);
1522 /* If requested, make 'decl' addressable. */
1523 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1524 && OMP_CLAUSE_MAP_DECL_MAKE_ADDRESSABLE (c))
1525 {
1526 gcc_checking_assert (DECL_P (decl));
1527
1528 bool decl_addressable = TREE_ADDRESSABLE (decl);
1529 if (!decl_addressable)
1530 {
1531 if (!make_addressable_vars)
1532 make_addressable_vars = BITMAP_ALLOC (NULL);
1533 bitmap_set_bit (make_addressable_vars, DECL_UID (decl));
1534 TREE_ADDRESSABLE (decl) = 1;
1535 }
1536
1537 if (dump_enabled_p ())
1538 {
1539 location_t loc = OMP_CLAUSE_LOCATION (c);
1540 const dump_user_location_t d_u_loc
1541 = dump_user_location_t::from_location_t (loc);
1542 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
1543#if __GNUC__ >= 10
1544# pragma GCC diagnostic push
1545# pragma GCC diagnostic ignored "-Wformat"
1546#endif
1547 if (!decl_addressable)
1548 dump_printf_loc (MSG_NOTE, d_u_loc,
1549 "variable %<%T%>"
1550 " made addressable\n",
1551 decl);
1552 else
1553 dump_printf_loc (MSG_NOTE, d_u_loc,
1554 "variable %<%T%>"
1555 " already made addressable\n",
1556 decl);
1557#if __GNUC__ >= 10
1558# pragma GCC diagnostic pop
1559#endif
1560 }
1561
1562 /* Done. */
1563 OMP_CLAUSE_MAP_DECL_MAKE_ADDRESSABLE (c) = 0;
1564 }
1565 /* Global variables with "omp declare target" attribute
1566 don't need to be copied, the receiver side will use them
1567 directly. However, global variables with "omp declare target link"
1568 attribute need to be copied. Or when ALWAYS modifier is used. */
1569 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1570 && DECL_P (decl)
1571 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1572 && (OMP_CLAUSE_MAP_KIND (c)
1573 != GOMP_MAP_FIRSTPRIVATE_REFERENCE)
1574 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
1575 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH)
1576 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1577 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1578 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1579 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1580 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_PRESENT_TO
1581 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_PRESENT_FROM
1582 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_PRESENT_TOFROM
1583 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
1584 && is_global_var (t: maybe_lookup_decl_in_outer_ctx (decl, ctx))
1585 && varpool_node::get_create (decl)->offloadable
1586 && !lookup_attribute (attr_name: "omp declare target link",
1587 DECL_ATTRIBUTES (decl)))
1588 break;
1589 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1590 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1591 {
1592 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1593 not offloaded; there is nothing to map for those. */
1594 if (!is_gimple_omp_offloaded (stmt: ctx->stmt)
1595 && !POINTER_TYPE_P (TREE_TYPE (decl))
1596 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1597 break;
1598 }
1599 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1600 && DECL_P (decl)
1601 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1602 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1603 && is_omp_target (stmt: ctx->stmt))
1604 {
1605 /* If this is an offloaded region, an attach operation should
1606 only exist when the pointer variable is mapped in a prior
1607 clause.
1608 If we had an error, we may not have attempted to sort clauses
1609 properly, so avoid the test. */
1610 if (is_gimple_omp_offloaded (stmt: ctx->stmt)
1611 && !seen_error ())
1612 gcc_assert
1613 (maybe_lookup_decl (decl, ctx)
1614 || (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1615 && lookup_attribute ("omp declare target",
1616 DECL_ATTRIBUTES (decl))));
1617
1618 /* By itself, attach/detach is generated as part of pointer
1619 variable mapping and should not create new variables in the
1620 offloaded region, however sender refs for it must be created
1621 for its address to be passed to the runtime. */
1622 tree field
1623 = build_decl (OMP_CLAUSE_LOCATION (c),
1624 FIELD_DECL, NULL_TREE, ptr_type_node);
1625 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1626 insert_field_into_struct (ctx->record_type, field);
1627 /* To not clash with a map of the pointer variable itself,
1628 attach/detach maps have their field looked up by the *clause*
1629 tree expression, not the decl. */
1630 gcc_assert (!splay_tree_lookup (ctx->field_map,
1631 (splay_tree_key) c));
1632 splay_tree_insert (ctx->field_map, (splay_tree_key) c,
1633 (splay_tree_value) field);
1634 break;
1635 }
1636 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1637 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1638 || (OMP_CLAUSE_MAP_KIND (c)
1639 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1640 {
1641 if (TREE_CODE (decl) == COMPONENT_REF
1642 || (INDIRECT_REF_P (decl)
1643 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1644 && (((TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1645 == REFERENCE_TYPE)
1646 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1647 == POINTER_TYPE)))))
1648 break;
1649 if (DECL_SIZE (decl)
1650 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1651 {
1652 tree decl2 = DECL_VALUE_EXPR (decl);
1653 gcc_assert (INDIRECT_REF_P (decl2));
1654 decl2 = TREE_OPERAND (decl2, 0);
1655 gcc_assert (DECL_P (decl2));
1656 install_var_local (var: decl2, ctx);
1657 }
1658 install_var_local (var: decl, ctx);
1659 break;
1660 }
1661 if (DECL_P (decl))
1662 {
1663 if (DECL_SIZE (decl)
1664 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1665 {
1666 tree decl2 = DECL_VALUE_EXPR (decl);
1667 gcc_assert (INDIRECT_REF_P (decl2));
1668 decl2 = TREE_OPERAND (decl2, 0);
1669 gcc_assert (DECL_P (decl2));
1670 install_var_field (var: decl2, by_ref: true, mask: 3, ctx);
1671 install_var_local (var: decl2, ctx);
1672 install_var_local (var: decl, ctx);
1673 }
1674 else
1675 {
1676 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1677 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1678 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1679 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1680 install_var_field (var: decl, by_ref: true, mask: 7, ctx);
1681 else
1682 install_var_field (var: decl, by_ref: true, mask: 3, ctx);
1683 if (is_gimple_omp_offloaded (stmt: ctx->stmt)
1684 && !(is_gimple_omp_oacc (stmt: ctx->stmt)
1685 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
1686 install_var_local (var: decl, ctx);
1687 }
1688 }
1689 else
1690 {
1691 tree base = get_base_address (t: decl);
1692 tree nc = OMP_CLAUSE_CHAIN (c);
1693 if (DECL_P (base)
1694 && nc != NULL_TREE
1695 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1696 && OMP_CLAUSE_DECL (nc) == base
1697 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1698 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1699 {
1700 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1701 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1702 }
1703 else
1704 {
1705 if (ctx->outer)
1706 {
1707 scan_omp_op (tp: &OMP_CLAUSE_DECL (c), ctx: ctx->outer);
1708 decl = OMP_CLAUSE_DECL (c);
1709 }
1710 gcc_assert (!splay_tree_lookup (ctx->field_map,
1711 (splay_tree_key) decl));
1712 tree field
1713 = build_decl (OMP_CLAUSE_LOCATION (c),
1714 FIELD_DECL, NULL_TREE, ptr_type_node);
1715 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1716 insert_field_into_struct (ctx->record_type, field);
1717 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1718 (splay_tree_value) field);
1719 }
1720 }
1721 break;
1722
1723 case OMP_CLAUSE_ORDER:
1724 ctx->order_concurrent = true;
1725 break;
1726
1727 case OMP_CLAUSE_BIND:
1728 ctx->loop_p = true;
1729 break;
1730
1731 case OMP_CLAUSE_NOWAIT:
1732 case OMP_CLAUSE_ORDERED:
1733 case OMP_CLAUSE_COLLAPSE:
1734 case OMP_CLAUSE_UNTIED:
1735 case OMP_CLAUSE_MERGEABLE:
1736 case OMP_CLAUSE_PROC_BIND:
1737 case OMP_CLAUSE_SAFELEN:
1738 case OMP_CLAUSE_SIMDLEN:
1739 case OMP_CLAUSE_THREADS:
1740 case OMP_CLAUSE_SIMD:
1741 case OMP_CLAUSE_NOGROUP:
1742 case OMP_CLAUSE_DEFAULTMAP:
1743 case OMP_CLAUSE_ASYNC:
1744 case OMP_CLAUSE_WAIT:
1745 case OMP_CLAUSE_GANG:
1746 case OMP_CLAUSE_WORKER:
1747 case OMP_CLAUSE_VECTOR:
1748 case OMP_CLAUSE_INDEPENDENT:
1749 case OMP_CLAUSE_AUTO:
1750 case OMP_CLAUSE_SEQ:
1751 case OMP_CLAUSE_TILE:
1752 case OMP_CLAUSE__SIMT_:
1753 case OMP_CLAUSE_DEFAULT:
1754 case OMP_CLAUSE_NONTEMPORAL:
1755 case OMP_CLAUSE_IF_PRESENT:
1756 case OMP_CLAUSE_FINALIZE:
1757 case OMP_CLAUSE_TASK_REDUCTION:
1758 case OMP_CLAUSE_ALLOCATE:
1759 break;
1760
1761 case OMP_CLAUSE_ALIGNED:
1762 decl = OMP_CLAUSE_DECL (c);
1763 if (is_global_var (t: decl)
1764 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1765 install_var_local (var: decl, ctx);
1766 break;
1767
1768 case OMP_CLAUSE__CONDTEMP_:
1769 decl = OMP_CLAUSE_DECL (c);
1770 if (is_parallel_ctx (ctx))
1771 {
1772 install_var_field (var: decl, by_ref: false, mask: 3, ctx);
1773 install_var_local (var: decl, ctx);
1774 }
1775 else if (gimple_code (g: ctx->stmt) == GIMPLE_OMP_FOR
1776 && gimple_omp_for_kind (g: ctx->stmt) == GF_OMP_FOR_KIND_SIMD
1777 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1778 install_var_local (var: decl, ctx);
1779 break;
1780
1781 case OMP_CLAUSE__CACHE_:
1782 case OMP_CLAUSE_NOHOST:
1783 default:
1784 gcc_unreachable ();
1785 }
1786 }
1787
1788 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1789 {
1790 switch (OMP_CLAUSE_CODE (c))
1791 {
1792 case OMP_CLAUSE_LASTPRIVATE:
1793 /* Let the corresponding firstprivate clause create
1794 the variable. */
1795 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1796 scan_array_reductions = true;
1797 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1798 break;
1799 /* FALLTHRU */
1800
1801 case OMP_CLAUSE_FIRSTPRIVATE:
1802 case OMP_CLAUSE_PRIVATE:
1803 case OMP_CLAUSE_LINEAR:
1804 case OMP_CLAUSE_HAS_DEVICE_ADDR:
1805 case OMP_CLAUSE_IS_DEVICE_PTR:
1806 decl = OMP_CLAUSE_DECL (c);
1807 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1808 {
1809 while (INDIRECT_REF_P (decl)
1810 || TREE_CODE (decl) == ARRAY_REF)
1811 decl = TREE_OPERAND (decl, 0);
1812 }
1813
1814 if (is_variable_sized (expr: decl))
1815 {
1816 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1817 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR
1818 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1819 && is_gimple_omp_offloaded (stmt: ctx->stmt))
1820 {
1821 tree decl2 = DECL_VALUE_EXPR (decl);
1822 gcc_assert (INDIRECT_REF_P (decl2));
1823 decl2 = TREE_OPERAND (decl2, 0);
1824 gcc_assert (DECL_P (decl2));
1825 install_var_local (var: decl2, ctx);
1826 fixup_remapped_decl (decl: decl2, ctx, private_debug: false);
1827 }
1828 install_var_local (var: decl, ctx);
1829 }
1830 fixup_remapped_decl (decl, ctx,
1831 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1832 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1833 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1834 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1835 scan_array_reductions = true;
1836 break;
1837
1838 case OMP_CLAUSE_REDUCTION:
1839 case OMP_CLAUSE_IN_REDUCTION:
1840 decl = OMP_CLAUSE_DECL (c);
1841 if (TREE_CODE (decl) != MEM_REF && !is_omp_target (stmt: ctx->stmt))
1842 {
1843 if (is_variable_sized (expr: decl))
1844 install_var_local (var: decl, ctx);
1845 fixup_remapped_decl (decl, ctx, private_debug: false);
1846 }
1847 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1848 scan_array_reductions = true;
1849 break;
1850
1851 case OMP_CLAUSE_TASK_REDUCTION:
1852 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1853 scan_array_reductions = true;
1854 break;
1855
1856 case OMP_CLAUSE_SHARED:
1857 /* Ignore shared directives in teams construct inside of
1858 target construct. */
1859 if (gimple_code (g: ctx->stmt) == GIMPLE_OMP_TEAMS
1860 && !is_host_teams_ctx (ctx))
1861 break;
1862 decl = OMP_CLAUSE_DECL (c);
1863 if (is_global_var (t: maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1864 break;
1865 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1866 {
1867 if (is_global_var (t: maybe_lookup_decl_in_outer_ctx (decl,
1868 ctx->outer)))
1869 break;
1870 bool by_ref = use_pointer_for_field (decl, shared_ctx: ctx);
1871 install_var_field (var: decl, by_ref, mask: 11, ctx);
1872 break;
1873 }
1874 fixup_remapped_decl (decl, ctx, private_debug: false);
1875 break;
1876
1877 case OMP_CLAUSE_MAP:
1878 if (!is_gimple_omp_offloaded (stmt: ctx->stmt))
1879 break;
1880 decl = OMP_CLAUSE_DECL (c);
1881 if (DECL_P (decl)
1882 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1883 && (OMP_CLAUSE_MAP_KIND (c)
1884 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1885 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1886 && is_global_var (t: maybe_lookup_decl_in_outer_ctx (decl, ctx))
1887 && varpool_node::get_create (decl)->offloadable)
1888 break;
1889 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1890 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1891 && is_omp_target (stmt: ctx->stmt)
1892 && !is_gimple_omp_offloaded (stmt: ctx->stmt))
1893 break;
1894 if (DECL_P (decl))
1895 {
1896 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1897 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1898 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1899 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1900 {
1901 tree new_decl = lookup_decl (var: decl, ctx);
1902 TREE_TYPE (new_decl)
1903 = remap_type (TREE_TYPE (decl), id: &ctx->cb);
1904 }
1905 else if (DECL_SIZE (decl)
1906 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1907 {
1908 tree decl2 = DECL_VALUE_EXPR (decl);
1909 gcc_assert (INDIRECT_REF_P (decl2));
1910 decl2 = TREE_OPERAND (decl2, 0);
1911 gcc_assert (DECL_P (decl2));
1912 fixup_remapped_decl (decl: decl2, ctx, private_debug: false);
1913 fixup_remapped_decl (decl, ctx, private_debug: true);
1914 }
1915 else
1916 fixup_remapped_decl (decl, ctx, private_debug: false);
1917 }
1918 break;
1919
1920 case OMP_CLAUSE_COPYPRIVATE:
1921 case OMP_CLAUSE_COPYIN:
1922 case OMP_CLAUSE_DEFAULT:
1923 case OMP_CLAUSE_IF:
1924 case OMP_CLAUSE_SELF:
1925 case OMP_CLAUSE_NUM_THREADS:
1926 case OMP_CLAUSE_NUM_TEAMS:
1927 case OMP_CLAUSE_THREAD_LIMIT:
1928 case OMP_CLAUSE_DEVICE:
1929 case OMP_CLAUSE_SCHEDULE:
1930 case OMP_CLAUSE_DIST_SCHEDULE:
1931 case OMP_CLAUSE_NOWAIT:
1932 case OMP_CLAUSE_ORDERED:
1933 case OMP_CLAUSE_COLLAPSE:
1934 case OMP_CLAUSE_UNTIED:
1935 case OMP_CLAUSE_FINAL:
1936 case OMP_CLAUSE_MERGEABLE:
1937 case OMP_CLAUSE_PROC_BIND:
1938 case OMP_CLAUSE_SAFELEN:
1939 case OMP_CLAUSE_SIMDLEN:
1940 case OMP_CLAUSE_ALIGNED:
1941 case OMP_CLAUSE_DEPEND:
1942 case OMP_CLAUSE_DETACH:
1943 case OMP_CLAUSE_ALLOCATE:
1944 case OMP_CLAUSE__LOOPTEMP_:
1945 case OMP_CLAUSE__REDUCTEMP_:
1946 case OMP_CLAUSE_TO:
1947 case OMP_CLAUSE_FROM:
1948 case OMP_CLAUSE_PRIORITY:
1949 case OMP_CLAUSE_GRAINSIZE:
1950 case OMP_CLAUSE_NUM_TASKS:
1951 case OMP_CLAUSE_THREADS:
1952 case OMP_CLAUSE_SIMD:
1953 case OMP_CLAUSE_NOGROUP:
1954 case OMP_CLAUSE_DEFAULTMAP:
1955 case OMP_CLAUSE_ORDER:
1956 case OMP_CLAUSE_BIND:
1957 case OMP_CLAUSE_USE_DEVICE_PTR:
1958 case OMP_CLAUSE_USE_DEVICE_ADDR:
1959 case OMP_CLAUSE_NONTEMPORAL:
1960 case OMP_CLAUSE_ASYNC:
1961 case OMP_CLAUSE_WAIT:
1962 case OMP_CLAUSE_NUM_GANGS:
1963 case OMP_CLAUSE_NUM_WORKERS:
1964 case OMP_CLAUSE_VECTOR_LENGTH:
1965 case OMP_CLAUSE_GANG:
1966 case OMP_CLAUSE_WORKER:
1967 case OMP_CLAUSE_VECTOR:
1968 case OMP_CLAUSE_INDEPENDENT:
1969 case OMP_CLAUSE_AUTO:
1970 case OMP_CLAUSE_SEQ:
1971 case OMP_CLAUSE_TILE:
1972 case OMP_CLAUSE__SIMT_:
1973 case OMP_CLAUSE_IF_PRESENT:
1974 case OMP_CLAUSE_FINALIZE:
1975 case OMP_CLAUSE_FILTER:
1976 case OMP_CLAUSE__CONDTEMP_:
1977 break;
1978
1979 case OMP_CLAUSE__CACHE_:
1980 case OMP_CLAUSE_NOHOST:
1981 default:
1982 gcc_unreachable ();
1983 }
1984 }
1985
1986 gcc_checking_assert (!scan_array_reductions
1987 || !is_gimple_omp_oacc (ctx->stmt));
1988 if (scan_array_reductions)
1989 {
1990 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1991 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1992 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1993 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1994 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1995 {
1996 omp_context *rctx = ctx;
1997 if (is_omp_target (stmt: ctx->stmt))
1998 rctx = ctx->outer;
1999 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), rctx);
2000 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), rctx);
2001 }
2002 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
2003 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
2004 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
2005 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
2006 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
2007 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
2008 }
2009}
2010
2011/* Create a new name for omp child function. Returns an identifier. */
2012
2013static tree
2014create_omp_child_function_name (bool task_copy)
2015{
2016 return clone_function_name_numbered (decl: current_function_decl,
2017 suffix: task_copy ? "_omp_cpyfn" : "_omp_fn");
2018}
2019
2020/* Return true if CTX may belong to offloaded code: either if current function
2021 is offloaded, or any enclosing context corresponds to a target region. */
2022
2023static bool
2024omp_maybe_offloaded_ctx (omp_context *ctx)
2025{
2026 if (cgraph_node::get (decl: current_function_decl)->offloadable)
2027 return true;
2028 for (; ctx; ctx = ctx->outer)
2029 if (is_gimple_omp_offloaded (stmt: ctx->stmt))
2030 return true;
2031 return false;
2032}
2033
2034/* Build a decl for the omp child function. It'll not contain a body
2035 yet, just the bare decl. */
2036
2037static void
2038create_omp_child_function (omp_context *ctx, bool task_copy)
2039{
2040 tree decl, type, name, t;
2041
2042 name = create_omp_child_function_name (task_copy);
2043 if (task_copy)
2044 type = build_function_type_list (void_type_node, ptr_type_node,
2045 ptr_type_node, NULL_TREE);
2046 else
2047 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2048
2049 decl = build_decl (gimple_location (g: ctx->stmt), FUNCTION_DECL, name, type);
2050
2051 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
2052 || !task_copy);
2053 if (!task_copy)
2054 ctx->cb.dst_fn = decl;
2055 else
2056 gimple_omp_task_set_copy_fn (gs: ctx->stmt, copy_fn: decl);
2057
2058 TREE_STATIC (decl) = 1;
2059 TREE_USED (decl) = 1;
2060 DECL_ARTIFICIAL (decl) = 1;
2061 DECL_IGNORED_P (decl) = 0;
2062 TREE_PUBLIC (decl) = 0;
2063 DECL_UNINLINABLE (decl) = 1;
2064 DECL_EXTERNAL (decl) = 0;
2065 DECL_CONTEXT (decl) = NULL_TREE;
2066 DECL_INITIAL (decl) = make_node (BLOCK);
2067 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
2068 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
2069 /* Remove omp declare simd attribute from the new attributes. */
2070 if (tree a = lookup_attribute (attr_name: "omp declare simd", DECL_ATTRIBUTES (decl)))
2071 {
2072 while (tree a2 = lookup_attribute (attr_name: "omp declare simd", TREE_CHAIN (a)))
2073 a = a2;
2074 a = TREE_CHAIN (a);
2075 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
2076 if (is_attribute_p (attr_name: "omp declare simd", ident: get_attribute_name (*p)))
2077 *p = TREE_CHAIN (*p);
2078 else
2079 {
2080 tree chain = TREE_CHAIN (*p);
2081 *p = copy_node (*p);
2082 p = &TREE_CHAIN (*p);
2083 *p = chain;
2084 }
2085 }
2086 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
2087 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
2088 DECL_FUNCTION_SPECIFIC_TARGET (decl)
2089 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
2090 DECL_FUNCTION_VERSIONED (decl)
2091 = DECL_FUNCTION_VERSIONED (current_function_decl);
2092
2093 if (omp_maybe_offloaded_ctx (ctx))
2094 {
2095 cgraph_node::get_create (decl)->offloadable = 1;
2096 if (ENABLE_OFFLOADING)
2097 g->have_offload = true;
2098 }
2099
2100 if (cgraph_node::get_create (decl)->offloadable)
2101 {
2102 const char *target_attr = (is_gimple_omp_offloaded (stmt: ctx->stmt)
2103 ? "omp target entrypoint"
2104 : "omp declare target");
2105 if (lookup_attribute (attr_name: "omp declare target",
2106 DECL_ATTRIBUTES (current_function_decl)))
2107 {
2108 if (is_gimple_omp_offloaded (stmt: ctx->stmt))
2109 DECL_ATTRIBUTES (decl)
2110 = remove_attribute ("omp declare target",
2111 copy_list (DECL_ATTRIBUTES (decl)));
2112 else
2113 target_attr = NULL;
2114 }
2115 if (target_attr
2116 && is_gimple_omp_offloaded (stmt: ctx->stmt)
2117 && lookup_attribute (attr_name: "noclone", DECL_ATTRIBUTES (decl)) == NULL_TREE)
2118 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("noclone"),
2119 NULL_TREE, DECL_ATTRIBUTES (decl));
2120 if (target_attr)
2121 DECL_ATTRIBUTES (decl)
2122 = tree_cons (get_identifier (target_attr),
2123 NULL_TREE, DECL_ATTRIBUTES (decl));
2124 }
2125
2126 t = build_decl (DECL_SOURCE_LOCATION (decl),
2127 RESULT_DECL, NULL_TREE, void_type_node);
2128 DECL_ARTIFICIAL (t) = 1;
2129 DECL_IGNORED_P (t) = 1;
2130 DECL_CONTEXT (t) = decl;
2131 DECL_RESULT (decl) = t;
2132
2133 tree data_name = get_identifier (".omp_data_i");
2134 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
2135 ptr_type_node);
2136 DECL_ARTIFICIAL (t) = 1;
2137 DECL_NAMELESS (t) = 1;
2138 DECL_ARG_TYPE (t) = ptr_type_node;
2139 DECL_CONTEXT (t) = current_function_decl;
2140 TREE_USED (t) = 1;
2141 TREE_READONLY (t) = 1;
2142 DECL_ARGUMENTS (decl) = t;
2143 if (!task_copy)
2144 ctx->receiver_decl = t;
2145 else
2146 {
2147 t = build_decl (DECL_SOURCE_LOCATION (decl),
2148 PARM_DECL, get_identifier (".omp_data_o"),
2149 ptr_type_node);
2150 DECL_ARTIFICIAL (t) = 1;
2151 DECL_NAMELESS (t) = 1;
2152 DECL_ARG_TYPE (t) = ptr_type_node;
2153 DECL_CONTEXT (t) = current_function_decl;
2154 TREE_USED (t) = 1;
2155 TREE_ADDRESSABLE (t) = 1;
2156 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
2157 DECL_ARGUMENTS (decl) = t;
2158 }
2159
2160 /* Allocate memory for the function structure. The call to
2161 allocate_struct_function clobbers CFUN, so we need to restore
2162 it afterward. */
2163 push_struct_function (fndecl: decl);
2164 cfun->function_end_locus = gimple_location (g: ctx->stmt);
2165 init_tree_ssa (cfun);
2166 pop_cfun ();
2167}
2168
2169/* Callback for walk_gimple_seq. Check if combined parallel
2170 contains gimple_omp_for_combined_into_p OMP_FOR. */
2171
2172tree
2173omp_find_combined_for (gimple_stmt_iterator *gsi_p,
2174 bool *handled_ops_p,
2175 struct walk_stmt_info *wi)
2176{
2177 gimple *stmt = gsi_stmt (i: *gsi_p);
2178
2179 *handled_ops_p = true;
2180 switch (gimple_code (g: stmt))
2181 {
2182 WALK_SUBSTMTS;
2183
2184 case GIMPLE_OMP_FOR:
2185 if (gimple_omp_for_combined_into_p (g: stmt)
2186 && gimple_omp_for_kind (g: stmt)
2187 == *(const enum gf_mask *) (wi->info))
2188 {
2189 wi->info = stmt;
2190 return integer_zero_node;
2191 }
2192 break;
2193 default:
2194 break;
2195 }
2196 return NULL;
2197}
2198
2199/* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
2200
2201static void
2202add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
2203 omp_context *outer_ctx)
2204{
2205 struct walk_stmt_info wi;
2206
2207 memset (s: &wi, c: 0, n: sizeof (wi));
2208 wi.val_only = true;
2209 wi.info = (void *) &msk;
2210 walk_gimple_seq (gimple_omp_body (gs: stmt), omp_find_combined_for, NULL, &wi);
2211 if (wi.info != (void *) &msk)
2212 {
2213 gomp_for *for_stmt = as_a <gomp_for *> (p: (gimple *) wi.info);
2214 struct omp_for_data fd;
2215 omp_extract_for_data (for_stmt, fd: &fd, NULL);
2216 /* We need two temporaries with fd.loop.v type (istart/iend)
2217 and then (fd.collapse - 1) temporaries with the same
2218 type for count2 ... countN-1 vars if not constant. */
2219 size_t count = 2, i;
2220 tree type = fd.iter_type;
2221 if (fd.collapse > 1
2222 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
2223 {
2224 count += fd.collapse - 1;
2225 /* If there are lastprivate clauses on the inner
2226 GIMPLE_OMP_FOR, add one more temporaries for the total number
2227 of iterations (product of count1 ... countN-1). */
2228 if (omp_find_clause (clauses: gimple_omp_for_clauses (gs: for_stmt),
2229 kind: OMP_CLAUSE_LASTPRIVATE)
2230 || (msk == GF_OMP_FOR_KIND_FOR
2231 && omp_find_clause (clauses: gimple_omp_parallel_clauses (gs: stmt),
2232 kind: OMP_CLAUSE_LASTPRIVATE)))
2233 {
2234 tree temp = create_tmp_var (type);
2235 tree c = build_omp_clause (UNKNOWN_LOCATION,
2236 OMP_CLAUSE__LOOPTEMP_);
2237 insert_decl_map (&outer_ctx->cb, temp, temp);
2238 OMP_CLAUSE_DECL (c) = temp;
2239 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (gs: stmt);
2240 gimple_omp_taskreg_set_clauses (gs: stmt, clauses: c);
2241 }
2242 if (fd.non_rect
2243 && fd.last_nonrect == fd.first_nonrect + 1)
2244 if (tree v = gimple_omp_for_index (gs: for_stmt, i: fd.last_nonrect))
2245 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
2246 {
2247 v = gimple_omp_for_index (gs: for_stmt, i: fd.first_nonrect);
2248 tree type2 = TREE_TYPE (v);
2249 count++;
2250 for (i = 0; i < 3; i++)
2251 {
2252 tree temp = create_tmp_var (type2);
2253 tree c = build_omp_clause (UNKNOWN_LOCATION,
2254 OMP_CLAUSE__LOOPTEMP_);
2255 insert_decl_map (&outer_ctx->cb, temp, temp);
2256 OMP_CLAUSE_DECL (c) = temp;
2257 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (gs: stmt);
2258 gimple_omp_taskreg_set_clauses (gs: stmt, clauses: c);
2259 }
2260 }
2261 }
2262 for (i = 0; i < count; i++)
2263 {
2264 tree temp = create_tmp_var (type);
2265 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
2266 insert_decl_map (&outer_ctx->cb, temp, temp);
2267 OMP_CLAUSE_DECL (c) = temp;
2268 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (gs: stmt);
2269 gimple_omp_taskreg_set_clauses (gs: stmt, clauses: c);
2270 }
2271 }
2272 if (msk == GF_OMP_FOR_KIND_TASKLOOP
2273 && omp_find_clause (clauses: gimple_omp_task_clauses (gs: stmt),
2274 kind: OMP_CLAUSE_REDUCTION))
2275 {
2276 tree type = build_pointer_type (pointer_sized_int_node);
2277 tree temp = create_tmp_var (type);
2278 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2279 insert_decl_map (&outer_ctx->cb, temp, temp);
2280 OMP_CLAUSE_DECL (c) = temp;
2281 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (gs: stmt);
2282 gimple_omp_task_set_clauses (gs: stmt, clauses: c);
2283 }
2284}
2285
2286/* Scan an OpenMP parallel directive. */
2287
2288static void
2289scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2290{
2291 omp_context *ctx;
2292 tree name;
2293 gomp_parallel *stmt = as_a <gomp_parallel *> (p: gsi_stmt (i: *gsi));
2294
2295 /* Ignore parallel directives with empty bodies, unless there
2296 are copyin clauses. */
2297 if (optimize > 0
2298 && empty_body_p (gimple_omp_body (gs: stmt))
2299 && omp_find_clause (clauses: gimple_omp_parallel_clauses (gs: stmt),
2300 kind: OMP_CLAUSE_COPYIN) == NULL)
2301 {
2302 gsi_replace (gsi, gimple_build_nop (), false);
2303 return;
2304 }
2305
2306 if (gimple_omp_parallel_combined_p (g: stmt))
2307 add_taskreg_looptemp_clauses (msk: GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
2308 for (tree c = omp_find_clause (clauses: gimple_omp_parallel_clauses (gs: stmt),
2309 kind: OMP_CLAUSE_REDUCTION);
2310 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), kind: OMP_CLAUSE_REDUCTION))
2311 if (OMP_CLAUSE_REDUCTION_TASK (c))
2312 {
2313 tree type = build_pointer_type (pointer_sized_int_node);
2314 tree temp = create_tmp_var (type);
2315 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2316 if (outer_ctx)
2317 insert_decl_map (&outer_ctx->cb, temp, temp);
2318 OMP_CLAUSE_DECL (c) = temp;
2319 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (gs: stmt);
2320 gimple_omp_parallel_set_clauses (omp_parallel_stmt: stmt, clauses: c);
2321 break;
2322 }
2323 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
2324 break;
2325
2326 ctx = new_omp_context (stmt, outer_ctx);
2327 taskreg_contexts.safe_push (obj: ctx);
2328 if (taskreg_nesting_level > 1)
2329 ctx->is_nested = true;
2330 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2331 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2332 name = create_tmp_var_name (".omp_data_s");
2333 name = build_decl (gimple_location (g: stmt),
2334 TYPE_DECL, name, ctx->record_type);
2335 DECL_ARTIFICIAL (name) = 1;
2336 DECL_NAMELESS (name) = 1;
2337 TYPE_NAME (ctx->record_type) = name;
2338 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2339 create_omp_child_function (ctx, task_copy: false);
2340 gimple_omp_parallel_set_child_fn (omp_parallel_stmt: stmt, child_fn: ctx->cb.dst_fn);
2341
2342 scan_sharing_clauses (clauses: gimple_omp_parallel_clauses (gs: stmt), ctx);
2343 scan_omp (gimple_omp_body_ptr (gs: stmt), ctx);
2344
2345 if (TYPE_FIELDS (ctx->record_type) == NULL)
2346 ctx->record_type = ctx->receiver_decl = NULL;
2347}
2348
2349/* Scan an OpenMP task directive. */
2350
2351static void
2352scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2353{
2354 omp_context *ctx;
2355 tree name, t;
2356 gomp_task *stmt = as_a <gomp_task *> (p: gsi_stmt (i: *gsi));
2357
2358 /* Ignore task directives with empty bodies, unless they have depend
2359 clause. */
2360 if (optimize > 0
2361 && gimple_omp_body (gs: stmt)
2362 && empty_body_p (gimple_omp_body (gs: stmt))
2363 && !omp_find_clause (clauses: gimple_omp_task_clauses (gs: stmt), kind: OMP_CLAUSE_DEPEND))
2364 {
2365 gsi_replace (gsi, gimple_build_nop (), false);
2366 return;
2367 }
2368
2369 if (gimple_omp_task_taskloop_p (g: stmt))
2370 add_taskreg_looptemp_clauses (msk: GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
2371
2372 ctx = new_omp_context (stmt, outer_ctx);
2373
2374 if (gimple_omp_task_taskwait_p (g: stmt))
2375 {
2376 scan_sharing_clauses (clauses: gimple_omp_task_clauses (gs: stmt), ctx);
2377 return;
2378 }
2379
2380 taskreg_contexts.safe_push (obj: ctx);
2381 if (taskreg_nesting_level > 1)
2382 ctx->is_nested = true;
2383 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2384 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2385 name = create_tmp_var_name (".omp_data_s");
2386 name = build_decl (gimple_location (g: stmt),
2387 TYPE_DECL, name, ctx->record_type);
2388 DECL_ARTIFICIAL (name) = 1;
2389 DECL_NAMELESS (name) = 1;
2390 TYPE_NAME (ctx->record_type) = name;
2391 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2392 create_omp_child_function (ctx, task_copy: false);
2393 gimple_omp_task_set_child_fn (gs: stmt, child_fn: ctx->cb.dst_fn);
2394
2395 scan_sharing_clauses (clauses: gimple_omp_task_clauses (gs: stmt), ctx);
2396
2397 if (ctx->srecord_type)
2398 {
2399 name = create_tmp_var_name (".omp_data_a");
2400 name = build_decl (gimple_location (g: stmt),
2401 TYPE_DECL, name, ctx->srecord_type);
2402 DECL_ARTIFICIAL (name) = 1;
2403 DECL_NAMELESS (name) = 1;
2404 TYPE_NAME (ctx->srecord_type) = name;
2405 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2406 create_omp_child_function (ctx, task_copy: true);
2407 }
2408
2409 scan_omp (gimple_omp_body_ptr (gs: stmt), ctx);
2410
2411 if (TYPE_FIELDS (ctx->record_type) == NULL)
2412 {
2413 ctx->record_type = ctx->receiver_decl = NULL;
2414 t = build_int_cst (long_integer_type_node, 0);
2415 gimple_omp_task_set_arg_size (gs: stmt, arg_size: t);
2416 t = build_int_cst (long_integer_type_node, 1);
2417 gimple_omp_task_set_arg_align (gs: stmt, arg_align: t);
2418 }
2419}
2420
2421/* Helper function for finish_taskreg_scan, called through walk_tree.
2422 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2423 tree, replace it in the expression. */
2424
2425static tree
2426finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2427{
2428 if (VAR_P (*tp))
2429 {
2430 omp_context *ctx = (omp_context *) data;
2431 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2432 if (t != *tp)
2433 {
2434 if (DECL_HAS_VALUE_EXPR_P (t))
2435 t = unshare_expr (DECL_VALUE_EXPR (t));
2436 *tp = t;
2437 }
2438 *walk_subtrees = 0;
2439 }
2440 else if (IS_TYPE_OR_DECL_P (*tp))
2441 *walk_subtrees = 0;
2442 return NULL_TREE;
2443}
2444
2445/* If any decls have been made addressable during scan_omp,
2446 adjust their fields if needed, and layout record types
2447 of parallel/task constructs. */
2448
2449static void
2450finish_taskreg_scan (omp_context *ctx)
2451{
2452 if (ctx->record_type == NULL_TREE)
2453 return;
2454
2455 /* If any make_addressable_vars were needed, verify all
2456 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2457 statements if use_pointer_for_field hasn't changed
2458 because of that. If it did, update field types now. */
2459 if (make_addressable_vars)
2460 {
2461 tree c;
2462
2463 for (c = gimple_omp_taskreg_clauses (gs: ctx->stmt);
2464 c; c = OMP_CLAUSE_CHAIN (c))
2465 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2466 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2467 {
2468 tree decl = OMP_CLAUSE_DECL (c);
2469
2470 /* Global variables don't need to be copied,
2471 the receiver side will use them directly. */
2472 if (is_global_var (t: maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2473 continue;
2474 if (!bitmap_bit_p (make_addressable_vars, DECL_UID (decl))
2475 || !use_pointer_for_field (decl, shared_ctx: ctx))
2476 continue;
2477 tree field = lookup_field (var: decl, ctx);
2478 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2479 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2480 continue;
2481 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2482 TREE_THIS_VOLATILE (field) = 0;
2483 DECL_USER_ALIGN (field) = 0;
2484 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2485 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2486 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2487 if (ctx->srecord_type)
2488 {
2489 tree sfield = lookup_sfield (var: decl, ctx);
2490 TREE_TYPE (sfield) = TREE_TYPE (field);
2491 TREE_THIS_VOLATILE (sfield) = 0;
2492 DECL_USER_ALIGN (sfield) = 0;
2493 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2494 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2495 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2496 }
2497 }
2498 }
2499
2500 if (gimple_code (g: ctx->stmt) == GIMPLE_OMP_PARALLEL)
2501 {
2502 tree clauses = gimple_omp_parallel_clauses (gs: ctx->stmt);
2503 tree c = omp_find_clause (clauses, kind: OMP_CLAUSE__REDUCTEMP_);
2504 if (c)
2505 {
2506 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2507 expects to find it at the start of data. */
2508 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2509 tree *p = &TYPE_FIELDS (ctx->record_type);
2510 while (*p)
2511 if (*p == f)
2512 {
2513 *p = DECL_CHAIN (*p);
2514 break;
2515 }
2516 else
2517 p = &DECL_CHAIN (*p);
2518 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2519 TYPE_FIELDS (ctx->record_type) = f;
2520 }
2521 layout_type (ctx->record_type);
2522 fixup_child_record_type (ctx);
2523 }
2524 else if (gimple_code (g: ctx->stmt) == GIMPLE_OMP_TEAMS)
2525 {
2526 layout_type (ctx->record_type);
2527 fixup_child_record_type (ctx);
2528 }
2529 else
2530 {
2531 location_t loc = gimple_location (g: ctx->stmt);
2532 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2533 tree detach_clause
2534 = omp_find_clause (clauses: gimple_omp_task_clauses (gs: ctx->stmt),
2535 kind: OMP_CLAUSE_DETACH);
2536 /* Move VLA fields to the end. */
2537 p = &TYPE_FIELDS (ctx->record_type);
2538 while (*p)
2539 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2540 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2541 {
2542 *q = *p;
2543 *p = TREE_CHAIN (*p);
2544 TREE_CHAIN (*q) = NULL_TREE;
2545 q = &TREE_CHAIN (*q);
2546 }
2547 else
2548 p = &DECL_CHAIN (*p);
2549 *p = vla_fields;
2550 if (gimple_omp_task_taskloop_p (g: ctx->stmt))
2551 {
2552 /* Move fields corresponding to first and second _looptemp_
2553 clause first. There are filled by GOMP_taskloop
2554 and thus need to be in specific positions. */
2555 tree clauses = gimple_omp_task_clauses (gs: ctx->stmt);
2556 tree c1 = omp_find_clause (clauses, kind: OMP_CLAUSE__LOOPTEMP_);
2557 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2558 kind: OMP_CLAUSE__LOOPTEMP_);
2559 tree c3 = omp_find_clause (clauses, kind: OMP_CLAUSE__REDUCTEMP_);
2560 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2561 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2562 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2563 p = &TYPE_FIELDS (ctx->record_type);
2564 while (*p)
2565 if (*p == f1 || *p == f2 || *p == f3)
2566 *p = DECL_CHAIN (*p);
2567 else
2568 p = &DECL_CHAIN (*p);
2569 DECL_CHAIN (f1) = f2;
2570 if (c3)
2571 {
2572 DECL_CHAIN (f2) = f3;
2573 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2574 }
2575 else
2576 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2577 TYPE_FIELDS (ctx->record_type) = f1;
2578 if (ctx->srecord_type)
2579 {
2580 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2581 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2582 if (c3)
2583 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2584 p = &TYPE_FIELDS (ctx->srecord_type);
2585 while (*p)
2586 if (*p == f1 || *p == f2 || *p == f3)
2587 *p = DECL_CHAIN (*p);
2588 else
2589 p = &DECL_CHAIN (*p);
2590 DECL_CHAIN (f1) = f2;
2591 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2592 if (c3)
2593 {
2594 DECL_CHAIN (f2) = f3;
2595 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2596 }
2597 else
2598 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2599 TYPE_FIELDS (ctx->srecord_type) = f1;
2600 }
2601 }
2602 if (detach_clause)
2603 {
2604 tree c, field;
2605
2606 /* Look for a firstprivate clause with the detach event handle. */
2607 for (c = gimple_omp_taskreg_clauses (gs: ctx->stmt);
2608 c; c = OMP_CLAUSE_CHAIN (c))
2609 {
2610 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
2611 continue;
2612 if (maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c), ctx)
2613 == OMP_CLAUSE_DECL (detach_clause))
2614 break;
2615 }
2616
2617 gcc_assert (c);
2618 field = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2619
2620 /* Move field corresponding to the detach clause first.
2621 This is filled by GOMP_task and needs to be in a
2622 specific position. */
2623 p = &TYPE_FIELDS (ctx->record_type);
2624 while (*p)
2625 if (*p == field)
2626 *p = DECL_CHAIN (*p);
2627 else
2628 p = &DECL_CHAIN (*p);
2629 DECL_CHAIN (field) = TYPE_FIELDS (ctx->record_type);
2630 TYPE_FIELDS (ctx->record_type) = field;
2631 if (ctx->srecord_type)
2632 {
2633 field = lookup_sfield (OMP_CLAUSE_DECL (c), ctx);
2634 p = &TYPE_FIELDS (ctx->srecord_type);
2635 while (*p)
2636 if (*p == field)
2637 *p = DECL_CHAIN (*p);
2638 else
2639 p = &DECL_CHAIN (*p);
2640 DECL_CHAIN (field) = TYPE_FIELDS (ctx->srecord_type);
2641 TYPE_FIELDS (ctx->srecord_type) = field;
2642 }
2643 }
2644 layout_type (ctx->record_type);
2645 fixup_child_record_type (ctx);
2646 if (ctx->srecord_type)
2647 layout_type (ctx->srecord_type);
2648 tree t = fold_convert_loc (loc, long_integer_type_node,
2649 TYPE_SIZE_UNIT (ctx->record_type));
2650 if (TREE_CODE (t) != INTEGER_CST)
2651 {
2652 t = unshare_expr (t);
2653 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2654 }
2655 gimple_omp_task_set_arg_size (gs: ctx->stmt, arg_size: t);
2656 t = build_int_cst (long_integer_type_node,
2657 TYPE_ALIGN_UNIT (ctx->record_type));
2658 gimple_omp_task_set_arg_align (gs: ctx->stmt, arg_align: t);
2659 }
2660}
2661
2662/* Find the enclosing offload context. */
2663
2664static omp_context *
2665enclosing_target_ctx (omp_context *ctx)
2666{
2667 for (; ctx; ctx = ctx->outer)
2668 if (gimple_code (g: ctx->stmt) == GIMPLE_OMP_TARGET)
2669 break;
2670
2671 return ctx;
2672}
2673
2674/* Return whether CTX's parent compute construct is an OpenACC 'kernels'
2675 construct.
2676 (This doesn't include OpenACC 'kernels' decomposed parts.) */
2677
2678static bool
2679ctx_in_oacc_kernels_region (omp_context *ctx)
2680{
2681 for (;ctx != NULL; ctx = ctx->outer)
2682 {
2683 gimple *stmt = ctx->stmt;
2684 if (gimple_code (g: stmt) == GIMPLE_OMP_TARGET
2685 && gimple_omp_target_kind (g: stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2686 return true;
2687 }
2688
2689 return false;
2690}
2691
2692/* Check the parallelism clauses inside a OpenACC 'kernels' region.
2693 (This doesn't include OpenACC 'kernels' decomposed parts.)
2694 Until kernels handling moves to use the same loop indirection
2695 scheme as parallel, we need to do this checking early. */
2696
2697static unsigned
2698check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2699{
2700 bool checking = true;
2701 unsigned outer_mask = 0;
2702 unsigned this_mask = 0;
2703 bool has_seq = false, has_auto = false;
2704
2705 if (ctx->outer)
2706 outer_mask = check_oacc_kernel_gwv (NULL, ctx: ctx->outer);
2707 if (!stmt)
2708 {
2709 checking = false;
2710 if (gimple_code (g: ctx->stmt) != GIMPLE_OMP_FOR)
2711 return outer_mask;
2712 stmt = as_a <gomp_for *> (p: ctx->stmt);
2713 }
2714
2715 for (tree c = gimple_omp_for_clauses (gs: stmt); c; c = OMP_CLAUSE_CHAIN (c))
2716 {
2717 switch (OMP_CLAUSE_CODE (c))
2718 {
2719 case OMP_CLAUSE_GANG:
2720 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2721 break;
2722 case OMP_CLAUSE_WORKER:
2723 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2724 break;
2725 case OMP_CLAUSE_VECTOR:
2726 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2727 break;
2728 case OMP_CLAUSE_SEQ:
2729 has_seq = true;
2730 break;
2731 case OMP_CLAUSE_AUTO:
2732 has_auto = true;
2733 break;
2734 default:
2735 break;
2736 }
2737 }
2738
2739 if (checking)
2740 {
2741 if (has_seq && (this_mask || has_auto))
2742 error_at (gimple_location (g: stmt), "%<seq%> overrides other"
2743 " OpenACC loop specifiers");
2744 else if (has_auto && this_mask)
2745 error_at (gimple_location (g: stmt), "%<auto%> conflicts with other"
2746 " OpenACC loop specifiers");
2747
2748 if (this_mask & outer_mask)
2749 error_at (gimple_location (g: stmt), "inner loop uses same"
2750 " OpenACC parallelism as containing loop");
2751 }
2752
2753 return outer_mask | this_mask;
2754}
2755
2756/* Scan a GIMPLE_OMP_FOR. */
2757
2758static omp_context *
2759scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2760{
2761 omp_context *ctx;
2762 size_t i;
2763 tree clauses = gimple_omp_for_clauses (gs: stmt);
2764
2765 ctx = new_omp_context (stmt, outer_ctx);
2766
2767 if (is_gimple_omp_oacc (stmt))
2768 {
2769 omp_context *tgt = enclosing_target_ctx (ctx: outer_ctx);
2770
2771 if (!(tgt && is_oacc_kernels (ctx: tgt)))
2772 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2773 {
2774 tree c_op0;
2775 switch (OMP_CLAUSE_CODE (c))
2776 {
2777 case OMP_CLAUSE_GANG:
2778 c_op0 = OMP_CLAUSE_GANG_EXPR (c);
2779 break;
2780
2781 case OMP_CLAUSE_WORKER:
2782 c_op0 = OMP_CLAUSE_WORKER_EXPR (c);
2783 break;
2784
2785 case OMP_CLAUSE_VECTOR:
2786 c_op0 = OMP_CLAUSE_VECTOR_EXPR (c);
2787 break;
2788
2789 default:
2790 continue;
2791 }
2792
2793 if (c_op0)
2794 {
2795 /* By construction, this is impossible for OpenACC 'kernels'
2796 decomposed parts. */
2797 gcc_assert (!(tgt && is_oacc_kernels_decomposed_part (tgt)));
2798
2799 error_at (OMP_CLAUSE_LOCATION (c),
2800 "argument not permitted on %qs clause",
2801 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
2802 if (tgt)
2803 inform (gimple_location (g: tgt->stmt),
2804 "enclosing parent compute construct");
2805 else if (oacc_get_fn_attrib (fn: current_function_decl))
2806 inform (DECL_SOURCE_LOCATION (current_function_decl),
2807 "enclosing routine");
2808 else
2809 gcc_unreachable ();
2810 }
2811 }
2812
2813 if (tgt && is_oacc_kernels (ctx: tgt))
2814 check_oacc_kernel_gwv (stmt, ctx);
2815
2816 /* Collect all variables named in reductions on this loop. Ensure
2817 that, if this loop has a reduction on some variable v, and there is
2818 a reduction on v somewhere in an outer context, then there is a
2819 reduction on v on all intervening loops as well. */
2820 tree local_reduction_clauses = NULL;
2821 for (tree c = gimple_omp_for_clauses (gs: stmt); c; c = OMP_CLAUSE_CHAIN (c))
2822 {
2823 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
2824 local_reduction_clauses
2825 = tree_cons (NULL, c, local_reduction_clauses);
2826 }
2827 if (ctx->outer_reduction_clauses == NULL && ctx->outer != NULL)
2828 ctx->outer_reduction_clauses
2829 = chainon (unshare_expr (ctx->outer->local_reduction_clauses),
2830 ctx->outer->outer_reduction_clauses);
2831 tree outer_reduction_clauses = ctx->outer_reduction_clauses;
2832 tree local_iter = local_reduction_clauses;
2833 for (; local_iter; local_iter = TREE_CHAIN (local_iter))
2834 {
2835 tree local_clause = TREE_VALUE (local_iter);
2836 tree local_var = OMP_CLAUSE_DECL (local_clause);
2837 tree_code local_op = OMP_CLAUSE_REDUCTION_CODE (local_clause);
2838 bool have_outer_reduction = false;
2839 tree ctx_iter = outer_reduction_clauses;
2840 for (; ctx_iter; ctx_iter = TREE_CHAIN (ctx_iter))
2841 {
2842 tree outer_clause = TREE_VALUE (ctx_iter);
2843 tree outer_var = OMP_CLAUSE_DECL (outer_clause);
2844 tree_code outer_op = OMP_CLAUSE_REDUCTION_CODE (outer_clause);
2845 if (outer_var == local_var && outer_op != local_op)
2846 {
2847 warning_at (OMP_CLAUSE_LOCATION (local_clause), 0,
2848 "conflicting reduction operations for %qE",
2849 local_var);
2850 inform (OMP_CLAUSE_LOCATION (outer_clause),
2851 "location of the previous reduction for %qE",
2852 outer_var);
2853 }
2854 if (outer_var == local_var)
2855 {
2856 have_outer_reduction = true;
2857 break;
2858 }
2859 }
2860 if (have_outer_reduction)
2861 {
2862 /* There is a reduction on outer_var both on this loop and on
2863 some enclosing loop. Walk up the context tree until such a
2864 loop with a reduction on outer_var is found, and complain
2865 about all intervening loops that do not have such a
2866 reduction. */
2867 struct omp_context *curr_loop = ctx->outer;
2868 bool found = false;
2869 while (curr_loop != NULL)
2870 {
2871 tree curr_iter = curr_loop->local_reduction_clauses;
2872 for (; curr_iter; curr_iter = TREE_CHAIN (curr_iter))
2873 {
2874 tree curr_clause = TREE_VALUE (curr_iter);
2875 tree curr_var = OMP_CLAUSE_DECL (curr_clause);
2876 if (curr_var == local_var)
2877 {
2878 found = true;
2879 break;
2880 }
2881 }
2882 if (!found)
2883 warning_at (gimple_location (g: curr_loop->stmt), 0,
2884 "nested loop in reduction needs "
2885 "reduction clause for %qE",
2886 local_var);
2887 else
2888 break;
2889 curr_loop = curr_loop->outer;
2890 }
2891 }
2892 }
2893 ctx->local_reduction_clauses = local_reduction_clauses;
2894 ctx->outer_reduction_clauses
2895 = chainon (unshare_expr (ctx->local_reduction_clauses),
2896 ctx->outer_reduction_clauses);
2897
2898 if (tgt && is_oacc_kernels (ctx: tgt))
2899 {
2900 /* Strip out reductions, as they are not handled yet. */
2901 tree *prev_ptr = &clauses;
2902
2903 while (tree probe = *prev_ptr)
2904 {
2905 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2906
2907 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2908 *prev_ptr = *next_ptr;
2909 else
2910 prev_ptr = next_ptr;
2911 }
2912
2913 gimple_omp_for_set_clauses (gs: stmt, clauses);
2914 }
2915 }
2916
2917 scan_sharing_clauses (clauses, ctx);
2918
2919 scan_omp (gimple_omp_for_pre_body_ptr (gs: stmt), ctx);
2920 for (i = 0; i < gimple_omp_for_collapse (gs: stmt); i++)
2921 {
2922 scan_omp_op (tp: gimple_omp_for_index_ptr (gs: stmt, i), ctx);
2923 scan_omp_op (tp: gimple_omp_for_initial_ptr (gs: stmt, i), ctx);
2924 scan_omp_op (tp: gimple_omp_for_final_ptr (gs: stmt, i), ctx);
2925 scan_omp_op (tp: gimple_omp_for_incr_ptr (gs: stmt, i), ctx);
2926 }
2927 scan_omp (gimple_omp_body_ptr (gs: stmt), ctx);
2928 return ctx;
2929}
2930
2931/* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2932
2933static void
2934scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2935 omp_context *outer_ctx)
2936{
2937 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2938 gsi_replace (gsi, bind, false);
2939 gimple_seq seq = NULL;
2940 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2941 tree cond = create_tmp_var_raw (integer_type_node);
2942 DECL_CONTEXT (cond) = current_function_decl;
2943 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2944 gimple_bind_set_vars (bind_stmt: bind, vars: cond);
2945 gimple_call_set_lhs (gs: g, lhs: cond);
2946 gimple_seq_add_stmt (&seq, g);
2947 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2948 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2949 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2950 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2951 gimple_seq_add_stmt (&seq, g);
2952 g = gimple_build_label (label: lab1);
2953 gimple_seq_add_stmt (&seq, g);
2954 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (seq: stmt);
2955 gomp_for *new_stmt = as_a <gomp_for *> (p: new_seq);
2956 tree clause = build_omp_clause (gimple_location (g: stmt), OMP_CLAUSE__SIMT_);
2957 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (gs: new_stmt);
2958 gimple_omp_for_set_clauses (gs: new_stmt, clauses: clause);
2959 gimple_seq_add_stmt (&seq, new_stmt);
2960 g = gimple_build_goto (dest: lab3);
2961 gimple_seq_add_stmt (&seq, g);
2962 g = gimple_build_label (label: lab2);
2963 gimple_seq_add_stmt (&seq, g);
2964 gimple_seq_add_stmt (&seq, stmt);
2965 g = gimple_build_label (label: lab3);
2966 gimple_seq_add_stmt (&seq, g);
2967 gimple_bind_set_body (bind_stmt: bind, seq);
2968 update_stmt (s: bind);
2969 scan_omp_for (stmt: new_stmt, outer_ctx);
2970 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2971}
2972
2973static tree omp_find_scan (gimple_stmt_iterator *, bool *,
2974 struct walk_stmt_info *);
2975static omp_context *maybe_lookup_ctx (gimple *);
2976
2977/* Duplicate #pragma omp simd, one for the scan input phase loop and one
2978 for scan phase loop. */
2979
2980static void
2981scan_omp_simd_scan (gimple_stmt_iterator *gsi, gomp_for *stmt,
2982 omp_context *outer_ctx)
2983{
2984 /* The only change between inclusive and exclusive scan will be
2985 within the first simd loop, so just use inclusive in the
2986 worksharing loop. */
2987 outer_ctx->scan_inclusive = true;
2988 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_INCLUSIVE);
2989 OMP_CLAUSE_DECL (c) = integer_zero_node;
2990
2991 gomp_scan *input_stmt = gimple_build_omp_scan (NULL, NULL_TREE);
2992 gomp_scan *scan_stmt = gimple_build_omp_scan (NULL, c);
2993 gsi_replace (gsi, input_stmt, false);
2994 gimple_seq input_body = NULL;
2995 gimple_seq_add_stmt (&input_body, stmt);
2996 gsi_insert_after (gsi, scan_stmt, GSI_NEW_STMT);
2997
2998 gimple_stmt_iterator input1_gsi = gsi_none ();
2999 struct walk_stmt_info wi;
3000 memset (s: &wi, c: 0, n: sizeof (wi));
3001 wi.val_only = true;
3002 wi.info = (void *) &input1_gsi;
3003 walk_gimple_seq_mod (gimple_omp_body_ptr (gs: stmt), omp_find_scan, NULL, &wi);
3004 gcc_assert (!gsi_end_p (input1_gsi));
3005
3006 gimple *input_stmt1 = gsi_stmt (i: input1_gsi);
3007 gsi_next (i: &input1_gsi);
3008 gimple *scan_stmt1 = gsi_stmt (i: input1_gsi);
3009 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
3010 c = gimple_omp_scan_clauses (scan_stmt: as_a <gomp_scan *> (p: scan_stmt1));
3011 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
3012 std::swap (a&: input_stmt1, b&: scan_stmt1);
3013
3014 gimple_seq input_body1 = gimple_omp_body (gs: input_stmt1);
3015 gimple_omp_set_body (gs: input_stmt1, NULL);
3016
3017 gimple_seq scan_body = copy_gimple_seq_and_replace_locals (seq: stmt);
3018 gomp_for *new_stmt = as_a <gomp_for *> (p: scan_body);
3019
3020 gimple_omp_set_body (gs: input_stmt1, body: input_body1);
3021 gimple_omp_set_body (gs: scan_stmt1, NULL);
3022
3023 gimple_stmt_iterator input2_gsi = gsi_none ();
3024 memset (s: &wi, c: 0, n: sizeof (wi));
3025 wi.val_only = true;
3026 wi.info = (void *) &input2_gsi;
3027 walk_gimple_seq_mod (gimple_omp_body_ptr (gs: new_stmt), omp_find_scan,
3028 NULL, &wi);
3029 gcc_assert (!gsi_end_p (input2_gsi));
3030
3031 gimple *input_stmt2 = gsi_stmt (i: input2_gsi);
3032 gsi_next (i: &input2_gsi);
3033 gimple *scan_stmt2 = gsi_stmt (i: input2_gsi);
3034 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
3035 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
3036 std::swap (a&: input_stmt2, b&: scan_stmt2);
3037
3038 gimple_omp_set_body (gs: input_stmt2, NULL);
3039
3040 gimple_omp_set_body (gs: input_stmt, body: input_body);
3041 gimple_omp_set_body (gs: scan_stmt, body: scan_body);
3042
3043 omp_context *ctx = new_omp_context (stmt: input_stmt, outer_ctx);
3044 scan_omp (gimple_omp_body_ptr (gs: input_stmt), ctx);
3045
3046 ctx = new_omp_context (stmt: scan_stmt, outer_ctx);
3047 scan_omp (gimple_omp_body_ptr (gs: scan_stmt), ctx);
3048
3049 maybe_lookup_ctx (new_stmt)->for_simd_scan_phase = true;
3050}
3051
3052/* Scan an OpenMP sections directive. */
3053
3054static void
3055scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
3056{
3057 omp_context *ctx;
3058
3059 ctx = new_omp_context (stmt, outer_ctx);
3060 scan_sharing_clauses (clauses: gimple_omp_sections_clauses (gs: stmt), ctx);
3061 scan_omp (gimple_omp_body_ptr (gs: stmt), ctx);
3062}
3063
3064/* Scan an OpenMP single directive. */
3065
3066static void
3067scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
3068{
3069 omp_context *ctx;
3070 tree name;
3071
3072 ctx = new_omp_context (stmt, outer_ctx);
3073 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3074 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3075 name = create_tmp_var_name (".omp_copy_s");
3076 name = build_decl (gimple_location (g: stmt),
3077 TYPE_DECL, name, ctx->record_type);
3078 TYPE_NAME (ctx->record_type) = name;
3079
3080 scan_sharing_clauses (clauses: gimple_omp_single_clauses (gs: stmt), ctx);
3081 scan_omp (gimple_omp_body_ptr (gs: stmt), ctx);
3082
3083 if (TYPE_FIELDS (ctx->record_type) == NULL)
3084 ctx->record_type = NULL;
3085 else
3086 layout_type (ctx->record_type);
3087}
3088
3089/* Scan a GIMPLE_OMP_TARGET. */
3090
3091static void
3092scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
3093{
3094 omp_context *ctx;
3095 tree name;
3096 bool offloaded = is_gimple_omp_offloaded (stmt);
3097 tree clauses = gimple_omp_target_clauses (gs: stmt);
3098
3099 ctx = new_omp_context (stmt, outer_ctx);
3100 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3101 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3102 name = create_tmp_var_name (".omp_data_t");
3103 name = build_decl (gimple_location (g: stmt),
3104 TYPE_DECL, name, ctx->record_type);
3105 DECL_ARTIFICIAL (name) = 1;
3106 DECL_NAMELESS (name) = 1;
3107 TYPE_NAME (ctx->record_type) = name;
3108 TYPE_ARTIFICIAL (ctx->record_type) = 1;
3109
3110 if (offloaded)
3111 {
3112 create_omp_child_function (ctx, task_copy: false);
3113 gimple_omp_target_set_child_fn (omp_target_stmt: stmt, child_fn: ctx->cb.dst_fn);
3114 }
3115
3116 scan_sharing_clauses (clauses, ctx);
3117 scan_omp (gimple_omp_body_ptr (gs: stmt), ctx);
3118
3119 if (TYPE_FIELDS (ctx->record_type) == NULL)
3120 ctx->record_type = ctx->receiver_decl = NULL;
3121 else
3122 {
3123 TYPE_FIELDS (ctx->record_type)
3124 = nreverse (TYPE_FIELDS (ctx->record_type));
3125 if (flag_checking)
3126 {
3127 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
3128 for (tree field = TYPE_FIELDS (ctx->record_type);
3129 field;
3130 field = DECL_CHAIN (field))
3131 gcc_assert (DECL_ALIGN (field) == align);
3132 }
3133 layout_type (ctx->record_type);
3134 if (offloaded)
3135 fixup_child_record_type (ctx);
3136 }
3137
3138 if (ctx->teams_nested_p && ctx->nonteams_nested_p)
3139 {
3140 error_at (gimple_location (g: stmt),
3141 "%<target%> construct with nested %<teams%> construct "
3142 "contains directives outside of the %<teams%> construct");
3143 gimple_omp_set_body (gs: stmt, body: gimple_build_bind (NULL, NULL, NULL));
3144 }
3145}
3146
3147/* Scan an OpenMP teams directive. */
3148
3149static void
3150scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
3151{
3152 omp_context *ctx = new_omp_context (stmt, outer_ctx);
3153
3154 if (!gimple_omp_teams_host (omp_teams_stmt: stmt))
3155 {
3156 scan_sharing_clauses (clauses: gimple_omp_teams_clauses (gs: stmt), ctx);
3157 scan_omp (gimple_omp_body_ptr (gs: stmt), ctx);
3158 return;
3159 }
3160 taskreg_contexts.safe_push (obj: ctx);
3161 gcc_assert (taskreg_nesting_level == 1);
3162 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3163 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3164 tree name = create_tmp_var_name (".omp_data_s");
3165 name = build_decl (gimple_location (g: stmt),
3166 TYPE_DECL, name, ctx->record_type);
3167 DECL_ARTIFICIAL (name) = 1;
3168 DECL_NAMELESS (name) = 1;
3169 TYPE_NAME (ctx->record_type) = name;
3170 TYPE_ARTIFICIAL (ctx->record_type) = 1;
3171 create_omp_child_function (ctx, task_copy: false);
3172 gimple_omp_teams_set_child_fn (omp_teams_stmt: stmt, child_fn: ctx->cb.dst_fn);
3173
3174 scan_sharing_clauses (clauses: gimple_omp_teams_clauses (gs: stmt), ctx);
3175 scan_omp (gimple_omp_body_ptr (gs: stmt), ctx);
3176
3177 if (TYPE_FIELDS (ctx->record_type) == NULL)
3178 ctx->record_type = ctx->receiver_decl = NULL;
3179}
3180
3181/* Check nesting restrictions. */
3182static bool
3183check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
3184{
3185 tree c;
3186
3187 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
3188 inside an OpenACC CTX. */
3189 if (gimple_code (g: stmt) == GIMPLE_OMP_ATOMIC_LOAD
3190 || gimple_code (g: stmt) == GIMPLE_OMP_ATOMIC_STORE)
3191 /* ..., except for the atomic codes that OpenACC shares with OpenMP. */
3192 ;
3193 else if (!(is_gimple_omp (stmt)
3194 && is_gimple_omp_oacc (stmt)))
3195 {
3196 if (oacc_get_fn_attrib (cfun->decl) != NULL)
3197 {
3198 error_at (gimple_location (g: stmt),
3199 "non-OpenACC construct inside of OpenACC routine");
3200 return false;
3201 }
3202 else
3203 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
3204 if (is_gimple_omp (stmt: octx->stmt)
3205 && is_gimple_omp_oacc (stmt: octx->stmt))
3206 {
3207 error_at (gimple_location (g: stmt),
3208 "non-OpenACC construct inside of OpenACC region");
3209 return false;
3210 }
3211 }
3212
3213 if (ctx != NULL)
3214 {
3215 if (gimple_code (g: ctx->stmt) == GIMPLE_OMP_TARGET
3216 && gimple_omp_target_kind (g: ctx->stmt) == GF_OMP_TARGET_KIND_REGION)
3217 {
3218 c = omp_find_clause (clauses: gimple_omp_target_clauses (gs: ctx->stmt),
3219 kind: OMP_CLAUSE_DEVICE);
3220 if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
3221 {
3222 error_at (gimple_location (g: stmt),
3223 "OpenMP constructs are not allowed in target region "
3224 "with %<ancestor%>");
3225 return false;
3226 }
3227
3228 if (gimple_code (g: stmt) == GIMPLE_OMP_TEAMS && !ctx->teams_nested_p)
3229 ctx->teams_nested_p = true;
3230 else
3231 ctx->nonteams_nested_p = true;
3232 }
3233 if (gimple_code (g: ctx->stmt) == GIMPLE_OMP_SCAN
3234 && ctx->outer
3235 && gimple_code (g: ctx->outer->stmt) == GIMPLE_OMP_FOR)
3236 ctx = ctx->outer;
3237 if (gimple_code (g: ctx->stmt) == GIMPLE_OMP_FOR
3238 && gimple_omp_for_kind (g: ctx->stmt) == GF_OMP_FOR_KIND_SIMD
3239 && !ctx->loop_p)
3240 {
3241 c = NULL_TREE;
3242 if (ctx->order_concurrent
3243 && (gimple_code (g: stmt) == GIMPLE_OMP_ORDERED
3244 || gimple_code (g: stmt) == GIMPLE_OMP_ATOMIC_LOAD
3245 || gimple_code (g: stmt) == GIMPLE_OMP_ATOMIC_STORE))
3246 {
3247 error_at (gimple_location (g: stmt),
3248 "OpenMP constructs other than %<parallel%>, %<loop%>"
3249 " or %<simd%> may not be nested inside a region with"
3250 " the %<order(concurrent)%> clause");
3251 return false;
3252 }
3253 if (gimple_code (g: stmt) == GIMPLE_OMP_ORDERED)
3254 {
3255 c = gimple_omp_ordered_clauses (ord_stmt: as_a <gomp_ordered *> (p: stmt));
3256 if (omp_find_clause (clauses: c, kind: OMP_CLAUSE_SIMD))
3257 {
3258 if (omp_find_clause (clauses: c, kind: OMP_CLAUSE_THREADS)
3259 && (ctx->outer == NULL
3260 || !gimple_omp_for_combined_into_p (g: ctx->stmt)
3261 || gimple_code (g: ctx->outer->stmt) != GIMPLE_OMP_FOR
3262 || (gimple_omp_for_kind (g: ctx->outer->stmt)
3263 != GF_OMP_FOR_KIND_FOR)
3264 || !gimple_omp_for_combined_p (g: ctx->outer->stmt)))
3265 {
3266 error_at (gimple_location (g: stmt),
3267 "%<ordered simd threads%> must be closely "
3268 "nested inside of %<%s simd%> region",
3269 lang_GNU_Fortran () ? "do" : "for");
3270 return false;
3271 }
3272 return true;
3273 }
3274 }
3275 else if (gimple_code (g: stmt) == GIMPLE_OMP_ATOMIC_LOAD
3276 || gimple_code (g: stmt) == GIMPLE_OMP_ATOMIC_STORE
3277 || gimple_code (g: stmt) == GIMPLE_OMP_SCAN)
3278 return true;
3279 else if (gimple_code (g: stmt) == GIMPLE_OMP_FOR
3280 && gimple_omp_for_kind (g: ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
3281 return true;
3282 error_at (gimple_location (g: stmt),
3283 "OpenMP constructs other than "
3284 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
3285 "not be nested inside %<simd%> region");
3286 return false;
3287 }
3288 else if (gimple_code (g: ctx->stmt) == GIMPLE_OMP_TEAMS)
3289 {
3290 if ((gimple_code (g: stmt) != GIMPLE_OMP_FOR
3291 || (gimple_omp_for_kind (g: stmt) != GF_OMP_FOR_KIND_DISTRIBUTE
3292 && omp_find_clause (clauses: gimple_omp_for_clauses (gs: stmt),
3293 kind: OMP_CLAUSE_BIND) == NULL_TREE))
3294 && gimple_code (g: stmt) != GIMPLE_OMP_PARALLEL)
3295 {
3296 error_at (gimple_location (g: stmt),
3297 "only %<distribute%>, %<parallel%> or %<loop%> "
3298 "regions are allowed to be strictly nested inside "
3299 "%<teams%> region");
3300 return false;
3301 }
3302 }
3303 else if (ctx->order_concurrent
3304 && gimple_code (g: stmt) != GIMPLE_OMP_PARALLEL
3305 && (gimple_code (g: stmt) != GIMPLE_OMP_FOR
3306 || gimple_omp_for_kind (g: stmt) != GF_OMP_FOR_KIND_SIMD)
3307 && gimple_code (g: stmt) != GIMPLE_OMP_SCAN)
3308 {
3309 if (ctx->loop_p)
3310 error_at (gimple_location (g: stmt),
3311 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3312 "%<simd%> may not be nested inside a %<loop%> region");
3313 else
3314 error_at (gimple_location (g: stmt),
3315 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3316 "%<simd%> may not be nested inside a region with "
3317 "the %<order(concurrent)%> clause");
3318 return false;
3319 }
3320 }
3321 switch (gimple_code (g: stmt))
3322 {
3323 case GIMPLE_OMP_FOR:
3324 if (gimple_omp_for_kind (g: stmt) == GF_OMP_FOR_KIND_SIMD)
3325 return true;
3326 if (gimple_omp_for_kind (g: stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
3327 {
3328 if (ctx != NULL && gimple_code (g: ctx->stmt) != GIMPLE_OMP_TEAMS)
3329 {
3330 error_at (gimple_location (g: stmt),
3331 "%<distribute%> region must be strictly nested "
3332 "inside %<teams%> construct");
3333 return false;
3334 }
3335 return true;
3336 }
3337 /* We split taskloop into task and nested taskloop in it. */
3338 if (gimple_omp_for_kind (g: stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3339 return true;
3340 /* For now, hope this will change and loop bind(parallel) will not
3341 be allowed in lots of contexts. */
3342 if (gimple_omp_for_kind (g: stmt) == GF_OMP_FOR_KIND_FOR
3343 && omp_find_clause (clauses: gimple_omp_for_clauses (gs: stmt), kind: OMP_CLAUSE_BIND))
3344 return true;
3345 if (gimple_omp_for_kind (g: stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
3346 {
3347 bool ok = false;
3348
3349 if (ctx)
3350 switch (gimple_code (g: ctx->stmt))
3351 {
3352 case GIMPLE_OMP_FOR:
3353 ok = (gimple_omp_for_kind (g: ctx->stmt)
3354 == GF_OMP_FOR_KIND_OACC_LOOP);
3355 break;
3356
3357 case GIMPLE_OMP_TARGET:
3358 switch (gimple_omp_target_kind (g: ctx->stmt))
3359 {
3360 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3361 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3362 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3363 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3364 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3365 ok = true;
3366 break;
3367
3368 default:
3369 break;
3370 }
3371
3372 default:
3373 break;
3374 }
3375 else if (oacc_get_fn_attrib (fn: current_function_decl))
3376 ok = true;
3377 if (!ok)
3378 {
3379 error_at (gimple_location (g: stmt),
3380 "OpenACC loop directive must be associated with"
3381 " an OpenACC compute region");
3382 return false;
3383 }
3384 }
3385 /* FALLTHRU */
3386 case GIMPLE_CALL:
3387 if (is_gimple_call (gs: stmt)
3388 && (DECL_FUNCTION_CODE (decl: gimple_call_fndecl (gs: stmt))
3389 == BUILT_IN_GOMP_CANCEL
3390 || DECL_FUNCTION_CODE (decl: gimple_call_fndecl (gs: stmt))
3391 == BUILT_IN_GOMP_CANCELLATION_POINT))
3392 {
3393 const char *bad = NULL;
3394 const char *kind = NULL;
3395 const char *construct
3396 = (DECL_FUNCTION_CODE (decl: gimple_call_fndecl (gs: stmt))
3397 == BUILT_IN_GOMP_CANCEL)
3398 ? "cancel"
3399 : "cancellation point";
3400 if (ctx == NULL)
3401 {
3402 error_at (gimple_location (g: stmt), "orphaned %qs construct",
3403 construct);
3404 return false;
3405 }
3406 switch (tree_fits_shwi_p (gimple_call_arg (gs: stmt, index: 0))
3407 ? tree_to_shwi (gimple_call_arg (gs: stmt, index: 0))
3408 : 0)
3409 {
3410 case 1:
3411 if (gimple_code (g: ctx->stmt) != GIMPLE_OMP_PARALLEL)
3412 bad = "parallel";
3413 else if (DECL_FUNCTION_CODE (decl: gimple_call_fndecl (gs: stmt))
3414 == BUILT_IN_GOMP_CANCEL
3415 && !integer_zerop (gimple_call_arg (gs: stmt, index: 1)))
3416 ctx->cancellable = true;
3417 kind = "parallel";
3418 break;
3419 case 2:
3420 if (gimple_code (g: ctx->stmt) != GIMPLE_OMP_FOR
3421 || gimple_omp_for_kind (g: ctx->stmt) != GF_OMP_FOR_KIND_FOR)
3422 bad = "for";
3423 else if (DECL_FUNCTION_CODE (decl: gimple_call_fndecl (gs: stmt))
3424 == BUILT_IN_GOMP_CANCEL
3425 && !integer_zerop (gimple_call_arg (gs: stmt, index: 1)))
3426 {
3427 ctx->cancellable = true;
3428 if (omp_find_clause (clauses: gimple_omp_for_clauses (gs: ctx->stmt),
3429 kind: OMP_CLAUSE_NOWAIT))
3430 warning_at (gimple_location (g: stmt), 0,
3431 "%<cancel for%> inside "
3432 "%<nowait%> for construct");
3433 if (omp_find_clause (clauses: gimple_omp_for_clauses (gs: ctx->stmt),
3434 kind: OMP_CLAUSE_ORDERED))
3435 warning_at (gimple_location (g: stmt), 0,
3436 "%<cancel for%> inside "
3437 "%<ordered%> for construct");
3438 }
3439 kind = "for";
3440 break;
3441 case 4:
3442 if (gimple_code (g: ctx->stmt) != GIMPLE_OMP_SECTIONS
3443 && gimple_code (g: ctx->stmt) != GIMPLE_OMP_SECTION)
3444 bad = "sections";
3445 else if (DECL_FUNCTION_CODE (decl: gimple_call_fndecl (gs: stmt))
3446 == BUILT_IN_GOMP_CANCEL
3447 && !integer_zerop (gimple_call_arg (gs: stmt, index: 1)))
3448 {
3449 if (gimple_code (g: ctx->stmt) == GIMPLE_OMP_SECTIONS)
3450 {
3451 ctx->cancellable = true;
3452 if (omp_find_clause (clauses: gimple_omp_sections_clauses
3453 (gs: ctx->stmt),
3454 kind: OMP_CLAUSE_NOWAIT))
3455 warning_at (gimple_location (g: stmt), 0,
3456 "%<cancel sections%> inside "
3457 "%<nowait%> sections construct");
3458 }
3459 else
3460 {
3461 gcc_assert (ctx->outer
3462 && gimple_code (ctx->outer->stmt)
3463 == GIMPLE_OMP_SECTIONS);
3464 ctx->outer->cancellable = true;
3465 if (omp_find_clause (clauses: gimple_omp_sections_clauses
3466 (gs: ctx->outer->stmt),
3467 kind: OMP_CLAUSE_NOWAIT))
3468 warning_at (gimple_location (g: stmt), 0,
3469 "%<cancel sections%> inside "
3470 "%<nowait%> sections construct");
3471 }
3472 }
3473 kind = "sections";
3474 break;
3475 case 8:
3476 if (!is_task_ctx (ctx)
3477 && (!is_taskloop_ctx (ctx)
3478 || ctx->outer == NULL
3479 || !is_task_ctx (ctx: ctx->outer)))
3480 bad = "task";
3481 else
3482 {
3483 for (omp_context *octx = ctx->outer;
3484 octx; octx = octx->outer)
3485 {
3486 switch (gimple_code (g: octx->stmt))
3487 {
3488 case GIMPLE_OMP_TASKGROUP:
3489 break;
3490 case GIMPLE_OMP_TARGET:
3491 if (gimple_omp_target_kind (g: octx->stmt)
3492 != GF_OMP_TARGET_KIND_REGION)
3493 continue;
3494 /* FALLTHRU */
3495 case GIMPLE_OMP_PARALLEL:
3496 case GIMPLE_OMP_TEAMS:
3497 error_at (gimple_location (g: stmt),
3498 "%<%s taskgroup%> construct not closely "
3499 "nested inside of %<taskgroup%> region",
3500 construct);
3501 return false;
3502 case GIMPLE_OMP_TASK:
3503 if (gimple_omp_task_taskloop_p (g: octx->stmt)
3504 && octx->outer
3505 && is_taskloop_ctx (ctx: octx->outer))
3506 {
3507 tree clauses
3508 = gimple_omp_for_clauses (gs: octx->outer->stmt);
3509 if (!omp_find_clause (clauses, kind: OMP_CLAUSE_NOGROUP))
3510 break;
3511 }
3512 continue;
3513 default:
3514 continue;
3515 }
3516 break;
3517 }
3518 ctx->cancellable = true;
3519 }
3520 kind = "taskgroup";
3521 break;
3522 default:
3523 error_at (gimple_location (g: stmt), "invalid arguments");
3524 return false;
3525 }
3526 if (bad)
3527 {
3528 error_at (gimple_location (g: stmt),
3529 "%<%s %s%> construct not closely nested inside of %qs",
3530 construct, kind, bad);
3531 return false;
3532 }
3533 }
3534 /* FALLTHRU */
3535 case GIMPLE_OMP_SECTIONS:
3536 case GIMPLE_OMP_SINGLE:
3537 for (; ctx != NULL; ctx = ctx->outer)
3538 switch (gimple_code (g: ctx->stmt))
3539 {
3540 case GIMPLE_OMP_FOR:
3541 if (gimple_omp_for_kind (g: ctx->stmt) != GF_OMP_FOR_KIND_FOR
3542 && gimple_omp_for_kind (g: ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3543 break;
3544 /* FALLTHRU */
3545 case GIMPLE_OMP_SECTIONS:
3546 case GIMPLE_OMP_SINGLE:
3547 case GIMPLE_OMP_ORDERED:
3548 case GIMPLE_OMP_MASTER:
3549 case GIMPLE_OMP_MASKED:
3550 case GIMPLE_OMP_TASK:
3551 case GIMPLE_OMP_CRITICAL:
3552 if (is_gimple_call (gs: stmt))
3553 {
3554 if (DECL_FUNCTION_CODE (decl: gimple_call_fndecl (gs: stmt))
3555 != BUILT_IN_GOMP_BARRIER)
3556 return true;
3557 error_at (gimple_location (g: stmt),
3558 "barrier region may not be closely nested inside "
3559 "of work-sharing, %<loop%>, %<critical%>, "
3560 "%<ordered%>, %<master%>, %<masked%>, explicit "
3561 "%<task%> or %<taskloop%> region");
3562 return false;
3563 }
3564 error_at (gimple_location (g: stmt),
3565 "work-sharing region may not be closely nested inside "
3566 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3567 "%<master%>, %<masked%>, explicit %<task%> or "
3568 "%<taskloop%> region");
3569 return false;
3570 case GIMPLE_OMP_PARALLEL:
3571 case GIMPLE_OMP_TEAMS:
3572 return true;
3573 case GIMPLE_OMP_TARGET:
3574 if (gimple_omp_target_kind (g: ctx->stmt)
3575 == GF_OMP_TARGET_KIND_REGION)
3576 return true;
3577 break;
3578 default:
3579 break;
3580 }
3581 break;
3582 case GIMPLE_OMP_MASTER:
3583 case GIMPLE_OMP_MASKED:
3584 for (; ctx != NULL; ctx = ctx->outer)
3585 switch (gimple_code (g: ctx->stmt))
3586 {
3587 case GIMPLE_OMP_FOR:
3588 if (gimple_omp_for_kind (g: ctx->stmt) != GF_OMP_FOR_KIND_FOR
3589 && gimple_omp_for_kind (g: ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3590 break;
3591 /* FALLTHRU */
3592 case GIMPLE_OMP_SECTIONS:
3593 case GIMPLE_OMP_SINGLE:
3594 case GIMPLE_OMP_TASK:
3595 error_at (gimple_location (g: stmt),
3596 "%qs region may not be closely nested inside "
3597 "of work-sharing, %<loop%>, explicit %<task%> or "
3598 "%<taskloop%> region",
3599 gimple_code (g: stmt) == GIMPLE_OMP_MASTER
3600 ? "master" : "masked");
3601 return false;
3602 case GIMPLE_OMP_PARALLEL:
3603 case GIMPLE_OMP_TEAMS:
3604 return true;
3605 case GIMPLE_OMP_TARGET:
3606 if (gimple_omp_target_kind (g: ctx->stmt)
3607 == GF_OMP_TARGET_KIND_REGION)
3608 return true;
3609 break;
3610 default:
3611 break;
3612 }
3613 break;
3614 case GIMPLE_OMP_SCOPE:
3615 for (; ctx != NULL; ctx = ctx->outer)
3616 switch (gimple_code (g: ctx->stmt))
3617 {
3618 case GIMPLE_OMP_FOR:
3619 if (gimple_omp_for_kind (g: ctx->stmt) != GF_OMP_FOR_KIND_FOR
3620 && gimple_omp_for_kind (g: ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3621 break;
3622 /* FALLTHRU */
3623 case GIMPLE_OMP_SECTIONS:
3624 case GIMPLE_OMP_SINGLE:
3625 case GIMPLE_OMP_TASK:
3626 case GIMPLE_OMP_CRITICAL:
3627 case GIMPLE_OMP_ORDERED:
3628 case GIMPLE_OMP_MASTER:
3629 case GIMPLE_OMP_MASKED:
3630 error_at (gimple_location (g: stmt),
3631 "%<scope%> region may not be closely nested inside "
3632 "of work-sharing, %<loop%>, explicit %<task%>, "
3633 "%<taskloop%>, %<critical%>, %<ordered%>, %<master%>, "
3634 "or %<masked%> region");
3635 return false;
3636 case GIMPLE_OMP_PARALLEL:
3637 case GIMPLE_OMP_TEAMS:
3638 return true;
3639 case GIMPLE_OMP_TARGET:
3640 if (gimple_omp_target_kind (g: ctx->stmt)
3641 == GF_OMP_TARGET_KIND_REGION)
3642 return true;
3643 break;
3644 default:
3645 break;
3646 }
3647 break;
3648 case GIMPLE_OMP_TASK:
3649 for (c = gimple_omp_task_clauses (gs: stmt); c; c = OMP_CLAUSE_CHAIN (c))
3650 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS)
3651 {
3652 enum omp_clause_doacross_kind kind = OMP_CLAUSE_DOACROSS_KIND (c);
3653 error_at (OMP_CLAUSE_LOCATION (c),
3654 "%<%s(%s)%> is only allowed in %<omp ordered%>",
3655 OMP_CLAUSE_DOACROSS_DEPEND (c) ? "depend" : "doacross",
3656 kind == OMP_CLAUSE_DOACROSS_SOURCE ? "source" : "sink");
3657 return false;
3658 }
3659 break;
3660 case GIMPLE_OMP_ORDERED:
3661 for (c = gimple_omp_ordered_clauses (ord_stmt: as_a <gomp_ordered *> (p: stmt));
3662 c; c = OMP_CLAUSE_CHAIN (c))
3663 {
3664 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DOACROSS)
3665 {
3666 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
3667 {
3668 error_at (OMP_CLAUSE_LOCATION (c),
3669 "invalid depend kind in omp %<ordered%> %<depend%>");
3670 return false;
3671 }
3672 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
3673 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
3674 continue;
3675 }
3676
3677 tree oclause;
3678 /* Look for containing ordered(N) loop. */
3679 if (ctx == NULL
3680 || gimple_code (g: ctx->stmt) != GIMPLE_OMP_FOR
3681 || (oclause
3682 = omp_find_clause (clauses: gimple_omp_for_clauses (gs: ctx->stmt),
3683 kind: OMP_CLAUSE_ORDERED)) == NULL_TREE)
3684 {
3685 error_at (OMP_CLAUSE_LOCATION (c),
3686 "%<ordered%> construct with %<depend%> clause "
3687 "must be closely nested inside an %<ordered%> loop");
3688 return false;
3689 }
3690 }
3691 c = gimple_omp_ordered_clauses (ord_stmt: as_a <gomp_ordered *> (p: stmt));
3692 if (omp_find_clause (clauses: c, kind: OMP_CLAUSE_SIMD))
3693 {
3694 /* ordered simd must be closely nested inside of simd region,
3695 and simd region must not encounter constructs other than
3696 ordered simd, therefore ordered simd may be either orphaned,
3697 or ctx->stmt must be simd. The latter case is handled already
3698 earlier. */
3699 if (ctx != NULL)
3700 {
3701 error_at (gimple_location (g: stmt),
3702 "%<ordered%> %<simd%> must be closely nested inside "
3703 "%<simd%> region");
3704 return false;
3705 }
3706 }
3707 for (; ctx != NULL; ctx = ctx->outer)
3708 switch (gimple_code (g: ctx->stmt))
3709 {
3710 case GIMPLE_OMP_CRITICAL:
3711 case GIMPLE_OMP_TASK:
3712 case GIMPLE_OMP_ORDERED:
3713 ordered_in_taskloop:
3714 error_at (gimple_location (g: stmt),
3715 "%<ordered%> region may not be closely nested inside "
3716 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3717 "%<taskloop%> region");
3718 return false;
3719 case GIMPLE_OMP_FOR:
3720 if (gimple_omp_for_kind (g: ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3721 goto ordered_in_taskloop;
3722 tree o;
3723 o = omp_find_clause (clauses: gimple_omp_for_clauses (gs: ctx->stmt),
3724 kind: OMP_CLAUSE_ORDERED);
3725 if (o == NULL)
3726 {
3727 error_at (gimple_location (g: stmt),
3728 "%<ordered%> region must be closely nested inside "
3729 "a loop region with an %<ordered%> clause");
3730 return false;
3731 }
3732 if (!gimple_omp_ordered_standalone_p (g: stmt))
3733 {
3734 if (OMP_CLAUSE_ORDERED_DOACROSS (o))
3735 {
3736 error_at (gimple_location (g: stmt),
3737 "%<ordered%> construct without %<doacross%> or "
3738 "%<depend%> clauses must not have the same "
3739 "binding region as %<ordered%> construct with "
3740 "those clauses");
3741 return false;
3742 }
3743 else if (OMP_CLAUSE_ORDERED_EXPR (o))
3744 {
3745 tree co
3746 = omp_find_clause (clauses: gimple_omp_for_clauses (gs: ctx->stmt),
3747 kind: OMP_CLAUSE_COLLAPSE);
3748 HOST_WIDE_INT
3749 o_n = tree_to_shwi (OMP_CLAUSE_ORDERED_EXPR (o));
3750 HOST_WIDE_INT c_n = 1;
3751 if (co)
3752 c_n = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (co));
3753 if (o_n != c_n)
3754 {
3755 error_at (gimple_location (g: stmt),
3756 "%<ordered%> construct without %<doacross%> "
3757 "or %<depend%> clauses binds to loop where "
3758 "%<collapse%> argument %wd is different from "
3759 "%<ordered%> argument %wd", c_n, o_n);
3760 return false;
3761 }
3762 }
3763 }
3764 return true;
3765 case GIMPLE_OMP_TARGET:
3766 if (gimple_omp_target_kind (g: ctx->stmt)
3767 != GF_OMP_TARGET_KIND_REGION)
3768 break;
3769 /* FALLTHRU */
3770 case GIMPLE_OMP_PARALLEL:
3771 case GIMPLE_OMP_TEAMS:
3772 error_at (gimple_location (g: stmt),
3773 "%<ordered%> region must be closely nested inside "
3774 "a loop region with an %<ordered%> clause");
3775 return false;
3776 default:
3777 break;
3778 }
3779 break;
3780 case GIMPLE_OMP_CRITICAL:
3781 {
3782 tree this_stmt_name
3783 = gimple_omp_critical_name (crit_stmt: as_a <gomp_critical *> (p: stmt));
3784 for (; ctx != NULL; ctx = ctx->outer)
3785 if (gomp_critical *other_crit
3786 = dyn_cast <gomp_critical *> (p: ctx->stmt))
3787 if (this_stmt_name == gimple_omp_critical_name (crit_stmt: other_crit))
3788 {
3789 error_at (gimple_location (g: stmt),
3790 "%<critical%> region may not be nested inside "
3791 "a %<critical%> region with the same name");
3792 return false;
3793 }
3794 }
3795 break;
3796 case GIMPLE_OMP_TEAMS:
3797 if (ctx == NULL)
3798 break;
3799 else if (gimple_code (g: ctx->stmt) != GIMPLE_OMP_TARGET
3800 || (gimple_omp_target_kind (g: ctx->stmt)
3801 != GF_OMP_TARGET_KIND_REGION))
3802 {
3803 /* Teams construct can appear either strictly nested inside of
3804 target construct with no intervening stmts, or can be encountered
3805 only by initial task (so must not appear inside any OpenMP
3806 construct. */
3807 error_at (gimple_location (g: stmt),
3808 "%<teams%> construct must be closely nested inside of "
3809 "%<target%> construct or not nested in any OpenMP "
3810 "construct");
3811 return false;
3812 }
3813 break;
3814 case GIMPLE_OMP_TARGET:
3815 for (c = gimple_omp_target_clauses (gs: stmt); c; c = OMP_CLAUSE_CHAIN (c))
3816 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS)
3817 {
3818 enum omp_clause_doacross_kind kind = OMP_CLAUSE_DOACROSS_KIND (c);
3819 error_at (OMP_CLAUSE_LOCATION (c),
3820 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3821 kind == OMP_CLAUSE_DOACROSS_SOURCE ? "source" : "sink");
3822 return false;
3823 }
3824 if (is_gimple_omp_offloaded (stmt)
3825 && oacc_get_fn_attrib (cfun->decl) != NULL)
3826 {
3827 error_at (gimple_location (g: stmt),
3828 "OpenACC region inside of OpenACC routine, nested "
3829 "parallelism not supported yet");
3830 return false;
3831 }
3832 for (; ctx != NULL; ctx = ctx->outer)
3833 {
3834 if (gimple_code (g: ctx->stmt) != GIMPLE_OMP_TARGET)
3835 {
3836 if (is_gimple_omp (stmt)
3837 && is_gimple_omp_oacc (stmt)
3838 && is_gimple_omp (stmt: ctx->stmt))
3839 {
3840 error_at (gimple_location (g: stmt),
3841 "OpenACC construct inside of non-OpenACC region");
3842 return false;
3843 }
3844 continue;
3845 }
3846
3847 const char *stmt_name, *ctx_stmt_name;
3848 switch (gimple_omp_target_kind (g: stmt))
3849 {
3850 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3851 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3852 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3853 case GF_OMP_TARGET_KIND_ENTER_DATA:
3854 stmt_name = "target enter data"; break;
3855 case GF_OMP_TARGET_KIND_EXIT_DATA:
3856 stmt_name = "target exit data"; break;
3857 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3858 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3859 case GF_OMP_TARGET_KIND_OACC_SERIAL: stmt_name = "serial"; break;
3860 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3861 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3862 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA:
3863 stmt_name = "enter data"; break;
3864 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA:
3865 stmt_name = "exit data"; break;
3866 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3867 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3868 break;
3869 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3870 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3871 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3872 /* OpenACC 'kernels' decomposed parts. */
3873 stmt_name = "kernels"; break;
3874 default: gcc_unreachable ();
3875 }
3876 switch (gimple_omp_target_kind (g: ctx->stmt))
3877 {
3878 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3879 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3880 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3881 ctx_stmt_name = "parallel"; break;
3882 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3883 ctx_stmt_name = "kernels"; break;
3884 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3885 ctx_stmt_name = "serial"; break;
3886 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3887 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3888 ctx_stmt_name = "host_data"; break;
3889 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3890 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3891 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3892 /* OpenACC 'kernels' decomposed parts. */
3893 ctx_stmt_name = "kernels"; break;
3894 default: gcc_unreachable ();
3895 }
3896
3897 /* OpenACC/OpenMP mismatch? */
3898 if (is_gimple_omp_oacc (stmt)
3899 != is_gimple_omp_oacc (stmt: ctx->stmt))
3900 {
3901 error_at (gimple_location (g: stmt),
3902 "%s %qs construct inside of %s %qs region",
3903 (is_gimple_omp_oacc (stmt)
3904 ? "OpenACC" : "OpenMP"), stmt_name,
3905 (is_gimple_omp_oacc (stmt: ctx->stmt)
3906 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3907 return false;
3908 }
3909 if (is_gimple_omp_offloaded (stmt: ctx->stmt))
3910 {
3911 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3912 if (is_gimple_omp_oacc (stmt: ctx->stmt))
3913 {
3914 error_at (gimple_location (g: stmt),
3915 "%qs construct inside of %qs region",
3916 stmt_name, ctx_stmt_name);
3917 return false;
3918 }
3919 else
3920 {
3921 if ((gimple_omp_target_kind (g: ctx->stmt)
3922 == GF_OMP_TARGET_KIND_REGION)
3923 && (gimple_omp_target_kind (g: stmt)
3924 == GF_OMP_TARGET_KIND_REGION))
3925 {
3926 c = omp_find_clause (clauses: gimple_omp_target_clauses (gs: stmt),
3927 kind: OMP_CLAUSE_DEVICE);
3928 if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
3929 break;
3930 }
3931 warning_at (gimple_location (g: stmt), 0,
3932 "%qs construct inside of %qs region",
3933 stmt_name, ctx_stmt_name);
3934 }
3935 }
3936 }
3937 break;
3938 default:
3939 break;
3940 }
3941 return true;
3942}
3943
3944
3945/* Helper function scan_omp.
3946
3947 Callback for walk_tree or operators in walk_gimple_stmt used to
3948 scan for OMP directives in TP. */
3949
3950static tree
3951scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3952{
3953 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3954 omp_context *ctx = (omp_context *) wi->info;
3955 tree t = *tp;
3956 tree tmp;
3957
3958 switch (TREE_CODE (t))
3959 {
3960 case VAR_DECL:
3961 case PARM_DECL:
3962 case LABEL_DECL:
3963 case RESULT_DECL:
3964 if (ctx)
3965 {
3966 tmp = NULL_TREE;
3967 if (TREE_CODE (t) == VAR_DECL
3968 && (tmp = lookup_attribute (attr_name: "omp allocate var",
3969 DECL_ATTRIBUTES (t))) != NULL_TREE)
3970 t = TREE_VALUE (TREE_VALUE (tmp));
3971 tree repl = remap_decl (decl: t, id: &ctx->cb);
3972 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3973 if (tmp != NULL_TREE && t != repl)
3974 *tp = build_fold_addr_expr (repl);
3975 else if (tmp == NULL_TREE)
3976 *tp = repl;
3977 }
3978 break;
3979
3980 case INDIRECT_REF:
3981 case MEM_REF:
3982 if (ctx
3983 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL
3984 && ((tmp = lookup_attribute (attr_name: "omp allocate var",
3985 DECL_ATTRIBUTES (TREE_OPERAND (t, 0))))
3986 != NULL_TREE))
3987 {
3988 tmp = TREE_VALUE (TREE_VALUE (tmp));
3989 tree repl = remap_decl (decl: tmp, id: &ctx->cb);
3990 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3991 if (tmp != repl)
3992 *tp = repl;
3993 break;
3994 }
3995 gcc_fallthrough ();
3996
3997 default:
3998 if (ctx && TYPE_P (t))
3999 *tp = remap_type (type: t, id: &ctx->cb);
4000 else if (!DECL_P (t))
4001 {
4002 *walk_subtrees = 1;
4003 if (ctx)
4004 {
4005 tree tem = remap_type (TREE_TYPE (t), id: &ctx->cb);
4006 if (tem != TREE_TYPE (t))
4007 {
4008 if (TREE_CODE (t) == INTEGER_CST)
4009 *tp = wide_int_to_tree (type: tem, cst: wi::to_wide (t));
4010 else
4011 TREE_TYPE (t) = tem;
4012 }
4013 }
4014 }
4015 break;
4016 }
4017
4018 return NULL_TREE;
4019}
4020
4021/* Return true if FNDECL is a setjmp or a longjmp. */
4022
4023static bool
4024setjmp_or_longjmp_p (const_tree fndecl)
4025{
4026 if (fndecl_built_in_p (node: fndecl, name1: BUILT_IN_SETJMP, names: BUILT_IN_LONGJMP))
4027 return true;
4028
4029 tree declname = DECL_NAME (fndecl);
4030 if (!declname
4031 || (DECL_CONTEXT (fndecl) != NULL_TREE
4032 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
4033 || !TREE_PUBLIC (fndecl))
4034 return false;
4035
4036 const char *name = IDENTIFIER_POINTER (declname);
4037 return !strcmp (s1: name, s2: "setjmp") || !strcmp (s1: name, s2: "longjmp");
4038}
4039
4040/* Helper function for scan_omp.
4041
4042 Callback for walk_gimple_stmt used to scan for OMP directives in
4043 the current statement in GSI. */
4044
4045static tree
4046scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
4047 struct walk_stmt_info *wi)
4048{
4049 gimple *stmt = gsi_stmt (i: *gsi);
4050 omp_context *ctx = (omp_context *) wi->info;
4051
4052 if (gimple_has_location (g: stmt))
4053 input_location = gimple_location (g: stmt);
4054
4055 /* Check the nesting restrictions. */
4056 bool remove = false;
4057 if (is_gimple_omp (stmt))
4058 remove = !check_omp_nesting_restrictions (stmt, ctx);
4059 else if (is_gimple_call (gs: stmt))
4060 {
4061 tree fndecl = gimple_call_fndecl (gs: stmt);
4062 if (fndecl)
4063 {
4064 if (ctx
4065 && gimple_code (g: ctx->stmt) == GIMPLE_OMP_FOR
4066 && gimple_omp_for_kind (g: ctx->stmt) == GF_OMP_FOR_KIND_SIMD
4067 && setjmp_or_longjmp_p (fndecl)
4068 && !ctx->loop_p)
4069 {
4070 remove = true;
4071 error_at (gimple_location (g: stmt),
4072 "setjmp/longjmp inside %<simd%> construct");
4073 }
4074 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
4075 switch (DECL_FUNCTION_CODE (decl: fndecl))
4076 {
4077 case BUILT_IN_GOMP_BARRIER:
4078 case BUILT_IN_GOMP_CANCEL:
4079 case BUILT_IN_GOMP_CANCELLATION_POINT:
4080 case BUILT_IN_GOMP_TASKYIELD:
4081 case BUILT_IN_GOMP_TASKWAIT:
4082 case BUILT_IN_GOMP_TASKGROUP_START:
4083 case BUILT_IN_GOMP_TASKGROUP_END:
4084 remove = !check_omp_nesting_restrictions (stmt, ctx);
4085 break;
4086 default:
4087 break;
4088 }
4089 else if (ctx)
4090 {
4091 omp_context *octx = ctx;
4092 if (gimple_code (g: ctx->stmt) == GIMPLE_OMP_SCAN && ctx->outer)
4093 octx = ctx->outer;
4094 if (octx->order_concurrent && omp_runtime_api_call (fndecl))
4095 {
4096 remove = true;
4097 error_at (gimple_location (g: stmt),
4098 "OpenMP runtime API call %qD in a region with "
4099 "%<order(concurrent)%> clause", fndecl);
4100 }
4101 if (gimple_code (g: ctx->stmt) == GIMPLE_OMP_TEAMS
4102 && omp_runtime_api_call (fndecl)
4103 && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl))
4104 != strlen (s: "omp_get_num_teams"))
4105 || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl)),
4106 s2: "omp_get_num_teams") != 0)
4107 && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl))
4108 != strlen (s: "omp_get_team_num"))
4109 || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl)),
4110 s2: "omp_get_team_num") != 0))
4111 {
4112 remove = true;
4113 error_at (gimple_location (g: stmt),
4114 "OpenMP runtime API call %qD strictly nested in a "
4115 "%<teams%> region", fndecl);
4116 }
4117 if (gimple_code (g: ctx->stmt) == GIMPLE_OMP_TARGET
4118 && (gimple_omp_target_kind (g: ctx->stmt)
4119 == GF_OMP_TARGET_KIND_REGION)
4120 && omp_runtime_api_call (fndecl))
4121 {
4122 tree tgt_clauses = gimple_omp_target_clauses (gs: ctx->stmt);
4123 tree c = omp_find_clause (clauses: tgt_clauses, kind: OMP_CLAUSE_DEVICE);
4124 if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
4125 error_at (gimple_location (g: stmt),
4126 "OpenMP runtime API call %qD in a region with "
4127 "%<device(ancestor)%> clause", fndecl);
4128 }
4129 }
4130 }
4131 }
4132 if (remove)
4133 {
4134 stmt = gimple_build_nop ();
4135 gsi_replace (gsi, stmt, false);
4136 }
4137
4138 *handled_ops_p = true;
4139
4140 switch (gimple_code (g: stmt))
4141 {
4142 case GIMPLE_OMP_PARALLEL:
4143 taskreg_nesting_level++;
4144 scan_omp_parallel (gsi, outer_ctx: ctx);
4145 taskreg_nesting_level--;
4146 break;
4147
4148 case GIMPLE_OMP_TASK:
4149 taskreg_nesting_level++;
4150 scan_omp_task (gsi, outer_ctx: ctx);
4151 taskreg_nesting_level--;
4152 break;
4153
4154 case GIMPLE_OMP_FOR:
4155 if ((gimple_omp_for_kind (g: as_a <gomp_for *> (p: stmt))
4156 == GF_OMP_FOR_KIND_SIMD)
4157 && gimple_omp_for_combined_into_p (g: stmt)
4158 && gimple_code (g: ctx->stmt) != GIMPLE_OMP_SCAN)
4159 {
4160 tree clauses = gimple_omp_for_clauses (gs: as_a <gomp_for *> (p: stmt));
4161 tree c = omp_find_clause (clauses, kind: OMP_CLAUSE_REDUCTION);
4162 if (c && OMP_CLAUSE_REDUCTION_INSCAN (c) && !seen_error ())
4163 {
4164 scan_omp_simd_scan (gsi, stmt: as_a <gomp_for *> (p: stmt), outer_ctx: ctx);
4165 break;
4166 }
4167 }
4168 if ((gimple_omp_for_kind (g: as_a <gomp_for *> (p: stmt))
4169 == GF_OMP_FOR_KIND_SIMD)
4170 && omp_maybe_offloaded_ctx (ctx)
4171 && omp_max_simt_vf ()
4172 && gimple_omp_for_collapse (gs: stmt) == 1)
4173 scan_omp_simd (gsi, stmt: as_a <gomp_for *> (p: stmt), outer_ctx: ctx);
4174 else
4175 scan_omp_for (stmt: as_a <gomp_for *> (p: stmt), outer_ctx: ctx);
4176 break;
4177
4178 case GIMPLE_OMP_SCOPE:
4179 ctx = new_omp_context (stmt, outer_ctx: ctx);
4180 scan_sharing_clauses (clauses: gimple_omp_scope_clauses (gs: stmt), ctx);
4181 scan_omp (gimple_omp_body_ptr (gs: stmt), ctx);
4182 break;
4183
4184 case GIMPLE_OMP_SECTIONS:
4185 scan_omp_sections (stmt: as_a <gomp_sections *> (p: stmt), outer_ctx: ctx);
4186 break;
4187
4188 case GIMPLE_OMP_SINGLE:
4189 scan_omp_single (stmt: as_a <gomp_single *> (p: stmt), outer_ctx: ctx);
4190 break;
4191
4192 case GIMPLE_OMP_SCAN:
4193 if (tree clauses = gimple_omp_scan_clauses (scan_stmt: as_a <gomp_scan *> (p: stmt)))
4194 {
4195 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
4196 ctx->scan_inclusive = true;
4197 else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
4198 ctx->scan_exclusive = true;
4199 }
4200 /* FALLTHRU */
4201 case GIMPLE_OMP_SECTION:
4202 case GIMPLE_OMP_STRUCTURED_BLOCK:
4203 case GIMPLE_OMP_MASTER:
4204 case GIMPLE_OMP_ORDERED:
4205 case GIMPLE_OMP_CRITICAL:
4206 ctx = new_omp_context (stmt, outer_ctx: ctx);
4207 scan_omp (gimple_omp_body_ptr (gs: stmt), ctx);
4208 break;
4209
4210 case GIMPLE_OMP_MASKED:
4211 ctx = new_omp_context (stmt, outer_ctx: ctx);
4212 scan_sharing_clauses (clauses: gimple_omp_masked_clauses (gs: stmt), ctx);
4213 scan_omp (gimple_omp_body_ptr (gs: stmt), ctx);
4214 break;
4215
4216 case GIMPLE_OMP_TASKGROUP:
4217 ctx = new_omp_context (stmt, outer_ctx: ctx);
4218 scan_sharing_clauses (clauses: gimple_omp_taskgroup_clauses (gs: stmt), ctx);
4219 scan_omp (gimple_omp_body_ptr (gs: stmt), ctx);
4220 break;
4221
4222 case GIMPLE_OMP_TARGET:
4223 if (is_gimple_omp_offloaded (stmt))
4224 {
4225 taskreg_nesting_level++;
4226 scan_omp_target (stmt: as_a <gomp_target *> (p: stmt), outer_ctx: ctx);
4227 taskreg_nesting_level--;
4228 }
4229 else
4230 scan_omp_target (stmt: as_a <gomp_target *> (p: stmt), outer_ctx: ctx);
4231 break;
4232
4233 case GIMPLE_OMP_TEAMS:
4234 if (gimple_omp_teams_host (omp_teams_stmt: as_a <gomp_teams *> (p: stmt)))
4235 {
4236 taskreg_nesting_level++;
4237 scan_omp_teams (stmt: as_a <gomp_teams *> (p: stmt), outer_ctx: ctx);
4238 taskreg_nesting_level--;
4239 }
4240 else
4241 scan_omp_teams (stmt: as_a <gomp_teams *> (p: stmt), outer_ctx: ctx);
4242 break;
4243
4244 case GIMPLE_BIND:
4245 {
4246 tree var;
4247
4248 *handled_ops_p = false;
4249 if (ctx)
4250 for (var = gimple_bind_vars (bind_stmt: as_a <gbind *> (p: stmt));
4251 var ;
4252 var = DECL_CHAIN (var))
4253 insert_decl_map (&ctx->cb, var, var);
4254 }
4255 break;
4256 default:
4257 *handled_ops_p = false;
4258 break;
4259 }
4260
4261 return NULL_TREE;
4262}
4263
4264
4265/* Scan all the statements starting at the current statement. CTX
4266 contains context information about the OMP directives and
4267 clauses found during the scan. */
4268
4269static void
4270scan_omp (gimple_seq *body_p, omp_context *ctx)
4271{
4272 location_t saved_location;
4273 struct walk_stmt_info wi;
4274
4275 memset (s: &wi, c: 0, n: sizeof (wi));
4276 wi.info = ctx;
4277 wi.want_locations = true;
4278
4279 saved_location = input_location;
4280 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
4281 input_location = saved_location;
4282}
4283
4284/* Re-gimplification and code generation routines. */
4285
4286/* Remove omp_member_access_dummy_var variables from gimple_bind_vars
4287 of BIND if in a method. */
4288
4289static void
4290maybe_remove_omp_member_access_dummy_vars (gbind *bind)
4291{
4292 if (DECL_ARGUMENTS (current_function_decl)
4293 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
4294 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
4295 == POINTER_TYPE))
4296 {
4297 tree vars = gimple_bind_vars (bind_stmt: bind);
4298 for (tree *pvar = &vars; *pvar; )
4299 if (omp_member_access_dummy_var (decl: *pvar))
4300 *pvar = DECL_CHAIN (*pvar);
4301 else
4302 pvar = &DECL_CHAIN (*pvar);
4303 gimple_bind_set_vars (bind_stmt: bind, vars);
4304 }
4305}
4306
4307/* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
4308 block and its subblocks. */
4309
4310static void
4311remove_member_access_dummy_vars (tree block)
4312{
4313 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
4314 if (omp_member_access_dummy_var (decl: *pvar))
4315 *pvar = DECL_CHAIN (*pvar);
4316 else
4317 pvar = &DECL_CHAIN (*pvar);
4318
4319 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
4320 remove_member_access_dummy_vars (block);
4321}
4322
4323/* If a context was created for STMT when it was scanned, return it. */
4324
4325static omp_context *
4326maybe_lookup_ctx (gimple *stmt)
4327{
4328 splay_tree_node n;
4329 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
4330 return n ? (omp_context *) n->value : NULL;
4331}
4332
4333
4334/* Find the mapping for DECL in CTX or the immediately enclosing
4335 context that has a mapping for DECL.
4336
4337 If CTX is a nested parallel directive, we may have to use the decl
4338 mappings created in CTX's parent context. Suppose that we have the
4339 following parallel nesting (variable UIDs showed for clarity):
4340
4341 iD.1562 = 0;
4342 #omp parallel shared(iD.1562) -> outer parallel
4343 iD.1562 = iD.1562 + 1;
4344
4345 #omp parallel shared (iD.1562) -> inner parallel
4346 iD.1562 = iD.1562 - 1;
4347
4348 Each parallel structure will create a distinct .omp_data_s structure
4349 for copying iD.1562 in/out of the directive:
4350
4351 outer parallel .omp_data_s.1.i -> iD.1562
4352 inner parallel .omp_data_s.2.i -> iD.1562
4353
4354 A shared variable mapping will produce a copy-out operation before
4355 the parallel directive and a copy-in operation after it. So, in
4356 this case we would have:
4357
4358 iD.1562 = 0;
4359 .omp_data_o.1.i = iD.1562;
4360 #omp parallel shared(iD.1562) -> outer parallel
4361 .omp_data_i.1 = &.omp_data_o.1
4362 .omp_data_i.1->i = .omp_data_i.1->i + 1;
4363
4364 .omp_data_o.2.i = iD.1562; -> **
4365 #omp parallel shared(iD.1562) -> inner parallel
4366 .omp_data_i.2 = &.omp_data_o.2
4367 .omp_data_i.2->i = .omp_data_i.2->i - 1;
4368
4369
4370 ** This is a problem. The symbol iD.1562 cannot be referenced
4371 inside the body of the outer parallel region. But since we are
4372 emitting this copy operation while expanding the inner parallel
4373 directive, we need to access the CTX structure of the outer
4374 parallel directive to get the correct mapping:
4375
4376 .omp_data_o.2.i = .omp_data_i.1->i
4377
4378 Since there may be other workshare or parallel directives enclosing
4379 the parallel directive, it may be necessary to walk up the context
4380 parent chain. This is not a problem in general because nested
4381 parallelism happens only rarely. */
4382
4383static tree
4384lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4385{
4386 tree t;
4387 omp_context *up;
4388
4389 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4390 t = maybe_lookup_decl (var: decl, ctx: up);
4391
4392 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
4393
4394 return t ? t : decl;
4395}
4396
4397
4398/* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
4399 in outer contexts. */
4400
4401static tree
4402maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4403{
4404 tree t = NULL;
4405 omp_context *up;
4406
4407 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4408 t = maybe_lookup_decl (var: decl, ctx: up);
4409
4410 return t ? t : decl;
4411}
4412
4413
4414/* Construct the initialization value for reduction operation OP. */
4415
4416tree
4417omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
4418{
4419 switch (op)
4420 {
4421 case PLUS_EXPR:
4422 case MINUS_EXPR:
4423 case BIT_IOR_EXPR:
4424 case BIT_XOR_EXPR:
4425 case TRUTH_OR_EXPR:
4426 case TRUTH_ORIF_EXPR:
4427 case TRUTH_XOR_EXPR:
4428 case NE_EXPR:
4429 return build_zero_cst (type);
4430
4431 case MULT_EXPR:
4432 case TRUTH_AND_EXPR:
4433 case TRUTH_ANDIF_EXPR:
4434 case EQ_EXPR:
4435 return fold_convert_loc (loc, type, integer_one_node);
4436
4437 case BIT_AND_EXPR:
4438 return fold_convert_loc (loc, type, integer_minus_one_node);
4439
4440 case MAX_EXPR:
4441 if (SCALAR_FLOAT_TYPE_P (type))
4442 {
4443 REAL_VALUE_TYPE min;
4444 if (HONOR_INFINITIES (type))
4445 real_arithmetic (&min, NEGATE_EXPR, &dconstinf, NULL);
4446 else
4447 real_maxval (&min, 1, TYPE_MODE (type));
4448 return build_real (type, min);
4449 }
4450 else if (POINTER_TYPE_P (type))
4451 {
4452 wide_int min
4453 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4454 return wide_int_to_tree (type, cst: min);
4455 }
4456 else
4457 {
4458 gcc_assert (INTEGRAL_TYPE_P (type));
4459 return TYPE_MIN_VALUE (type);
4460 }
4461
4462 case MIN_EXPR:
4463 if (SCALAR_FLOAT_TYPE_P (type))
4464 {
4465 REAL_VALUE_TYPE max;
4466 if (HONOR_INFINITIES (type))
4467 max = dconstinf;
4468 else
4469 real_maxval (&max, 0, TYPE_MODE (type));
4470 return build_real (type, max);
4471 }
4472 else if (POINTER_TYPE_P (type))
4473 {
4474 wide_int max
4475 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4476 return wide_int_to_tree (type, cst: max);
4477 }
4478 else
4479 {
4480 gcc_assert (INTEGRAL_TYPE_P (type));
4481 return TYPE_MAX_VALUE (type);
4482 }
4483
4484 default:
4485 gcc_unreachable ();
4486 }
4487}
4488
4489/* Construct the initialization value for reduction CLAUSE. */
4490
4491tree
4492omp_reduction_init (tree clause, tree type)
4493{
4494 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
4495 OMP_CLAUSE_REDUCTION_CODE (clause), type);
4496}
4497
4498/* Return alignment to be assumed for var in CLAUSE, which should be
4499 OMP_CLAUSE_ALIGNED. */
4500
4501static tree
4502omp_clause_aligned_alignment (tree clause)
4503{
4504 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
4505 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
4506
4507 /* Otherwise return implementation defined alignment. */
4508 unsigned int al = 1;
4509 opt_scalar_mode mode_iter;
4510 auto_vector_modes modes;
4511 targetm.vectorize.autovectorize_vector_modes (&modes, true);
4512 static enum mode_class classes[]
4513 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
4514 for (int i = 0; i < 4; i += 2)
4515 /* The for loop above dictates that we only walk through scalar classes. */
4516 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
4517 {
4518 scalar_mode mode = mode_iter.require ();
4519 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
4520 if (GET_MODE_CLASS (vmode) != classes[i + 1])
4521 continue;
4522 machine_mode alt_vmode;
4523 for (unsigned int j = 0; j < modes.length (); ++j)
4524 if (related_vector_mode (modes[j], mode).exists (mode: &alt_vmode)
4525 && known_ge (GET_MODE_SIZE (alt_vmode), GET_MODE_SIZE (vmode)))
4526 vmode = alt_vmode;
4527
4528 tree type = lang_hooks.types.type_for_mode (mode, 1);
4529 if (type == NULL_TREE || TYPE_MODE (type) != mode)
4530 continue;
4531 type = build_vector_type_for_mode (type, vmode);
4532 if (TYPE_MODE (type) != vmode)
4533 continue;
4534 if (TYPE_ALIGN_UNIT (type) > al)
4535 al = TYPE_ALIGN_UNIT (type);
4536 }
4537 return build_int_cst (integer_type_node, al);
4538}
4539
4540
4541/* This structure is part of the interface between lower_rec_simd_input_clauses
4542 and lower_rec_input_clauses. */
4543
4544class omplow_simd_context {
4545public:
4546 omplow_simd_context () { memset (s: this, c: 0, n: sizeof (*this)); }
4547 tree idx;
4548 tree lane;
4549 tree lastlane;
4550 vec<tree, va_heap> simt_eargs;
4551 gimple_seq simt_dlist;
4552 poly_uint64 max_vf;
4553 bool is_simt;
4554};
4555
4556/* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4557 privatization. */
4558
4559static bool
4560lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
4561 omplow_simd_context *sctx, tree &ivar,
4562 tree &lvar, tree *rvar = NULL,
4563 tree *rvar2 = NULL)
4564{
4565 if (known_eq (sctx->max_vf, 0U))
4566 {
4567 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
4568 if (maybe_gt (sctx->max_vf, 1U))
4569 {
4570 tree c = omp_find_clause (clauses: gimple_omp_for_clauses (gs: ctx->stmt),
4571 kind: OMP_CLAUSE_SAFELEN);
4572 if (c)
4573 {
4574 poly_uint64 safe_len;
4575 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), value: &safe_len)
4576 || maybe_lt (a: safe_len, b: 1U))
4577 sctx->max_vf = 1;
4578 else
4579 sctx->max_vf = lower_bound (a: sctx->max_vf, b: safe_len);
4580 }
4581 }
4582 if (sctx->is_simt && !known_eq (sctx->max_vf, 1U))
4583 {
4584 for (tree c = gimple_omp_for_clauses (gs: ctx->stmt); c;
4585 c = OMP_CLAUSE_CHAIN (c))
4586 {
4587 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4588 continue;
4589
4590 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4591 {
4592 /* UDR reductions are not supported yet for SIMT, disable
4593 SIMT. */
4594 sctx->max_vf = 1;
4595 break;
4596 }
4597
4598 if (truth_value_p (OMP_CLAUSE_REDUCTION_CODE (c))
4599 && !INTEGRAL_TYPE_P (TREE_TYPE (new_var)))
4600 {
4601 /* Doing boolean operations on non-integral types is
4602 for conformance only, it's not worth supporting this
4603 for SIMT. */
4604 sctx->max_vf = 1;
4605 break;
4606 }
4607 }
4608 }
4609 if (maybe_gt (sctx->max_vf, 1U))
4610 {
4611 sctx->idx = create_tmp_var (unsigned_type_node);
4612 sctx->lane = create_tmp_var (unsigned_type_node);
4613 }
4614 }
4615 if (known_eq (sctx->max_vf, 1U))
4616 return false;
4617
4618 if (sctx->is_simt)
4619 {
4620 if (is_gimple_reg (new_var))
4621 {
4622 ivar = lvar = new_var;
4623 return true;
4624 }
4625 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
4626 ivar = lvar = create_tmp_var (type);
4627 TREE_ADDRESSABLE (ivar) = 1;
4628 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
4629 NULL, DECL_ATTRIBUTES (ivar));
4630 sctx->simt_eargs.safe_push (obj: build1 (ADDR_EXPR, ptype, ivar));
4631 tree clobber = build_clobber (type);
4632 gimple *g = gimple_build_assign (ivar, clobber);
4633 gimple_seq_add_stmt (&sctx->simt_dlist, g);
4634 }
4635 else
4636 {
4637 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
4638 tree avar = create_tmp_var_raw (atype);
4639 if (TREE_ADDRESSABLE (new_var))
4640 TREE_ADDRESSABLE (avar) = 1;
4641 DECL_ATTRIBUTES (avar)
4642 = tree_cons (get_identifier ("omp simd array"), NULL,
4643 DECL_ATTRIBUTES (avar));
4644 gimple_add_tmp_var (avar);
4645 tree iavar = avar;
4646 if (rvar && !ctx->for_simd_scan_phase)
4647 {
4648 /* For inscan reductions, create another array temporary,
4649 which will hold the reduced value. */
4650 iavar = create_tmp_var_raw (atype);
4651 if (TREE_ADDRESSABLE (new_var))
4652 TREE_ADDRESSABLE (iavar) = 1;
4653 DECL_ATTRIBUTES (iavar)
4654 = tree_cons (get_identifier ("omp simd array"), NULL,
4655 tree_cons (get_identifier ("omp simd inscan"), NULL,
4656 DECL_ATTRIBUTES (iavar)));
4657 gimple_add_tmp_var (iavar);
4658 ctx->cb.decl_map->put (k: avar, v: iavar);
4659 if (sctx->lastlane == NULL_TREE)
4660 sctx->lastlane = create_tmp_var (unsigned_type_node);
4661 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
4662 sctx->lastlane, NULL_TREE, NULL_TREE);
4663 TREE_THIS_NOTRAP (*rvar) = 1;
4664
4665 if (ctx->scan_exclusive)
4666 {
4667 /* And for exclusive scan yet another one, which will
4668 hold the value during the scan phase. */
4669 tree savar = create_tmp_var_raw (atype);
4670 if (TREE_ADDRESSABLE (new_var))
4671 TREE_ADDRESSABLE (savar) = 1;
4672 DECL_ATTRIBUTES (savar)
4673 = tree_cons (get_identifier ("omp simd array"), NULL,
4674 tree_cons (get_identifier ("omp simd inscan "
4675 "exclusive"), NULL,
4676 DECL_ATTRIBUTES (savar)));
4677 gimple_add_tmp_var (savar);
4678 ctx->cb.decl_map->put (k: iavar, v: savar);
4679 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
4680 sctx->idx, NULL_TREE, NULL_TREE);
4681 TREE_THIS_NOTRAP (*rvar2) = 1;
4682 }
4683 }
4684 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
4685 NULL_TREE, NULL_TREE);
4686 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
4687 NULL_TREE, NULL_TREE);
4688 TREE_THIS_NOTRAP (ivar) = 1;
4689 TREE_THIS_NOTRAP (lvar) = 1;
4690 }
4691 if (DECL_P (new_var))
4692 {
4693 SET_DECL_VALUE_EXPR (new_var, lvar);
4694 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4695 }
4696 return true;
4697}
4698
4699/* Helper function of lower_rec_input_clauses. For a reference
4700 in simd reduction, add an underlying variable it will reference. */
4701
4702static void
4703handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
4704{
4705 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
4706 if (TREE_CONSTANT (z))
4707 {
4708 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
4709 get_name (new_vard));
4710 gimple_add_tmp_var (z);
4711 TREE_ADDRESSABLE (z) = 1;
4712 z = build_fold_addr_expr_loc (loc, z);
4713 gimplify_assign (new_vard, z, ilist);
4714 }
4715}
4716
4717/* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4718 code to emit (type) (tskred_temp[idx]). */
4719
4720static tree
4721task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
4722 unsigned idx)
4723{
4724 unsigned HOST_WIDE_INT sz
4725 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
4726 tree r = build2 (MEM_REF, pointer_sized_int_node,
4727 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
4728 idx * sz));
4729 tree v = create_tmp_var (pointer_sized_int_node);
4730 gimple *g = gimple_build_assign (v, r);
4731 gimple_seq_add_stmt (ilist, g);
4732 if (!useless_type_conversion_p (type, pointer_sized_int_node))
4733 {
4734 v = create_tmp_var (type);
4735 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (gs: g));
4736 gimple_seq_add_stmt (ilist, g);
4737 }
4738 return v;
4739}
4740
4741/* Lower early initialization of privatized variable NEW_VAR
4742 if it needs an allocator (has allocate clause). */
4743
4744static bool
4745lower_private_allocate (tree var, tree new_var, tree &allocator,
4746 tree &allocate_ptr, gimple_seq *ilist,
4747 omp_context *ctx, bool is_ref, tree size)
4748{
4749 if (allocator)
4750 return false;
4751 gcc_assert (allocate_ptr == NULL_TREE);
4752 if (ctx->allocate_map
4753 && (DECL_P (new_var) || (TYPE_P (new_var) && size)))
4754 if (tree *allocatorp = ctx->allocate_map->get (k: var))
4755 allocator = *allocatorp;
4756 if (allocator == NULL_TREE)
4757 return false;
4758 if (!is_ref && omp_privatize_by_reference (decl: var))
4759 {
4760 allocator = NULL_TREE;
4761 return false;
4762 }
4763
4764 unsigned HOST_WIDE_INT ialign = 0;
4765 if (TREE_CODE (allocator) == TREE_LIST)
4766 {
4767 ialign = tree_to_uhwi (TREE_VALUE (allocator));
4768 allocator = TREE_PURPOSE (allocator);
4769 }
4770 if (TREE_CODE (allocator) != INTEGER_CST)
4771 allocator = build_outer_var_ref (var: allocator, ctx, code: OMP_CLAUSE_ALLOCATE);
4772 allocator = fold_convert (pointer_sized_int_node, allocator);
4773 if (TREE_CODE (allocator) != INTEGER_CST)
4774 {
4775 tree var = create_tmp_var (TREE_TYPE (allocator));
4776 gimplify_assign (var, allocator, ilist);
4777 allocator = var;
4778 }
4779
4780 tree ptr_type, align, sz = size;
4781 if (TYPE_P (new_var))
4782 {
4783 ptr_type = build_pointer_type (new_var);
4784 ialign = MAX (ialign, TYPE_ALIGN_UNIT (new_var));
4785 }
4786 else if (is_ref)
4787 {
4788 ptr_type = build_pointer_type (TREE_TYPE (TREE_TYPE (new_var)));
4789 ialign = MAX (ialign, TYPE_ALIGN_UNIT (TREE_TYPE (ptr_type)));
4790 }
4791 else
4792 {
4793 ptr_type = build_pointer_type (TREE_TYPE (new_var));
4794 ialign = MAX (ialign, DECL_ALIGN_UNIT (new_var));
4795 if (sz == NULL_TREE)
4796 sz = fold_convert (size_type_node, DECL_SIZE_UNIT (new_var));
4797 }
4798 align = build_int_cst (size_type_node, ialign);
4799 if (TREE_CODE (sz) != INTEGER_CST)
4800 {
4801 tree szvar = create_tmp_var (size_type_node);
4802 gimplify_assign (szvar, sz, ilist);
4803 sz = szvar;
4804 }
4805 allocate_ptr = create_tmp_var (ptr_type);
4806 tree a = builtin_decl_explicit (fncode: BUILT_IN_GOMP_ALLOC);
4807 gimple *g = gimple_build_call (a, 3, align, sz, allocator);
4808 gimple_call_set_lhs (gs: g, lhs: allocate_ptr);
4809 gimple_seq_add_stmt (ilist, g);
4810 if (!is_ref)
4811 {
4812 tree x = build_simple_mem_ref (allocate_ptr);
4813 TREE_THIS_NOTRAP (x) = 1;
4814 SET_DECL_VALUE_EXPR (new_var, x);
4815 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4816 }
4817 return true;
4818}
4819
4820/* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4821 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4822 private variables. Initialization statements go in ILIST, while calls
4823 to destructors go in DLIST. */
4824
4825static void
4826lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
4827 omp_context *ctx, struct omp_for_data *fd)
4828{
4829 tree c, copyin_seq, x, ptr;
4830 bool copyin_by_ref = false;
4831 bool lastprivate_firstprivate = false;
4832 bool reduction_omp_orig_ref = false;
4833 int pass;
4834 bool is_simd = (gimple_code (g: ctx->stmt) == GIMPLE_OMP_FOR
4835 && gimple_omp_for_kind (g: ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
4836 omplow_simd_context sctx = omplow_simd_context ();
4837 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
4838 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
4839 gimple_seq llist[4] = { };
4840 tree nonconst_simd_if = NULL_TREE;
4841
4842 copyin_seq = NULL;
4843 sctx.is_simt = is_simd && omp_find_clause (clauses, kind: OMP_CLAUSE__SIMT_);
4844
4845 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4846 with data sharing clauses referencing variable sized vars. That
4847 is unnecessarily hard to support and very unlikely to result in
4848 vectorized code anyway. */
4849 if (is_simd)
4850 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4851 switch (OMP_CLAUSE_CODE (c))
4852 {
4853 case OMP_CLAUSE_LINEAR:
4854 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4855 sctx.max_vf = 1;
4856 /* FALLTHRU */
4857 case OMP_CLAUSE_PRIVATE:
4858 case OMP_CLAUSE_FIRSTPRIVATE:
4859 case OMP_CLAUSE_LASTPRIVATE:
4860 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
4861 sctx.max_vf = 1;
4862 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c)))
4863 {
4864 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4865 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4866 sctx.max_vf = 1;
4867 }
4868 break;
4869 case OMP_CLAUSE_REDUCTION:
4870 case OMP_CLAUSE_IN_REDUCTION:
4871 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
4872 || is_variable_sized (OMP_CLAUSE_DECL (c)))
4873 sctx.max_vf = 1;
4874 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c)))
4875 {
4876 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4877 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4878 sctx.max_vf = 1;
4879 }
4880 break;
4881 case OMP_CLAUSE_IF:
4882 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
4883 sctx.max_vf = 1;
4884 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
4885 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
4886 break;
4887 case OMP_CLAUSE_SIMDLEN:
4888 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
4889 sctx.max_vf = 1;
4890 break;
4891 case OMP_CLAUSE__CONDTEMP_:
4892 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4893 if (sctx.is_simt)
4894 sctx.max_vf = 1;
4895 break;
4896 default:
4897 continue;
4898 }
4899
4900 /* Add a placeholder for simduid. */
4901 if (sctx.is_simt && maybe_ne (a: sctx.max_vf, b: 1U))
4902 sctx.simt_eargs.safe_push (NULL_TREE);
4903
4904 unsigned task_reduction_cnt = 0;
4905 unsigned task_reduction_cntorig = 0;
4906 unsigned task_reduction_cnt_full = 0;
4907 unsigned task_reduction_cntorig_full = 0;
4908 unsigned task_reduction_other_cnt = 0;
4909 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
4910 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
4911 /* Do all the fixed sized types in the first pass, and the variable sized
4912 types in the second pass. This makes sure that the scalar arguments to
4913 the variable sized types are processed before we use them in the
4914 variable sized operations. For task reductions we use 4 passes, in the
4915 first two we ignore them, in the third one gather arguments for
4916 GOMP_task_reduction_remap call and in the last pass actually handle
4917 the task reductions. */
4918 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
4919 ? 4 : 2); ++pass)
4920 {
4921 if (pass == 2 && task_reduction_cnt)
4922 {
4923 tskred_atype
4924 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
4925 + task_reduction_cntorig);
4926 tskred_avar = create_tmp_var_raw (tskred_atype);
4927 gimple_add_tmp_var (tskred_avar);
4928 TREE_ADDRESSABLE (tskred_avar) = 1;
4929 task_reduction_cnt_full = task_reduction_cnt;
4930 task_reduction_cntorig_full = task_reduction_cntorig;
4931 }
4932 else if (pass == 3 && task_reduction_cnt)
4933 {
4934 x = builtin_decl_explicit (fncode: BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
4935 gimple *g
4936 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
4937 size_int (task_reduction_cntorig),
4938 build_fold_addr_expr (tskred_avar));
4939 gimple_seq_add_stmt (ilist, g);
4940 }
4941 if (pass == 3 && task_reduction_other_cnt)
4942 {
4943 /* For reduction clauses, build
4944 tskred_base = (void *) tskred_temp[2]
4945 + omp_get_thread_num () * tskred_temp[1]
4946 or if tskred_temp[1] is known to be constant, that constant
4947 directly. This is the start of the private reduction copy block
4948 for the current thread. */
4949 tree v = create_tmp_var (integer_type_node);
4950 x = builtin_decl_explicit (fncode: BUILT_IN_OMP_GET_THREAD_NUM);
4951 gimple *g = gimple_build_call (x, 0);
4952 gimple_call_set_lhs (gs: g, lhs: v);
4953 gimple_seq_add_stmt (ilist, g);
4954 c = omp_find_clause (clauses, kind: OMP_CLAUSE__REDUCTEMP_);
4955 tskred_temp = OMP_CLAUSE_DECL (c);
4956 if (is_taskreg_ctx (ctx))
4957 tskred_temp = lookup_decl (var: tskred_temp, ctx);
4958 tree v2 = create_tmp_var (sizetype);
4959 g = gimple_build_assign (v2, NOP_EXPR, v);
4960 gimple_seq_add_stmt (ilist, g);
4961 if (ctx->task_reductions[0])
4962 v = fold_convert (sizetype, ctx->task_reductions[0]);
4963 else
4964 v = task_reduction_read (ilist, tskred_temp, sizetype, idx: 1);
4965 tree v3 = create_tmp_var (sizetype);
4966 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
4967 gimple_seq_add_stmt (ilist, g);
4968 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, idx: 2);
4969 tskred_base = create_tmp_var (ptr_type_node);
4970 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
4971 gimple_seq_add_stmt (ilist, g);
4972 }
4973 task_reduction_cnt = 0;
4974 task_reduction_cntorig = 0;
4975 task_reduction_other_cnt = 0;
4976 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4977 {
4978 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
4979 tree var, new_var;
4980 bool by_ref;
4981 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4982 bool task_reduction_p = false;
4983 bool task_reduction_needs_orig_p = false;
4984 tree cond = NULL_TREE;
4985 tree allocator, allocate_ptr;
4986
4987 switch (c_kind)
4988 {
4989 case OMP_CLAUSE_PRIVATE:
4990 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
4991 continue;
4992 break;
4993 case OMP_CLAUSE_SHARED:
4994 /* Ignore shared directives in teams construct inside
4995 of target construct. */
4996 if (gimple_code (g: ctx->stmt) == GIMPLE_OMP_TEAMS
4997 && !is_host_teams_ctx (ctx))
4998 continue;
4999 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
5000 {
5001 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
5002 || is_global_var (OMP_CLAUSE_DECL (c)));
5003 continue;
5004 }
5005 case OMP_CLAUSE_FIRSTPRIVATE:
5006 case OMP_CLAUSE_COPYIN:
5007 break;
5008 case OMP_CLAUSE_LINEAR:
5009 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
5010 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
5011 lastprivate_firstprivate = true;
5012 break;
5013 case OMP_CLAUSE_REDUCTION:
5014 case OMP_CLAUSE_IN_REDUCTION:
5015 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
5016 || is_task_ctx (ctx)
5017 || OMP_CLAUSE_REDUCTION_TASK (c))
5018 {
5019 task_reduction_p = true;
5020 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5021 {
5022 task_reduction_other_cnt++;
5023 if (pass == 2)
5024 continue;
5025 }
5026 else
5027 task_reduction_cnt++;
5028 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5029 {
5030 var = OMP_CLAUSE_DECL (c);
5031 /* If var is a global variable that isn't privatized
5032 in outer contexts, we don't need to look up the
5033 original address, it is always the address of the
5034 global variable itself. */
5035 if (!DECL_P (var)
5036 || omp_privatize_by_reference (decl: var)
5037 || !is_global_var
5038 (t: maybe_lookup_decl_in_outer_ctx (decl: var, ctx)))
5039 {
5040 task_reduction_needs_orig_p = true;
5041 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5042 task_reduction_cntorig++;
5043 }
5044 }
5045 }
5046 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5047 reduction_omp_orig_ref = true;
5048 break;
5049 case OMP_CLAUSE__REDUCTEMP_:
5050 if (!is_taskreg_ctx (ctx))
5051 continue;
5052 /* FALLTHRU */
5053 case OMP_CLAUSE__LOOPTEMP_:
5054 /* Handle _looptemp_/_reductemp_ clauses only on
5055 parallel/task. */
5056 if (fd)
5057 continue;
5058 break;
5059 case OMP_CLAUSE_LASTPRIVATE:
5060 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5061 {
5062 lastprivate_firstprivate = true;
5063 if (pass != 0 || is_taskloop_ctx (ctx))
5064 continue;
5065 }
5066 /* Even without corresponding firstprivate, if
5067 decl is Fortran allocatable, it needs outer var
5068 reference. */
5069 else if (pass == 0
5070 && lang_hooks.decls.omp_private_outer_ref
5071 (OMP_CLAUSE_DECL (c)))
5072 lastprivate_firstprivate = true;
5073 break;
5074 case OMP_CLAUSE_ALIGNED:
5075 if (pass != 1)
5076 continue;
5077 var = OMP_CLAUSE_DECL (c);
5078 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
5079 && !is_global_var (t: var))
5080 {
5081 new_var = maybe_lookup_decl (var, ctx);
5082 if (new_var == NULL_TREE)
5083 new_var = maybe_lookup_decl_in_outer_ctx (decl: var, ctx);
5084 x = builtin_decl_explicit (fncode: BUILT_IN_ASSUME_ALIGNED);
5085 tree alarg = omp_clause_aligned_alignment (clause: c);
5086 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
5087 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
5088 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5089 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5090 gimplify_and_add (x, ilist);
5091 }
5092 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
5093 && is_global_var (t: var))
5094 {
5095 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
5096 new_var = lookup_decl (var, ctx);
5097 t = maybe_lookup_decl_in_outer_ctx (decl: var, ctx);
5098 t = build_fold_addr_expr_loc (clause_loc, t);
5099 t2 = builtin_decl_explicit (fncode: BUILT_IN_ASSUME_ALIGNED);
5100 tree alarg = omp_clause_aligned_alignment (clause: c);
5101 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
5102 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
5103 t = fold_convert_loc (clause_loc, ptype, t);
5104 x = create_tmp_var (ptype);
5105 t = build2 (MODIFY_EXPR, ptype, x, t);
5106 gimplify_and_add (t, ilist);
5107 t = build_simple_mem_ref_loc (clause_loc, x);
5108 SET_DECL_VALUE_EXPR (new_var, t);
5109 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5110 }
5111 continue;
5112 case OMP_CLAUSE__CONDTEMP_:
5113 if (is_parallel_ctx (ctx)
5114 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
5115 break;
5116 continue;
5117 default:
5118 continue;
5119 }
5120
5121 if (task_reduction_p != (pass >= 2))
5122 continue;
5123
5124 allocator = NULL_TREE;
5125 allocate_ptr = NULL_TREE;
5126 new_var = var = OMP_CLAUSE_DECL (c);
5127 if ((c_kind == OMP_CLAUSE_REDUCTION
5128 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5129 && TREE_CODE (var) == MEM_REF)
5130 {
5131 var = TREE_OPERAND (var, 0);
5132 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5133 var = TREE_OPERAND (var, 0);
5134 if (TREE_CODE (var) == INDIRECT_REF
5135 || TREE_CODE (var) == ADDR_EXPR)
5136 var = TREE_OPERAND (var, 0);
5137 if (is_variable_sized (expr: var))
5138 {
5139 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5140 var = DECL_VALUE_EXPR (var);
5141 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5142 var = TREE_OPERAND (var, 0);
5143 gcc_assert (DECL_P (var));
5144 }
5145 new_var = var;
5146 }
5147 if (c_kind == OMP_CLAUSE_IN_REDUCTION && is_omp_target (stmt: ctx->stmt))
5148 {
5149 splay_tree_key key = (splay_tree_key) &DECL_CONTEXT (var);
5150 new_var = (tree) splay_tree_lookup (ctx->field_map, key)->value;
5151 }
5152 else if (c_kind != OMP_CLAUSE_COPYIN)
5153 new_var = lookup_decl (var, ctx);
5154
5155 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
5156 {
5157 if (pass != 0)
5158 continue;
5159 }
5160 /* C/C++ array section reductions. */
5161 else if ((c_kind == OMP_CLAUSE_REDUCTION
5162 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5163 && var != OMP_CLAUSE_DECL (c))
5164 {
5165 if (pass == 0)
5166 continue;
5167
5168 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
5169 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
5170
5171 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
5172 {
5173 tree b = TREE_OPERAND (orig_var, 1);
5174 if (is_omp_target (stmt: ctx->stmt))
5175 b = NULL_TREE;
5176 else
5177 b = maybe_lookup_decl (var: b, ctx);
5178 if (b == NULL)
5179 {
5180 b = TREE_OPERAND (orig_var, 1);
5181 b = maybe_lookup_decl_in_outer_ctx (decl: b, ctx);
5182 }
5183 if (integer_zerop (bias))
5184 bias = b;
5185 else
5186 {
5187 bias = fold_convert_loc (clause_loc,
5188 TREE_TYPE (b), bias);
5189 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5190 TREE_TYPE (b), b, bias);
5191 }
5192 orig_var = TREE_OPERAND (orig_var, 0);
5193 }
5194 if (pass == 2)
5195 {
5196 tree out = maybe_lookup_decl_in_outer_ctx (decl: var, ctx);
5197 if (is_global_var (t: out)
5198 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
5199 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
5200 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
5201 != POINTER_TYPE)))
5202 x = var;
5203 else if (is_omp_target (stmt: ctx->stmt))
5204 x = out;
5205 else
5206 {
5207 bool by_ref = use_pointer_for_field (decl: var, NULL);
5208 x = build_receiver_ref (var, by_ref, ctx);
5209 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
5210 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
5211 == POINTER_TYPE))
5212 x = build_fold_addr_expr (x);
5213 }
5214 if (TREE_CODE (orig_var) == INDIRECT_REF)
5215 x = build_simple_mem_ref (x);
5216 else if (TREE_CODE (orig_var) == ADDR_EXPR)
5217 {
5218 if (var == TREE_OPERAND (orig_var, 0))
5219 x = build_fold_addr_expr (x);
5220 }
5221 bias = fold_convert (sizetype, bias);
5222 x = fold_convert (ptr_type_node, x);
5223 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5224 TREE_TYPE (x), x, bias);
5225 unsigned cnt = task_reduction_cnt - 1;
5226 if (!task_reduction_needs_orig_p)
5227 cnt += (task_reduction_cntorig_full
5228 - task_reduction_cntorig);
5229 else
5230 cnt = task_reduction_cntorig - 1;
5231 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5232 size_int (cnt), NULL_TREE, NULL_TREE);
5233 gimplify_assign (r, x, ilist);
5234 continue;
5235 }
5236
5237 if (TREE_CODE (orig_var) == INDIRECT_REF
5238 || TREE_CODE (orig_var) == ADDR_EXPR)
5239 orig_var = TREE_OPERAND (orig_var, 0);
5240 tree d = OMP_CLAUSE_DECL (c);
5241 tree type = TREE_TYPE (d);
5242 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
5243 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5244 tree sz = v;
5245 const char *name = get_name (orig_var);
5246 if (pass != 3 && !TREE_CONSTANT (v))
5247 {
5248 tree t;
5249 if (is_omp_target (stmt: ctx->stmt))
5250 t = NULL_TREE;
5251 else
5252 t = maybe_lookup_decl (var: v, ctx);
5253 if (t)
5254 v = t;
5255 else
5256 v = maybe_lookup_decl_in_outer_ctx (decl: v, ctx);
5257 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
5258 t = fold_build2_loc (clause_loc, PLUS_EXPR,
5259 TREE_TYPE (v), v,
5260 build_int_cst (TREE_TYPE (v), 1));
5261 sz = fold_build2_loc (clause_loc, MULT_EXPR,
5262 TREE_TYPE (v), t,
5263 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5264 }
5265 if (pass == 3)
5266 {
5267 tree xv = create_tmp_var (ptr_type_node);
5268 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5269 {
5270 unsigned cnt = task_reduction_cnt - 1;
5271 if (!task_reduction_needs_orig_p)
5272 cnt += (task_reduction_cntorig_full
5273 - task_reduction_cntorig);
5274 else
5275 cnt = task_reduction_cntorig - 1;
5276 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5277 size_int (cnt), NULL_TREE, NULL_TREE);
5278
5279 gimple *g = gimple_build_assign (xv, x);
5280 gimple_seq_add_stmt (ilist, g);
5281 }
5282 else
5283 {
5284 unsigned int idx = *ctx->task_reduction_map->get (k: c);
5285 tree off;
5286 if (ctx->task_reductions[1 + idx])
5287 off = fold_convert (sizetype,
5288 ctx->task_reductions[1 + idx]);
5289 else
5290 off = task_reduction_read (ilist, tskred_temp, sizetype,
5291 idx: 7 + 3 * idx + 1);
5292 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
5293 tskred_base, off);
5294 gimple_seq_add_stmt (ilist, g);
5295 }
5296 x = fold_convert (build_pointer_type (boolean_type_node),
5297 xv);
5298 if (TREE_CONSTANT (v))
5299 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
5300 TYPE_SIZE_UNIT (type));
5301 else
5302 {
5303 tree t;
5304 if (is_omp_target (stmt: ctx->stmt))
5305 t = NULL_TREE;
5306 else
5307 t = maybe_lookup_decl (var: v, ctx);
5308 if (t)
5309 v = t;
5310 else
5311 v = maybe_lookup_decl_in_outer_ctx (decl: v, ctx);
5312 gimplify_expr (&v, ilist, NULL, is_gimple_val,
5313 fb_rvalue);
5314 t = fold_build2_loc (clause_loc, PLUS_EXPR,
5315 TREE_TYPE (v), v,
5316 build_int_cst (TREE_TYPE (v), 1));
5317 t = fold_build2_loc (clause_loc, MULT_EXPR,
5318 TREE_TYPE (v), t,
5319 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5320 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
5321 }
5322 cond = create_tmp_var (TREE_TYPE (x));
5323 gimplify_assign (cond, x, ilist);
5324 x = xv;
5325 }
5326 else if (lower_private_allocate (var, new_var: type, allocator,
5327 allocate_ptr, ilist, ctx,
5328 is_ref: true,
5329 TREE_CONSTANT (v)
5330 ? TYPE_SIZE_UNIT (type)
5331 : sz))
5332 x = allocate_ptr;
5333 else if (TREE_CONSTANT (v))
5334 {
5335 x = create_tmp_var_raw (type, name);
5336 gimple_add_tmp_var (x);
5337 TREE_ADDRESSABLE (x) = 1;
5338 x = build_fold_addr_expr_loc (clause_loc, x);
5339 }
5340 else
5341 {
5342 tree atmp
5343 = builtin_decl_explicit (fncode: BUILT_IN_ALLOCA_WITH_ALIGN);
5344 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
5345 x = build_call_expr_loc (clause_loc, atmp, 2, sz, al);
5346 }
5347
5348 tree ptype = build_pointer_type (TREE_TYPE (type));
5349 x = fold_convert_loc (clause_loc, ptype, x);
5350 tree y = create_tmp_var (ptype, name);
5351 gimplify_assign (y, x, ilist);
5352 x = y;
5353 tree yb = y;
5354
5355 if (!integer_zerop (bias))
5356 {
5357 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
5358 bias);
5359 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
5360 x);
5361 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
5362 pointer_sized_int_node, yb, bias);
5363 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
5364 yb = create_tmp_var (ptype, name);
5365 gimplify_assign (yb, x, ilist);
5366 x = yb;
5367 }
5368
5369 d = TREE_OPERAND (d, 0);
5370 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5371 d = TREE_OPERAND (d, 0);
5372 if (TREE_CODE (d) == ADDR_EXPR)
5373 {
5374 if (orig_var != var)
5375 {
5376 gcc_assert (is_variable_sized (orig_var));
5377 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
5378 x);
5379 gimplify_assign (new_var, x, ilist);
5380 tree new_orig_var = lookup_decl (var: orig_var, ctx);
5381 tree t = build_fold_indirect_ref (new_var);
5382 DECL_IGNORED_P (new_var) = 0;
5383 TREE_THIS_NOTRAP (t) = 1;
5384 SET_DECL_VALUE_EXPR (new_orig_var, t);
5385 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
5386 }
5387 else
5388 {
5389 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
5390 build_int_cst (ptype, 0));
5391 SET_DECL_VALUE_EXPR (new_var, x);
5392 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5393 }
5394 }
5395 else
5396 {
5397 gcc_assert (orig_var == var);
5398 if (TREE_CODE (d) == INDIRECT_REF)
5399 {
5400 x = create_tmp_var (ptype, name);
5401 TREE_ADDRESSABLE (x) = 1;
5402 gimplify_assign (x, yb, ilist);
5403 x = build_fold_addr_expr_loc (clause_loc, x);
5404 }
5405 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5406 gimplify_assign (new_var, x, ilist);
5407 }
5408 /* GOMP_taskgroup_reduction_register memsets the whole
5409 array to zero. If the initializer is zero, we don't
5410 need to initialize it again, just mark it as ever
5411 used unconditionally, i.e. cond = true. */
5412 if (cond
5413 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
5414 && initializer_zerop (omp_reduction_init (clause: c,
5415 TREE_TYPE (type))))
5416 {
5417 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
5418 boolean_true_node);
5419 gimple_seq_add_stmt (ilist, g);
5420 continue;
5421 }
5422 tree end = create_artificial_label (UNKNOWN_LOCATION);
5423 if (cond)
5424 {
5425 gimple *g;
5426 if (!is_parallel_ctx (ctx))
5427 {
5428 tree condv = create_tmp_var (boolean_type_node);
5429 g = gimple_build_assign (condv,
5430 build_simple_mem_ref (cond));
5431 gimple_seq_add_stmt (ilist, g);
5432 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
5433 g = gimple_build_cond (NE_EXPR, condv,
5434 boolean_false_node, end, lab1);
5435 gimple_seq_add_stmt (ilist, g);
5436 gimple_seq_add_stmt (ilist, gimple_build_label (label: lab1));
5437 }
5438 g = gimple_build_assign (build_simple_mem_ref (cond),
5439 boolean_true_node);
5440 gimple_seq_add_stmt (ilist, g);
5441 }
5442
5443 tree y1 = create_tmp_var (ptype);
5444 gimplify_assign (y1, y, ilist);
5445 tree i2 = NULL_TREE, y2 = NULL_TREE;
5446 tree body2 = NULL_TREE, end2 = NULL_TREE;
5447 tree y3 = NULL_TREE, y4 = NULL_TREE;
5448 if (task_reduction_needs_orig_p)
5449 {
5450 y3 = create_tmp_var (ptype);
5451 tree ref;
5452 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5453 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5454 size_int (task_reduction_cnt_full
5455 + task_reduction_cntorig - 1),
5456 NULL_TREE, NULL_TREE);
5457 else
5458 {
5459 unsigned int idx = *ctx->task_reduction_map->get (k: c);
5460 ref = task_reduction_read (ilist, tskred_temp, type: ptype,
5461 idx: 7 + 3 * idx);
5462 }
5463 gimplify_assign (y3, ref, ilist);
5464 }
5465 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
5466 {
5467 if (pass != 3)
5468 {
5469 y2 = create_tmp_var (ptype);
5470 gimplify_assign (y2, y, ilist);
5471 }
5472 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5473 {
5474 tree ref = build_outer_var_ref (var, ctx);
5475 /* For ref build_outer_var_ref already performs this. */
5476 if (TREE_CODE (d) == INDIRECT_REF)
5477 gcc_assert (omp_privatize_by_reference (var));
5478 else if (TREE_CODE (d) == ADDR_EXPR)
5479 ref = build_fold_addr_expr (ref);
5480 else if (omp_privatize_by_reference (decl: var))
5481 ref = build_fold_addr_expr (ref);
5482 ref = fold_convert_loc (clause_loc, ptype, ref);
5483 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5484 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5485 {
5486 y3 = create_tmp_var (ptype);
5487 gimplify_assign (y3, unshare_expr (ref), ilist);
5488 }
5489 if (is_simd)
5490 {
5491 y4 = create_tmp_var (ptype);
5492 gimplify_assign (y4, ref, dlist);
5493 }
5494 }
5495 }
5496 tree i = create_tmp_var (TREE_TYPE (v));
5497 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
5498 tree body = create_artificial_label (UNKNOWN_LOCATION);
5499 gimple_seq_add_stmt (ilist, gimple_build_label (label: body));
5500 if (y2)
5501 {
5502 i2 = create_tmp_var (TREE_TYPE (v));
5503 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
5504 body2 = create_artificial_label (UNKNOWN_LOCATION);
5505 end2 = create_artificial_label (UNKNOWN_LOCATION);
5506 gimple_seq_add_stmt (dlist, gimple_build_label (label: body2));
5507 }
5508 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5509 {
5510 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5511 tree decl_placeholder
5512 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5513 SET_DECL_VALUE_EXPR (decl_placeholder,
5514 build_simple_mem_ref (y1));
5515 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5516 SET_DECL_VALUE_EXPR (placeholder,
5517 y3 ? build_simple_mem_ref (y3)
5518 : error_mark_node);
5519 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5520 x = lang_hooks.decls.omp_clause_default_ctor
5521 (c, build_simple_mem_ref (y1),
5522 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
5523 if (x)
5524 gimplify_and_add (x, ilist);
5525 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5526 {
5527 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5528 lower_omp (&tseq, ctx);
5529 gimple_seq_add_seq (ilist, tseq);
5530 }
5531 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5532 if (is_simd)
5533 {
5534 SET_DECL_VALUE_EXPR (decl_placeholder,
5535 build_simple_mem_ref (y2));
5536 SET_DECL_VALUE_EXPR (placeholder,
5537 build_simple_mem_ref (y4));
5538 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5539 lower_omp (&tseq, ctx);
5540 gimple_seq_add_seq (dlist, tseq);
5541 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5542 }
5543 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5544 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
5545 if (y2)
5546 {
5547 x = lang_hooks.decls.omp_clause_dtor
5548 (c, build_simple_mem_ref (y2));
5549 if (x)
5550 gimplify_and_add (x, dlist);
5551 }
5552 }
5553 else
5554 {
5555 x = omp_reduction_init (clause: c, TREE_TYPE (type));
5556 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5557
5558 /* reduction(-:var) sums up the partial results, so it
5559 acts identically to reduction(+:var). */
5560 if (code == MINUS_EXPR)
5561 code = PLUS_EXPR;
5562
5563 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
5564 if (is_simd)
5565 {
5566 x = build2 (code, TREE_TYPE (type),
5567 build_simple_mem_ref (y4),
5568 build_simple_mem_ref (y2));
5569 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
5570 }
5571 }
5572 gimple *g
5573 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
5574 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5575 gimple_seq_add_stmt (ilist, g);
5576 if (y3)
5577 {
5578 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
5579 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5580 gimple_seq_add_stmt (ilist, g);
5581 }
5582 g = gimple_build_assign (i, PLUS_EXPR, i,
5583 build_int_cst (TREE_TYPE (i), 1));
5584 gimple_seq_add_stmt (ilist, g);
5585 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5586 gimple_seq_add_stmt (ilist, g);
5587 gimple_seq_add_stmt (ilist, gimple_build_label (label: end));
5588 if (y2)
5589 {
5590 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
5591 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5592 gimple_seq_add_stmt (dlist, g);
5593 if (y4)
5594 {
5595 g = gimple_build_assign
5596 (y4, POINTER_PLUS_EXPR, y4,
5597 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5598 gimple_seq_add_stmt (dlist, g);
5599 }
5600 g = gimple_build_assign (i2, PLUS_EXPR, i2,
5601 build_int_cst (TREE_TYPE (i2), 1));
5602 gimple_seq_add_stmt (dlist, g);
5603 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
5604 gimple_seq_add_stmt (dlist, g);
5605 gimple_seq_add_stmt (dlist, gimple_build_label (label: end2));
5606 }
5607 if (allocator)
5608 {
5609 tree f = builtin_decl_explicit (fncode: BUILT_IN_GOMP_FREE);
5610 g = gimple_build_call (f, 2, allocate_ptr, allocator);
5611 gimple_seq_add_stmt (dlist, g);
5612 }
5613 continue;
5614 }
5615 else if (pass == 2)
5616 {
5617 tree out = maybe_lookup_decl_in_outer_ctx (decl: var, ctx);
5618 if (is_global_var (t: out))
5619 x = var;
5620 else if (is_omp_target (stmt: ctx->stmt))
5621 x = out;
5622 else
5623 {
5624 bool by_ref = use_pointer_for_field (decl: var, shared_ctx: ctx);
5625 x = build_receiver_ref (var, by_ref, ctx);
5626 }
5627 if (!omp_privatize_by_reference (decl: var))
5628 x = build_fold_addr_expr (x);
5629 x = fold_convert (ptr_type_node, x);
5630 unsigned cnt = task_reduction_cnt - 1;
5631 if (!task_reduction_needs_orig_p)
5632 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
5633 else
5634 cnt = task_reduction_cntorig - 1;
5635 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5636 size_int (cnt), NULL_TREE, NULL_TREE);
5637 gimplify_assign (r, x, ilist);
5638 continue;
5639 }
5640 else if (pass == 3)
5641 {
5642 tree type = TREE_TYPE (new_var);
5643 if (!omp_privatize_by_reference (decl: var))
5644 type = build_pointer_type (type);
5645 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5646 {
5647 unsigned cnt = task_reduction_cnt - 1;
5648 if (!task_reduction_needs_orig_p)
5649 cnt += (task_reduction_cntorig_full
5650 - task_reduction_cntorig);
5651 else
5652 cnt = task_reduction_cntorig - 1;
5653 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5654 size_int (cnt), NULL_TREE, NULL_TREE);
5655 }
5656 else
5657 {
5658 unsigned int idx = *ctx->task_reduction_map->get (k: c);
5659 tree off;
5660 if (ctx->task_reductions[1 + idx])
5661 off = fold_convert (sizetype,
5662 ctx->task_reductions[1 + idx]);
5663 else
5664 off = task_reduction_read (ilist, tskred_temp, sizetype,
5665 idx: 7 + 3 * idx + 1);
5666 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
5667 tskred_base, off);
5668 }
5669 x = fold_convert (type, x);
5670 tree t;
5671 if (omp_privatize_by_reference (decl: var))
5672 {
5673 gimplify_assign (new_var, x, ilist);
5674 t = new_var;
5675 new_var = build_simple_mem_ref (new_var);
5676 }
5677 else
5678 {
5679 t = create_tmp_var (type);
5680 gimplify_assign (t, x, ilist);
5681 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
5682 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5683 }
5684 t = fold_convert (build_pointer_type (boolean_type_node), t);
5685 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
5686 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5687 cond = create_tmp_var (TREE_TYPE (t));
5688 gimplify_assign (cond, t, ilist);
5689 }
5690 else if (is_variable_sized (expr: var))
5691 {
5692 /* For variable sized types, we need to allocate the
5693 actual storage here. Call alloca and store the
5694 result in the pointer decl that we created elsewhere. */
5695 if (pass == 0)
5696 continue;
5697
5698 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
5699 {
5700 tree tmp;
5701
5702 ptr = DECL_VALUE_EXPR (new_var);
5703 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
5704 ptr = TREE_OPERAND (ptr, 0);
5705 gcc_assert (DECL_P (ptr));
5706 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
5707
5708 if (lower_private_allocate (var, new_var, allocator,
5709 allocate_ptr, ilist, ctx,
5710 is_ref: false, size: x))
5711 tmp = allocate_ptr;
5712 else
5713 {
5714 /* void *tmp = __builtin_alloca */
5715 tree atmp
5716 = builtin_decl_explicit (fncode: BUILT_IN_ALLOCA_WITH_ALIGN);
5717 gcall *stmt
5718 = gimple_build_call (atmp, 2, x,
5719 size_int (DECL_ALIGN (var)));
5720 cfun->calls_alloca = 1;
5721 tmp = create_tmp_var_raw (ptr_type_node);
5722 gimple_add_tmp_var (tmp);
5723 gimple_call_set_lhs (gs: stmt, lhs: tmp);
5724
5725 gimple_seq_add_stmt (ilist, stmt);
5726 }
5727
5728 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
5729 gimplify_assign (ptr, x, ilist);
5730 }
5731 }
5732 else if (omp_privatize_by_reference (decl: var)
5733 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
5734 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
5735 {
5736 /* For references that are being privatized for Fortran,
5737 allocate new backing storage for the new pointer
5738 variable. This allows us to avoid changing all the
5739 code that expects a pointer to something that expects
5740 a direct variable. */
5741 if (pass == 0)
5742 continue;
5743
5744 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
5745 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
5746 {
5747 x = build_receiver_ref (var, by_ref: false, ctx);
5748 if (ctx->allocate_map)
5749 if (tree *allocatep = ctx->allocate_map->get (k: var))
5750 {
5751 allocator = *allocatep;
5752 if (TREE_CODE (allocator) == TREE_LIST)
5753 allocator = TREE_PURPOSE (allocator);
5754 if (TREE_CODE (allocator) != INTEGER_CST)
5755 allocator = build_outer_var_ref (var: allocator, ctx);
5756 allocator = fold_convert (pointer_sized_int_node,
5757 allocator);
5758 allocate_ptr = unshare_expr (x);
5759 }
5760 if (allocator == NULL_TREE)
5761 x = build_fold_addr_expr_loc (clause_loc, x);
5762 }
5763 else if (lower_private_allocate (var, new_var, allocator,
5764 allocate_ptr,
5765 ilist, ctx, is_ref: true, size: x))
5766 x = allocate_ptr;
5767 else if (TREE_CONSTANT (x))
5768 {
5769 /* For reduction in SIMD loop, defer adding the
5770 initialization of the reference, because if we decide
5771 to use SIMD array for it, the initilization could cause
5772 expansion ICE. Ditto for other privatization clauses. */
5773 if (is_simd)
5774 x = NULL_TREE;
5775 else
5776 {
5777 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
5778 get_name (var));
5779 gimple_add_tmp_var (x);
5780 TREE_ADDRESSABLE (x) = 1;
5781 x = build_fold_addr_expr_loc (clause_loc, x);
5782 }
5783 }
5784 else
5785 {
5786 tree atmp
5787 = builtin_decl_explicit (fncode: BUILT_IN_ALLOCA_WITH_ALIGN);
5788 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
5789 tree al = size_int (TYPE_ALIGN (rtype));
5790 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
5791 }
5792
5793 if (x)
5794 {
5795 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5796 gimplify_assign (new_var, x, ilist);
5797 }
5798
5799 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5800 }
5801 else if ((c_kind == OMP_CLAUSE_REDUCTION
5802 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5803 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5804 {
5805 if (pass == 0)
5806 continue;
5807 }
5808 else if (pass != 0)
5809 continue;
5810
5811 switch (OMP_CLAUSE_CODE (c))
5812 {
5813 case OMP_CLAUSE_SHARED:
5814 /* Ignore shared directives in teams construct inside
5815 target construct. */
5816 if (gimple_code (g: ctx->stmt) == GIMPLE_OMP_TEAMS
5817 && !is_host_teams_ctx (ctx))
5818 continue;
5819 /* Shared global vars are just accessed directly. */
5820 if (is_global_var (t: new_var))
5821 break;
5822 /* For taskloop firstprivate/lastprivate, represented
5823 as firstprivate and shared clause on the task, new_var
5824 is the firstprivate var. */
5825 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5826 break;
5827 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5828 needs to be delayed until after fixup_child_record_type so
5829 that we get the correct type during the dereference. */
5830 by_ref = use_pointer_for_field (decl: var, shared_ctx: ctx);
5831 x = build_receiver_ref (var, by_ref, ctx);
5832 SET_DECL_VALUE_EXPR (new_var, x);
5833 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5834
5835 /* ??? If VAR is not passed by reference, and the variable
5836 hasn't been initialized yet, then we'll get a warning for
5837 the store into the omp_data_s structure. Ideally, we'd be
5838 able to notice this and not store anything at all, but
5839 we're generating code too early. Suppress the warning. */
5840 if (!by_ref)
5841 suppress_warning (var, OPT_Wuninitialized);
5842 break;
5843
5844 case OMP_CLAUSE__CONDTEMP_:
5845 if (is_parallel_ctx (ctx))
5846 {
5847 x = build_receiver_ref (var, by_ref: false, ctx);
5848 SET_DECL_VALUE_EXPR (new_var, x);
5849 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5850 }
5851 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
5852 {
5853 x = build_zero_cst (TREE_TYPE (var));
5854 goto do_private;
5855 }
5856 break;
5857
5858 case OMP_CLAUSE_LASTPRIVATE:
5859 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5860 break;
5861 /* FALLTHRU */
5862
5863 case OMP_CLAUSE_PRIVATE:
5864 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
5865 x = build_outer_var_ref (var, ctx);
5866 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5867 {
5868 if (is_task_ctx (ctx))
5869 x = build_receiver_ref (var, by_ref: false, ctx);
5870 else
5871 x = build_outer_var_ref (var, ctx, code: OMP_CLAUSE_PRIVATE);
5872 }
5873 else
5874 x = NULL;
5875 do_private:
5876 tree nx;
5877 bool copy_ctor;
5878 copy_ctor = false;
5879 lower_private_allocate (var, new_var, allocator, allocate_ptr,
5880 ilist, ctx, is_ref: false, NULL_TREE);
5881 nx = unshare_expr (new_var);
5882 if (is_simd
5883 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5884 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
5885 copy_ctor = true;
5886 if (copy_ctor)
5887 nx = lang_hooks.decls.omp_clause_copy_ctor (c, nx, x);
5888 else
5889 nx = lang_hooks.decls.omp_clause_default_ctor (c, nx, x);
5890 if (is_simd)
5891 {
5892 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
5893 if ((TREE_ADDRESSABLE (new_var) || nx || y
5894 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5895 && (gimple_omp_for_collapse (gs: ctx->stmt) != 1
5896 || (gimple_omp_for_index (gs: ctx->stmt, i: 0)
5897 != new_var)))
5898 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
5899 || omp_privatize_by_reference (decl: var))
5900 && lower_rec_simd_input_clauses (new_var, ctx, sctx: &sctx,
5901 ivar, lvar))
5902 {
5903 if (omp_privatize_by_reference (decl: var))
5904 {
5905 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5906 tree new_vard = TREE_OPERAND (new_var, 0);
5907 gcc_assert (DECL_P (new_vard));
5908 SET_DECL_VALUE_EXPR (new_vard,
5909 build_fold_addr_expr (lvar));
5910 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5911 }
5912
5913 if (nx)
5914 {
5915 tree iv = unshare_expr (ivar);
5916 if (copy_ctor)
5917 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv,
5918 x);
5919 else
5920 x = lang_hooks.decls.omp_clause_default_ctor (c,
5921 iv,
5922 x);
5923 }
5924 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
5925 {
5926 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
5927 unshare_expr (ivar), x);
5928 nx = x;
5929 }
5930 if (nx && x)
5931 gimplify_and_add (x, &llist[0]);
5932 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5933 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5934 {
5935 tree v = new_var;
5936 if (!DECL_P (v))
5937 {
5938 gcc_assert (TREE_CODE (v) == MEM_REF);
5939 v = TREE_OPERAND (v, 0);
5940 gcc_assert (DECL_P (v));
5941 }
5942 v = *ctx->lastprivate_conditional_map->get (k: v);
5943 tree t = create_tmp_var (TREE_TYPE (v));
5944 tree z = build_zero_cst (TREE_TYPE (v));
5945 tree orig_v
5946 = build_outer_var_ref (var, ctx,
5947 code: OMP_CLAUSE_LASTPRIVATE);
5948 gimple_seq_add_stmt (dlist,
5949 gimple_build_assign (t, z));
5950 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
5951 tree civar = DECL_VALUE_EXPR (v);
5952 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
5953 civar = unshare_expr (civar);
5954 TREE_OPERAND (civar, 1) = sctx.idx;
5955 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
5956 unshare_expr (civar));
5957 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
5958 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
5959 orig_v, unshare_expr (ivar)));
5960 tree cond = build2 (LT_EXPR, boolean_type_node, t,
5961 civar);
5962 x = build3 (COND_EXPR, void_type_node, cond, x,
5963 void_node);
5964 gimple_seq tseq = NULL;
5965 gimplify_and_add (x, &tseq);
5966 if (ctx->outer)
5967 lower_omp (&tseq, ctx->outer);
5968 gimple_seq_add_seq (&llist[1], tseq);
5969 }
5970 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5971 && ctx->for_simd_scan_phase)
5972 {
5973 x = unshare_expr (ivar);
5974 tree orig_v
5975 = build_outer_var_ref (var, ctx,
5976 code: OMP_CLAUSE_LASTPRIVATE);
5977 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5978 orig_v);
5979 gimplify_and_add (x, &llist[0]);
5980 }
5981 if (y)
5982 {
5983 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
5984 if (y)
5985 gimplify_and_add (y, &llist[1]);
5986 }
5987 break;
5988 }
5989 if (omp_privatize_by_reference (decl: var))
5990 {
5991 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5992 tree new_vard = TREE_OPERAND (new_var, 0);
5993 gcc_assert (DECL_P (new_vard));
5994 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5995 x = TYPE_SIZE_UNIT (type);
5996 if (TREE_CONSTANT (x))
5997 {
5998 x = create_tmp_var_raw (type, get_name (var));
5999 gimple_add_tmp_var (x);
6000 TREE_ADDRESSABLE (x) = 1;
6001 x = build_fold_addr_expr_loc (clause_loc, x);
6002 x = fold_convert_loc (clause_loc,
6003 TREE_TYPE (new_vard), x);
6004 gimplify_assign (new_vard, x, ilist);
6005 }
6006 }
6007 }
6008 if (nx)
6009 gimplify_and_add (nx, ilist);
6010 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6011 && is_simd
6012 && ctx->for_simd_scan_phase)
6013 {
6014 tree orig_v = build_outer_var_ref (var, ctx,
6015 code: OMP_CLAUSE_LASTPRIVATE);
6016 x = lang_hooks.decls.omp_clause_assign_op (c, new_var,
6017 orig_v);
6018 gimplify_and_add (x, ilist);
6019 }
6020 /* FALLTHRU */
6021
6022 do_dtor:
6023 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
6024 if (x)
6025 gimplify_and_add (x, dlist);
6026 if (allocator)
6027 {
6028 if (!is_gimple_val (allocator))
6029 {
6030 tree avar = create_tmp_var (TREE_TYPE (allocator));
6031 gimplify_assign (avar, allocator, dlist);
6032 allocator = avar;
6033 }
6034 if (!is_gimple_val (allocate_ptr))
6035 {
6036 tree apvar = create_tmp_var (TREE_TYPE (allocate_ptr));
6037 gimplify_assign (apvar, allocate_ptr, dlist);
6038 allocate_ptr = apvar;
6039 }
6040 tree f = builtin_decl_explicit (fncode: BUILT_IN_GOMP_FREE);
6041 gimple *g
6042 = gimple_build_call (f, 2, allocate_ptr, allocator);
6043 gimple_seq_add_stmt (dlist, g);
6044 }
6045 break;
6046
6047 case OMP_CLAUSE_LINEAR:
6048 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6049 goto do_firstprivate;
6050 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6051 x = NULL;
6052 else
6053 x = build_outer_var_ref (var, ctx);
6054 goto do_private;
6055
6056 case OMP_CLAUSE_FIRSTPRIVATE:
6057 if (is_task_ctx (ctx))
6058 {
6059 if ((omp_privatize_by_reference (decl: var)
6060 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
6061 || is_variable_sized (expr: var))
6062 goto do_dtor;
6063 else if (is_global_var (t: maybe_lookup_decl_in_outer_ctx (decl: var,
6064 ctx))
6065 || use_pointer_for_field (decl: var, NULL))
6066 {
6067 x = build_receiver_ref (var, by_ref: false, ctx);
6068 if (ctx->allocate_map)
6069 if (tree *allocatep = ctx->allocate_map->get (k: var))
6070 {
6071 allocator = *allocatep;
6072 if (TREE_CODE (allocator) == TREE_LIST)
6073 allocator = TREE_PURPOSE (allocator);
6074 if (TREE_CODE (allocator) != INTEGER_CST)
6075 allocator = build_outer_var_ref (var: allocator, ctx);
6076 allocator = fold_convert (pointer_sized_int_node,
6077 allocator);
6078 allocate_ptr = unshare_expr (x);
6079 x = build_simple_mem_ref (x);
6080 TREE_THIS_NOTRAP (x) = 1;
6081 }
6082 SET_DECL_VALUE_EXPR (new_var, x);
6083 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
6084 goto do_dtor;
6085 }
6086 }
6087 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
6088 && omp_privatize_by_reference (decl: var))
6089 {
6090 x = build_outer_var_ref (var, ctx);
6091 gcc_assert (TREE_CODE (x) == MEM_REF
6092 && integer_zerop (TREE_OPERAND (x, 1)));
6093 x = TREE_OPERAND (x, 0);
6094 x = lang_hooks.decls.omp_clause_copy_ctor
6095 (c, unshare_expr (new_var), x);
6096 gimplify_and_add (x, ilist);
6097 goto do_dtor;
6098 }
6099 do_firstprivate:
6100 lower_private_allocate (var, new_var, allocator, allocate_ptr,
6101 ilist, ctx, is_ref: false, NULL_TREE);
6102 x = build_outer_var_ref (var, ctx);
6103 if (is_simd)
6104 {
6105 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6106 && gimple_omp_for_combined_into_p (g: ctx->stmt))
6107 {
6108 tree t = OMP_CLAUSE_LINEAR_STEP (c);
6109 if (DECL_P (t))
6110 t = build_outer_var_ref (var: t, ctx);
6111 tree stept = TREE_TYPE (t);
6112 tree ct = omp_find_clause (clauses,
6113 kind: OMP_CLAUSE__LOOPTEMP_);
6114 gcc_assert (ct);
6115 tree l = OMP_CLAUSE_DECL (ct);
6116 tree n1 = fd->loop.n1;
6117 tree step = fd->loop.step;
6118 tree itype = TREE_TYPE (l);
6119 if (POINTER_TYPE_P (itype))
6120 itype = signed_type_for (itype);
6121 l = fold_build2 (MINUS_EXPR, itype, l, n1);
6122 if (TYPE_UNSIGNED (itype)
6123 && fd->loop.cond_code == GT_EXPR)
6124 l = fold_build2 (TRUNC_DIV_EXPR, itype,
6125 fold_build1 (NEGATE_EXPR, itype, l),
6126 fold_build1 (NEGATE_EXPR,
6127 itype, step));
6128 else
6129 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
6130 t = fold_build2 (MULT_EXPR, stept,
6131 fold_convert (stept, l), t);
6132
6133 if (OMP_CLAUSE_LINEAR_ARRAY (c))
6134 {
6135 if (omp_privatize_by_reference (decl: var))
6136 {
6137 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6138 tree new_vard = TREE_OPERAND (new_var, 0);
6139 gcc_assert (DECL_P (new_vard));
6140 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6141 nx = TYPE_SIZE_UNIT (type);
6142 if (TREE_CONSTANT (nx))
6143 {
6144 nx = create_tmp_var_raw (type,
6145 get_name (var));
6146 gimple_add_tmp_var (nx);
6147 TREE_ADDRESSABLE (nx) = 1;
6148 nx = build_fold_addr_expr_loc (clause_loc,
6149 nx);
6150 nx = fold_convert_loc (clause_loc,
6151 TREE_TYPE (new_vard),
6152 nx);
6153 gimplify_assign (new_vard, nx, ilist);
6154 }
6155 }
6156
6157 x = lang_hooks.decls.omp_clause_linear_ctor
6158 (c, new_var, x, t);
6159 gimplify_and_add (x, ilist);
6160 goto do_dtor;
6161 }
6162
6163 if (POINTER_TYPE_P (TREE_TYPE (x)))
6164 x = fold_build_pointer_plus (x, t);
6165 else
6166 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x,
6167 fold_convert (TREE_TYPE (x), t));
6168 }
6169
6170 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
6171 || TREE_ADDRESSABLE (new_var)
6172 || omp_privatize_by_reference (decl: var))
6173 && lower_rec_simd_input_clauses (new_var, ctx, sctx: &sctx,
6174 ivar, lvar))
6175 {
6176 if (omp_privatize_by_reference (decl: var))
6177 {
6178 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6179 tree new_vard = TREE_OPERAND (new_var, 0);
6180 gcc_assert (DECL_P (new_vard));
6181 SET_DECL_VALUE_EXPR (new_vard,
6182 build_fold_addr_expr (lvar));
6183 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6184 }
6185 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
6186 {
6187 tree iv = create_tmp_var (TREE_TYPE (new_var));
6188 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
6189 gimplify_and_add (x, ilist);
6190 gimple_stmt_iterator gsi
6191 = gsi_start (seq&: *gimple_omp_body_ptr (gs: ctx->stmt));
6192 gassign *g
6193 = gimple_build_assign (unshare_expr (lvar), iv);
6194 gsi_insert_before_without_update (&gsi, g,
6195 GSI_SAME_STMT);
6196 tree t = OMP_CLAUSE_LINEAR_STEP (c);
6197 enum tree_code code = PLUS_EXPR;
6198 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
6199 code = POINTER_PLUS_EXPR;
6200 g = gimple_build_assign (iv, code, iv, t);
6201 gsi_insert_before_without_update (&gsi, g,
6202 GSI_SAME_STMT);
6203 break;
6204 }
6205 x = lang_hooks.decls.omp_clause_copy_ctor
6206 (c, unshare_expr (ivar), x);
6207 gimplify_and_add (x, &llist[0]);
6208 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6209 if (x)
6210 gimplify_and_add (x, &llist[1]);
6211 break;
6212 }
6213 if (omp_privatize_by_reference (decl: var))
6214 {
6215 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6216 tree new_vard = TREE_OPERAND (new_var, 0);
6217 gcc_assert (DECL_P (new_vard));
6218 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6219 nx = TYPE_SIZE_UNIT (type);
6220 if (TREE_CONSTANT (nx))
6221 {
6222 nx = create_tmp_var_raw (type, get_name (var));
6223 gimple_add_tmp_var (nx);
6224 TREE_ADDRESSABLE (nx) = 1;
6225 nx = build_fold_addr_expr_loc (clause_loc, nx);
6226 nx = fold_convert_loc (clause_loc,
6227 TREE_TYPE (new_vard), nx);
6228 gimplify_assign (new_vard, nx, ilist);
6229 }
6230 }
6231 }
6232 x = lang_hooks.decls.omp_clause_copy_ctor
6233 (c, unshare_expr (new_var), x);
6234 gimplify_and_add (x, ilist);
6235 goto do_dtor;
6236
6237 case OMP_CLAUSE__LOOPTEMP_:
6238 case OMP_CLAUSE__REDUCTEMP_:
6239 gcc_assert (is_taskreg_ctx (ctx));
6240 x = build_outer_var_ref (var, ctx);
6241 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
6242 gimplify_and_add (x, ilist);
6243 break;
6244
6245 case OMP_CLAUSE_COPYIN:
6246 by_ref = use_pointer_for_field (decl: var, NULL);
6247 x = build_receiver_ref (var, by_ref, ctx);
6248 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
6249 append_to_statement_list (x, &copyin_seq);
6250 copyin_by_ref |= by_ref;
6251 break;
6252
6253 case OMP_CLAUSE_REDUCTION:
6254 case OMP_CLAUSE_IN_REDUCTION:
6255 /* OpenACC reductions are initialized using the
6256 GOACC_REDUCTION internal function. */
6257 if (is_gimple_omp_oacc (stmt: ctx->stmt))
6258 break;
6259 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6260 {
6261 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6262 gimple *tseq;
6263 tree ptype = TREE_TYPE (placeholder);
6264 if (cond)
6265 {
6266 x = error_mark_node;
6267 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
6268 && !task_reduction_needs_orig_p)
6269 x = var;
6270 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
6271 {
6272 tree pptype = build_pointer_type (ptype);
6273 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
6274 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
6275 size_int (task_reduction_cnt_full
6276 + task_reduction_cntorig - 1),
6277 NULL_TREE, NULL_TREE);
6278 else
6279 {
6280 unsigned int idx
6281 = *ctx->task_reduction_map->get (k: c);
6282 x = task_reduction_read (ilist, tskred_temp,
6283 type: pptype, idx: 7 + 3 * idx);
6284 }
6285 x = fold_convert (pptype, x);
6286 x = build_simple_mem_ref (x);
6287 }
6288 }
6289 else
6290 {
6291 lower_private_allocate (var, new_var, allocator,
6292 allocate_ptr, ilist, ctx, is_ref: false,
6293 NULL_TREE);
6294 x = build_outer_var_ref (var, ctx);
6295
6296 if (omp_privatize_by_reference (decl: var)
6297 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
6298 x = build_fold_addr_expr_loc (clause_loc, x);
6299 }
6300 SET_DECL_VALUE_EXPR (placeholder, x);
6301 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6302 tree new_vard = new_var;
6303 if (omp_privatize_by_reference (decl: var))
6304 {
6305 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6306 new_vard = TREE_OPERAND (new_var, 0);
6307 gcc_assert (DECL_P (new_vard));
6308 }
6309 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6310 if (is_simd
6311 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6312 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6313 rvarp = &rvar;
6314 if (is_simd
6315 && lower_rec_simd_input_clauses (new_var, ctx, sctx: &sctx,
6316 ivar, lvar, rvar: rvarp,
6317 rvar2: &rvar2))
6318 {
6319 if (new_vard == new_var)
6320 {
6321 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
6322 SET_DECL_VALUE_EXPR (new_var, ivar);
6323 }
6324 else
6325 {
6326 SET_DECL_VALUE_EXPR (new_vard,
6327 build_fold_addr_expr (ivar));
6328 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6329 }
6330 x = lang_hooks.decls.omp_clause_default_ctor
6331 (c, unshare_expr (ivar),
6332 build_outer_var_ref (var, ctx));
6333 if (rvarp && ctx->for_simd_scan_phase)
6334 {
6335 if (x)
6336 gimplify_and_add (x, &llist[0]);
6337 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6338 if (x)
6339 gimplify_and_add (x, &llist[1]);
6340 break;
6341 }
6342 else if (rvarp)
6343 {
6344 if (x)
6345 {
6346 gimplify_and_add (x, &llist[0]);
6347
6348 tree ivar2 = unshare_expr (lvar);
6349 TREE_OPERAND (ivar2, 1) = sctx.idx;
6350 x = lang_hooks.decls.omp_clause_default_ctor
6351 (c, ivar2, build_outer_var_ref (var, ctx));
6352 gimplify_and_add (x, &llist[0]);
6353
6354 if (rvar2)
6355 {
6356 x = lang_hooks.decls.omp_clause_default_ctor
6357 (c, unshare_expr (rvar2),
6358 build_outer_var_ref (var, ctx));
6359 gimplify_and_add (x, &llist[0]);
6360 }
6361
6362 /* For types that need construction, add another
6363 private var which will be default constructed
6364 and optionally initialized with
6365 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
6366 loop we want to assign this value instead of
6367 constructing and destructing it in each
6368 iteration. */
6369 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
6370 gimple_add_tmp_var (nv);
6371 ctx->cb.decl_map->put (TREE_OPERAND (rvar2
6372 ? rvar2
6373 : ivar, 0),
6374 v: nv);
6375 x = lang_hooks.decls.omp_clause_default_ctor
6376 (c, nv, build_outer_var_ref (var, ctx));
6377 gimplify_and_add (x, ilist);
6378
6379 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6380 {
6381 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6382 x = DECL_VALUE_EXPR (new_vard);
6383 tree vexpr = nv;
6384 if (new_vard != new_var)
6385 vexpr = build_fold_addr_expr (nv);
6386 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6387 lower_omp (&tseq, ctx);
6388 SET_DECL_VALUE_EXPR (new_vard, x);
6389 gimple_seq_add_seq (ilist, tseq);
6390 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6391 }
6392
6393 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6394 if (x)
6395 gimplify_and_add (x, dlist);
6396 }
6397
6398 tree ref = build_outer_var_ref (var, ctx);
6399 x = unshare_expr (ivar);
6400 x = lang_hooks.decls.omp_clause_assign_op (c, x,
6401 ref);
6402 gimplify_and_add (x, &llist[0]);
6403
6404 ref = build_outer_var_ref (var, ctx);
6405 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
6406 rvar);
6407 gimplify_and_add (x, &llist[3]);
6408
6409 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6410 if (new_vard == new_var)
6411 SET_DECL_VALUE_EXPR (new_var, lvar);
6412 else
6413 SET_DECL_VALUE_EXPR (new_vard,
6414 build_fold_addr_expr (lvar));
6415
6416 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6417 if (x)
6418 gimplify_and_add (x, &llist[1]);
6419
6420 tree ivar2 = unshare_expr (lvar);
6421 TREE_OPERAND (ivar2, 1) = sctx.idx;
6422 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
6423 if (x)
6424 gimplify_and_add (x, &llist[1]);
6425
6426 if (rvar2)
6427 {
6428 x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
6429 if (x)
6430 gimplify_and_add (x, &llist[1]);
6431 }
6432 break;
6433 }
6434 if (x)
6435 gimplify_and_add (x, &llist[0]);
6436 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6437 {
6438 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6439 lower_omp (&tseq, ctx);
6440 gimple_seq_add_seq (&llist[0], tseq);
6441 }
6442 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6443 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6444 lower_omp (&tseq, ctx);
6445 gimple_seq_add_seq (&llist[1], tseq);
6446 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6447 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6448 if (new_vard == new_var)
6449 SET_DECL_VALUE_EXPR (new_var, lvar);
6450 else
6451 SET_DECL_VALUE_EXPR (new_vard,
6452 build_fold_addr_expr (lvar));
6453 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6454 if (x)
6455 gimplify_and_add (x, &llist[1]);
6456 break;
6457 }
6458 /* If this is a reference to constant size reduction var
6459 with placeholder, we haven't emitted the initializer
6460 for it because it is undesirable if SIMD arrays are used.
6461 But if they aren't used, we need to emit the deferred
6462 initialization now. */
6463 else if (omp_privatize_by_reference (decl: var) && is_simd)
6464 handle_simd_reference (loc: clause_loc, new_vard, ilist);
6465
6466 tree lab2 = NULL_TREE;
6467 if (cond)
6468 {
6469 gimple *g;
6470 if (!is_parallel_ctx (ctx))
6471 {
6472 tree condv = create_tmp_var (boolean_type_node);
6473 tree m = build_simple_mem_ref (cond);
6474 g = gimple_build_assign (condv, m);
6475 gimple_seq_add_stmt (ilist, g);
6476 tree lab1
6477 = create_artificial_label (UNKNOWN_LOCATION);
6478 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6479 g = gimple_build_cond (NE_EXPR, condv,
6480 boolean_false_node,
6481 lab2, lab1);
6482 gimple_seq_add_stmt (ilist, g);
6483 gimple_seq_add_stmt (ilist,
6484 gimple_build_label (label: lab1));
6485 }
6486 g = gimple_build_assign (build_simple_mem_ref (cond),
6487 boolean_true_node);
6488 gimple_seq_add_stmt (ilist, g);
6489 }
6490 x = lang_hooks.decls.omp_clause_default_ctor
6491 (c, unshare_expr (new_var),
6492 cond ? NULL_TREE
6493 : build_outer_var_ref (var, ctx));
6494 if (x)
6495 gimplify_and_add (x, ilist);
6496
6497 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6498 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6499 {
6500 if (ctx->for_simd_scan_phase)
6501 goto do_dtor;
6502 if (x || (!is_simd
6503 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
6504 {
6505 tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
6506 gimple_add_tmp_var (nv);
6507 ctx->cb.decl_map->put (k: new_vard, v: nv);
6508 x = lang_hooks.decls.omp_clause_default_ctor
6509 (c, nv, build_outer_var_ref (var, ctx));
6510 if (x)
6511 gimplify_and_add (x, ilist);
6512 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6513 {
6514 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6515 tree vexpr = nv;
6516 if (new_vard != new_var)
6517 vexpr = build_fold_addr_expr (nv);
6518 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6519 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6520 lower_omp (&tseq, ctx);
6521 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
6522 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
6523 gimple_seq_add_seq (ilist, tseq);
6524 }
6525 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6526 if (is_simd && ctx->scan_exclusive)
6527 {
6528 tree nv2
6529 = create_tmp_var_raw (TREE_TYPE (new_var));
6530 gimple_add_tmp_var (nv2);
6531 ctx->cb.decl_map->put (k: nv, v: nv2);
6532 x = lang_hooks.decls.omp_clause_default_ctor
6533 (c, nv2, build_outer_var_ref (var, ctx));
6534 gimplify_and_add (x, ilist);
6535 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6536 if (x)
6537 gimplify_and_add (x, dlist);
6538 }
6539 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6540 if (x)
6541 gimplify_and_add (x, dlist);
6542 }
6543 else if (is_simd
6544 && ctx->scan_exclusive
6545 && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
6546 {
6547 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
6548 gimple_add_tmp_var (nv2);
6549 ctx->cb.decl_map->put (k: new_vard, v: nv2);
6550 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6551 if (x)
6552 gimplify_and_add (x, dlist);
6553 }
6554 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6555 goto do_dtor;
6556 }
6557
6558 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6559 {
6560 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6561 if (c_kind == OMP_CLAUSE_IN_REDUCTION
6562 && is_omp_target (stmt: ctx->stmt))
6563 {
6564 tree d = maybe_lookup_decl_in_outer_ctx (decl: var, ctx);
6565 tree oldv = NULL_TREE;
6566 gcc_assert (d);
6567 if (DECL_HAS_VALUE_EXPR_P (d))
6568 oldv = DECL_VALUE_EXPR (d);
6569 SET_DECL_VALUE_EXPR (d, new_vard);
6570 DECL_HAS_VALUE_EXPR_P (d) = 1;
6571 lower_omp (&tseq, ctx);
6572 if (oldv)
6573 SET_DECL_VALUE_EXPR (d, oldv);
6574 else
6575 {
6576 SET_DECL_VALUE_EXPR (d, NULL_TREE);
6577 DECL_HAS_VALUE_EXPR_P (d) = 0;
6578 }
6579 }
6580 else
6581 lower_omp (&tseq, ctx);
6582 gimple_seq_add_seq (ilist, tseq);
6583 }
6584 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6585 if (is_simd)
6586 {
6587 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6588 lower_omp (&tseq, ctx);
6589 gimple_seq_add_seq (dlist, tseq);
6590 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6591 }
6592 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6593 if (cond)
6594 {
6595 if (lab2)
6596 gimple_seq_add_stmt (ilist, gimple_build_label (label: lab2));
6597 break;
6598 }
6599 goto do_dtor;
6600 }
6601 else
6602 {
6603 x = omp_reduction_init (clause: c, TREE_TYPE (new_var));
6604 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
6605 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
6606
6607 if (cond)
6608 {
6609 gimple *g;
6610 tree lab2 = NULL_TREE;
6611 /* GOMP_taskgroup_reduction_register memsets the whole
6612 array to zero. If the initializer is zero, we don't
6613 need to initialize it again, just mark it as ever
6614 used unconditionally, i.e. cond = true. */
6615 if (initializer_zerop (x))
6616 {
6617 g = gimple_build_assign (build_simple_mem_ref (cond),
6618 boolean_true_node);
6619 gimple_seq_add_stmt (ilist, g);
6620 break;
6621 }
6622
6623 /* Otherwise, emit
6624 if (!cond) { cond = true; new_var = x; } */
6625 if (!is_parallel_ctx (ctx))
6626 {
6627 tree condv = create_tmp_var (boolean_type_node);
6628 tree m = build_simple_mem_ref (cond);
6629 g = gimple_build_assign (condv, m);
6630 gimple_seq_add_stmt (ilist, g);
6631 tree lab1
6632 = create_artificial_label (UNKNOWN_LOCATION);
6633 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6634 g = gimple_build_cond (NE_EXPR, condv,
6635 boolean_false_node,
6636 lab2, lab1);
6637 gimple_seq_add_stmt (ilist, g);
6638 gimple_seq_add_stmt (ilist,
6639 gimple_build_label (label: lab1));
6640 }
6641 g = gimple_build_assign (build_simple_mem_ref (cond),
6642 boolean_true_node);
6643 gimple_seq_add_stmt (ilist, g);
6644 gimplify_assign (new_var, x, ilist);
6645 if (lab2)
6646 gimple_seq_add_stmt (ilist, gimple_build_label (label: lab2));
6647 break;
6648 }
6649
6650 /* reduction(-:var) sums up the partial results, so it
6651 acts identically to reduction(+:var). */
6652 if (code == MINUS_EXPR)
6653 code = PLUS_EXPR;
6654
6655 bool is_truth_op
6656 = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR);
6657 tree new_vard = new_var;
6658 if (is_simd && omp_privatize_by_reference (decl: var))
6659 {
6660 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6661 new_vard = TREE_OPERAND (new_var, 0);
6662 gcc_assert (DECL_P (new_vard));
6663 }
6664 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6665 if (is_simd
6666 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6667 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6668 rvarp = &rvar;
6669 if (is_simd
6670 && lower_rec_simd_input_clauses (new_var, ctx, sctx: &sctx,
6671 ivar, lvar, rvar: rvarp,
6672 rvar2: &rvar2))
6673 {
6674 if (new_vard != new_var)
6675 {
6676 SET_DECL_VALUE_EXPR (new_vard,
6677 build_fold_addr_expr (lvar));
6678 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6679 }
6680
6681 tree ref = build_outer_var_ref (var, ctx);
6682
6683 if (rvarp)
6684 {
6685 if (ctx->for_simd_scan_phase)
6686 break;
6687 gimplify_assign (ivar, ref, &llist[0]);
6688 ref = build_outer_var_ref (var, ctx);
6689 gimplify_assign (ref, rvar, &llist[3]);
6690 break;
6691 }
6692
6693 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
6694
6695 if (sctx.is_simt)
6696 {
6697 if (!simt_lane)
6698 simt_lane = create_tmp_var (unsigned_type_node);
6699 x = build_call_expr_internal_loc
6700 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
6701 TREE_TYPE (ivar), 2, ivar, simt_lane);
6702 /* Make sure x is evaluated unconditionally. */
6703 tree bfly_var = create_tmp_var (TREE_TYPE (ivar));
6704 gimplify_assign (bfly_var, x, &llist[2]);
6705 x = build2 (code, TREE_TYPE (ivar), ivar, bfly_var);
6706 gimplify_assign (ivar, x, &llist[2]);
6707 }
6708 tree ivar2 = ivar;
6709 tree ref2 = ref;
6710 if (is_truth_op)
6711 {
6712 tree zero = build_zero_cst (TREE_TYPE (ivar));
6713 ivar2 = fold_build2_loc (clause_loc, NE_EXPR,
6714 boolean_type_node, ivar,
6715 zero);
6716 ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6717 boolean_type_node, ref,
6718 zero);
6719 }
6720 x = build2 (code, TREE_TYPE (ref), ref2, ivar2);
6721 if (is_truth_op)
6722 x = fold_convert (TREE_TYPE (ref), x);
6723 ref = build_outer_var_ref (var, ctx);
6724 gimplify_assign (ref, x, &llist[1]);
6725
6726 }
6727 else
6728 {
6729 lower_private_allocate (var, new_var, allocator,
6730 allocate_ptr, ilist, ctx,
6731 is_ref: false, NULL_TREE);
6732 if (omp_privatize_by_reference (decl: var) && is_simd)
6733 handle_simd_reference (loc: clause_loc, new_vard, ilist);
6734 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6735 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6736 break;
6737 gimplify_assign (new_var, x, ilist);
6738 if (is_simd)
6739 {
6740 tree ref = build_outer_var_ref (var, ctx);
6741 tree new_var2 = new_var;
6742 tree ref2 = ref;
6743 if (is_truth_op)
6744 {
6745 tree zero = build_zero_cst (TREE_TYPE (new_var));
6746 new_var2
6747 = fold_build2_loc (clause_loc, NE_EXPR,
6748 boolean_type_node, new_var,
6749 zero);
6750 ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6751 boolean_type_node, ref,
6752 zero);
6753 }
6754 x = build2 (code, TREE_TYPE (ref2), ref2, new_var2);
6755 if (is_truth_op)
6756 x = fold_convert (TREE_TYPE (new_var), x);
6757 ref = build_outer_var_ref (var, ctx);
6758 gimplify_assign (ref, x, dlist);
6759 }
6760 if (allocator)
6761 goto do_dtor;
6762 }
6763 }
6764 break;
6765
6766 default:
6767 gcc_unreachable ();
6768 }
6769 }
6770 }
6771 if (tskred_avar)
6772 {
6773 tree clobber = build_clobber (TREE_TYPE (tskred_avar));
6774 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
6775 }
6776
6777 if (known_eq (sctx.max_vf, 1U))
6778 {
6779 sctx.is_simt = false;
6780 if (ctx->lastprivate_conditional_map)
6781 {
6782 if (gimple_omp_for_combined_into_p (g: ctx->stmt))
6783 {
6784 /* Signal to lower_omp_1 that it should use parent context. */
6785 ctx->combined_into_simd_safelen1 = true;
6786 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6787 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6788 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6789 {
6790 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6791 omp_context *outer = ctx->outer;
6792 if (gimple_code (g: outer->stmt) == GIMPLE_OMP_SCAN)
6793 outer = outer->outer;
6794 tree *v = ctx->lastprivate_conditional_map->get (k: o);
6795 tree po = lookup_decl (OMP_CLAUSE_DECL (c), ctx: outer);
6796 tree *pv = outer->lastprivate_conditional_map->get (k: po);
6797 *v = *pv;
6798 }
6799 }
6800 else
6801 {
6802 /* When not vectorized, treat lastprivate(conditional:) like
6803 normal lastprivate, as there will be just one simd lane
6804 writing the privatized variable. */
6805 delete ctx->lastprivate_conditional_map;
6806 ctx->lastprivate_conditional_map = NULL;
6807 }
6808 }
6809 }
6810
6811 if (nonconst_simd_if)
6812 {
6813 if (sctx.lane == NULL_TREE)
6814 {
6815 sctx.idx = create_tmp_var (unsigned_type_node);
6816 sctx.lane = create_tmp_var (unsigned_type_node);
6817 }
6818 /* FIXME: For now. */
6819 sctx.is_simt = false;
6820 }
6821
6822 if (sctx.lane || sctx.is_simt)
6823 {
6824 uid = create_tmp_var (ptr_type_node, "simduid");
6825 /* Don't want uninit warnings on simduid, it is always uninitialized,
6826 but we use it not for the value, but for the DECL_UID only. */
6827 suppress_warning (uid, OPT_Wuninitialized);
6828 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
6829 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
6830 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gs: ctx->stmt);
6831 gimple_omp_for_set_clauses (gs: ctx->stmt, clauses: c);
6832 }
6833 /* Emit calls denoting privatized variables and initializing a pointer to
6834 structure that holds private variables as fields after ompdevlow pass. */
6835 if (sctx.is_simt)
6836 {
6837 sctx.simt_eargs[0] = uid;
6838 gimple *g
6839 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
6840 gimple_call_set_lhs (gs: g, lhs: uid);
6841 gimple_seq_add_stmt (ilist, g);
6842 sctx.simt_eargs.release ();
6843
6844 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
6845 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
6846 gimple_call_set_lhs (gs: g, lhs: simtrec);
6847 gimple_seq_add_stmt (ilist, g);
6848 }
6849 if (sctx.lane)
6850 {
6851 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
6852 2 + (nonconst_simd_if != NULL),
6853 uid, integer_zero_node,
6854 nonconst_simd_if);
6855 gimple_call_set_lhs (gs: g, lhs: sctx.lane);
6856 gimple_stmt_iterator gsi = gsi_start (seq&: *gimple_omp_body_ptr (gs: ctx->stmt));
6857 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6858 g = gimple_build_assign (sctx.lane, INTEGER_CST,
6859 build_int_cst (unsigned_type_node, 0));
6860 gimple_seq_add_stmt (ilist, g);
6861 if (sctx.lastlane)
6862 {
6863 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6864 2, uid, sctx.lane);
6865 gimple_call_set_lhs (gs: g, lhs: sctx.lastlane);
6866 gimple_seq_add_stmt (dlist, g);
6867 gimple_seq_add_seq (dlist, llist[3]);
6868 }
6869 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6870 if (llist[2])
6871 {
6872 tree simt_vf = create_tmp_var (unsigned_type_node);
6873 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
6874 gimple_call_set_lhs (gs: g, lhs: simt_vf);
6875 gimple_seq_add_stmt (dlist, g);
6876
6877 tree t = build_int_cst (unsigned_type_node, 1);
6878 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
6879 gimple_seq_add_stmt (dlist, g);
6880
6881 t = build_int_cst (unsigned_type_node, 0);
6882 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6883 gimple_seq_add_stmt (dlist, g);
6884
6885 tree body = create_artificial_label (UNKNOWN_LOCATION);
6886 tree header = create_artificial_label (UNKNOWN_LOCATION);
6887 tree end = create_artificial_label (UNKNOWN_LOCATION);
6888 gimple_seq_add_stmt (dlist, gimple_build_goto (dest: header));
6889 gimple_seq_add_stmt (dlist, gimple_build_label (label: body));
6890
6891 gimple_seq_add_seq (dlist, llist[2]);
6892
6893 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
6894 gimple_seq_add_stmt (dlist, g);
6895
6896 gimple_seq_add_stmt (dlist, gimple_build_label (label: header));
6897 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
6898 gimple_seq_add_stmt (dlist, g);
6899
6900 gimple_seq_add_stmt (dlist, gimple_build_label (label: end));
6901 }
6902 for (int i = 0; i < 2; i++)
6903 if (llist[i])
6904 {
6905 tree vf = create_tmp_var (unsigned_type_node);
6906 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
6907 gimple_call_set_lhs (gs: g, lhs: vf);
6908 gimple_seq *seq = i == 0 ? ilist : dlist;
6909 gimple_seq_add_stmt (seq, g);
6910 tree t = build_int_cst (unsigned_type_node, 0);
6911 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6912 gimple_seq_add_stmt (seq, g);
6913 tree body = create_artificial_label (UNKNOWN_LOCATION);
6914 tree header = create_artificial_label (UNKNOWN_LOCATION);
6915 tree end = create_artificial_label (UNKNOWN_LOCATION);
6916 gimple_seq_add_stmt (seq, gimple_build_goto (dest: header));
6917 gimple_seq_add_stmt (seq, gimple_build_label (label: body));
6918 gimple_seq_add_seq (seq, llist[i]);
6919 t = build_int_cst (unsigned_type_node, 1);
6920 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
6921 gimple_seq_add_stmt (seq, g);
6922 gimple_seq_add_stmt (seq, gimple_build_label (label: header));
6923 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
6924 gimple_seq_add_stmt (seq, g);
6925 gimple_seq_add_stmt (seq, gimple_build_label (label: end));
6926 }
6927 }
6928 if (sctx.is_simt)
6929 {
6930 gimple_seq_add_seq (dlist, sctx.simt_dlist);
6931 gimple *g
6932 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
6933 gimple_seq_add_stmt (dlist, g);
6934 }
6935
6936 /* The copyin sequence is not to be executed by the main thread, since
6937 that would result in self-copies. Perhaps not visible to scalars,
6938 but it certainly is to C++ operator=. */
6939 if (copyin_seq)
6940 {
6941 x = build_call_expr (builtin_decl_explicit (fncode: BUILT_IN_OMP_GET_THREAD_NUM),
6942 0);
6943 x = build2 (NE_EXPR, boolean_type_node, x,
6944 build_int_cst (TREE_TYPE (x), 0));
6945 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
6946 gimplify_and_add (x, ilist);
6947 }
6948
6949 /* If any copyin variable is passed by reference, we must ensure the
6950 master thread doesn't modify it before it is copied over in all
6951 threads. Similarly for variables in both firstprivate and
6952 lastprivate clauses we need to ensure the lastprivate copying
6953 happens after firstprivate copying in all threads. And similarly
6954 for UDRs if initializer expression refers to omp_orig. */
6955 if (copyin_by_ref || lastprivate_firstprivate
6956 || (reduction_omp_orig_ref
6957 && !ctx->scan_inclusive
6958 && !ctx->scan_exclusive))
6959 {
6960 /* Don't add any barrier for #pragma omp simd or
6961 #pragma omp distribute. */
6962 if (!is_task_ctx (ctx)
6963 && (gimple_code (g: ctx->stmt) != GIMPLE_OMP_FOR
6964 || gimple_omp_for_kind (g: ctx->stmt) == GF_OMP_FOR_KIND_FOR))
6965 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
6966 }
6967
6968 /* If max_vf is non-zero, then we can use only a vectorization factor
6969 up to the max_vf we chose. So stick it into the safelen clause. */
6970 if (maybe_ne (a: sctx.max_vf, b: 0U))
6971 {
6972 tree c = omp_find_clause (clauses: gimple_omp_for_clauses (gs: ctx->stmt),
6973 kind: OMP_CLAUSE_SAFELEN);
6974 poly_uint64 safe_len;
6975 if (c == NULL_TREE
6976 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), value: &safe_len)
6977 && maybe_gt (safe_len, sctx.max_vf)))
6978 {
6979 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
6980 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
6981 sctx.max_vf);
6982 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gs: ctx->stmt);
6983 gimple_omp_for_set_clauses (gs: ctx->stmt, clauses: c);
6984 }
6985 }
6986}
6987
6988/* Create temporary variables for lastprivate(conditional:) implementation
6989 in context CTX with CLAUSES. */
6990
6991static void
6992lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
6993{
6994 tree iter_type = NULL_TREE;
6995 tree cond_ptr = NULL_TREE;
6996 tree iter_var = NULL_TREE;
6997 bool is_simd = (gimple_code (g: ctx->stmt) == GIMPLE_OMP_FOR
6998 && gimple_omp_for_kind (g: ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
6999 tree next = *clauses;
7000 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
7001 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7002 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
7003 {
7004 if (is_simd)
7005 {
7006 tree cc = omp_find_clause (clauses: next, kind: OMP_CLAUSE__CONDTEMP_);
7007 gcc_assert (cc);
7008 if (iter_type == NULL_TREE)
7009 {
7010 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
7011 iter_var = create_tmp_var_raw (iter_type);
7012 DECL_CONTEXT (iter_var) = current_function_decl;
7013 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
7014 DECL_CHAIN (iter_var) = ctx->block_vars;
7015 ctx->block_vars = iter_var;
7016 tree c3
7017 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
7018 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
7019 OMP_CLAUSE_DECL (c3) = iter_var;
7020 OMP_CLAUSE_CHAIN (c3) = *clauses;
7021 *clauses = c3;
7022 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
7023 }
7024 next = OMP_CLAUSE_CHAIN (cc);
7025 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7026 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
7027 ctx->lastprivate_conditional_map->put (k: o, v);
7028 continue;
7029 }
7030 if (iter_type == NULL)
7031 {
7032 if (gimple_code (g: ctx->stmt) == GIMPLE_OMP_FOR)
7033 {
7034 struct omp_for_data fd;
7035 omp_extract_for_data (for_stmt: as_a <gomp_for *> (p: ctx->stmt), fd: &fd,
7036 NULL);
7037 iter_type = unsigned_type_for (fd.iter_type);
7038 }
7039 else if (gimple_code (g: ctx->stmt) == GIMPLE_OMP_SECTIONS)
7040 iter_type = unsigned_type_node;
7041 tree c2 = omp_find_clause (clauses: *clauses, kind: OMP_CLAUSE__CONDTEMP_);
7042 if (c2)
7043 {
7044 cond_ptr
7045 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
7046 OMP_CLAUSE_DECL (c2) = cond_ptr;
7047 }
7048 else
7049 {
7050 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
7051 DECL_CONTEXT (cond_ptr) = current_function_decl;
7052 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
7053 DECL_CHAIN (cond_ptr) = ctx->block_vars;
7054 ctx->block_vars = cond_ptr;
7055 c2 = build_omp_clause (UNKNOWN_LOCATION,
7056 OMP_CLAUSE__CONDTEMP_);
7057 OMP_CLAUSE_DECL (c2) = cond_ptr;
7058 OMP_CLAUSE_CHAIN (c2) = *clauses;
7059 *clauses = c2;
7060 }
7061 iter_var = create_tmp_var_raw (iter_type);
7062 DECL_CONTEXT (iter_var) = current_function_decl;
7063 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
7064 DECL_CHAIN (iter_var) = ctx->block_vars;
7065 ctx->block_vars = iter_var;
7066 tree c3
7067 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
7068 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
7069 OMP_CLAUSE_DECL (c3) = iter_var;
7070 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
7071 OMP_CLAUSE_CHAIN (c2) = c3;
7072 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
7073 }
7074 tree v = create_tmp_var_raw (iter_type);
7075 DECL_CONTEXT (v) = current_function_decl;
7076 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
7077 DECL_CHAIN (v) = ctx->block_vars;
7078 ctx->block_vars = v;
7079 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7080 ctx->lastprivate_conditional_map->put (k: o, v);
7081 }
7082}
7083
7084
7085/* Generate code to implement the LASTPRIVATE clauses. This is used for
7086 both parallel and workshare constructs. PREDICATE may be NULL if it's
7087 always true. BODY_P is the sequence to insert early initialization
7088 if needed, STMT_LIST is where the non-conditional lastprivate handling
7089 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
7090 section. */
7091
7092static void
7093lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
7094 gimple_seq *stmt_list, gimple_seq *cstmt_list,
7095 omp_context *ctx)
7096{
7097 tree x, c, label = NULL, orig_clauses = clauses;
7098 bool par_clauses = false;
7099 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
7100 unsigned HOST_WIDE_INT conditional_off = 0;
7101 gimple_seq post_stmt_list = NULL;
7102
7103 /* Early exit if there are no lastprivate or linear clauses. */
7104 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
7105 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
7106 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
7107 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
7108 break;
7109 if (clauses == NULL)
7110 {
7111 /* If this was a workshare clause, see if it had been combined
7112 with its parallel. In that case, look for the clauses on the
7113 parallel statement itself. */
7114 if (is_parallel_ctx (ctx))
7115 return;
7116
7117 ctx = ctx->outer;
7118 if (ctx == NULL || !is_parallel_ctx (ctx))
7119 return;
7120
7121 clauses = omp_find_clause (clauses: gimple_omp_parallel_clauses (gs: ctx->stmt),
7122 kind: OMP_CLAUSE_LASTPRIVATE);
7123 if (clauses == NULL)
7124 return;
7125 par_clauses = true;
7126 }
7127
7128 bool maybe_simt = false;
7129 if (gimple_code (g: ctx->stmt) == GIMPLE_OMP_FOR
7130 && gimple_omp_for_kind (g: ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7131 {
7132 maybe_simt = omp_find_clause (clauses: orig_clauses, kind: OMP_CLAUSE__SIMT_);
7133 simduid = omp_find_clause (clauses: orig_clauses, kind: OMP_CLAUSE__SIMDUID_);
7134 if (simduid)
7135 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
7136 }
7137
7138 if (predicate)
7139 {
7140 gcond *stmt;
7141 tree label_true, arm1, arm2;
7142 enum tree_code pred_code = TREE_CODE (predicate);
7143
7144 label = create_artificial_label (UNKNOWN_LOCATION);
7145 label_true = create_artificial_label (UNKNOWN_LOCATION);
7146 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
7147 {
7148 arm1 = TREE_OPERAND (predicate, 0);
7149 arm2 = TREE_OPERAND (predicate, 1);
7150 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
7151 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
7152 }
7153 else
7154 {
7155 arm1 = predicate;
7156 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
7157 arm2 = boolean_false_node;
7158 pred_code = NE_EXPR;
7159 }
7160 if (maybe_simt)
7161 {
7162 c = build2 (pred_code, boolean_type_node, arm1, arm2);
7163 c = fold_convert (integer_type_node, c);
7164 simtcond = create_tmp_var (integer_type_node);
7165 gimplify_assign (simtcond, c, stmt_list);
7166 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
7167 1, simtcond);
7168 c = create_tmp_var (integer_type_node);
7169 gimple_call_set_lhs (gs: g, lhs: c);
7170 gimple_seq_add_stmt (stmt_list, g);
7171 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
7172 label_true, label);
7173 }
7174 else
7175 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
7176 gimple_seq_add_stmt (stmt_list, stmt);
7177 gimple_seq_add_stmt (stmt_list, gimple_build_label (label: label_true));
7178 }
7179
7180 tree cond_ptr = NULL_TREE;
7181 for (c = clauses; c ;)
7182 {
7183 tree var, new_var;
7184 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7185 gimple_seq *this_stmt_list = stmt_list;
7186 tree lab2 = NULL_TREE;
7187
7188 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7189 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
7190 && ctx->lastprivate_conditional_map
7191 && !ctx->combined_into_simd_safelen1)
7192 {
7193 gcc_assert (body_p);
7194 if (simduid)
7195 goto next;
7196 if (cond_ptr == NULL_TREE)
7197 {
7198 cond_ptr = omp_find_clause (clauses: orig_clauses, kind: OMP_CLAUSE__CONDTEMP_);
7199 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
7200 }
7201 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
7202 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7203 tree v = *ctx->lastprivate_conditional_map->get (k: o);
7204 gimplify_assign (v, build_zero_cst (type), body_p);
7205 this_stmt_list = cstmt_list;
7206 tree mem;
7207 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
7208 {
7209 mem = build2 (MEM_REF, type, cond_ptr,
7210 build_int_cst (TREE_TYPE (cond_ptr),
7211 conditional_off));
7212 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
7213 }
7214 else
7215 mem = build4 (ARRAY_REF, type, cond_ptr,
7216 size_int (conditional_off++), NULL_TREE, NULL_TREE);
7217 tree mem2 = copy_node (mem);
7218 gimple_seq seq = NULL;
7219 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
7220 gimple_seq_add_seq (this_stmt_list, seq);
7221 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
7222 lab2 = create_artificial_label (UNKNOWN_LOCATION);
7223 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
7224 gimple_seq_add_stmt (this_stmt_list, g);
7225 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (label: lab1));
7226 gimplify_assign (mem2, v, this_stmt_list);
7227 }
7228 else if (predicate
7229 && ctx->combined_into_simd_safelen1
7230 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7231 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
7232 && ctx->lastprivate_conditional_map)
7233 this_stmt_list = &post_stmt_list;
7234
7235 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7236 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7237 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7238 {
7239 var = OMP_CLAUSE_DECL (c);
7240 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7241 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
7242 && is_taskloop_ctx (ctx))
7243 {
7244 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
7245 new_var = lookup_decl (var, ctx: ctx->outer);
7246 }
7247 else
7248 {
7249 new_var = lookup_decl (var, ctx);
7250 /* Avoid uninitialized warnings for lastprivate and
7251 for linear iterators. */
7252 if (predicate
7253 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7254 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
7255 suppress_warning (new_var, OPT_Wuninitialized);
7256 }
7257
7258 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
7259 {
7260 tree val = DECL_VALUE_EXPR (new_var);
7261 if (TREE_CODE (val) == ARRAY_REF
7262 && VAR_P (TREE_OPERAND (val, 0))
7263 && lookup_attribute (attr_name: "omp simd array",
7264 DECL_ATTRIBUTES (TREE_OPERAND (val,
7265 0))))
7266 {
7267 if (lastlane == NULL)
7268 {
7269 lastlane = create_tmp_var (unsigned_type_node);
7270 gcall *g
7271 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
7272 2, simduid,
7273 TREE_OPERAND (val, 1));
7274 gimple_call_set_lhs (gs: g, lhs: lastlane);
7275 gimple_seq_add_stmt (this_stmt_list, g);
7276 }
7277 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
7278 TREE_OPERAND (val, 0), lastlane,
7279 NULL_TREE, NULL_TREE);
7280 TREE_THIS_NOTRAP (new_var) = 1;
7281 }
7282 }
7283 else if (maybe_simt)
7284 {
7285 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
7286 ? DECL_VALUE_EXPR (new_var)
7287 : new_var);
7288 if (simtlast == NULL)
7289 {
7290 simtlast = create_tmp_var (unsigned_type_node);
7291 gcall *g = gimple_build_call_internal
7292 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
7293 gimple_call_set_lhs (gs: g, lhs: simtlast);
7294 gimple_seq_add_stmt (this_stmt_list, g);
7295 }
7296 x = build_call_expr_internal_loc
7297 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
7298 TREE_TYPE (val), 2, val, simtlast);
7299 new_var = unshare_expr (new_var);
7300 gimplify_assign (new_var, x, this_stmt_list);
7301 new_var = unshare_expr (new_var);
7302 }
7303
7304 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7305 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
7306 {
7307 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
7308 gimple_seq_add_seq (this_stmt_list,
7309 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
7310 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
7311 }
7312 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7313 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
7314 {
7315 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
7316 gimple_seq_add_seq (this_stmt_list,
7317 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
7318 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
7319 }
7320
7321 x = NULL_TREE;
7322 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7323 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)
7324 && is_taskloop_ctx (ctx))
7325 {
7326 tree ovar = maybe_lookup_decl_in_outer_ctx (decl: var,
7327 ctx: ctx->outer->outer);
7328 if (is_global_var (t: ovar))
7329 x = ovar;
7330 }
7331 if (!x)
7332 x = build_outer_var_ref (var, ctx, code: OMP_CLAUSE_LASTPRIVATE);
7333 if (omp_privatize_by_reference (decl: var))
7334 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7335 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
7336 gimplify_and_add (x, this_stmt_list);
7337
7338 if (lab2)
7339 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (label: lab2));
7340 }
7341
7342 next:
7343 c = OMP_CLAUSE_CHAIN (c);
7344 if (c == NULL && !par_clauses)
7345 {
7346 /* If this was a workshare clause, see if it had been combined
7347 with its parallel. In that case, continue looking for the
7348 clauses also on the parallel statement itself. */
7349 if (is_parallel_ctx (ctx))
7350 break;
7351
7352 ctx = ctx->outer;
7353 if (ctx == NULL || !is_parallel_ctx (ctx))
7354 break;
7355
7356 c = omp_find_clause (clauses: gimple_omp_parallel_clauses (gs: ctx->stmt),
7357 kind: OMP_CLAUSE_LASTPRIVATE);
7358 par_clauses = true;
7359 }
7360 }
7361
7362 if (label)
7363 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
7364 gimple_seq_add_seq (stmt_list, post_stmt_list);
7365}
7366
7367/* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
7368 (which might be a placeholder). INNER is true if this is an inner
7369 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
7370 join markers. Generate the before-loop forking sequence in
7371 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
7372 general form of these sequences is
7373
7374 GOACC_REDUCTION_SETUP
7375 GOACC_FORK
7376 GOACC_REDUCTION_INIT
7377 ...
7378 GOACC_REDUCTION_FINI
7379 GOACC_JOIN
7380 GOACC_REDUCTION_TEARDOWN. */
7381
7382static void
7383lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
7384 gcall *fork, gcall *private_marker, gcall *join,
7385 gimple_seq *fork_seq, gimple_seq *join_seq,
7386 omp_context *ctx)
7387{
7388 gimple_seq before_fork = NULL;
7389 gimple_seq after_fork = NULL;
7390 gimple_seq before_join = NULL;
7391 gimple_seq after_join = NULL;
7392 tree init_code = NULL_TREE, fini_code = NULL_TREE,
7393 setup_code = NULL_TREE, teardown_code = NULL_TREE;
7394 unsigned offset = 0;
7395
7396 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7397 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
7398 {
7399 /* No 'reduction' clauses on OpenACC 'kernels'. */
7400 gcc_checking_assert (!is_oacc_kernels (ctx));
7401 /* Likewise, on OpenACC 'kernels' decomposed parts. */
7402 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
7403
7404 tree orig = OMP_CLAUSE_DECL (c);
7405 tree var = maybe_lookup_decl (var: orig, ctx);
7406 tree ref_to_res = NULL_TREE;
7407 tree incoming, outgoing, v1, v2, v3;
7408 bool is_private = false;
7409
7410 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
7411 if (rcode == MINUS_EXPR)
7412 rcode = PLUS_EXPR;
7413 else if (rcode == TRUTH_ANDIF_EXPR)
7414 rcode = BIT_AND_EXPR;
7415 else if (rcode == TRUTH_ORIF_EXPR)
7416 rcode = BIT_IOR_EXPR;
7417 tree op = build_int_cst (unsigned_type_node, rcode);
7418
7419 if (!var)
7420 var = orig;
7421
7422 incoming = outgoing = var;
7423
7424 if (!inner)
7425 {
7426 /* See if an outer construct also reduces this variable. */
7427 omp_context *outer = ctx;
7428
7429 while (omp_context *probe = outer->outer)
7430 {
7431 enum gimple_code type = gimple_code (g: probe->stmt);
7432 tree cls;
7433
7434 switch (type)
7435 {
7436 case GIMPLE_OMP_FOR:
7437 cls = gimple_omp_for_clauses (gs: probe->stmt);
7438 break;
7439
7440 case GIMPLE_OMP_TARGET:
7441 /* No 'reduction' clauses inside OpenACC 'kernels'
7442 regions. */
7443 gcc_checking_assert (!is_oacc_kernels (probe));
7444
7445 if (!is_gimple_omp_offloaded (stmt: probe->stmt))
7446 goto do_lookup;
7447
7448 cls = gimple_omp_target_clauses (gs: probe->stmt);
7449 break;
7450
7451 default:
7452 goto do_lookup;
7453 }
7454
7455 outer = probe;
7456 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
7457 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
7458 && orig == OMP_CLAUSE_DECL (cls))
7459 {
7460 incoming = outgoing = lookup_decl (var: orig, ctx: probe);
7461 goto has_outer_reduction;
7462 }
7463 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
7464 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
7465 && orig == OMP_CLAUSE_DECL (cls))
7466 {
7467 is_private = true;
7468 goto do_lookup;
7469 }
7470 }
7471
7472 do_lookup:
7473 /* This is the outermost construct with this reduction,
7474 see if there's a mapping for it. */
7475 if (gimple_code (g: outer->stmt) == GIMPLE_OMP_TARGET
7476 && maybe_lookup_field (var: orig, ctx: outer) && !is_private)
7477 {
7478 ref_to_res = build_receiver_ref (var: orig, by_ref: false, ctx: outer);
7479 if (omp_privatize_by_reference (decl: orig))
7480 ref_to_res = build_simple_mem_ref (ref_to_res);
7481
7482 tree type = TREE_TYPE (var);
7483 if (POINTER_TYPE_P (type))
7484 type = TREE_TYPE (type);
7485
7486 outgoing = var;
7487 incoming = omp_reduction_init_op (loc, op: rcode, type);
7488 }
7489 else
7490 {
7491 /* Try to look at enclosing contexts for reduction var,
7492 use original if no mapping found. */
7493 tree t = NULL_TREE;
7494 omp_context *c = ctx->outer;
7495 while (c && !t)
7496 {
7497 t = maybe_lookup_decl (var: orig, ctx: c);
7498 c = c->outer;
7499 }
7500 incoming = outgoing = (t ? t : orig);
7501 }
7502
7503 has_outer_reduction:;
7504 }
7505
7506 if (!ref_to_res)
7507 ref_to_res = integer_zero_node;
7508
7509 if (omp_privatize_by_reference (decl: orig))
7510 {
7511 tree type = TREE_TYPE (var);
7512 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
7513
7514 if (!inner)
7515 {
7516 tree x = create_tmp_var (TREE_TYPE (type), id);
7517 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
7518 }
7519
7520 v1 = create_tmp_var (type, id);
7521 v2 = create_tmp_var (type, id);
7522 v3 = create_tmp_var (type, id);
7523
7524 gimplify_assign (v1, var, fork_seq);
7525 gimplify_assign (v2, var, fork_seq);
7526 gimplify_assign (v3, var, fork_seq);
7527
7528 var = build_simple_mem_ref (var);
7529 v1 = build_simple_mem_ref (v1);
7530 v2 = build_simple_mem_ref (v2);
7531 v3 = build_simple_mem_ref (v3);
7532 outgoing = build_simple_mem_ref (outgoing);
7533
7534 if (!TREE_CONSTANT (incoming))
7535 incoming = build_simple_mem_ref (incoming);
7536 }
7537 else
7538 /* Note that 'var' might be a mem ref. */
7539 v1 = v2 = v3 = var;
7540
7541 /* Determine position in reduction buffer, which may be used
7542 by target. The parser has ensured that this is not a
7543 variable-sized type. */
7544 fixed_size_mode mode
7545 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
7546 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7547 offset = (offset + align - 1) & ~(align - 1);
7548 tree off = build_int_cst (sizetype, offset);
7549 offset += GET_MODE_SIZE (mode);
7550
7551 if (!init_code)
7552 {
7553 init_code = build_int_cst (integer_type_node,
7554 IFN_GOACC_REDUCTION_INIT);
7555 fini_code = build_int_cst (integer_type_node,
7556 IFN_GOACC_REDUCTION_FINI);
7557 setup_code = build_int_cst (integer_type_node,
7558 IFN_GOACC_REDUCTION_SETUP);
7559 teardown_code = build_int_cst (integer_type_node,
7560 IFN_GOACC_REDUCTION_TEARDOWN);
7561 }
7562
7563 tree setup_call
7564 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7565 TREE_TYPE (var), 6, setup_code,
7566 unshare_expr (ref_to_res),
7567 unshare_expr (incoming),
7568 level, op, off);
7569 tree init_call
7570 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7571 TREE_TYPE (var), 6, init_code,
7572 unshare_expr (ref_to_res),
7573 unshare_expr (v1), level, op, off);
7574 tree fini_call
7575 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7576 TREE_TYPE (var), 6, fini_code,
7577 unshare_expr (ref_to_res),
7578 unshare_expr (v2), level, op, off);
7579 tree teardown_call
7580 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7581 TREE_TYPE (var), 6, teardown_code,
7582 ref_to_res, unshare_expr (v3),
7583 level, op, off);
7584
7585 gimplify_assign (unshare_expr (v1), setup_call, &before_fork);
7586 gimplify_assign (unshare_expr (v2), init_call, &after_fork);
7587 gimplify_assign (unshare_expr (v3), fini_call, &before_join);
7588 gimplify_assign (unshare_expr (outgoing), teardown_call, &after_join);
7589 }
7590
7591 /* Now stitch things together. */
7592 gimple_seq_add_seq (fork_seq, before_fork);
7593 if (private_marker)
7594 gimple_seq_add_stmt (fork_seq, private_marker);
7595 if (fork)
7596 gimple_seq_add_stmt (fork_seq, fork);
7597 gimple_seq_add_seq (fork_seq, after_fork);
7598
7599 gimple_seq_add_seq (join_seq, before_join);
7600 if (join)
7601 gimple_seq_add_stmt (join_seq, join);
7602 gimple_seq_add_seq (join_seq, after_join);
7603}
7604
7605/* Generate code to implement the REDUCTION clauses, append it
7606 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
7607 that should be emitted also inside of the critical section,
7608 in that case clear *CLIST afterwards, otherwise leave it as is
7609 and let the caller emit it itself. */
7610
7611static void
7612lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
7613 gimple_seq *clist, omp_context *ctx)
7614{
7615 gimple_seq sub_seq = NULL;
7616 gimple *stmt;
7617 tree x, c;
7618 int count = 0;
7619
7620 /* OpenACC loop reductions are handled elsewhere. */
7621 if (is_gimple_omp_oacc (stmt: ctx->stmt))
7622 return;
7623
7624 /* SIMD reductions are handled in lower_rec_input_clauses. */
7625 if (gimple_code (g: ctx->stmt) == GIMPLE_OMP_FOR
7626 && gimple_omp_for_kind (g: ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7627 return;
7628
7629 /* inscan reductions are handled elsewhere. */
7630 if (ctx->scan_inclusive || ctx->scan_exclusive)
7631 return;
7632
7633 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
7634 update in that case, otherwise use a lock. */
7635 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
7636 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7637 && !OMP_CLAUSE_REDUCTION_TASK (c))
7638 {
7639 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
7640 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7641 {
7642 /* Never use OMP_ATOMIC for array reductions or UDRs. */
7643 count = -1;
7644 break;
7645 }
7646 count++;
7647 }
7648
7649 if (count == 0)
7650 return;
7651
7652 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7653 {
7654 tree var, ref, new_var, orig_var;
7655 enum tree_code code;
7656 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7657
7658 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7659 || OMP_CLAUSE_REDUCTION_TASK (c))
7660 continue;
7661
7662 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
7663 orig_var = var = OMP_CLAUSE_DECL (c);
7664 if (TREE_CODE (var) == MEM_REF)
7665 {
7666 var = TREE_OPERAND (var, 0);
7667 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
7668 var = TREE_OPERAND (var, 0);
7669 if (TREE_CODE (var) == ADDR_EXPR)
7670 var = TREE_OPERAND (var, 0);
7671 else
7672 {
7673 /* If this is a pointer or referenced based array
7674 section, the var could be private in the outer
7675 context e.g. on orphaned loop construct. Pretend this
7676 is private variable's outer reference. */
7677 ccode = OMP_CLAUSE_PRIVATE;
7678 if (INDIRECT_REF_P (var))
7679 var = TREE_OPERAND (var, 0);
7680 }
7681 orig_var = var;
7682 if (is_variable_sized (expr: var))
7683 {
7684 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
7685 var = DECL_VALUE_EXPR (var);
7686 gcc_assert (INDIRECT_REF_P (var));
7687 var = TREE_OPERAND (var, 0);
7688 gcc_assert (DECL_P (var));
7689 }
7690 }
7691 new_var = lookup_decl (var, ctx);
7692 if (var == OMP_CLAUSE_DECL (c)
7693 && omp_privatize_by_reference (decl: var))
7694 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7695 ref = build_outer_var_ref (var, ctx, code: ccode);
7696 code = OMP_CLAUSE_REDUCTION_CODE (c);
7697
7698 /* reduction(-:var) sums up the partial results, so it acts
7699 identically to reduction(+:var). */
7700 if (code == MINUS_EXPR)
7701 code = PLUS_EXPR;
7702
7703 bool is_truth_op = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR);
7704 if (count == 1)
7705 {
7706 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
7707
7708 addr = save_expr (addr);
7709 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
7710 tree new_var2 = new_var;
7711 tree ref2 = ref;
7712 if (is_truth_op)
7713 {
7714 tree zero = build_zero_cst (TREE_TYPE (new_var));
7715 new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7716 boolean_type_node, new_var, zero);
7717 ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node,
7718 ref, zero);
7719 }
7720 x = fold_build2_loc (clause_loc, code, TREE_TYPE (new_var2), ref2,
7721 new_var2);
7722 if (is_truth_op)
7723 x = fold_convert (TREE_TYPE (new_var), x);
7724 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
7725 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
7726 gimplify_and_add (x, stmt_seqp);
7727 return;
7728 }
7729 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7730 {
7731 tree d = OMP_CLAUSE_DECL (c);
7732 tree type = TREE_TYPE (d);
7733 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7734 tree i = create_tmp_var (TREE_TYPE (v));
7735 tree ptype = build_pointer_type (TREE_TYPE (type));
7736 tree bias = TREE_OPERAND (d, 1);
7737 d = TREE_OPERAND (d, 0);
7738 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
7739 {
7740 tree b = TREE_OPERAND (d, 1);
7741 b = maybe_lookup_decl (var: b, ctx);
7742 if (b == NULL)
7743 {
7744 b = TREE_OPERAND (d, 1);
7745 b = maybe_lookup_decl_in_outer_ctx (decl: b, ctx);
7746 }
7747 if (integer_zerop (bias))
7748 bias = b;
7749 else
7750 {
7751 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
7752 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
7753 TREE_TYPE (b), b, bias);
7754 }
7755 d = TREE_OPERAND (d, 0);
7756 }
7757 /* For ref build_outer_var_ref already performs this, so
7758 only new_var needs a dereference. */
7759 if (INDIRECT_REF_P (d))
7760 {
7761 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7762 gcc_assert (omp_privatize_by_reference (var)
7763 && var == orig_var);
7764 }
7765 else if (TREE_CODE (d) == ADDR_EXPR)
7766 {
7767 if (orig_var == var)
7768 {
7769 new_var = build_fold_addr_expr (new_var);
7770 ref = build_fold_addr_expr (ref);
7771 }
7772 }
7773 else
7774 {
7775 gcc_assert (orig_var == var);
7776 if (omp_privatize_by_reference (decl: var))
7777 ref = build_fold_addr_expr (ref);
7778 }
7779 if (DECL_P (v))
7780 {
7781 tree t = maybe_lookup_decl (var: v, ctx);
7782 if (t)
7783 v = t;
7784 else
7785 v = maybe_lookup_decl_in_outer_ctx (decl: v, ctx);
7786 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
7787 }
7788 if (!integer_zerop (bias))
7789 {
7790 bias = fold_convert_loc (clause_loc, sizetype, bias);
7791 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7792 TREE_TYPE (new_var), new_var,
7793 unshare_expr (bias));
7794 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7795 TREE_TYPE (ref), ref, bias);
7796 }
7797 new_var = fold_convert_loc (clause_loc, ptype, new_var);
7798 ref = fold_convert_loc (clause_loc, ptype, ref);
7799 tree m = create_tmp_var (ptype);
7800 gimplify_assign (m, new_var, stmt_seqp);
7801 new_var = m;
7802 m = create_tmp_var (ptype);
7803 gimplify_assign (m, ref, stmt_seqp);
7804 ref = m;
7805 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
7806 tree body = create_artificial_label (UNKNOWN_LOCATION);
7807 tree end = create_artificial_label (UNKNOWN_LOCATION);
7808 gimple_seq_add_stmt (&sub_seq, gimple_build_label (label: body));
7809 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
7810 tree out = build_simple_mem_ref_loc (clause_loc, ref);
7811 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7812 {
7813 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7814 tree decl_placeholder
7815 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
7816 SET_DECL_VALUE_EXPR (placeholder, out);
7817 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7818 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
7819 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
7820 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7821 gimple_seq_add_seq (&sub_seq,
7822 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7823 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7824 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7825 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
7826 }
7827 else
7828 {
7829 tree out2 = out;
7830 tree priv2 = priv;
7831 if (is_truth_op)
7832 {
7833 tree zero = build_zero_cst (TREE_TYPE (out));
7834 out2 = fold_build2_loc (clause_loc, NE_EXPR,
7835 boolean_type_node, out, zero);
7836 priv2 = fold_build2_loc (clause_loc, NE_EXPR,
7837 boolean_type_node, priv, zero);
7838 }
7839 x = build2 (code, TREE_TYPE (out2), out2, priv2);
7840 if (is_truth_op)
7841 x = fold_convert (TREE_TYPE (out), x);
7842 out = unshare_expr (out);
7843 gimplify_assign (out, x, &sub_seq);
7844 }
7845 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
7846 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7847 gimple_seq_add_stmt (&sub_seq, g);
7848 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
7849 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7850 gimple_seq_add_stmt (&sub_seq, g);
7851 g = gimple_build_assign (i, PLUS_EXPR, i,
7852 build_int_cst (TREE_TYPE (i), 1));
7853 gimple_seq_add_stmt (&sub_seq, g);
7854 g = gimple_build_cond (LE_EXPR, i, v, body, end);
7855 gimple_seq_add_stmt (&sub_seq, g);
7856 gimple_seq_add_stmt (&sub_seq, gimple_build_label (label: end));
7857 }
7858 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7859 {
7860 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7861
7862 if (omp_privatize_by_reference (decl: var)
7863 && !useless_type_conversion_p (TREE_TYPE (placeholder),
7864 TREE_TYPE (ref)))
7865 ref = build_fold_addr_expr_loc (clause_loc, ref);
7866 SET_DECL_VALUE_EXPR (placeholder, ref);
7867 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7868 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7869 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7870 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7871 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7872 }
7873 else
7874 {
7875 tree new_var2 = new_var;
7876 tree ref2 = ref;
7877 if (is_truth_op)
7878 {
7879 tree zero = build_zero_cst (TREE_TYPE (new_var));
7880 new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7881 boolean_type_node, new_var, zero);
7882 ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node,
7883 ref, zero);
7884 }
7885 x = build2 (code, TREE_TYPE (ref), ref2, new_var2);
7886 if (is_truth_op)
7887 x = fold_convert (TREE_TYPE (new_var), x);
7888 ref = build_outer_var_ref (var, ctx);
7889 gimplify_assign (ref, x, &sub_seq);
7890 }
7891 }
7892
7893 stmt = gimple_build_call (builtin_decl_explicit (fncode: BUILT_IN_GOMP_ATOMIC_START),
7894 0);
7895 gimple_seq_add_stmt (stmt_seqp, stmt);
7896
7897 gimple_seq_add_seq (stmt_seqp, sub_seq);
7898
7899 if (clist)
7900 {
7901 gimple_seq_add_seq (stmt_seqp, *clist);
7902 *clist = NULL;
7903 }
7904
7905 stmt = gimple_build_call (builtin_decl_explicit (fncode: BUILT_IN_GOMP_ATOMIC_END),
7906 0);
7907 gimple_seq_add_stmt (stmt_seqp, stmt);
7908}
7909
7910
7911/* Generate code to implement the COPYPRIVATE clauses. */
7912
7913static void
7914lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
7915 omp_context *ctx)
7916{
7917 tree c;
7918
7919 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7920 {
7921 tree var, new_var, ref, x;
7922 bool by_ref;
7923 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7924
7925 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
7926 continue;
7927
7928 var = OMP_CLAUSE_DECL (c);
7929 by_ref = use_pointer_for_field (decl: var, NULL);
7930
7931 ref = build_sender_ref (var, ctx);
7932 x = new_var = lookup_decl_in_outer_ctx (decl: var, ctx);
7933 if (by_ref)
7934 {
7935 x = build_fold_addr_expr_loc (clause_loc, new_var);
7936 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
7937 }
7938 gimplify_assign (ref, x, slist);
7939
7940 ref = build_receiver_ref (var, by_ref: false, ctx);
7941 if (by_ref)
7942 {
7943 ref = fold_convert_loc (clause_loc,
7944 build_pointer_type (TREE_TYPE (new_var)),
7945 ref);
7946 ref = build_fold_indirect_ref_loc (clause_loc, ref);
7947 }
7948 if (omp_privatize_by_reference (decl: var))
7949 {
7950 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
7951 ref = build_simple_mem_ref_loc (clause_loc, ref);
7952 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7953 }
7954 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
7955 gimplify_and_add (x, rlist);
7956 }
7957}
7958
7959
7960/* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7961 and REDUCTION from the sender (aka parent) side. */
7962
7963static void
7964lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
7965 omp_context *ctx)
7966{
7967 tree c, t;
7968 int ignored_looptemp = 0;
7969 bool is_taskloop = false;
7970
7971 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7972 by GOMP_taskloop. */
7973 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (g: ctx->stmt))
7974 {
7975 ignored_looptemp = 2;
7976 is_taskloop = true;
7977 }
7978
7979 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7980 {
7981 tree val, ref, x, var;
7982 bool by_ref, do_in = false, do_out = false;
7983 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7984
7985 switch (OMP_CLAUSE_CODE (c))
7986 {
7987 case OMP_CLAUSE_PRIVATE:
7988 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7989 break;
7990 continue;
7991 case OMP_CLAUSE_FIRSTPRIVATE:
7992 case OMP_CLAUSE_COPYIN:
7993 case OMP_CLAUSE_LASTPRIVATE:
7994 case OMP_CLAUSE_IN_REDUCTION:
7995 case OMP_CLAUSE__REDUCTEMP_:
7996 break;
7997 case OMP_CLAUSE_REDUCTION:
7998 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
7999 continue;
8000 break;
8001 case OMP_CLAUSE_SHARED:
8002 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
8003 break;
8004 continue;
8005 case OMP_CLAUSE__LOOPTEMP_:
8006 if (ignored_looptemp)
8007 {
8008 ignored_looptemp--;
8009 continue;
8010 }
8011 break;
8012 default:
8013 continue;
8014 }
8015
8016 val = OMP_CLAUSE_DECL (c);
8017 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8018 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
8019 && TREE_CODE (val) == MEM_REF)
8020 {
8021 val = TREE_OPERAND (val, 0);
8022 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
8023 val = TREE_OPERAND (val, 0);
8024 if (INDIRECT_REF_P (val)
8025 || TREE_CODE (val) == ADDR_EXPR)
8026 val = TREE_OPERAND (val, 0);
8027 if (is_variable_sized (expr: val))
8028 continue;
8029 }
8030
8031 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
8032 outer taskloop region. */
8033 omp_context *ctx_for_o = ctx;
8034 if (is_taskloop
8035 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8036 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
8037 ctx_for_o = ctx->outer;
8038
8039 var = lookup_decl_in_outer_ctx (decl: val, ctx: ctx_for_o);
8040
8041 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
8042 && is_global_var (t: var)
8043 && (val == OMP_CLAUSE_DECL (c)
8044 || !is_task_ctx (ctx)
8045 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
8046 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
8047 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
8048 != POINTER_TYPE)))))
8049 continue;
8050
8051 t = omp_member_access_dummy_var (decl: var);
8052 if (t)
8053 {
8054 var = DECL_VALUE_EXPR (var);
8055 tree o = maybe_lookup_decl_in_outer_ctx (decl: t, ctx: ctx_for_o);
8056 if (o != t)
8057 var = unshare_and_remap (x: var, from: t, to: o);
8058 else
8059 var = unshare_expr (var);
8060 }
8061
8062 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
8063 {
8064 /* Handle taskloop firstprivate/lastprivate, where the
8065 lastprivate on GIMPLE_OMP_TASK is represented as
8066 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
8067 tree f = lookup_sfield (key: (splay_tree_key) &DECL_UID (val), ctx);
8068 x = omp_build_component_ref (obj: ctx->sender_decl, field: f);
8069 if (use_pointer_for_field (decl: val, shared_ctx: ctx))
8070 var = build_fold_addr_expr (var);
8071 gimplify_assign (x, var, ilist);
8072 DECL_ABSTRACT_ORIGIN (f) = NULL;
8073 continue;
8074 }
8075
8076 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
8077 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
8078 || val == OMP_CLAUSE_DECL (c))
8079 && is_variable_sized (expr: val))
8080 continue;
8081 by_ref = use_pointer_for_field (decl: val, NULL);
8082
8083 switch (OMP_CLAUSE_CODE (c))
8084 {
8085 case OMP_CLAUSE_FIRSTPRIVATE:
8086 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
8087 && !by_ref
8088 && is_task_ctx (ctx))
8089 suppress_warning (var);
8090 do_in = true;
8091 break;
8092
8093 case OMP_CLAUSE_PRIVATE:
8094 case OMP_CLAUSE_COPYIN:
8095 case OMP_CLAUSE__LOOPTEMP_:
8096 case OMP_CLAUSE__REDUCTEMP_:
8097 do_in = true;
8098 break;
8099
8100 case OMP_CLAUSE_LASTPRIVATE:
8101 if (by_ref || omp_privatize_by_reference (decl: val))
8102 {
8103 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8104 continue;
8105 do_in = true;
8106 }
8107 else
8108 {
8109 do_out = true;
8110 if (lang_hooks.decls.omp_private_outer_ref (val))
8111 do_in = true;
8112 }
8113 break;
8114
8115 case OMP_CLAUSE_REDUCTION:
8116 case OMP_CLAUSE_IN_REDUCTION:
8117 do_in = true;
8118 if (val == OMP_CLAUSE_DECL (c))
8119 {
8120 if (is_task_ctx (ctx))
8121 by_ref = use_pointer_for_field (decl: val, shared_ctx: ctx);
8122 else
8123 do_out = !(by_ref || omp_privatize_by_reference (decl: val));
8124 }
8125 else
8126 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
8127 break;
8128
8129 default:
8130 gcc_unreachable ();
8131 }
8132
8133 if (do_in)
8134 {
8135 ref = build_sender_ref (var: val, ctx);
8136 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
8137 gimplify_assign (ref, x, ilist);
8138 if (is_task_ctx (ctx))
8139 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
8140 }
8141
8142 if (do_out)
8143 {
8144 ref = build_sender_ref (var: val, ctx);
8145 gimplify_assign (var, ref, olist);
8146 }
8147 }
8148}
8149
8150/* Generate code to implement SHARED from the sender (aka parent)
8151 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
8152 list things that got automatically shared. */
8153
8154static void
8155lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
8156{
8157 tree var, ovar, nvar, t, f, x, record_type;
8158
8159 if (ctx->record_type == NULL)
8160 return;
8161
8162 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
8163 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
8164 {
8165 ovar = DECL_ABSTRACT_ORIGIN (f);
8166 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
8167 continue;
8168
8169 nvar = maybe_lookup_decl (var: ovar, ctx);
8170 if (!nvar
8171 || !DECL_HAS_VALUE_EXPR_P (nvar)
8172 || (ctx->allocate_map
8173 && ctx->allocate_map->get (k: ovar)))
8174 continue;
8175
8176 /* If CTX is a nested parallel directive. Find the immediately
8177 enclosing parallel or workshare construct that contains a
8178 mapping for OVAR. */
8179 var = lookup_decl_in_outer_ctx (decl: ovar, ctx);
8180
8181 t = omp_member_access_dummy_var (decl: var);
8182 if (t)
8183 {
8184 var = DECL_VALUE_EXPR (var);
8185 tree o = maybe_lookup_decl_in_outer_ctx (decl: t, ctx);
8186 if (o != t)
8187 var = unshare_and_remap (x: var, from: t, to: o);
8188 else
8189 var = unshare_expr (var);
8190 }
8191
8192 if (use_pointer_for_field (decl: ovar, shared_ctx: ctx))
8193 {
8194 x = build_sender_ref (var: ovar, ctx);
8195 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
8196 && TREE_TYPE (f) == TREE_TYPE (ovar))
8197 {
8198 gcc_assert (is_parallel_ctx (ctx)
8199 && DECL_ARTIFICIAL (ovar));
8200 /* _condtemp_ clause. */
8201 var = build_constructor (TREE_TYPE (x), NULL);
8202 }
8203 else
8204 var = build_fold_addr_expr (var);
8205 gimplify_assign (x, var, ilist);
8206 }
8207 else
8208 {
8209 x = build_sender_ref (var: ovar, ctx);
8210 gimplify_assign (x, var, ilist);
8211
8212 if (!TREE_READONLY (var)
8213 /* We don't need to receive a new reference to a result
8214 or parm decl. In fact we may not store to it as we will
8215 invalidate any pending RSO and generate wrong gimple
8216 during inlining. */
8217 && !((TREE_CODE (var) == RESULT_DECL
8218 || TREE_CODE (var) == PARM_DECL)
8219 && DECL_BY_REFERENCE (var)))
8220 {
8221 x = build_sender_ref (var: ovar, ctx);
8222 gimplify_assign (var, x, olist);
8223 }
8224 }
8225 }
8226}
8227
8228/* Emit an OpenACC head marker call, encapulating the partitioning and
8229 other information that must be processed by the target compiler.
8230 Return the maximum number of dimensions the associated loop might
8231 be partitioned over. */
8232
8233static unsigned
8234lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
8235 gimple_seq *seq, omp_context *ctx)
8236{
8237 unsigned levels = 0;
8238 unsigned tag = 0;
8239 tree gang_static = NULL_TREE;
8240 auto_vec<tree, 5> args;
8241
8242 args.quick_push (obj: build_int_cst
8243 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
8244 args.quick_push (obj: ddvar);
8245 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8246 {
8247 switch (OMP_CLAUSE_CODE (c))
8248 {
8249 case OMP_CLAUSE_GANG:
8250 tag |= OLF_DIM_GANG;
8251 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
8252 /* static:* is represented by -1, and we can ignore it, as
8253 scheduling is always static. */
8254 if (gang_static && integer_minus_onep (gang_static))
8255 gang_static = NULL_TREE;
8256 levels++;
8257 break;
8258
8259 case OMP_CLAUSE_WORKER:
8260 tag |= OLF_DIM_WORKER;
8261 levels++;
8262 break;
8263
8264 case OMP_CLAUSE_VECTOR:
8265 tag |= OLF_DIM_VECTOR;
8266 levels++;
8267 break;
8268
8269 case OMP_CLAUSE_SEQ:
8270 tag |= OLF_SEQ;
8271 break;
8272
8273 case OMP_CLAUSE_AUTO:
8274 tag |= OLF_AUTO;
8275 break;
8276
8277 case OMP_CLAUSE_INDEPENDENT:
8278 tag |= OLF_INDEPENDENT;
8279 break;
8280
8281 case OMP_CLAUSE_TILE:
8282 tag |= OLF_TILE;
8283 break;
8284
8285 case OMP_CLAUSE_REDUCTION:
8286 tag |= OLF_REDUCTION;
8287 break;
8288
8289 default:
8290 continue;
8291 }
8292 }
8293
8294 if (gang_static)
8295 {
8296 if (DECL_P (gang_static))
8297 gang_static = build_outer_var_ref (var: gang_static, ctx);
8298 tag |= OLF_GANG_STATIC;
8299 }
8300
8301 omp_context *tgt = enclosing_target_ctx (ctx);
8302 if (!tgt || is_oacc_parallel_or_serial (ctx: tgt))
8303 ;
8304 else if (is_oacc_kernels (ctx: tgt))
8305 /* Not using this loops handling inside OpenACC 'kernels' regions. */
8306 gcc_unreachable ();
8307 else if (is_oacc_kernels_decomposed_part (ctx: tgt))
8308 ;
8309 else
8310 gcc_unreachable ();
8311
8312 /* In a parallel region, loops are implicitly INDEPENDENT. */
8313 if (!tgt || is_oacc_parallel_or_serial (ctx: tgt))
8314 tag |= OLF_INDEPENDENT;
8315
8316 /* Loops inside OpenACC 'kernels' decomposed parts' regions are expected to
8317 have an explicit 'seq' or 'independent' clause, and no 'auto' clause. */
8318 if (tgt && is_oacc_kernels_decomposed_part (ctx: tgt))
8319 {
8320 gcc_assert (tag & (OLF_SEQ | OLF_INDEPENDENT));
8321 gcc_assert (!(tag & OLF_AUTO));
8322 }
8323
8324 if (tag & OLF_TILE)
8325 /* Tiling could use all 3 levels. */
8326 levels = 3;
8327 else
8328 {
8329 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
8330 Ensure at least one level, or 2 for possible auto
8331 partitioning */
8332 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
8333 << OLF_DIM_BASE) | OLF_SEQ));
8334
8335 if (levels < 1u + maybe_auto)
8336 levels = 1u + maybe_auto;
8337 }
8338
8339 args.quick_push (obj: build_int_cst (integer_type_node, levels));
8340 args.quick_push (obj: build_int_cst (integer_type_node, tag));
8341 if (gang_static)
8342 args.quick_push (obj: gang_static);
8343
8344 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
8345 gimple_set_location (g: call, location: loc);
8346 gimple_set_lhs (call, ddvar);
8347 gimple_seq_add_stmt (seq, call);
8348
8349 return levels;
8350}
8351
8352/* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
8353 partitioning level of the enclosed region. */
8354
8355static void
8356lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
8357 tree tofollow, gimple_seq *seq)
8358{
8359 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
8360 : IFN_UNIQUE_OACC_TAIL_MARK);
8361 tree marker = build_int_cst (integer_type_node, marker_kind);
8362 int nargs = 2 + (tofollow != NULL_TREE);
8363 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
8364 marker, ddvar, tofollow);
8365 gimple_set_location (g: call, location: loc);
8366 gimple_set_lhs (call, ddvar);
8367 gimple_seq_add_stmt (seq, call);
8368}
8369
8370/* Generate the before and after OpenACC loop sequences. CLAUSES are
8371 the loop clauses, from which we extract reductions. Initialize
8372 HEAD and TAIL. */
8373
8374static void
8375lower_oacc_head_tail (location_t loc, tree clauses, gcall *private_marker,
8376 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
8377{
8378 bool inner = false;
8379 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
8380 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
8381
8382 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, seq: head, ctx);
8383
8384 if (private_marker)
8385 {
8386 gimple_set_location (g: private_marker, location: loc);
8387 gimple_call_set_lhs (gs: private_marker, lhs: ddvar);
8388 gimple_call_set_arg (gs: private_marker, index: 1, arg: ddvar);
8389 }
8390
8391 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
8392 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
8393
8394 gcc_assert (count);
8395 for (unsigned done = 1; count; count--, done++)
8396 {
8397 gimple_seq fork_seq = NULL;
8398 gimple_seq join_seq = NULL;
8399
8400 tree place = build_int_cst (integer_type_node, -1);
8401 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
8402 fork_kind, ddvar, place);
8403 gimple_set_location (g: fork, location: loc);
8404 gimple_set_lhs (fork, ddvar);
8405
8406 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
8407 join_kind, ddvar, place);
8408 gimple_set_location (g: join, location: loc);
8409 gimple_set_lhs (join, ddvar);
8410
8411 /* Mark the beginning of this level sequence. */
8412 if (inner)
8413 lower_oacc_loop_marker (loc, ddvar, head: true,
8414 tofollow: build_int_cst (integer_type_node, count),
8415 seq: &fork_seq);
8416 lower_oacc_loop_marker (loc, ddvar, head: false,
8417 tofollow: build_int_cst (integer_type_node, done),
8418 seq: &join_seq);
8419
8420 lower_oacc_reductions (loc, clauses, level: place, inner,
8421 fork, private_marker: (count == 1) ? private_marker : NULL,
8422 join, fork_seq: &fork_seq, join_seq: &join_seq, ctx);
8423
8424 /* Append this level to head. */
8425 gimple_seq_add_seq (head, fork_seq);
8426 /* Prepend it to tail. */
8427 gimple_seq_add_seq (&join_seq, *tail);
8428 *tail = join_seq;
8429
8430 inner = true;
8431 }
8432
8433 /* Mark the end of the sequence. */
8434 lower_oacc_loop_marker (loc, ddvar, head: true, NULL_TREE, seq: head);
8435 lower_oacc_loop_marker (loc, ddvar, head: false, NULL_TREE, seq: tail);
8436}
8437
8438/* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
8439 catch handler and return it. This prevents programs from violating the
8440 structured block semantics with throws. */
8441
8442static gimple_seq
8443maybe_catch_exception (gimple_seq body)
8444{
8445 gimple *g;
8446 tree decl;
8447
8448 if (!flag_exceptions)
8449 return body;
8450
8451 if (lang_hooks.eh_protect_cleanup_actions != NULL)
8452 decl = lang_hooks.eh_protect_cleanup_actions ();
8453 else
8454 decl = builtin_decl_explicit (fncode: BUILT_IN_TRAP);
8455
8456 g = gimple_build_eh_must_not_throw (decl);
8457 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (stmt: g),
8458 GIMPLE_TRY_CATCH);
8459
8460 return gimple_seq_alloc_with_stmt (stmt: g);
8461}
8462
8463
8464/* Routines to lower OMP directives into OMP-GIMPLE. */
8465
8466/* If ctx is a worksharing context inside of a cancellable parallel
8467 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
8468 and conditional branch to parallel's cancel_label to handle
8469 cancellation in the implicit barrier. */
8470
8471static void
8472maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
8473 gimple_seq *body)
8474{
8475 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
8476 if (gimple_omp_return_nowait_p (g: omp_return))
8477 return;
8478 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8479 if (gimple_code (g: outer->stmt) == GIMPLE_OMP_PARALLEL
8480 && outer->cancellable)
8481 {
8482 tree fndecl = builtin_decl_explicit (fncode: BUILT_IN_GOMP_CANCEL);
8483 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
8484 tree lhs = create_tmp_var (c_bool_type);
8485 gimple_omp_return_set_lhs (g: omp_return, lhs);
8486 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8487 gimple *g = gimple_build_cond (NE_EXPR, lhs,
8488 fold_convert (c_bool_type,
8489 boolean_false_node),
8490 outer->cancel_label, fallthru_label);
8491 gimple_seq_add_stmt (body, g);
8492 gimple_seq_add_stmt (body, gimple_build_label (label: fallthru_label));
8493 }
8494 else if (gimple_code (g: outer->stmt) != GIMPLE_OMP_TASKGROUP
8495 && gimple_code (g: outer->stmt) != GIMPLE_OMP_SCOPE)
8496 return;
8497}
8498
8499/* Find the first task_reduction or reduction clause or return NULL
8500 if there are none. */
8501
8502static inline tree
8503omp_task_reductions_find_first (tree clauses, enum tree_code code,
8504 enum omp_clause_code ccode)
8505{
8506 while (1)
8507 {
8508 clauses = omp_find_clause (clauses, kind: ccode);
8509 if (clauses == NULL_TREE)
8510 return NULL_TREE;
8511 if (ccode != OMP_CLAUSE_REDUCTION
8512 || code == OMP_TASKLOOP
8513 || OMP_CLAUSE_REDUCTION_TASK (clauses))
8514 return clauses;
8515 clauses = OMP_CLAUSE_CHAIN (clauses);
8516 }
8517}
8518
8519static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
8520 gimple_seq *, gimple_seq *);
8521
8522/* Lower the OpenMP sections directive in the current statement in GSI_P.
8523 CTX is the enclosing OMP context for the current statement. */
8524
8525static void
8526lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8527{
8528 tree block, control;
8529 gimple_stmt_iterator tgsi;
8530 gomp_sections *stmt;
8531 gimple *t;
8532 gbind *new_stmt, *bind;
8533 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
8534
8535 stmt = as_a <gomp_sections *> (p: gsi_stmt (i: *gsi_p));
8536
8537 push_gimplify_context ();
8538
8539 dlist = NULL;
8540 ilist = NULL;
8541
8542 tree rclauses
8543 = omp_task_reductions_find_first (clauses: gimple_omp_sections_clauses (gs: stmt),
8544 code: OMP_SECTIONS, ccode: OMP_CLAUSE_REDUCTION);
8545 tree rtmp = NULL_TREE;
8546 if (rclauses)
8547 {
8548 tree type = build_pointer_type (pointer_sized_int_node);
8549 tree temp = create_tmp_var (type);
8550 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8551 OMP_CLAUSE_DECL (c) = temp;
8552 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (gs: stmt);
8553 gimple_omp_sections_set_clauses (gs: stmt, clauses: c);
8554 lower_omp_task_reductions (ctx, OMP_SECTIONS,
8555 gimple_omp_sections_clauses (gs: stmt),
8556 &ilist, &tred_dlist);
8557 rclauses = c;
8558 rtmp = make_ssa_name (var: type);
8559 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
8560 }
8561
8562 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (gs: stmt);
8563 lower_lastprivate_conditional_clauses (clauses: clauses_ptr, ctx);
8564
8565 lower_rec_input_clauses (clauses: gimple_omp_sections_clauses (gs: stmt),
8566 ilist: &ilist, dlist: &dlist, ctx, NULL);
8567
8568 control = create_tmp_var (unsigned_type_node, ".section");
8569 gimple_omp_sections_set_control (gs: stmt, control);
8570
8571 new_body = gimple_omp_body (gs: stmt);
8572 gimple_omp_set_body (gs: stmt, NULL);
8573 tgsi = gsi_start (seq&: new_body);
8574 for (; !gsi_end_p (i: tgsi); gsi_next (i: &tgsi))
8575 {
8576 omp_context *sctx;
8577 gimple *sec_start;
8578
8579 sec_start = gsi_stmt (i: tgsi);
8580 sctx = maybe_lookup_ctx (stmt: sec_start);
8581 gcc_assert (sctx);
8582
8583 lower_omp (gimple_omp_body_ptr (gs: sec_start), sctx);
8584 gsi_insert_seq_after (&tgsi, gimple_omp_body (gs: sec_start),
8585 GSI_CONTINUE_LINKING);
8586 gimple_omp_set_body (gs: sec_start, NULL);
8587
8588 if (gsi_one_before_end_p (i: tgsi))
8589 {
8590 gimple_seq l = NULL;
8591 lower_lastprivate_clauses (clauses: gimple_omp_sections_clauses (gs: stmt), NULL,
8592 body_p: &ilist, stmt_list: &l, cstmt_list: &clist, ctx);
8593 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
8594 gimple_omp_section_set_last (g: sec_start);
8595 }
8596
8597 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
8598 GSI_CONTINUE_LINKING);
8599 }
8600
8601 block = make_node (BLOCK);
8602 bind = gimple_build_bind (NULL, new_body, block);
8603
8604 olist = NULL;
8605 lower_reduction_clauses (clauses: gimple_omp_sections_clauses (gs: stmt), stmt_seqp: &olist,
8606 clist: &clist, ctx);
8607 if (clist)
8608 {
8609 tree fndecl = builtin_decl_explicit (fncode: BUILT_IN_GOMP_ATOMIC_START);
8610 gcall *g = gimple_build_call (fndecl, 0);
8611 gimple_seq_add_stmt (&olist, g);
8612 gimple_seq_add_seq (&olist, clist);
8613 fndecl = builtin_decl_explicit (fncode: BUILT_IN_GOMP_ATOMIC_END);
8614 g = gimple_build_call (fndecl, 0);
8615 gimple_seq_add_stmt (&olist, g);
8616 }
8617
8618 block = make_node (BLOCK);
8619 new_stmt = gimple_build_bind (NULL, NULL, block);
8620 gsi_replace (gsi_p, new_stmt, true);
8621
8622 pop_gimplify_context (new_stmt);
8623 gimple_bind_append_vars (bind_stmt: new_stmt, vars: ctx->block_vars);
8624 BLOCK_VARS (block) = gimple_bind_vars (bind_stmt: bind);
8625 if (BLOCK_VARS (block))
8626 TREE_USED (block) = 1;
8627
8628 new_body = NULL;
8629 gimple_seq_add_seq (&new_body, ilist);
8630 gimple_seq_add_stmt (&new_body, stmt);
8631 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
8632 gimple_seq_add_stmt (&new_body, bind);
8633
8634 t = gimple_build_omp_continue (control, control);
8635 gimple_seq_add_stmt (&new_body, t);
8636
8637 gimple_seq_add_seq (&new_body, olist);
8638 if (ctx->cancellable)
8639 gimple_seq_add_stmt (&new_body, gimple_build_label (label: ctx->cancel_label));
8640 gimple_seq_add_seq (&new_body, dlist);
8641
8642 new_body = maybe_catch_exception (body: new_body);
8643
8644 bool nowait = omp_find_clause (clauses: gimple_omp_sections_clauses (gs: stmt),
8645 kind: OMP_CLAUSE_NOWAIT) != NULL_TREE;
8646 t = gimple_build_omp_return (nowait);
8647 gimple_seq_add_stmt (&new_body, t);
8648 gimple_seq_add_seq (&new_body, tred_dlist);
8649 maybe_add_implicit_barrier_cancel (ctx, omp_return: t, body: &new_body);
8650
8651 if (rclauses)
8652 OMP_CLAUSE_DECL (rclauses) = rtmp;
8653
8654 gimple_bind_set_body (bind_stmt: new_stmt, seq: new_body);
8655}
8656
8657
8658/* A subroutine of lower_omp_single. Expand the simple form of
8659 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
8660
8661 if (GOMP_single_start ())
8662 BODY;
8663 [ GOMP_barrier (); ] -> unless 'nowait' is present.
8664
8665 FIXME. It may be better to delay expanding the logic of this until
8666 pass_expand_omp. The expanded logic may make the job more difficult
8667 to a synchronization analysis pass. */
8668
8669static void
8670lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
8671{
8672 location_t loc = gimple_location (g: single_stmt);
8673 tree tlabel = create_artificial_label (loc);
8674 tree flabel = create_artificial_label (loc);
8675 gimple *call, *cond;
8676 tree lhs, decl;
8677
8678 decl = builtin_decl_explicit (fncode: BUILT_IN_GOMP_SINGLE_START);
8679 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
8680 call = gimple_build_call (decl, 0);
8681 gimple_call_set_lhs (gs: call, lhs);
8682 gimple_seq_add_stmt (pre_p, call);
8683
8684 cond = gimple_build_cond (EQ_EXPR, lhs,
8685 fold_convert_loc (loc, TREE_TYPE (lhs),
8686 boolean_true_node),
8687 tlabel, flabel);
8688 gimple_seq_add_stmt (pre_p, cond);
8689 gimple_seq_add_stmt (pre_p, gimple_build_label (label: tlabel));
8690 gimple_seq_add_seq (pre_p, gimple_omp_body (gs: single_stmt));
8691 gimple_seq_add_stmt (pre_p, gimple_build_label (label: flabel));
8692}
8693
8694
8695/* A subroutine of lower_omp_single. Expand the simple form of
8696 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
8697
8698 #pragma omp single copyprivate (a, b, c)
8699
8700 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
8701
8702 {
8703 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
8704 {
8705 BODY;
8706 copyout.a = a;
8707 copyout.b = b;
8708 copyout.c = c;
8709 GOMP_single_copy_end (&copyout);
8710 }
8711 else
8712 {
8713 a = copyout_p->a;
8714 b = copyout_p->b;
8715 c = copyout_p->c;
8716 }
8717 GOMP_barrier ();
8718 }
8719
8720 FIXME. It may be better to delay expanding the logic of this until
8721 pass_expand_omp. The expanded logic may make the job more difficult
8722 to a synchronization analysis pass. */
8723
8724static void
8725lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
8726 omp_context *ctx)
8727{
8728 tree ptr_type, t, l0, l1, l2, bfn_decl;
8729 gimple_seq copyin_seq;
8730 location_t loc = gimple_location (g: single_stmt);
8731
8732 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
8733
8734 ptr_type = build_pointer_type (ctx->record_type);
8735 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
8736
8737 l0 = create_artificial_label (loc);
8738 l1 = create_artificial_label (loc);
8739 l2 = create_artificial_label (loc);
8740
8741 bfn_decl = builtin_decl_explicit (fncode: BUILT_IN_GOMP_SINGLE_COPY_START);
8742 t = build_call_expr_loc (loc, bfn_decl, 0);
8743 t = fold_convert_loc (loc, ptr_type, t);
8744 gimplify_assign (ctx->receiver_decl, t, pre_p);
8745
8746 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
8747 build_int_cst (ptr_type, 0));
8748 t = build3 (COND_EXPR, void_type_node, t,
8749 build_and_jump (&l0), build_and_jump (&l1));
8750 gimplify_and_add (t, pre_p);
8751
8752 gimple_seq_add_stmt (pre_p, gimple_build_label (label: l0));
8753
8754 gimple_seq_add_seq (pre_p, gimple_omp_body (gs: single_stmt));
8755
8756 copyin_seq = NULL;
8757 lower_copyprivate_clauses (clauses: gimple_omp_single_clauses (gs: single_stmt), slist: pre_p,
8758 rlist: &copyin_seq, ctx);
8759
8760 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8761 bfn_decl = builtin_decl_explicit (fncode: BUILT_IN_GOMP_SINGLE_COPY_END);
8762 t = build_call_expr_loc (loc, bfn_decl, 1, t);
8763 gimplify_and_add (t, pre_p);
8764
8765 t = build_and_jump (&l2);
8766 gimplify_and_add (t, pre_p);
8767
8768 gimple_seq_add_stmt (pre_p, gimple_build_label (label: l1));
8769
8770 gimple_seq_add_seq (pre_p, copyin_seq);
8771
8772 gimple_seq_add_stmt (pre_p, gimple_build_label (label: l2));
8773}
8774
8775
8776/* Expand code for an OpenMP single directive. */
8777
8778static void
8779lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8780{
8781 tree block;
8782 gomp_single *single_stmt = as_a <gomp_single *> (p: gsi_stmt (i: *gsi_p));
8783 gbind *bind;
8784 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8785
8786 push_gimplify_context ();
8787
8788 block = make_node (BLOCK);
8789 bind = gimple_build_bind (NULL, NULL, block);
8790 gsi_replace (gsi_p, bind, true);
8791 bind_body = NULL;
8792 dlist = NULL;
8793 lower_rec_input_clauses (clauses: gimple_omp_single_clauses (gs: single_stmt),
8794 ilist: &bind_body, dlist: &dlist, ctx, NULL);
8795 lower_omp (gimple_omp_body_ptr (gs: single_stmt), ctx);
8796
8797 gimple_seq_add_stmt (&bind_body, single_stmt);
8798
8799 if (ctx->record_type)
8800 lower_omp_single_copy (single_stmt, pre_p: &bind_body, ctx);
8801 else
8802 lower_omp_single_simple (single_stmt, pre_p: &bind_body);
8803
8804 gimple_omp_set_body (gs: single_stmt, NULL);
8805
8806 gimple_seq_add_seq (&bind_body, dlist);
8807
8808 bind_body = maybe_catch_exception (body: bind_body);
8809
8810 bool nowait = omp_find_clause (clauses: gimple_omp_single_clauses (gs: single_stmt),
8811 kind: OMP_CLAUSE_NOWAIT) != NULL_TREE;
8812 gimple *g = gimple_build_omp_return (nowait);
8813 gimple_seq_add_stmt (&bind_body_tail, g);
8814 maybe_add_implicit_barrier_cancel (ctx, omp_return: g, body: &bind_body_tail);
8815 if (ctx->record_type)
8816 {
8817 gimple_stmt_iterator gsi = gsi_start (seq&: bind_body_tail);
8818 tree clobber = build_clobber (ctx->record_type);
8819 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8820 clobber), GSI_SAME_STMT);
8821 }
8822 gimple_seq_add_seq (&bind_body, bind_body_tail);
8823 gimple_bind_set_body (bind_stmt: bind, seq: bind_body);
8824
8825 pop_gimplify_context (bind);
8826
8827 gimple_bind_append_vars (bind_stmt: bind, vars: ctx->block_vars);
8828 BLOCK_VARS (block) = ctx->block_vars;
8829 if (BLOCK_VARS (block))
8830 TREE_USED (block) = 1;
8831}
8832
8833
8834/* Lower code for an OMP scope directive. */
8835
8836static void
8837lower_omp_scope (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8838{
8839 tree block;
8840 gimple *scope_stmt = gsi_stmt (i: *gsi_p);
8841 gbind *bind;
8842 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8843 gimple_seq tred_dlist = NULL;
8844
8845 push_gimplify_context ();
8846
8847 block = make_node (BLOCK);
8848 bind = gimple_build_bind (NULL, NULL, block);
8849 gsi_replace (gsi_p, bind, true);
8850 bind_body = NULL;
8851 dlist = NULL;
8852
8853 tree rclauses
8854 = omp_task_reductions_find_first (clauses: gimple_omp_scope_clauses (gs: scope_stmt),
8855 code: OMP_SCOPE, ccode: OMP_CLAUSE_REDUCTION);
8856 if (rclauses)
8857 {
8858 tree type = build_pointer_type (pointer_sized_int_node);
8859 tree temp = create_tmp_var (type);
8860 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8861 OMP_CLAUSE_DECL (c) = temp;
8862 OMP_CLAUSE_CHAIN (c) = gimple_omp_scope_clauses (gs: scope_stmt);
8863 gimple_omp_scope_set_clauses (gs: scope_stmt, clauses: c);
8864 lower_omp_task_reductions (ctx, OMP_SCOPE,
8865 gimple_omp_scope_clauses (gs: scope_stmt),
8866 &bind_body, &tred_dlist);
8867 rclauses = c;
8868 tree fndecl = builtin_decl_explicit (fncode: BUILT_IN_GOMP_SCOPE_START);
8869 gimple *stmt = gimple_build_call (fndecl, 1, temp);
8870 gimple_seq_add_stmt (&bind_body, stmt);
8871 }
8872
8873 lower_rec_input_clauses (clauses: gimple_omp_scope_clauses (gs: scope_stmt),
8874 ilist: &bind_body, dlist: &dlist, ctx, NULL);
8875 lower_omp (gimple_omp_body_ptr (gs: scope_stmt), ctx);
8876
8877 gimple_seq_add_stmt (&bind_body, scope_stmt);
8878
8879 gimple_seq_add_seq (&bind_body, gimple_omp_body (gs: scope_stmt));
8880
8881 gimple_omp_set_body (gs: scope_stmt, NULL);
8882
8883 gimple_seq clist = NULL;
8884 lower_reduction_clauses (clauses: gimple_omp_scope_clauses (gs: scope_stmt),
8885 stmt_seqp: &bind_body, clist: &clist, ctx);
8886 if (clist)
8887 {
8888 tree fndecl = builtin_decl_explicit (fncode: BUILT_IN_GOMP_ATOMIC_START);
8889 gcall *g = gimple_build_call (fndecl, 0);
8890 gimple_seq_add_stmt (&bind_body, g);
8891 gimple_seq_add_seq (&bind_body, clist);
8892 fndecl = builtin_decl_explicit (fncode: BUILT_IN_GOMP_ATOMIC_END);
8893 g = gimple_build_call (fndecl, 0);
8894 gimple_seq_add_stmt (&bind_body, g);
8895 }
8896
8897 gimple_seq_add_seq (&bind_body, dlist);
8898
8899 bind_body = maybe_catch_exception (body: bind_body);
8900
8901 bool nowait = omp_find_clause (clauses: gimple_omp_scope_clauses (gs: scope_stmt),
8902 kind: OMP_CLAUSE_NOWAIT) != NULL_TREE;
8903 gimple *g = gimple_build_omp_return (nowait);
8904 gimple_seq_add_stmt (&bind_body_tail, g);
8905 gimple_seq_add_seq (&bind_body_tail, tred_dlist);
8906 maybe_add_implicit_barrier_cancel (ctx, omp_return: g, body: &bind_body_tail);
8907 if (ctx->record_type)
8908 {
8909 gimple_stmt_iterator gsi = gsi_start (seq&: bind_body_tail);
8910 tree clobber = build_clobber (ctx->record_type);
8911 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8912 clobber), GSI_SAME_STMT);
8913 }
8914 gimple_seq_add_seq (&bind_body, bind_body_tail);
8915
8916 gimple_bind_set_body (bind_stmt: bind, seq: bind_body);
8917
8918 pop_gimplify_context (bind);
8919
8920 gimple_bind_append_vars (bind_stmt: bind, vars: ctx->block_vars);
8921 BLOCK_VARS (block) = ctx->block_vars;
8922 if (BLOCK_VARS (block))
8923 TREE_USED (block) = 1;
8924}
8925/* Expand code for an OpenMP master or masked directive. */
8926
8927static void
8928lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8929{
8930 tree block, lab = NULL, x, bfn_decl;
8931 gimple *stmt = gsi_stmt (i: *gsi_p);
8932 gbind *bind;
8933 location_t loc = gimple_location (g: stmt);
8934 gimple_seq tseq;
8935 tree filter = integer_zero_node;
8936
8937 push_gimplify_context ();
8938
8939 if (gimple_code (g: stmt) == GIMPLE_OMP_MASKED)
8940 {
8941 filter = omp_find_clause (clauses: gimple_omp_masked_clauses (gs: stmt),
8942 kind: OMP_CLAUSE_FILTER);
8943 if (filter)
8944 filter = fold_convert (integer_type_node,
8945 OMP_CLAUSE_FILTER_EXPR (filter));
8946 else
8947 filter = integer_zero_node;
8948 }
8949 block = make_node (BLOCK);
8950 bind = gimple_build_bind (NULL, NULL, block);
8951 gsi_replace (gsi_p, bind, true);
8952 gimple_bind_add_stmt (bind_stmt: bind, stmt);
8953
8954 bfn_decl = builtin_decl_explicit (fncode: BUILT_IN_OMP_GET_THREAD_NUM);
8955 x = build_call_expr_loc (loc, bfn_decl, 0);
8956 x = build2 (EQ_EXPR, boolean_type_node, x, filter);
8957 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
8958 tseq = NULL;
8959 gimplify_and_add (x, &tseq);
8960 gimple_bind_add_seq (bind_stmt: bind, seq: tseq);
8961
8962 lower_omp (gimple_omp_body_ptr (gs: stmt), ctx);
8963 gimple_omp_set_body (gs: stmt, body: maybe_catch_exception (body: gimple_omp_body (gs: stmt)));
8964 gimple_bind_add_seq (bind_stmt: bind, seq: gimple_omp_body (gs: stmt));
8965 gimple_omp_set_body (gs: stmt, NULL);
8966
8967 gimple_bind_add_stmt (bind_stmt: bind, stmt: gimple_build_label (label: lab));
8968
8969 gimple_bind_add_stmt (bind_stmt: bind, stmt: gimple_build_omp_return (true));
8970
8971 pop_gimplify_context (bind);
8972
8973 gimple_bind_append_vars (bind_stmt: bind, vars: ctx->block_vars);
8974 BLOCK_VARS (block) = ctx->block_vars;
8975}
8976
8977/* Helper function for lower_omp_task_reductions. For a specific PASS
8978 find out the current clause it should be processed, or return false
8979 if all have been processed already. */
8980
8981static inline bool
8982omp_task_reduction_iterate (int pass, enum tree_code code,
8983 enum omp_clause_code ccode, tree *c, tree *decl,
8984 tree *type, tree *next)
8985{
8986 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), kind: ccode))
8987 {
8988 if (ccode == OMP_CLAUSE_REDUCTION
8989 && code != OMP_TASKLOOP
8990 && !OMP_CLAUSE_REDUCTION_TASK (*c))
8991 continue;
8992 *decl = OMP_CLAUSE_DECL (*c);
8993 *type = TREE_TYPE (*decl);
8994 if (TREE_CODE (*decl) == MEM_REF)
8995 {
8996 if (pass != 1)
8997 continue;
8998 }
8999 else
9000 {
9001 if (omp_privatize_by_reference (decl: *decl))
9002 *type = TREE_TYPE (*type);
9003 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
9004 continue;
9005 }
9006 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), kind: ccode);
9007 return true;
9008 }
9009 *decl = NULL_TREE;
9010 *type = NULL_TREE;
9011 *next = NULL_TREE;
9012 return false;
9013}
9014
9015/* Lower task_reduction and reduction clauses (the latter unless CODE is
9016 OMP_TASKGROUP only with task modifier). Register mapping of those in
9017 START sequence and reducing them and unregister them in the END sequence. */
9018
9019static void
9020lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
9021 gimple_seq *start, gimple_seq *end)
9022{
9023 enum omp_clause_code ccode
9024 = (code == OMP_TASKGROUP
9025 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
9026 tree cancellable = NULL_TREE;
9027 clauses = omp_task_reductions_find_first (clauses, code, ccode);
9028 if (clauses == NULL_TREE)
9029 return;
9030 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9031 {
9032 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
9033 if (gimple_code (g: outer->stmt) == GIMPLE_OMP_PARALLEL
9034 && outer->cancellable)
9035 {
9036 cancellable = error_mark_node;
9037 break;
9038 }
9039 else if (gimple_code (g: outer->stmt) != GIMPLE_OMP_TASKGROUP
9040 && gimple_code (g: outer->stmt) != GIMPLE_OMP_SCOPE)
9041 break;
9042 }
9043 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
9044 tree *last = &TYPE_FIELDS (record_type);
9045 unsigned cnt = 0;
9046 if (cancellable)
9047 {
9048 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
9049 ptr_type_node);
9050 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
9051 integer_type_node);
9052 *last = field;
9053 DECL_CHAIN (field) = ifield;
9054 last = &DECL_CHAIN (ifield);
9055 DECL_CONTEXT (field) = record_type;
9056 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
9057 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
9058 DECL_CONTEXT (ifield) = record_type;
9059 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
9060 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
9061 }
9062 for (int pass = 0; pass < 2; pass++)
9063 {
9064 tree decl, type, next;
9065 for (tree c = clauses;
9066 omp_task_reduction_iterate (pass, code, ccode,
9067 c: &c, decl: &decl, type: &type, next: &next); c = next)
9068 {
9069 ++cnt;
9070 tree new_type = type;
9071 if (ctx->outer)
9072 new_type = remap_type (type, id: &ctx->outer->cb);
9073 tree field
9074 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
9075 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
9076 new_type);
9077 if (DECL_P (decl) && type == TREE_TYPE (decl))
9078 {
9079 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
9080 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
9081 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
9082 }
9083 else
9084 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
9085 DECL_CONTEXT (field) = record_type;
9086 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
9087 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
9088 *last = field;
9089 last = &DECL_CHAIN (field);
9090 tree bfield
9091 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
9092 boolean_type_node);
9093 DECL_CONTEXT (bfield) = record_type;
9094 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
9095 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
9096 *last = bfield;
9097 last = &DECL_CHAIN (bfield);
9098 }
9099 }
9100 *last = NULL_TREE;
9101 layout_type (record_type);
9102
9103 /* Build up an array which registers with the runtime all the reductions
9104 and deregisters them at the end. Format documented in libgomp/task.c. */
9105 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
9106 tree avar = create_tmp_var_raw (atype);
9107 gimple_add_tmp_var (avar);
9108 TREE_ADDRESSABLE (avar) = 1;
9109 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
9110 NULL_TREE, NULL_TREE);
9111 tree t = build_int_cst (pointer_sized_int_node, cnt);
9112 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9113 gimple_seq seq = NULL;
9114 tree sz = fold_convert (pointer_sized_int_node,
9115 TYPE_SIZE_UNIT (record_type));
9116 int cachesz = 64;
9117 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
9118 build_int_cst (pointer_sized_int_node, cachesz - 1));
9119 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
9120 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
9121 ctx->task_reductions.create (nelems: 1 + cnt);
9122 ctx->task_reduction_map = new hash_map<tree, unsigned>;
9123 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
9124 ? sz : NULL_TREE);
9125 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
9126 gimple_seq_add_seq (start, seq);
9127 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
9128 NULL_TREE, NULL_TREE);
9129 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
9130 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
9131 NULL_TREE, NULL_TREE);
9132 t = build_int_cst (pointer_sized_int_node,
9133 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
9134 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9135 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
9136 NULL_TREE, NULL_TREE);
9137 t = build_int_cst (pointer_sized_int_node, -1);
9138 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9139 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
9140 NULL_TREE, NULL_TREE);
9141 t = build_int_cst (pointer_sized_int_node, 0);
9142 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9143
9144 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
9145 and for each task reduction checks a bool right after the private variable
9146 within that thread's chunk; if the bool is clear, it hasn't been
9147 initialized and thus isn't going to be reduced nor destructed, otherwise
9148 reduce and destruct it. */
9149 tree idx = create_tmp_var (size_type_node);
9150 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
9151 tree num_thr_sz = create_tmp_var (size_type_node);
9152 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
9153 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
9154 tree lab3 = NULL_TREE, lab7 = NULL_TREE;
9155 gimple *g;
9156 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9157 {
9158 /* For worksharing constructs or scope, only perform it in the master
9159 thread, with the exception of cancelled implicit barriers - then only
9160 handle the current thread. */
9161 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
9162 t = builtin_decl_explicit (fncode: BUILT_IN_OMP_GET_THREAD_NUM);
9163 tree thr_num = create_tmp_var (integer_type_node);
9164 g = gimple_build_call (t, 0);
9165 gimple_call_set_lhs (gs: g, lhs: thr_num);
9166 gimple_seq_add_stmt (end, g);
9167 if (cancellable)
9168 {
9169 tree c;
9170 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9171 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
9172 lab3 = create_artificial_label (UNKNOWN_LOCATION);
9173 if (code == OMP_FOR)
9174 c = gimple_omp_for_clauses (gs: ctx->stmt);
9175 else if (code == OMP_SECTIONS)
9176 c = gimple_omp_sections_clauses (gs: ctx->stmt);
9177 else /* if (code == OMP_SCOPE) */
9178 c = gimple_omp_scope_clauses (gs: ctx->stmt);
9179 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
9180 cancellable = c;
9181 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
9182 lab5, lab6);
9183 gimple_seq_add_stmt (end, g);
9184 gimple_seq_add_stmt (end, gimple_build_label (label: lab5));
9185 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
9186 gimple_seq_add_stmt (end, g);
9187 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
9188 build_one_cst (TREE_TYPE (idx)));
9189 gimple_seq_add_stmt (end, g);
9190 gimple_seq_add_stmt (end, gimple_build_goto (dest: lab3));
9191 gimple_seq_add_stmt (end, gimple_build_label (label: lab6));
9192 }
9193 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
9194 gimple_seq_add_stmt (end, g);
9195 gimple_seq_add_stmt (end, gimple_build_label (label: lab4));
9196 }
9197 if (code != OMP_PARALLEL)
9198 {
9199 t = builtin_decl_explicit (fncode: BUILT_IN_OMP_GET_NUM_THREADS);
9200 tree num_thr = create_tmp_var (integer_type_node);
9201 g = gimple_build_call (t, 0);
9202 gimple_call_set_lhs (gs: g, lhs: num_thr);
9203 gimple_seq_add_stmt (end, g);
9204 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
9205 gimple_seq_add_stmt (end, g);
9206 if (cancellable)
9207 gimple_seq_add_stmt (end, gimple_build_label (label: lab3));
9208 }
9209 else
9210 {
9211 tree c = omp_find_clause (clauses: gimple_omp_parallel_clauses (gs: ctx->stmt),
9212 kind: OMP_CLAUSE__REDUCTEMP_);
9213 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
9214 t = fold_convert (size_type_node, t);
9215 gimplify_assign (num_thr_sz, t, end);
9216 }
9217 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
9218 NULL_TREE, NULL_TREE);
9219 tree data = create_tmp_var (pointer_sized_int_node);
9220 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
9221 if (code == OMP_TASKLOOP)
9222 {
9223 lab7 = create_artificial_label (UNKNOWN_LOCATION);
9224 g = gimple_build_cond (NE_EXPR, data,
9225 build_zero_cst (pointer_sized_int_node),
9226 lab1, lab7);
9227 gimple_seq_add_stmt (end, g);
9228 }
9229 gimple_seq_add_stmt (end, gimple_build_label (label: lab1));
9230 tree ptr;
9231 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
9232 ptr = create_tmp_var (build_pointer_type (record_type));
9233 else
9234 ptr = create_tmp_var (ptr_type_node);
9235 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
9236
9237 tree field = TYPE_FIELDS (record_type);
9238 cnt = 0;
9239 if (cancellable)
9240 field = DECL_CHAIN (DECL_CHAIN (field));
9241 for (int pass = 0; pass < 2; pass++)
9242 {
9243 tree decl, type, next;
9244 for (tree c = clauses;
9245 omp_task_reduction_iterate (pass, code, ccode,
9246 c: &c, decl: &decl, type: &type, next: &next); c = next)
9247 {
9248 tree var = decl, ref;
9249 if (TREE_CODE (decl) == MEM_REF)
9250 {
9251 var = TREE_OPERAND (var, 0);
9252 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
9253 var = TREE_OPERAND (var, 0);
9254 tree v = var;
9255 if (TREE_CODE (var) == ADDR_EXPR)
9256 var = TREE_OPERAND (var, 0);
9257 else if (INDIRECT_REF_P (var))
9258 var = TREE_OPERAND (var, 0);
9259 tree orig_var = var;
9260 if (is_variable_sized (expr: var))
9261 {
9262 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
9263 var = DECL_VALUE_EXPR (var);
9264 gcc_assert (INDIRECT_REF_P (var));
9265 var = TREE_OPERAND (var, 0);
9266 gcc_assert (DECL_P (var));
9267 }
9268 t = ref = maybe_lookup_decl_in_outer_ctx (decl: var, ctx);
9269 if (orig_var != var)
9270 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
9271 else if (TREE_CODE (v) == ADDR_EXPR)
9272 t = build_fold_addr_expr (t);
9273 else if (INDIRECT_REF_P (v))
9274 t = build_fold_indirect_ref (t);
9275 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
9276 {
9277 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
9278 b = maybe_lookup_decl_in_outer_ctx (decl: b, ctx);
9279 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
9280 }
9281 if (!integer_zerop (TREE_OPERAND (decl, 1)))
9282 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
9283 fold_convert (size_type_node,
9284 TREE_OPERAND (decl, 1)));
9285 }
9286 else
9287 {
9288 t = ref = maybe_lookup_decl_in_outer_ctx (decl: var, ctx);
9289 if (!omp_privatize_by_reference (decl))
9290 t = build_fold_addr_expr (t);
9291 }
9292 t = fold_convert (pointer_sized_int_node, t);
9293 seq = NULL;
9294 t = force_gimple_operand (t, &seq, true, NULL_TREE);
9295 gimple_seq_add_seq (start, seq);
9296 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9297 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
9298 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9299 t = unshare_expr (byte_position (field));
9300 t = fold_convert (pointer_sized_int_node, t);
9301 ctx->task_reduction_map->put (k: c, v: cnt);
9302 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
9303 ? t : NULL_TREE);
9304 seq = NULL;
9305 t = force_gimple_operand (t, &seq, true, NULL_TREE);
9306 gimple_seq_add_seq (start, seq);
9307 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9308 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
9309 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9310
9311 tree bfield = DECL_CHAIN (field);
9312 tree cond;
9313 if (code == OMP_PARALLEL
9314 || code == OMP_FOR
9315 || code == OMP_SECTIONS
9316 || code == OMP_SCOPE)
9317 /* In parallel, worksharing or scope all threads unconditionally
9318 initialize all their task reduction private variables. */
9319 cond = boolean_true_node;
9320 else if (TREE_TYPE (ptr) == ptr_type_node)
9321 {
9322 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
9323 unshare_expr (byte_position (bfield)));
9324 seq = NULL;
9325 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
9326 gimple_seq_add_seq (end, seq);
9327 tree pbool = build_pointer_type (TREE_TYPE (bfield));
9328 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
9329 build_int_cst (pbool, 0));
9330 }
9331 else
9332 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
9333 build_simple_mem_ref (ptr), bfield, NULL_TREE);
9334 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
9335 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
9336 tree condv = create_tmp_var (boolean_type_node);
9337 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
9338 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
9339 lab3, lab4);
9340 gimple_seq_add_stmt (end, g);
9341 gimple_seq_add_stmt (end, gimple_build_label (label: lab3));
9342 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
9343 {
9344 /* If this reduction doesn't need destruction and parallel
9345 has been cancelled, there is nothing to do for this
9346 reduction, so jump around the merge operation. */
9347 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9348 g = gimple_build_cond (NE_EXPR, cancellable,
9349 build_zero_cst (TREE_TYPE (cancellable)),
9350 lab4, lab5);
9351 gimple_seq_add_stmt (end, g);
9352 gimple_seq_add_stmt (end, gimple_build_label (label: lab5));
9353 }
9354
9355 tree new_var;
9356 if (TREE_TYPE (ptr) == ptr_type_node)
9357 {
9358 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
9359 unshare_expr (byte_position (field)));
9360 seq = NULL;
9361 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
9362 gimple_seq_add_seq (end, seq);
9363 tree pbool = build_pointer_type (TREE_TYPE (field));
9364 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
9365 build_int_cst (pbool, 0));
9366 }
9367 else
9368 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
9369 build_simple_mem_ref (ptr), field, NULL_TREE);
9370
9371 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
9372 if (TREE_CODE (decl) != MEM_REF
9373 && omp_privatize_by_reference (decl))
9374 ref = build_simple_mem_ref (ref);
9375 /* reduction(-:var) sums up the partial results, so it acts
9376 identically to reduction(+:var). */
9377 if (rcode == MINUS_EXPR)
9378 rcode = PLUS_EXPR;
9379 if (TREE_CODE (decl) == MEM_REF)
9380 {
9381 tree type = TREE_TYPE (new_var);
9382 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
9383 tree i = create_tmp_var (TREE_TYPE (v));
9384 tree ptype = build_pointer_type (TREE_TYPE (type));
9385 if (DECL_P (v))
9386 {
9387 v = maybe_lookup_decl_in_outer_ctx (decl: v, ctx);
9388 tree vv = create_tmp_var (TREE_TYPE (v));
9389 gimplify_assign (vv, v, start);
9390 v = vv;
9391 }
9392 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9393 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
9394 new_var = build_fold_addr_expr (new_var);
9395 new_var = fold_convert (ptype, new_var);
9396 ref = fold_convert (ptype, ref);
9397 tree m = create_tmp_var (ptype);
9398 gimplify_assign (m, new_var, end);
9399 new_var = m;
9400 m = create_tmp_var (ptype);
9401 gimplify_assign (m, ref, end);
9402 ref = m;
9403 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
9404 tree body = create_artificial_label (UNKNOWN_LOCATION);
9405 tree endl = create_artificial_label (UNKNOWN_LOCATION);
9406 gimple_seq_add_stmt (end, gimple_build_label (label: body));
9407 tree priv = build_simple_mem_ref (new_var);
9408 tree out = build_simple_mem_ref (ref);
9409 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9410 {
9411 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9412 tree decl_placeholder
9413 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
9414 tree lab6 = NULL_TREE;
9415 if (cancellable)
9416 {
9417 /* If this reduction needs destruction and parallel
9418 has been cancelled, jump around the merge operation
9419 to the destruction. */
9420 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9421 lab6 = create_artificial_label (UNKNOWN_LOCATION);
9422 tree zero = build_zero_cst (TREE_TYPE (cancellable));
9423 g = gimple_build_cond (NE_EXPR, cancellable, zero,
9424 lab6, lab5);
9425 gimple_seq_add_stmt (end, g);
9426 gimple_seq_add_stmt (end, gimple_build_label (label: lab5));
9427 }
9428 SET_DECL_VALUE_EXPR (placeholder, out);
9429 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9430 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
9431 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
9432 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9433 gimple_seq_add_seq (end,
9434 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9435 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9436 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9437 {
9438 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9439 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
9440 }
9441 if (cancellable)
9442 gimple_seq_add_stmt (end, gimple_build_label (label: lab6));
9443 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
9444 if (x)
9445 {
9446 gimple_seq tseq = NULL;
9447 gimplify_stmt (&x, &tseq);
9448 gimple_seq_add_seq (end, tseq);
9449 }
9450 }
9451 else
9452 {
9453 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
9454 out = unshare_expr (out);
9455 gimplify_assign (out, x, end);
9456 }
9457 gimple *g
9458 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
9459 TYPE_SIZE_UNIT (TREE_TYPE (type)));
9460 gimple_seq_add_stmt (end, g);
9461 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
9462 TYPE_SIZE_UNIT (TREE_TYPE (type)));
9463 gimple_seq_add_stmt (end, g);
9464 g = gimple_build_assign (i, PLUS_EXPR, i,
9465 build_int_cst (TREE_TYPE (i), 1));
9466 gimple_seq_add_stmt (end, g);
9467 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
9468 gimple_seq_add_stmt (end, g);
9469 gimple_seq_add_stmt (end, gimple_build_label (label: endl));
9470 }
9471 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9472 {
9473 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9474 tree oldv = NULL_TREE;
9475 tree lab6 = NULL_TREE;
9476 if (cancellable)
9477 {
9478 /* If this reduction needs destruction and parallel
9479 has been cancelled, jump around the merge operation
9480 to the destruction. */
9481 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9482 lab6 = create_artificial_label (UNKNOWN_LOCATION);
9483 tree zero = build_zero_cst (TREE_TYPE (cancellable));
9484 g = gimple_build_cond (NE_EXPR, cancellable, zero,
9485 lab6, lab5);
9486 gimple_seq_add_stmt (end, g);
9487 gimple_seq_add_stmt (end, gimple_build_label (label: lab5));
9488 }
9489 if (omp_privatize_by_reference (decl)
9490 && !useless_type_conversion_p (TREE_TYPE (placeholder),
9491 TREE_TYPE (ref)))
9492 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9493 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9494 tree refv = create_tmp_var (TREE_TYPE (ref));
9495 gimplify_assign (refv, ref, end);
9496 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
9497 SET_DECL_VALUE_EXPR (placeholder, ref);
9498 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9499 tree d = maybe_lookup_decl (var: decl, ctx);
9500 gcc_assert (d);
9501 if (DECL_HAS_VALUE_EXPR_P (d))
9502 oldv = DECL_VALUE_EXPR (d);
9503 if (omp_privatize_by_reference (decl: var))
9504 {
9505 tree v = fold_convert (TREE_TYPE (d),
9506 build_fold_addr_expr (new_var));
9507 SET_DECL_VALUE_EXPR (d, v);
9508 }
9509 else
9510 SET_DECL_VALUE_EXPR (d, new_var);
9511 DECL_HAS_VALUE_EXPR_P (d) = 1;
9512 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9513 if (oldv)
9514 SET_DECL_VALUE_EXPR (d, oldv);
9515 else
9516 {
9517 SET_DECL_VALUE_EXPR (d, NULL_TREE);
9518 DECL_HAS_VALUE_EXPR_P (d) = 0;
9519 }
9520 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9521 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9522 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9523 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9524 if (cancellable)
9525 gimple_seq_add_stmt (end, gimple_build_label (label: lab6));
9526 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
9527 if (x)
9528 {
9529 gimple_seq tseq = NULL;
9530 gimplify_stmt (&x, &tseq);
9531 gimple_seq_add_seq (end, tseq);
9532 }
9533 }
9534 else
9535 {
9536 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
9537 ref = unshare_expr (ref);
9538 gimplify_assign (ref, x, end);
9539 }
9540 gimple_seq_add_stmt (end, gimple_build_label (label: lab4));
9541 ++cnt;
9542 field = DECL_CHAIN (bfield);
9543 }
9544 }
9545
9546 if (code == OMP_TASKGROUP)
9547 {
9548 t = builtin_decl_explicit (fncode: BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
9549 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9550 gimple_seq_add_stmt (start, g);
9551 }
9552 else
9553 {
9554 tree c;
9555 if (code == OMP_FOR)
9556 c = gimple_omp_for_clauses (gs: ctx->stmt);
9557 else if (code == OMP_SECTIONS)
9558 c = gimple_omp_sections_clauses (gs: ctx->stmt);
9559 else if (code == OMP_SCOPE)
9560 c = gimple_omp_scope_clauses (gs: ctx->stmt);
9561 else
9562 c = gimple_omp_taskreg_clauses (gs: ctx->stmt);
9563 c = omp_find_clause (clauses: c, kind: OMP_CLAUSE__REDUCTEMP_);
9564 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
9565 build_fold_addr_expr (avar));
9566 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
9567 }
9568
9569 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
9570 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
9571 size_one_node));
9572 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
9573 gimple_seq_add_stmt (end, g);
9574 gimple_seq_add_stmt (end, gimple_build_label (label: lab2));
9575 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9576 {
9577 enum built_in_function bfn
9578 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
9579 t = builtin_decl_explicit (fncode: bfn);
9580 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
9581 tree arg;
9582 if (cancellable)
9583 {
9584 arg = create_tmp_var (c_bool_type);
9585 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
9586 cancellable));
9587 }
9588 else
9589 arg = build_int_cst (c_bool_type, 0);
9590 g = gimple_build_call (t, 1, arg);
9591 }
9592 else
9593 {
9594 t = builtin_decl_explicit (fncode: BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
9595 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9596 }
9597 gimple_seq_add_stmt (end, g);
9598 if (lab7)
9599 gimple_seq_add_stmt (end, gimple_build_label (label: lab7));
9600 t = build_constructor (atype, NULL);
9601 TREE_THIS_VOLATILE (t) = 1;
9602 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
9603}
9604
9605/* Expand code for an OpenMP taskgroup directive. */
9606
9607static void
9608lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9609{
9610 gimple *stmt = gsi_stmt (i: *gsi_p);
9611 gcall *x;
9612 gbind *bind;
9613 gimple_seq dseq = NULL;
9614 tree block = make_node (BLOCK);
9615
9616 bind = gimple_build_bind (NULL, NULL, block);
9617 gsi_replace (gsi_p, bind, true);
9618 gimple_bind_add_stmt (bind_stmt: bind, stmt);
9619
9620 push_gimplify_context ();
9621
9622 x = gimple_build_call (builtin_decl_explicit (fncode: BUILT_IN_GOMP_TASKGROUP_START),
9623 0);
9624 gimple_bind_add_stmt (bind_stmt: bind, stmt: x);
9625
9626 lower_omp_task_reductions (ctx, code: OMP_TASKGROUP,
9627 clauses: gimple_omp_taskgroup_clauses (gs: stmt),
9628 start: gimple_bind_body_ptr (bind_stmt: bind), end: &dseq);
9629
9630 lower_omp (gimple_omp_body_ptr (gs: stmt), ctx);
9631 gimple_bind_add_seq (bind_stmt: bind, seq: gimple_omp_body (gs: stmt));
9632 gimple_omp_set_body (gs: stmt, NULL);
9633
9634 gimple_bind_add_seq (bind_stmt: bind, seq: dseq);
9635
9636 pop_gimplify_context (bind);
9637
9638 gimple_bind_append_vars (bind_stmt: bind, vars: ctx->block_vars);
9639 BLOCK_VARS (block) = ctx->block_vars;
9640}
9641
9642
9643/* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
9644
9645static void
9646lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
9647 omp_context *ctx)
9648{
9649 struct omp_for_data fd;
9650 if (!ctx->outer || gimple_code (g: ctx->outer->stmt) != GIMPLE_OMP_FOR)
9651 return;
9652
9653 unsigned int len = gimple_omp_for_collapse (gs: ctx->outer->stmt);
9654 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
9655 omp_extract_for_data (for_stmt: as_a <gomp_for *> (p: ctx->outer->stmt), fd: &fd, loops);
9656 if (!fd.ordered)
9657 return;
9658
9659 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9660 tree c = gimple_omp_ordered_clauses (ord_stmt);
9661 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
9662 && OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK)
9663 {
9664 /* Merge depend clauses from multiple adjacent
9665 #pragma omp ordered depend(sink:...) constructs
9666 into one #pragma omp ordered depend(sink:...), so that
9667 we can optimize them together. */
9668 gimple_stmt_iterator gsi = *gsi_p;
9669 gsi_next (i: &gsi);
9670 while (!gsi_end_p (i: gsi))
9671 {
9672 gimple *stmt = gsi_stmt (i: gsi);
9673 if (is_gimple_debug (gs: stmt)
9674 || gimple_code (g: stmt) == GIMPLE_NOP)
9675 {
9676 gsi_next (i: &gsi);
9677 continue;
9678 }
9679 if (gimple_code (g: stmt) != GIMPLE_OMP_ORDERED)
9680 break;
9681 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (p: stmt);
9682 c = gimple_omp_ordered_clauses (ord_stmt: ord_stmt2);
9683 if (c == NULL_TREE
9684 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DOACROSS
9685 || OMP_CLAUSE_DOACROSS_KIND (c) != OMP_CLAUSE_DOACROSS_SINK)
9686 break;
9687 while (*list_p)
9688 list_p = &OMP_CLAUSE_CHAIN (*list_p);
9689 *list_p = c;
9690 gsi_remove (&gsi, true);
9691 }
9692 }
9693
9694 /* Canonicalize sink dependence clauses into one folded clause if
9695 possible.
9696
9697 The basic algorithm is to create a sink vector whose first
9698 element is the GCD of all the first elements, and whose remaining
9699 elements are the minimum of the subsequent columns.
9700
9701 We ignore dependence vectors whose first element is zero because
9702 such dependencies are known to be executed by the same thread.
9703
9704 We take into account the direction of the loop, so a minimum
9705 becomes a maximum if the loop is iterating forwards. We also
9706 ignore sink clauses where the loop direction is unknown, or where
9707 the offsets are clearly invalid because they are not a multiple
9708 of the loop increment.
9709
9710 For example:
9711
9712 #pragma omp for ordered(2)
9713 for (i=0; i < N; ++i)
9714 for (j=0; j < M; ++j)
9715 {
9716 #pragma omp ordered \
9717 depend(sink:i-8,j-2) \
9718 depend(sink:i,j-1) \ // Completely ignored because i+0.
9719 depend(sink:i-4,j-3) \
9720 depend(sink:i-6,j-4)
9721 #pragma omp ordered depend(source)
9722 }
9723
9724 Folded clause is:
9725
9726 depend(sink:-gcd(8,4,6),-min(2,3,4))
9727 -or-
9728 depend(sink:-2,-2)
9729 */
9730
9731 /* FIXME: Computing GCD's where the first element is zero is
9732 non-trivial in the presence of collapsed loops. Do this later. */
9733 if (fd.collapse > 1)
9734 return;
9735
9736 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
9737
9738 /* wide_int is not a POD so it must be default-constructed. */
9739 for (unsigned i = 0; i != 2 * len - 1; ++i)
9740 new (static_cast<void*>(folded_deps + i)) wide_int ();
9741
9742 tree folded_dep = NULL_TREE;
9743 /* TRUE if the first dimension's offset is negative. */
9744 bool neg_offset_p = false;
9745
9746 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9747 unsigned int i;
9748 while ((c = *list_p) != NULL)
9749 {
9750 bool remove = false;
9751
9752 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS);
9753 if (OMP_CLAUSE_DOACROSS_KIND (c) != OMP_CLAUSE_DOACROSS_SINK)
9754 goto next_ordered_clause;
9755
9756 tree vec;
9757 for (vec = OMP_CLAUSE_DECL (c), i = 0;
9758 vec && TREE_CODE (vec) == TREE_LIST;
9759 vec = TREE_CHAIN (vec), ++i)
9760 {
9761 gcc_assert (i < len);
9762
9763 /* omp_extract_for_data has canonicalized the condition. */
9764 gcc_assert (fd.loops[i].cond_code == LT_EXPR
9765 || fd.loops[i].cond_code == GT_EXPR);
9766 bool forward = fd.loops[i].cond_code == LT_EXPR;
9767 bool maybe_lexically_later = true;
9768
9769 /* While the committee makes up its mind, bail if we have any
9770 non-constant steps. */
9771 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
9772 goto lower_omp_ordered_ret;
9773
9774 tree itype = TREE_TYPE (TREE_VALUE (vec));
9775 if (POINTER_TYPE_P (itype))
9776 itype = sizetype;
9777 wide_int offset = wide_int::from (x: wi::to_wide (TREE_PURPOSE (vec)),
9778 TYPE_PRECISION (itype),
9779 TYPE_SIGN (itype));
9780
9781 /* Ignore invalid offsets that are not multiples of the step. */
9782 if (!wi::multiple_of_p (x: wi::abs (x: offset),
9783 y: wi::abs (x: wi::to_wide (t: fd.loops[i].step)),
9784 sgn: UNSIGNED))
9785 {
9786 warning_at (OMP_CLAUSE_LOCATION (c), 0,
9787 "ignoring sink clause with offset that is not "
9788 "a multiple of the loop step");
9789 remove = true;
9790 goto next_ordered_clause;
9791 }
9792
9793 /* Calculate the first dimension. The first dimension of
9794 the folded dependency vector is the GCD of the first
9795 elements, while ignoring any first elements whose offset
9796 is 0. */
9797 if (i == 0)
9798 {
9799 /* Ignore dependence vectors whose first dimension is 0. */
9800 if (offset == 0)
9801 {
9802 remove = true;
9803 goto next_ordered_clause;
9804 }
9805 else
9806 {
9807 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (x: offset)))
9808 {
9809 error_at (OMP_CLAUSE_LOCATION (c),
9810 "first offset must be in opposite direction "
9811 "of loop iterations");
9812 goto lower_omp_ordered_ret;
9813 }
9814 if (forward)
9815 offset = -offset;
9816 neg_offset_p = forward;
9817 /* Initialize the first time around. */
9818 if (folded_dep == NULL_TREE)
9819 {
9820 folded_dep = c;
9821 folded_deps[0] = offset;
9822 }
9823 else
9824 folded_deps[0] = wi::gcd (a: folded_deps[0],
9825 b: offset, sgn: UNSIGNED);
9826 }
9827 }
9828 /* Calculate minimum for the remaining dimensions. */
9829 else
9830 {
9831 folded_deps[len + i - 1] = offset;
9832 if (folded_dep == c)
9833 folded_deps[i] = offset;
9834 else if (maybe_lexically_later
9835 && !wi::eq_p (x: folded_deps[i], y: offset))
9836 {
9837 if (forward ^ wi::gts_p (x: folded_deps[i], y: offset))
9838 {
9839 unsigned int j;
9840 folded_dep = c;
9841 for (j = 1; j <= i; j++)
9842 folded_deps[j] = folded_deps[len + j - 1];
9843 }
9844 else
9845 maybe_lexically_later = false;
9846 }
9847 }
9848 }
9849 gcc_assert (i == len);
9850
9851 remove = true;
9852
9853 next_ordered_clause:
9854 if (remove)
9855 *list_p = OMP_CLAUSE_CHAIN (c);
9856 else
9857 list_p = &OMP_CLAUSE_CHAIN (c);
9858 }
9859
9860 if (folded_dep)
9861 {
9862 if (neg_offset_p)
9863 folded_deps[0] = -folded_deps[0];
9864
9865 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
9866 if (POINTER_TYPE_P (itype))
9867 itype = sizetype;
9868
9869 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
9870 = wide_int_to_tree (type: itype, cst: folded_deps[0]);
9871 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
9872 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
9873 }
9874
9875 lower_omp_ordered_ret:
9876
9877 /* Ordered without clauses is #pragma omp threads, while we want
9878 a nop instead if we remove all clauses. */
9879 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
9880 gsi_replace (gsi_p, gimple_build_nop (), true);
9881}
9882
9883
9884/* Expand code for an OpenMP ordered directive. */
9885
9886static void
9887lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9888{
9889 tree block;
9890 gimple *stmt = gsi_stmt (i: *gsi_p), *g;
9891 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (p: stmt);
9892 gcall *x;
9893 gbind *bind;
9894 bool simd = omp_find_clause (clauses: gimple_omp_ordered_clauses (ord_stmt),
9895 kind: OMP_CLAUSE_SIMD);
9896 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
9897 loop. */
9898 bool maybe_simt
9899 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
9900 bool threads = omp_find_clause (clauses: gimple_omp_ordered_clauses (ord_stmt),
9901 kind: OMP_CLAUSE_THREADS);
9902
9903 if (gimple_omp_ordered_standalone_p (g: ord_stmt))
9904 {
9905 /* FIXME: This is needs to be moved to the expansion to verify various
9906 conditions only testable on cfg with dominators computed, and also
9907 all the depend clauses to be merged still might need to be available
9908 for the runtime checks. */
9909 if (0)
9910 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
9911 return;
9912 }
9913
9914 push_gimplify_context ();
9915
9916 block = make_node (BLOCK);
9917 bind = gimple_build_bind (NULL, NULL, block);
9918 gsi_replace (gsi_p, bind, true);
9919 gimple_bind_add_stmt (bind_stmt: bind, stmt);
9920
9921 if (simd)
9922 {
9923 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
9924 build_int_cst (NULL_TREE, threads));
9925 cfun->has_simduid_loops = true;
9926 }
9927 else
9928 x = gimple_build_call (builtin_decl_explicit (fncode: BUILT_IN_GOMP_ORDERED_START),
9929 0);
9930 gimple_bind_add_stmt (bind_stmt: bind, stmt: x);
9931
9932 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
9933 if (maybe_simt)
9934 {
9935 counter = create_tmp_var (integer_type_node);
9936 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
9937 gimple_call_set_lhs (gs: g, lhs: counter);
9938 gimple_bind_add_stmt (bind_stmt: bind, stmt: g);
9939
9940 body = create_artificial_label (UNKNOWN_LOCATION);
9941 test = create_artificial_label (UNKNOWN_LOCATION);
9942 gimple_bind_add_stmt (bind_stmt: bind, stmt: gimple_build_label (label: body));
9943
9944 tree simt_pred = create_tmp_var (integer_type_node);
9945 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
9946 gimple_call_set_lhs (gs: g, lhs: simt_pred);
9947 gimple_bind_add_stmt (bind_stmt: bind, stmt: g);
9948
9949 tree t = create_artificial_label (UNKNOWN_LOCATION);
9950 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
9951 gimple_bind_add_stmt (bind_stmt: bind, stmt: g);
9952
9953 gimple_bind_add_stmt (bind_stmt: bind, stmt: gimple_build_label (label: t));
9954 }
9955 lower_omp (gimple_omp_body_ptr (gs: stmt), ctx);
9956 gimple_omp_set_body (gs: stmt, body: maybe_catch_exception (body: gimple_omp_body (gs: stmt)));
9957 gimple_bind_add_seq (bind_stmt: bind, seq: gimple_omp_body (gs: stmt));
9958 gimple_omp_set_body (gs: stmt, NULL);
9959
9960 if (maybe_simt)
9961 {
9962 gimple_bind_add_stmt (bind_stmt: bind, stmt: gimple_build_label (label: test));
9963 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
9964 gimple_bind_add_stmt (bind_stmt: bind, stmt: g);
9965
9966 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
9967 tree nonneg = create_tmp_var (integer_type_node);
9968 gimple_seq tseq = NULL;
9969 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
9970 gimple_bind_add_seq (bind_stmt: bind, seq: tseq);
9971
9972 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
9973 gimple_call_set_lhs (gs: g, lhs: nonneg);
9974 gimple_bind_add_stmt (bind_stmt: bind, stmt: g);
9975
9976 tree end = create_artificial_label (UNKNOWN_LOCATION);
9977 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
9978 gimple_bind_add_stmt (bind_stmt: bind, stmt: g);
9979
9980 gimple_bind_add_stmt (bind_stmt: bind, stmt: gimple_build_label (label: end));
9981 }
9982 if (simd)
9983 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
9984 build_int_cst (NULL_TREE, threads));
9985 else
9986 x = gimple_build_call (builtin_decl_explicit (fncode: BUILT_IN_GOMP_ORDERED_END),
9987 0);
9988 gimple_bind_add_stmt (bind_stmt: bind, stmt: x);
9989
9990 gimple_bind_add_stmt (bind_stmt: bind, stmt: gimple_build_omp_return (true));
9991
9992 pop_gimplify_context (bind);
9993
9994 gimple_bind_append_vars (bind_stmt: bind, vars: ctx->block_vars);
9995 BLOCK_VARS (block) = gimple_bind_vars (bind_stmt: bind);
9996}
9997
9998
9999/* Expand code for an OpenMP scan directive and the structured block
10000 before the scan directive. */
10001
10002static void
10003lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10004{
10005 gimple *stmt = gsi_stmt (i: *gsi_p);
10006 bool has_clauses
10007 = gimple_omp_scan_clauses (scan_stmt: as_a <gomp_scan *> (p: stmt)) != NULL;
10008 tree lane = NULL_TREE;
10009 gimple_seq before = NULL;
10010 omp_context *octx = ctx->outer;
10011 gcc_assert (octx);
10012 if (octx->scan_exclusive && !has_clauses)
10013 {
10014 gimple_stmt_iterator gsi2 = *gsi_p;
10015 gsi_next (i: &gsi2);
10016 gimple *stmt2 = gsi_stmt (i: gsi2);
10017 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
10018 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
10019 the one with exclusive clause(s), comes first. */
10020 if (stmt2
10021 && gimple_code (g: stmt2) == GIMPLE_OMP_SCAN
10022 && gimple_omp_scan_clauses (scan_stmt: as_a <gomp_scan *> (p: stmt2)) != NULL)
10023 {
10024 gsi_remove (gsi_p, false);
10025 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
10026 ctx = maybe_lookup_ctx (stmt: stmt2);
10027 gcc_assert (ctx);
10028 lower_omp_scan (gsi_p, ctx);
10029 return;
10030 }
10031 }
10032
10033 bool input_phase = has_clauses ^ octx->scan_inclusive;
10034 bool is_simd = (gimple_code (g: octx->stmt) == GIMPLE_OMP_FOR
10035 && gimple_omp_for_kind (g: octx->stmt) == GF_OMP_FOR_KIND_SIMD);
10036 bool is_for = (gimple_code (g: octx->stmt) == GIMPLE_OMP_FOR
10037 && gimple_omp_for_kind (g: octx->stmt) == GF_OMP_FOR_KIND_FOR
10038 && !gimple_omp_for_combined_p (g: octx->stmt));
10039 bool is_for_simd = is_simd && gimple_omp_for_combined_into_p (g: octx->stmt);
10040 if (is_for_simd && octx->for_simd_scan_phase)
10041 is_simd = false;
10042 if (is_simd)
10043 if (tree c = omp_find_clause (clauses: gimple_omp_for_clauses (gs: octx->stmt),
10044 kind: OMP_CLAUSE__SIMDUID_))
10045 {
10046 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
10047 lane = create_tmp_var (unsigned_type_node);
10048 tree t = build_int_cst (integer_type_node,
10049 input_phase ? 1
10050 : octx->scan_inclusive ? 2 : 3);
10051 gimple *g
10052 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
10053 gimple_call_set_lhs (gs: g, lhs: lane);
10054 gimple_seq_add_stmt (&before, g);
10055 }
10056
10057 if (is_simd || is_for)
10058 {
10059 for (tree c = gimple_omp_for_clauses (gs: octx->stmt);
10060 c; c = OMP_CLAUSE_CHAIN (c))
10061 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10062 && OMP_CLAUSE_REDUCTION_INSCAN (c))
10063 {
10064 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10065 tree var = OMP_CLAUSE_DECL (c);
10066 tree new_var = lookup_decl (var, ctx: octx);
10067 tree val = new_var;
10068 tree var2 = NULL_TREE;
10069 tree var3 = NULL_TREE;
10070 tree var4 = NULL_TREE;
10071 tree lane0 = NULL_TREE;
10072 tree new_vard = new_var;
10073 if (omp_privatize_by_reference (decl: var))
10074 {
10075 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
10076 val = new_var;
10077 }
10078 if (DECL_HAS_VALUE_EXPR_P (new_vard))
10079 {
10080 val = DECL_VALUE_EXPR (new_vard);
10081 if (new_vard != new_var)
10082 {
10083 gcc_assert (TREE_CODE (val) == ADDR_EXPR);
10084 val = TREE_OPERAND (val, 0);
10085 }
10086 if (TREE_CODE (val) == ARRAY_REF
10087 && VAR_P (TREE_OPERAND (val, 0)))
10088 {
10089 tree v = TREE_OPERAND (val, 0);
10090 if (lookup_attribute (attr_name: "omp simd array",
10091 DECL_ATTRIBUTES (v)))
10092 {
10093 val = unshare_expr (val);
10094 lane0 = TREE_OPERAND (val, 1);
10095 TREE_OPERAND (val, 1) = lane;
10096 var2 = lookup_decl (var: v, ctx: octx);
10097 if (octx->scan_exclusive)
10098 var4 = lookup_decl (var: var2, ctx: octx);
10099 if (input_phase
10100 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10101 var3 = maybe_lookup_decl (var: var4 ? var4 : var2, ctx: octx);
10102 if (!input_phase)
10103 {
10104 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
10105 var2, lane, NULL_TREE, NULL_TREE);
10106 TREE_THIS_NOTRAP (var2) = 1;
10107 if (octx->scan_exclusive)
10108 {
10109 var4 = build4 (ARRAY_REF, TREE_TYPE (val),
10110 var4, lane, NULL_TREE,
10111 NULL_TREE);
10112 TREE_THIS_NOTRAP (var4) = 1;
10113 }
10114 }
10115 else
10116 var2 = val;
10117 }
10118 }
10119 gcc_assert (var2);
10120 }
10121 else
10122 {
10123 var2 = build_outer_var_ref (var, ctx: octx);
10124 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10125 {
10126 var3 = maybe_lookup_decl (var: new_vard, ctx: octx);
10127 if (var3 == new_vard || var3 == NULL_TREE)
10128 var3 = NULL_TREE;
10129 else if (is_simd && octx->scan_exclusive && !input_phase)
10130 {
10131 var4 = maybe_lookup_decl (var: var3, ctx: octx);
10132 if (var4 == var3 || var4 == NULL_TREE)
10133 {
10134 if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
10135 {
10136 var4 = var3;
10137 var3 = NULL_TREE;
10138 }
10139 else
10140 var4 = NULL_TREE;
10141 }
10142 }
10143 }
10144 if (is_simd
10145 && octx->scan_exclusive
10146 && !input_phase
10147 && var4 == NULL_TREE)
10148 var4 = create_tmp_var (TREE_TYPE (val));
10149 }
10150 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10151 {
10152 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
10153 if (input_phase)
10154 {
10155 if (var3)
10156 {
10157 /* If we've added a separate identity element
10158 variable, copy it over into val. */
10159 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
10160 var3);
10161 gimplify_and_add (x, &before);
10162 }
10163 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
10164 {
10165 /* Otherwise, assign to it the identity element. */
10166 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
10167 if (is_for)
10168 tseq = copy_gimple_seq_and_replace_locals (seq: tseq);
10169 tree ref = build_outer_var_ref (var, ctx: octx);
10170 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
10171 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10172 if (x)
10173 {
10174 if (new_vard != new_var)
10175 val = build_fold_addr_expr_loc (clause_loc, val);
10176 SET_DECL_VALUE_EXPR (new_vard, val);
10177 }
10178 SET_DECL_VALUE_EXPR (placeholder, ref);
10179 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10180 lower_omp (&tseq, octx);
10181 if (x)
10182 SET_DECL_VALUE_EXPR (new_vard, x);
10183 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10184 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10185 gimple_seq_add_seq (&before, tseq);
10186 if (is_simd)
10187 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
10188 }
10189 }
10190 else if (is_simd)
10191 {
10192 tree x;
10193 if (octx->scan_exclusive)
10194 {
10195 tree v4 = unshare_expr (var4);
10196 tree v2 = unshare_expr (var2);
10197 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
10198 gimplify_and_add (x, &before);
10199 }
10200 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10201 x = (DECL_HAS_VALUE_EXPR_P (new_vard)
10202 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10203 tree vexpr = val;
10204 if (x && new_vard != new_var)
10205 vexpr = build_fold_addr_expr_loc (clause_loc, val);
10206 if (x)
10207 SET_DECL_VALUE_EXPR (new_vard, vexpr);
10208 SET_DECL_VALUE_EXPR (placeholder, var2);
10209 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10210 lower_omp (&tseq, octx);
10211 gimple_seq_add_seq (&before, tseq);
10212 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
10213 if (x)
10214 SET_DECL_VALUE_EXPR (new_vard, x);
10215 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10216 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10217 if (octx->scan_inclusive)
10218 {
10219 x = lang_hooks.decls.omp_clause_assign_op (c, val,
10220 var2);
10221 gimplify_and_add (x, &before);
10222 }
10223 else if (lane0 == NULL_TREE)
10224 {
10225 x = lang_hooks.decls.omp_clause_assign_op (c, val,
10226 var4);
10227 gimplify_and_add (x, &before);
10228 }
10229 }
10230 }
10231 else
10232 {
10233 if (input_phase)
10234 {
10235 /* input phase. Set val to initializer before
10236 the body. */
10237 tree x = omp_reduction_init (clause: c, TREE_TYPE (new_var));
10238 gimplify_assign (val, x, &before);
10239 }
10240 else if (is_simd)
10241 {
10242 /* scan phase. */
10243 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
10244 if (code == MINUS_EXPR)
10245 code = PLUS_EXPR;
10246
10247 tree x = build2 (code, TREE_TYPE (var2),
10248 unshare_expr (var2), unshare_expr (val));
10249 if (octx->scan_inclusive)
10250 {
10251 gimplify_assign (unshare_expr (var2), x, &before);
10252 gimplify_assign (val, var2, &before);
10253 }
10254 else
10255 {
10256 gimplify_assign (unshare_expr (var4),
10257 unshare_expr (var2), &before);
10258 gimplify_assign (var2, x, &before);
10259 if (lane0 == NULL_TREE)
10260 gimplify_assign (val, var4, &before);
10261 }
10262 }
10263 }
10264 if (octx->scan_exclusive && !input_phase && lane0)
10265 {
10266 tree vexpr = unshare_expr (var4);
10267 TREE_OPERAND (vexpr, 1) = lane0;
10268 if (new_vard != new_var)
10269 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
10270 SET_DECL_VALUE_EXPR (new_vard, vexpr);
10271 }
10272 }
10273 }
10274 if (is_simd && !is_for_simd)
10275 {
10276 gsi_insert_seq_after (gsi_p, gimple_omp_body (gs: stmt), GSI_SAME_STMT);
10277 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
10278 gsi_replace (gsi_p, gimple_build_nop (), true);
10279 return;
10280 }
10281 lower_omp (gimple_omp_body_ptr (gs: stmt), octx);
10282 if (before)
10283 {
10284 gimple_stmt_iterator gsi = gsi_start (seq&: *gimple_omp_body_ptr (gs: stmt));
10285 gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
10286 }
10287}
10288
10289
10290/* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
10291 substitution of a couple of function calls. But in the NAMED case,
10292 requires that languages coordinate a symbol name. It is therefore
10293 best put here in common code. */
10294
10295static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
10296
10297static void
10298lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10299{
10300 tree block;
10301 tree name, lock, unlock;
10302 gomp_critical *stmt = as_a <gomp_critical *> (p: gsi_stmt (i: *gsi_p));
10303 gbind *bind;
10304 location_t loc = gimple_location (g: stmt);
10305 gimple_seq tbody;
10306
10307 name = gimple_omp_critical_name (crit_stmt: stmt);
10308 if (name)
10309 {
10310 tree decl;
10311
10312 if (!critical_name_mutexes)
10313 critical_name_mutexes = hash_map<tree, tree>::create_ggc (size: 10);
10314
10315 tree *n = critical_name_mutexes->get (k: name);
10316 if (n == NULL)
10317 {
10318 char *new_str;
10319
10320 decl = create_tmp_var_raw (ptr_type_node);
10321
10322 new_str = ACONCAT ((".gomp_critical_user_",
10323 IDENTIFIER_POINTER (name), NULL));
10324 DECL_NAME (decl) = get_identifier (new_str);
10325 TREE_PUBLIC (decl) = 1;
10326 TREE_STATIC (decl) = 1;
10327 DECL_COMMON (decl) = 1;
10328 DECL_ARTIFICIAL (decl) = 1;
10329 DECL_IGNORED_P (decl) = 1;
10330
10331 varpool_node::finalize_decl (decl);
10332
10333 critical_name_mutexes->put (k: name, v: decl);
10334 }
10335 else
10336 decl = *n;
10337
10338 /* If '#pragma omp critical' is inside offloaded region or
10339 inside function marked as offloadable, the symbol must be
10340 marked as offloadable too. */
10341 omp_context *octx;
10342 if (cgraph_node::get (decl: current_function_decl)->offloadable)
10343 varpool_node::get_create (decl)->offloadable = 1;
10344 else
10345 for (octx = ctx->outer; octx; octx = octx->outer)
10346 if (is_gimple_omp_offloaded (stmt: octx->stmt))
10347 {
10348 varpool_node::get_create (decl)->offloadable = 1;
10349 break;
10350 }
10351
10352 lock = builtin_decl_explicit (fncode: BUILT_IN_GOMP_CRITICAL_NAME_START);
10353 lock = build_call_expr_loc (loc, lock, 1,
10354 build_fold_addr_expr_loc (loc, decl));
10355
10356 unlock = builtin_decl_explicit (fncode: BUILT_IN_GOMP_CRITICAL_NAME_END);
10357 unlock = build_call_expr_loc (loc, unlock, 1,
10358 build_fold_addr_expr_loc (loc, decl));
10359 }
10360 else
10361 {
10362 lock = builtin_decl_explicit (fncode: BUILT_IN_GOMP_CRITICAL_START);
10363 lock = build_call_expr_loc (loc, lock, 0);
10364
10365 unlock = builtin_decl_explicit (fncode: BUILT_IN_GOMP_CRITICAL_END);
10366 unlock = build_call_expr_loc (loc, unlock, 0);
10367 }
10368
10369 push_gimplify_context ();
10370
10371 block = make_node (BLOCK);
10372 bind = gimple_build_bind (NULL, NULL, block);
10373 gsi_replace (gsi_p, bind, true);
10374 gimple_bind_add_stmt (bind_stmt: bind, stmt);
10375
10376 tbody = gimple_bind_body (gs: bind);
10377 gimplify_and_add (lock, &tbody);
10378 gimple_bind_set_body (bind_stmt: bind, seq: tbody);
10379
10380 lower_omp (gimple_omp_body_ptr (gs: stmt), ctx);
10381 gimple_omp_set_body (gs: stmt, body: maybe_catch_exception (body: gimple_omp_body (gs: stmt)));
10382 gimple_bind_add_seq (bind_stmt: bind, seq: gimple_omp_body (gs: stmt));
10383 gimple_omp_set_body (gs: stmt, NULL);
10384
10385 tbody = gimple_bind_body (gs: bind);
10386 gimplify_and_add (unlock, &tbody);
10387 gimple_bind_set_body (bind_stmt: bind, seq: tbody);
10388
10389 gimple_bind_add_stmt (bind_stmt: bind, stmt: gimple_build_omp_return (true));
10390
10391 pop_gimplify_context (bind);
10392 gimple_bind_append_vars (bind_stmt: bind, vars: ctx->block_vars);
10393 BLOCK_VARS (block) = gimple_bind_vars (bind_stmt: bind);
10394}
10395
10396/* A subroutine of lower_omp_for. Generate code to emit the predicate
10397 for a lastprivate clause. Given a loop control predicate of (V
10398 cond N2), we gate the clause on (!(V cond N2)). The lowered form
10399 is appended to *DLIST, iterator initialization is appended to
10400 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
10401 to be emitted in a critical section. */
10402
10403static void
10404lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
10405 gimple_seq *dlist, gimple_seq *clist,
10406 struct omp_context *ctx)
10407{
10408 tree clauses, cond, vinit;
10409 enum tree_code cond_code;
10410 gimple_seq stmts;
10411
10412 cond_code = fd->loop.cond_code;
10413 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
10414
10415 /* When possible, use a strict equality expression. This can let VRP
10416 type optimizations deduce the value and remove a copy. */
10417 if (tree_fits_shwi_p (fd->loop.step))
10418 {
10419 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
10420 if (step == 1 || step == -1)
10421 cond_code = EQ_EXPR;
10422 }
10423
10424 tree n2 = fd->loop.n2;
10425 if (fd->collapse > 1
10426 && TREE_CODE (n2) != INTEGER_CST
10427 && gimple_omp_for_combined_into_p (g: fd->for_stmt))
10428 {
10429 struct omp_context *taskreg_ctx = NULL;
10430 if (gimple_code (g: ctx->outer->stmt) == GIMPLE_OMP_FOR)
10431 {
10432 gomp_for *gfor = as_a <gomp_for *> (p: ctx->outer->stmt);
10433 if (gimple_omp_for_kind (g: gfor) == GF_OMP_FOR_KIND_FOR
10434 || gimple_omp_for_kind (g: gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
10435 {
10436 if (gimple_omp_for_combined_into_p (g: gfor))
10437 {
10438 gcc_assert (ctx->outer->outer
10439 && is_parallel_ctx (ctx->outer->outer));
10440 taskreg_ctx = ctx->outer->outer;
10441 }
10442 else
10443 {
10444 struct omp_for_data outer_fd;
10445 omp_extract_for_data (for_stmt: gfor, fd: &outer_fd, NULL);
10446 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
10447 }
10448 }
10449 else if (gimple_omp_for_kind (g: gfor) == GF_OMP_FOR_KIND_TASKLOOP)
10450 taskreg_ctx = ctx->outer->outer;
10451 }
10452 else if (is_taskreg_ctx (ctx: ctx->outer))
10453 taskreg_ctx = ctx->outer;
10454 if (taskreg_ctx)
10455 {
10456 int i;
10457 tree taskreg_clauses
10458 = gimple_omp_taskreg_clauses (gs: taskreg_ctx->stmt);
10459 tree innerc = omp_find_clause (clauses: taskreg_clauses,
10460 kind: OMP_CLAUSE__LOOPTEMP_);
10461 gcc_assert (innerc);
10462 int count = fd->collapse;
10463 if (fd->non_rect
10464 && fd->last_nonrect == fd->first_nonrect + 1)
10465 if (tree v = gimple_omp_for_index (gs: fd->for_stmt, i: fd->last_nonrect))
10466 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
10467 count += 4;
10468 for (i = 0; i < count; i++)
10469 {
10470 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10471 kind: OMP_CLAUSE__LOOPTEMP_);
10472 gcc_assert (innerc);
10473 }
10474 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10475 kind: OMP_CLAUSE__LOOPTEMP_);
10476 if (innerc)
10477 n2 = fold_convert (TREE_TYPE (n2),
10478 lookup_decl (OMP_CLAUSE_DECL (innerc),
10479 taskreg_ctx));
10480 }
10481 }
10482 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
10483
10484 clauses = gimple_omp_for_clauses (gs: fd->for_stmt);
10485 stmts = NULL;
10486 lower_lastprivate_clauses (clauses, predicate: cond, body_p, stmt_list: &stmts, cstmt_list: clist, ctx);
10487 if (!gimple_seq_empty_p (s: stmts))
10488 {
10489 gimple_seq_add_seq (&stmts, *dlist);
10490 *dlist = stmts;
10491
10492 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
10493 vinit = fd->loop.n1;
10494 if (cond_code == EQ_EXPR
10495 && tree_fits_shwi_p (fd->loop.n2)
10496 && ! integer_zerop (fd->loop.n2))
10497 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
10498 else
10499 vinit = unshare_expr (vinit);
10500
10501 /* Initialize the iterator variable, so that threads that don't execute
10502 any iterations don't execute the lastprivate clauses by accident. */
10503 gimplify_assign (fd->loop.v, vinit, body_p);
10504 }
10505}
10506
10507/* OpenACC privatization.
10508
10509 Or, in other words, *sharing* at the respective OpenACC level of
10510 parallelism.
10511
10512 From a correctness perspective, a non-addressable variable can't be accessed
10513 outside the current thread, so it can go in a (faster than shared memory)
10514 register -- though that register may need to be broadcast in some
10515 circumstances. A variable can only meaningfully be "shared" across workers
10516 or vector lanes if its address is taken, e.g. by a call to an atomic
10517 builtin.
10518
10519 From an optimisation perspective, the answer might be fuzzier: maybe
10520 sometimes, using shared memory directly would be faster than
10521 broadcasting. */
10522
10523static void
10524oacc_privatization_begin_diagnose_var (const dump_flags_t l_dump_flags,
10525 const location_t loc, const tree c,
10526 const tree decl)
10527{
10528 const dump_user_location_t d_u_loc
10529 = dump_user_location_t::from_location_t (loc);
10530/* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
10531#if __GNUC__ >= 10
10532# pragma GCC diagnostic push
10533# pragma GCC diagnostic ignored "-Wformat"
10534#endif
10535 dump_printf_loc (l_dump_flags, d_u_loc,
10536 "variable %<%T%> ", decl);
10537#if __GNUC__ >= 10
10538# pragma GCC diagnostic pop
10539#endif
10540 if (c)
10541 dump_printf (l_dump_flags,
10542 "in %qs clause ",
10543 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
10544 else
10545 dump_printf (l_dump_flags,
10546 "declared in block ");
10547}
10548
10549static bool
10550oacc_privatization_candidate_p (const location_t loc, const tree c,
10551 const tree decl)
10552{
10553 dump_flags_t l_dump_flags = get_openacc_privatization_dump_flags ();
10554
10555 /* There is some differentiation depending on block vs. clause. */
10556 bool block = !c;
10557
10558 bool res = true;
10559
10560 if (res && !VAR_P (decl))
10561 {
10562 /* A PARM_DECL (appearing in a 'private' clause) is expected to have been
10563 privatized into a new VAR_DECL. */
10564 gcc_checking_assert (TREE_CODE (decl) != PARM_DECL);
10565
10566 res = false;
10567
10568 if (dump_enabled_p ())
10569 {
10570 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10571 dump_printf (l_dump_flags,
10572 "potentially has improper OpenACC privatization level: %qs\n",
10573 get_tree_code_name (TREE_CODE (decl)));
10574 }
10575 }
10576
10577 if (res && block && TREE_STATIC (decl))
10578 {
10579 res = false;
10580
10581 if (dump_enabled_p ())
10582 {
10583 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10584 dump_printf (l_dump_flags,
10585 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10586 "static");
10587 }
10588 }
10589
10590 if (res && block && DECL_EXTERNAL (decl))
10591 {
10592 res = false;
10593
10594 if (dump_enabled_p ())
10595 {
10596 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10597 dump_printf (l_dump_flags,
10598 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10599 "external");
10600 }
10601 }
10602
10603 if (res && !TREE_ADDRESSABLE (decl))
10604 {
10605 res = false;
10606
10607 if (dump_enabled_p ())
10608 {
10609 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10610 dump_printf (l_dump_flags,
10611 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10612 "not addressable");
10613 }
10614 }
10615
10616 /* If an artificial variable has been added to a bind, e.g.
10617 a compiler-generated temporary structure used by the Fortran front-end, do
10618 not consider it as a privatization candidate. Note that variables on
10619 the stack are private per-thread by default: making them "gang-private"
10620 for OpenACC actually means to share a single instance of a variable
10621 amongst all workers and threads spawned within each gang.
10622 At present, no compiler-generated artificial variables require such
10623 sharing semantics, so this is safe. */
10624
10625 if (res && block && DECL_ARTIFICIAL (decl))
10626 {
10627 res = false;
10628
10629 if (dump_enabled_p ())
10630 {
10631 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10632 dump_printf (l_dump_flags,
10633 "isn%'t candidate for adjusting OpenACC privatization "
10634 "level: %s\n", "artificial");
10635 }
10636 }
10637
10638 if (res)
10639 {
10640 if (dump_enabled_p ())
10641 {
10642 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10643 dump_printf (l_dump_flags,
10644 "is candidate for adjusting OpenACC privatization level\n");
10645 }
10646 }
10647
10648 if (dump_file && (dump_flags & TDF_DETAILS))
10649 {
10650 print_generic_decl (dump_file, decl, dump_flags);
10651 fprintf (stream: dump_file, format: "\n");
10652 }
10653
10654 return res;
10655}
10656
10657/* Scan CLAUSES for candidates for adjusting OpenACC privatization level in
10658 CTX. */
10659
10660static void
10661oacc_privatization_scan_clause_chain (omp_context *ctx, tree clauses)
10662{
10663 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
10664 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE)
10665 {
10666 tree decl = OMP_CLAUSE_DECL (c);
10667
10668 tree new_decl = lookup_decl (var: decl, ctx);
10669
10670 if (!oacc_privatization_candidate_p (OMP_CLAUSE_LOCATION (c), c,
10671 decl: new_decl))
10672 continue;
10673
10674 gcc_checking_assert
10675 (!ctx->oacc_privatization_candidates.contains (new_decl));
10676 ctx->oacc_privatization_candidates.safe_push (obj: new_decl);
10677 }
10678}
10679
10680/* Scan DECLS for candidates for adjusting OpenACC privatization level in
10681 CTX. */
10682
10683static void
10684oacc_privatization_scan_decl_chain (omp_context *ctx, tree decls)
10685{
10686 for (tree decl = decls; decl; decl = DECL_CHAIN (decl))
10687 {
10688 tree new_decl = lookup_decl (var: decl, ctx);
10689 gcc_checking_assert (new_decl == decl);
10690
10691 if (!oacc_privatization_candidate_p (loc: gimple_location (g: ctx->stmt), NULL,
10692 decl: new_decl))
10693 continue;
10694
10695 gcc_checking_assert
10696 (!ctx->oacc_privatization_candidates.contains (new_decl));
10697 ctx->oacc_privatization_candidates.safe_push (obj: new_decl);
10698 }
10699}
10700
10701/* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
10702
10703static tree
10704omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10705 struct walk_stmt_info *wi)
10706{
10707 gimple *stmt = gsi_stmt (i: *gsi_p);
10708
10709 *handled_ops_p = true;
10710 switch (gimple_code (g: stmt))
10711 {
10712 WALK_SUBSTMTS;
10713
10714 case GIMPLE_OMP_FOR:
10715 if (gimple_omp_for_kind (g: stmt) == GF_OMP_FOR_KIND_SIMD
10716 && gimple_omp_for_combined_into_p (g: stmt))
10717 *handled_ops_p = false;
10718 break;
10719
10720 case GIMPLE_OMP_SCAN:
10721 *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
10722 return integer_zero_node;
10723 default:
10724 break;
10725 }
10726 return NULL;
10727}
10728
10729/* Helper function for lower_omp_for, add transformations for a worksharing
10730 loop with scan directives inside of it.
10731 For worksharing loop not combined with simd, transform:
10732 #pragma omp for reduction(inscan,+:r) private(i)
10733 for (i = 0; i < n; i = i + 1)
10734 {
10735 {
10736 update (r);
10737 }
10738 #pragma omp scan inclusive(r)
10739 {
10740 use (r);
10741 }
10742 }
10743
10744 into two worksharing loops + code to merge results:
10745
10746 num_threads = omp_get_num_threads ();
10747 thread_num = omp_get_thread_num ();
10748 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
10749 <D.2099>:
10750 var2 = r;
10751 goto <D.2101>;
10752 <D.2100>:
10753 // For UDRs this is UDR init, or if ctors are needed, copy from
10754 // var3 that has been constructed to contain the neutral element.
10755 var2 = 0;
10756 <D.2101>:
10757 ivar = 0;
10758 // The _scantemp_ clauses will arrange for rpriva to be initialized to
10759 // a shared array with num_threads elements and rprivb to a local array
10760 // number of elements equal to the number of (contiguous) iterations the
10761 // current thread will perform. controlb and controlp variables are
10762 // temporaries to handle deallocation of rprivb at the end of second
10763 // GOMP_FOR.
10764 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
10765 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
10766 for (i = 0; i < n; i = i + 1)
10767 {
10768 {
10769 // For UDRs this is UDR init or copy from var3.
10770 r = 0;
10771 // This is the input phase from user code.
10772 update (r);
10773 }
10774 {
10775 // For UDRs this is UDR merge.
10776 var2 = var2 + r;
10777 // Rather than handing it over to the user, save to local thread's
10778 // array.
10779 rprivb[ivar] = var2;
10780 // For exclusive scan, the above two statements are swapped.
10781 ivar = ivar + 1;
10782 }
10783 }
10784 // And remember the final value from this thread's into the shared
10785 // rpriva array.
10786 rpriva[(sizetype) thread_num] = var2;
10787 // If more than one thread, compute using Work-Efficient prefix sum
10788 // the inclusive parallel scan of the rpriva array.
10789 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
10790 <D.2102>:
10791 GOMP_barrier ();
10792 down = 0;
10793 k = 1;
10794 num_threadsu = (unsigned int) num_threads;
10795 thread_numup1 = (unsigned int) thread_num + 1;
10796 <D.2108>:
10797 twok = k << 1;
10798 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
10799 <D.2110>:
10800 down = 4294967295;
10801 k = k >> 1;
10802 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
10803 <D.2112>:
10804 k = k >> 1;
10805 <D.2111>:
10806 twok = k << 1;
10807 cplx = .MUL_OVERFLOW (thread_nump1, twok);
10808 mul = REALPART_EXPR <cplx>;
10809 ovf = IMAGPART_EXPR <cplx>;
10810 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
10811 <D.2116>:
10812 andv = k & down;
10813 andvm1 = andv + 4294967295;
10814 l = mul + andvm1;
10815 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
10816 <D.2120>:
10817 // For UDRs this is UDR merge, performed using var2 variable as temporary,
10818 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
10819 rpriva[l] = rpriva[l - k] + rpriva[l];
10820 <D.2117>:
10821 if (down == 0) goto <D.2121>; else goto <D.2122>;
10822 <D.2121>:
10823 k = k << 1;
10824 goto <D.2123>;
10825 <D.2122>:
10826 k = k >> 1;
10827 <D.2123>:
10828 GOMP_barrier ();
10829 if (k != 0) goto <D.2108>; else goto <D.2103>;
10830 <D.2103>:
10831 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
10832 <D.2124>:
10833 // For UDRs this is UDR init or copy from var3.
10834 var2 = 0;
10835 goto <D.2126>;
10836 <D.2125>:
10837 var2 = rpriva[thread_num - 1];
10838 <D.2126>:
10839 ivar = 0;
10840 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
10841 reduction(inscan,+:r) private(i)
10842 for (i = 0; i < n; i = i + 1)
10843 {
10844 {
10845 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
10846 r = var2 + rprivb[ivar];
10847 }
10848 {
10849 // This is the scan phase from user code.
10850 use (r);
10851 // Plus a bump of the iterator.
10852 ivar = ivar + 1;
10853 }
10854 } */
10855
10856static void
10857lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
10858 struct omp_for_data *fd, omp_context *ctx)
10859{
10860 bool is_for_simd = gimple_omp_for_combined_p (g: stmt);
10861 gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
10862
10863 gimple_seq body = gimple_omp_body (gs: stmt);
10864 gimple_stmt_iterator input1_gsi = gsi_none ();
10865 struct walk_stmt_info wi;
10866 memset (s: &wi, c: 0, n: sizeof (wi));
10867 wi.val_only = true;
10868 wi.info = (void *) &input1_gsi;
10869 walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
10870 gcc_assert (!gsi_end_p (input1_gsi));
10871
10872 gimple *input_stmt1 = gsi_stmt (i: input1_gsi);
10873 gimple_stmt_iterator gsi = input1_gsi;
10874 gsi_next (i: &gsi);
10875 gimple_stmt_iterator scan1_gsi = gsi;
10876 gimple *scan_stmt1 = gsi_stmt (i: gsi);
10877 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
10878
10879 gimple_seq input_body = gimple_omp_body (gs: input_stmt1);
10880 gimple_seq scan_body = gimple_omp_body (gs: scan_stmt1);
10881 gimple_omp_set_body (gs: input_stmt1, NULL);
10882 gimple_omp_set_body (gs: scan_stmt1, NULL);
10883 gimple_omp_set_body (gs: stmt, NULL);
10884
10885 gomp_for *new_stmt = as_a <gomp_for *> (p: gimple_copy (stmt));
10886 gimple_seq new_body = copy_gimple_seq_and_replace_locals (seq: body);
10887 gimple_omp_set_body (gs: stmt, body);
10888 gimple_omp_set_body (gs: input_stmt1, body: input_body);
10889
10890 gimple_stmt_iterator input2_gsi = gsi_none ();
10891 memset (s: &wi, c: 0, n: sizeof (wi));
10892 wi.val_only = true;
10893 wi.info = (void *) &input2_gsi;
10894 walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
10895 gcc_assert (!gsi_end_p (input2_gsi));
10896
10897 gimple *input_stmt2 = gsi_stmt (i: input2_gsi);
10898 gsi = input2_gsi;
10899 gsi_next (i: &gsi);
10900 gimple_stmt_iterator scan2_gsi = gsi;
10901 gimple *scan_stmt2 = gsi_stmt (i: gsi);
10902 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
10903 gimple_omp_set_body (gs: scan_stmt2, body: scan_body);
10904
10905 gimple_stmt_iterator input3_gsi = gsi_none ();
10906 gimple_stmt_iterator scan3_gsi = gsi_none ();
10907 gimple_stmt_iterator input4_gsi = gsi_none ();
10908 gimple_stmt_iterator scan4_gsi = gsi_none ();
10909 gimple *input_stmt3 = NULL, *scan_stmt3 = NULL;
10910 gimple *input_stmt4 = NULL, *scan_stmt4 = NULL;
10911 omp_context *input_simd_ctx = NULL, *scan_simd_ctx = NULL;
10912 if (is_for_simd)
10913 {
10914 memset (s: &wi, c: 0, n: sizeof (wi));
10915 wi.val_only = true;
10916 wi.info = (void *) &input3_gsi;
10917 walk_gimple_seq_mod (&input_body, omp_find_scan, NULL, &wi);
10918 gcc_assert (!gsi_end_p (input3_gsi));
10919
10920 input_stmt3 = gsi_stmt (i: input3_gsi);
10921 gsi = input3_gsi;
10922 gsi_next (i: &gsi);
10923 scan3_gsi = gsi;
10924 scan_stmt3 = gsi_stmt (i: gsi);
10925 gcc_assert (scan_stmt3 && gimple_code (scan_stmt3) == GIMPLE_OMP_SCAN);
10926
10927 memset (s: &wi, c: 0, n: sizeof (wi));
10928 wi.val_only = true;
10929 wi.info = (void *) &input4_gsi;
10930 walk_gimple_seq_mod (&scan_body, omp_find_scan, NULL, &wi);
10931 gcc_assert (!gsi_end_p (input4_gsi));
10932
10933 input_stmt4 = gsi_stmt (i: input4_gsi);
10934 gsi = input4_gsi;
10935 gsi_next (i: &gsi);
10936 scan4_gsi = gsi;
10937 scan_stmt4 = gsi_stmt (i: gsi);
10938 gcc_assert (scan_stmt4 && gimple_code (scan_stmt4) == GIMPLE_OMP_SCAN);
10939
10940 input_simd_ctx = maybe_lookup_ctx (stmt: input_stmt3)->outer;
10941 scan_simd_ctx = maybe_lookup_ctx (stmt: input_stmt4)->outer;
10942 }
10943
10944 tree num_threads = create_tmp_var (integer_type_node);
10945 tree thread_num = create_tmp_var (integer_type_node);
10946 tree nthreads_decl = builtin_decl_explicit (fncode: BUILT_IN_OMP_GET_NUM_THREADS);
10947 tree threadnum_decl = builtin_decl_explicit (fncode: BUILT_IN_OMP_GET_THREAD_NUM);
10948 gimple *g = gimple_build_call (nthreads_decl, 0);
10949 gimple_call_set_lhs (gs: g, lhs: num_threads);
10950 gimple_seq_add_stmt (body_p, g);
10951 g = gimple_build_call (threadnum_decl, 0);
10952 gimple_call_set_lhs (gs: g, lhs: thread_num);
10953 gimple_seq_add_stmt (body_p, g);
10954
10955 tree ivar = create_tmp_var (sizetype);
10956 tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
10957 tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
10958 tree k = create_tmp_var (unsigned_type_node);
10959 tree l = create_tmp_var (unsigned_type_node);
10960
10961 gimple_seq clist = NULL, mdlist = NULL;
10962 gimple_seq thr01_list = NULL, thrn1_list = NULL;
10963 gimple_seq thr02_list = NULL, thrn2_list = NULL;
10964 gimple_seq scan1_list = NULL, input2_list = NULL;
10965 gimple_seq last_list = NULL, reduc_list = NULL;
10966 for (tree c = gimple_omp_for_clauses (gs: stmt); c; c = OMP_CLAUSE_CHAIN (c))
10967 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10968 && OMP_CLAUSE_REDUCTION_INSCAN (c))
10969 {
10970 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10971 tree var = OMP_CLAUSE_DECL (c);
10972 tree new_var = lookup_decl (var, ctx);
10973 tree var3 = NULL_TREE;
10974 tree new_vard = new_var;
10975 if (omp_privatize_by_reference (decl: var))
10976 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
10977 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10978 {
10979 var3 = maybe_lookup_decl (var: new_vard, ctx);
10980 if (var3 == new_vard)
10981 var3 = NULL_TREE;
10982 }
10983
10984 tree ptype = build_pointer_type (TREE_TYPE (new_var));
10985 tree rpriva = create_tmp_var (ptype);
10986 tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
10987 OMP_CLAUSE_DECL (nc) = rpriva;
10988 *cp1 = nc;
10989 cp1 = &OMP_CLAUSE_CHAIN (nc);
10990
10991 tree rprivb = create_tmp_var (ptype);
10992 nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
10993 OMP_CLAUSE_DECL (nc) = rprivb;
10994 OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
10995 *cp1 = nc;
10996 cp1 = &OMP_CLAUSE_CHAIN (nc);
10997
10998 tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
10999 if (new_vard != new_var)
11000 TREE_ADDRESSABLE (var2) = 1;
11001 gimple_add_tmp_var (var2);
11002
11003 tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
11004 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
11005 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11006 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
11007 tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
11008
11009 x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
11010 thread_num, integer_minus_one_node);
11011 x = fold_convert_loc (clause_loc, sizetype, x);
11012 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
11013 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11014 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
11015 tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
11016
11017 x = fold_convert_loc (clause_loc, sizetype, l);
11018 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
11019 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11020 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
11021 tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
11022
11023 x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
11024 x = fold_convert_loc (clause_loc, sizetype, x);
11025 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
11026 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11027 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
11028 tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
11029
11030 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
11031 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11032 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
11033 tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
11034
11035 tree var4 = is_for_simd ? new_var : var2;
11036 tree var5 = NULL_TREE, var6 = NULL_TREE;
11037 if (is_for_simd)
11038 {
11039 var5 = lookup_decl (var, ctx: input_simd_ctx);
11040 var6 = lookup_decl (var, ctx: scan_simd_ctx);
11041 if (new_vard != new_var)
11042 {
11043 var5 = build_simple_mem_ref_loc (clause_loc, var5);
11044 var6 = build_simple_mem_ref_loc (clause_loc, var6);
11045 }
11046 }
11047 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
11048 {
11049 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
11050 tree val = var2;
11051
11052 x = lang_hooks.decls.omp_clause_default_ctor
11053 (c, var2, build_outer_var_ref (var, ctx));
11054 if (x)
11055 gimplify_and_add (x, &clist);
11056
11057 x = build_outer_var_ref (var, ctx);
11058 x = lang_hooks.decls.omp_clause_assign_op (c, unshare_expr (var4),
11059 x);
11060 gimplify_and_add (x, &thr01_list);
11061
11062 tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
11063 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
11064 if (var3)
11065 {
11066 x = unshare_expr (var4);
11067 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
11068 gimplify_and_add (x, &thrn1_list);
11069 x = unshare_expr (var4);
11070 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
11071 gimplify_and_add (x, &thr02_list);
11072 }
11073 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
11074 {
11075 /* Otherwise, assign to it the identity element. */
11076 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
11077 tseq = copy_gimple_seq_and_replace_locals (seq: tseq);
11078 if (!is_for_simd)
11079 {
11080 if (new_vard != new_var)
11081 val = build_fold_addr_expr_loc (clause_loc, val);
11082 SET_DECL_VALUE_EXPR (new_vard, val);
11083 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11084 }
11085 SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
11086 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11087 lower_omp (&tseq, ctx);
11088 gimple_seq_add_seq (&thrn1_list, tseq);
11089 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
11090 lower_omp (&tseq, ctx);
11091 gimple_seq_add_seq (&thr02_list, tseq);
11092 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
11093 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11094 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
11095 if (y)
11096 SET_DECL_VALUE_EXPR (new_vard, y);
11097 else
11098 {
11099 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11100 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11101 }
11102 }
11103
11104 x = unshare_expr (var4);
11105 x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivam1_ref);
11106 gimplify_and_add (x, &thrn2_list);
11107
11108 if (is_for_simd)
11109 {
11110 x = unshare_expr (rprivb_ref);
11111 x = lang_hooks.decls.omp_clause_assign_op (c, x, var5);
11112 gimplify_and_add (x, &scan1_list);
11113 }
11114 else
11115 {
11116 if (ctx->scan_exclusive)
11117 {
11118 x = unshare_expr (rprivb_ref);
11119 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
11120 gimplify_and_add (x, &scan1_list);
11121 }
11122
11123 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11124 tseq = copy_gimple_seq_and_replace_locals (seq: tseq);
11125 SET_DECL_VALUE_EXPR (placeholder, var2);
11126 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11127 lower_omp (&tseq, ctx);
11128 gimple_seq_add_seq (&scan1_list, tseq);
11129
11130 if (ctx->scan_inclusive)
11131 {
11132 x = unshare_expr (rprivb_ref);
11133 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
11134 gimplify_and_add (x, &scan1_list);
11135 }
11136 }
11137
11138 x = unshare_expr (rpriva_ref);
11139 x = lang_hooks.decls.omp_clause_assign_op (c, x,
11140 unshare_expr (var4));
11141 gimplify_and_add (x, &mdlist);
11142
11143 x = unshare_expr (is_for_simd ? var6 : new_var);
11144 x = lang_hooks.decls.omp_clause_assign_op (c, x, var4);
11145 gimplify_and_add (x, &input2_list);
11146
11147 val = rprivb_ref;
11148 if (new_vard != new_var)
11149 val = build_fold_addr_expr_loc (clause_loc, val);
11150
11151 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11152 tseq = copy_gimple_seq_and_replace_locals (seq: tseq);
11153 SET_DECL_VALUE_EXPR (new_vard, val);
11154 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11155 if (is_for_simd)
11156 {
11157 SET_DECL_VALUE_EXPR (placeholder, var6);
11158 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11159 }
11160 else
11161 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11162 lower_omp (&tseq, ctx);
11163 if (y)
11164 SET_DECL_VALUE_EXPR (new_vard, y);
11165 else
11166 {
11167 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11168 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11169 }
11170 if (!is_for_simd)
11171 {
11172 SET_DECL_VALUE_EXPR (placeholder, new_var);
11173 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11174 lower_omp (&tseq, ctx);
11175 }
11176 gimple_seq_add_seq (&input2_list, tseq);
11177
11178 x = build_outer_var_ref (var, ctx);
11179 x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
11180 gimplify_and_add (x, &last_list);
11181
11182 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
11183 gimplify_and_add (x, &reduc_list);
11184 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11185 tseq = copy_gimple_seq_and_replace_locals (seq: tseq);
11186 val = rprival_ref;
11187 if (new_vard != new_var)
11188 val = build_fold_addr_expr_loc (clause_loc, val);
11189 SET_DECL_VALUE_EXPR (new_vard, val);
11190 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11191 SET_DECL_VALUE_EXPR (placeholder, var2);
11192 lower_omp (&tseq, ctx);
11193 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
11194 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
11195 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11196 if (y)
11197 SET_DECL_VALUE_EXPR (new_vard, y);
11198 else
11199 {
11200 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11201 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11202 }
11203 gimple_seq_add_seq (&reduc_list, tseq);
11204 x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
11205 gimplify_and_add (x, &reduc_list);
11206
11207 x = lang_hooks.decls.omp_clause_dtor (c, var2);
11208 if (x)
11209 gimplify_and_add (x, dlist);
11210 }
11211 else
11212 {
11213 x = build_outer_var_ref (var, ctx);
11214 gimplify_assign (unshare_expr (var4), x, &thr01_list);
11215
11216 x = omp_reduction_init (clause: c, TREE_TYPE (new_var));
11217 gimplify_assign (unshare_expr (var4), unshare_expr (x),
11218 &thrn1_list);
11219 gimplify_assign (unshare_expr (var4), x, &thr02_list);
11220
11221 gimplify_assign (unshare_expr (var4), rprivam1_ref, &thrn2_list);
11222
11223 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
11224 if (code == MINUS_EXPR)
11225 code = PLUS_EXPR;
11226
11227 if (is_for_simd)
11228 gimplify_assign (unshare_expr (rprivb_ref), var5, &scan1_list);
11229 else
11230 {
11231 if (ctx->scan_exclusive)
11232 gimplify_assign (unshare_expr (rprivb_ref), var2,
11233 &scan1_list);
11234 x = build2 (code, TREE_TYPE (new_var), var2, new_var);
11235 gimplify_assign (var2, x, &scan1_list);
11236 if (ctx->scan_inclusive)
11237 gimplify_assign (unshare_expr (rprivb_ref), var2,
11238 &scan1_list);
11239 }
11240
11241 gimplify_assign (unshare_expr (rpriva_ref), unshare_expr (var4),
11242 &mdlist);
11243
11244 x = build2 (code, TREE_TYPE (new_var), var4, rprivb_ref);
11245 gimplify_assign (is_for_simd ? var6 : new_var, x, &input2_list);
11246
11247 gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
11248 &last_list);
11249
11250 x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
11251 unshare_expr (rprival_ref));
11252 gimplify_assign (rprival_ref, x, &reduc_list);
11253 }
11254 }
11255
11256 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
11257 gimple_seq_add_stmt (&scan1_list, g);
11258 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
11259 gimple_seq_add_stmt (gimple_omp_body_ptr (gs: is_for_simd
11260 ? scan_stmt4 : scan_stmt2), g);
11261
11262 tree controlb = create_tmp_var (boolean_type_node);
11263 tree controlp = create_tmp_var (ptr_type_node);
11264 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11265 OMP_CLAUSE_DECL (nc) = controlb;
11266 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11267 *cp1 = nc;
11268 cp1 = &OMP_CLAUSE_CHAIN (nc);
11269 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11270 OMP_CLAUSE_DECL (nc) = controlp;
11271 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11272 *cp1 = nc;
11273 cp1 = &OMP_CLAUSE_CHAIN (nc);
11274 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11275 OMP_CLAUSE_DECL (nc) = controlb;
11276 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11277 *cp2 = nc;
11278 cp2 = &OMP_CLAUSE_CHAIN (nc);
11279 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11280 OMP_CLAUSE_DECL (nc) = controlp;
11281 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11282 *cp2 = nc;
11283 cp2 = &OMP_CLAUSE_CHAIN (nc);
11284
11285 *cp1 = gimple_omp_for_clauses (gs: stmt);
11286 gimple_omp_for_set_clauses (gs: stmt, clauses: new_clauses1);
11287 *cp2 = gimple_omp_for_clauses (gs: new_stmt);
11288 gimple_omp_for_set_clauses (gs: new_stmt, clauses: new_clauses2);
11289
11290 if (is_for_simd)
11291 {
11292 gimple_seq_add_seq (gimple_omp_body_ptr (gs: scan_stmt3), scan1_list);
11293 gimple_seq_add_seq (gimple_omp_body_ptr (gs: input_stmt4), input2_list);
11294
11295 gsi_insert_seq_after (&input3_gsi, gimple_omp_body (gs: input_stmt3),
11296 GSI_SAME_STMT);
11297 gsi_remove (&input3_gsi, true);
11298 gsi_insert_seq_after (&scan3_gsi, gimple_omp_body (gs: scan_stmt3),
11299 GSI_SAME_STMT);
11300 gsi_remove (&scan3_gsi, true);
11301 gsi_insert_seq_after (&input4_gsi, gimple_omp_body (gs: input_stmt4),
11302 GSI_SAME_STMT);
11303 gsi_remove (&input4_gsi, true);
11304 gsi_insert_seq_after (&scan4_gsi, gimple_omp_body (gs: scan_stmt4),
11305 GSI_SAME_STMT);
11306 gsi_remove (&scan4_gsi, true);
11307 }
11308 else
11309 {
11310 gimple_omp_set_body (gs: scan_stmt1, body: scan1_list);
11311 gimple_omp_set_body (gs: input_stmt2, body: input2_list);
11312 }
11313
11314 gsi_insert_seq_after (&input1_gsi, gimple_omp_body (gs: input_stmt1),
11315 GSI_SAME_STMT);
11316 gsi_remove (&input1_gsi, true);
11317 gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (gs: scan_stmt1),
11318 GSI_SAME_STMT);
11319 gsi_remove (&scan1_gsi, true);
11320 gsi_insert_seq_after (&input2_gsi, gimple_omp_body (gs: input_stmt2),
11321 GSI_SAME_STMT);
11322 gsi_remove (&input2_gsi, true);
11323 gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (gs: scan_stmt2),
11324 GSI_SAME_STMT);
11325 gsi_remove (&scan2_gsi, true);
11326
11327 gimple_seq_add_seq (body_p, clist);
11328
11329 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
11330 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
11331 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
11332 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
11333 gimple_seq_add_stmt (body_p, g);
11334 g = gimple_build_label (label: lab1);
11335 gimple_seq_add_stmt (body_p, g);
11336 gimple_seq_add_seq (body_p, thr01_list);
11337 g = gimple_build_goto (dest: lab3);
11338 gimple_seq_add_stmt (body_p, g);
11339 g = gimple_build_label (label: lab2);
11340 gimple_seq_add_stmt (body_p, g);
11341 gimple_seq_add_seq (body_p, thrn1_list);
11342 g = gimple_build_label (label: lab3);
11343 gimple_seq_add_stmt (body_p, g);
11344
11345 g = gimple_build_assign (ivar, size_zero_node);
11346 gimple_seq_add_stmt (body_p, g);
11347
11348 gimple_seq_add_stmt (body_p, stmt);
11349 gimple_seq_add_seq (body_p, body);
11350 gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
11351 fd->loop.v));
11352
11353 g = gimple_build_omp_return (true);
11354 gimple_seq_add_stmt (body_p, g);
11355 gimple_seq_add_seq (body_p, mdlist);
11356
11357 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11358 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11359 g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
11360 gimple_seq_add_stmt (body_p, g);
11361 g = gimple_build_label (label: lab1);
11362 gimple_seq_add_stmt (body_p, g);
11363
11364 g = omp_build_barrier (NULL);
11365 gimple_seq_add_stmt (body_p, g);
11366
11367 tree down = create_tmp_var (unsigned_type_node);
11368 g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
11369 gimple_seq_add_stmt (body_p, g);
11370
11371 g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
11372 gimple_seq_add_stmt (body_p, g);
11373
11374 tree num_threadsu = create_tmp_var (unsigned_type_node);
11375 g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
11376 gimple_seq_add_stmt (body_p, g);
11377
11378 tree thread_numu = create_tmp_var (unsigned_type_node);
11379 g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
11380 gimple_seq_add_stmt (body_p, g);
11381
11382 tree thread_nump1 = create_tmp_var (unsigned_type_node);
11383 g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
11384 build_int_cst (unsigned_type_node, 1));
11385 gimple_seq_add_stmt (body_p, g);
11386
11387 lab3 = create_artificial_label (UNKNOWN_LOCATION);
11388 g = gimple_build_label (label: lab3);
11389 gimple_seq_add_stmt (body_p, g);
11390
11391 tree twok = create_tmp_var (unsigned_type_node);
11392 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
11393 gimple_seq_add_stmt (body_p, g);
11394
11395 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
11396 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
11397 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
11398 g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
11399 gimple_seq_add_stmt (body_p, g);
11400 g = gimple_build_label (label: lab4);
11401 gimple_seq_add_stmt (body_p, g);
11402 g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
11403 gimple_seq_add_stmt (body_p, g);
11404 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11405 gimple_seq_add_stmt (body_p, g);
11406
11407 g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
11408 gimple_seq_add_stmt (body_p, g);
11409 g = gimple_build_label (label: lab6);
11410 gimple_seq_add_stmt (body_p, g);
11411
11412 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11413 gimple_seq_add_stmt (body_p, g);
11414
11415 g = gimple_build_label (label: lab5);
11416 gimple_seq_add_stmt (body_p, g);
11417
11418 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
11419 gimple_seq_add_stmt (body_p, g);
11420
11421 tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, named: false));
11422 g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
11423 gimple_call_set_lhs (gs: g, lhs: cplx);
11424 gimple_seq_add_stmt (body_p, g);
11425 tree mul = create_tmp_var (unsigned_type_node);
11426 g = gimple_build_assign (mul, REALPART_EXPR,
11427 build1 (REALPART_EXPR, unsigned_type_node, cplx));
11428 gimple_seq_add_stmt (body_p, g);
11429 tree ovf = create_tmp_var (unsigned_type_node);
11430 g = gimple_build_assign (ovf, IMAGPART_EXPR,
11431 build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
11432 gimple_seq_add_stmt (body_p, g);
11433
11434 tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
11435 tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
11436 g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
11437 lab7, lab8);
11438 gimple_seq_add_stmt (body_p, g);
11439 g = gimple_build_label (label: lab7);
11440 gimple_seq_add_stmt (body_p, g);
11441
11442 tree andv = create_tmp_var (unsigned_type_node);
11443 g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
11444 gimple_seq_add_stmt (body_p, g);
11445 tree andvm1 = create_tmp_var (unsigned_type_node);
11446 g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
11447 build_minus_one_cst (unsigned_type_node));
11448 gimple_seq_add_stmt (body_p, g);
11449
11450 g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
11451 gimple_seq_add_stmt (body_p, g);
11452
11453 tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
11454 g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
11455 gimple_seq_add_stmt (body_p, g);
11456 g = gimple_build_label (label: lab9);
11457 gimple_seq_add_stmt (body_p, g);
11458 gimple_seq_add_seq (body_p, reduc_list);
11459 g = gimple_build_label (label: lab8);
11460 gimple_seq_add_stmt (body_p, g);
11461
11462 tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
11463 tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
11464 tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
11465 g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
11466 lab10, lab11);
11467 gimple_seq_add_stmt (body_p, g);
11468 g = gimple_build_label (label: lab10);
11469 gimple_seq_add_stmt (body_p, g);
11470 g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
11471 gimple_seq_add_stmt (body_p, g);
11472 g = gimple_build_goto (dest: lab12);
11473 gimple_seq_add_stmt (body_p, g);
11474 g = gimple_build_label (label: lab11);
11475 gimple_seq_add_stmt (body_p, g);
11476 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11477 gimple_seq_add_stmt (body_p, g);
11478 g = gimple_build_label (label: lab12);
11479 gimple_seq_add_stmt (body_p, g);
11480
11481 g = omp_build_barrier (NULL);
11482 gimple_seq_add_stmt (body_p, g);
11483
11484 g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
11485 lab3, lab2);
11486 gimple_seq_add_stmt (body_p, g);
11487
11488 g = gimple_build_label (label: lab2);
11489 gimple_seq_add_stmt (body_p, g);
11490
11491 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11492 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11493 lab3 = create_artificial_label (UNKNOWN_LOCATION);
11494 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
11495 gimple_seq_add_stmt (body_p, g);
11496 g = gimple_build_label (label: lab1);
11497 gimple_seq_add_stmt (body_p, g);
11498 gimple_seq_add_seq (body_p, thr02_list);
11499 g = gimple_build_goto (dest: lab3);
11500 gimple_seq_add_stmt (body_p, g);
11501 g = gimple_build_label (label: lab2);
11502 gimple_seq_add_stmt (body_p, g);
11503 gimple_seq_add_seq (body_p, thrn2_list);
11504 g = gimple_build_label (label: lab3);
11505 gimple_seq_add_stmt (body_p, g);
11506
11507 g = gimple_build_assign (ivar, size_zero_node);
11508 gimple_seq_add_stmt (body_p, g);
11509 gimple_seq_add_stmt (body_p, new_stmt);
11510 gimple_seq_add_seq (body_p, new_body);
11511
11512 gimple_seq new_dlist = NULL;
11513 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11514 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11515 tree num_threadsm1 = create_tmp_var (integer_type_node);
11516 g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
11517 integer_minus_one_node);
11518 gimple_seq_add_stmt (&new_dlist, g);
11519 g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
11520 gimple_seq_add_stmt (&new_dlist, g);
11521 g = gimple_build_label (label: lab1);
11522 gimple_seq_add_stmt (&new_dlist, g);
11523 gimple_seq_add_seq (&new_dlist, last_list);
11524 g = gimple_build_label (label: lab2);
11525 gimple_seq_add_stmt (&new_dlist, g);
11526 gimple_seq_add_seq (&new_dlist, *dlist);
11527 *dlist = new_dlist;
11528}
11529
11530/* Build an internal UNIQUE function with type IFN_UNIQUE_OACC_PRIVATE listing
11531 the addresses of variables to be made private at the surrounding
11532 parallelism level. Such functions appear in the gimple code stream in two
11533 forms, e.g. for a partitioned loop:
11534
11535 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6, 1, 68);
11536 .data_dep.6 = .UNIQUE (OACC_PRIVATE, .data_dep.6, -1, &w);
11537 .data_dep.6 = .UNIQUE (OACC_FORK, .data_dep.6, -1);
11538 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6);
11539
11540 or alternatively, OACC_PRIVATE can appear at the top level of a parallel,
11541 not as part of a HEAD_MARK sequence:
11542
11543 .UNIQUE (OACC_PRIVATE, 0, 0, &w);
11544
11545 For such stand-alone appearances, the 3rd argument is always 0, denoting
11546 gang partitioning. */
11547
11548static gcall *
11549lower_oacc_private_marker (omp_context *ctx)
11550{
11551 if (ctx->oacc_privatization_candidates.length () == 0)
11552 return NULL;
11553
11554 auto_vec<tree, 5> args;
11555
11556 args.quick_push (obj: build_int_cst (integer_type_node, IFN_UNIQUE_OACC_PRIVATE));
11557 args.quick_push (integer_zero_node);
11558 args.quick_push (integer_minus_one_node);
11559
11560 int i;
11561 tree decl;
11562 FOR_EACH_VEC_ELT (ctx->oacc_privatization_candidates, i, decl)
11563 {
11564 gcc_checking_assert (TREE_ADDRESSABLE (decl));
11565 tree addr = build_fold_addr_expr (decl);
11566 args.safe_push (obj: addr);
11567 }
11568
11569 return gimple_build_call_internal_vec (IFN_UNIQUE, args);
11570}
11571
11572/* Lower code for an OMP loop directive. */
11573
11574static void
11575lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11576{
11577 tree *rhs_p, block;
11578 struct omp_for_data fd, *fdp = NULL;
11579 gomp_for *stmt = as_a <gomp_for *> (p: gsi_stmt (i: *gsi_p));
11580 gbind *new_stmt;
11581 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
11582 gimple_seq cnt_list = NULL, clist = NULL;
11583 gimple_seq oacc_head = NULL, oacc_tail = NULL;
11584 size_t i;
11585
11586 push_gimplify_context ();
11587
11588 if (is_gimple_omp_oacc (stmt: ctx->stmt))
11589 oacc_privatization_scan_clause_chain (ctx, clauses: gimple_omp_for_clauses (gs: stmt));
11590
11591 lower_omp (gimple_omp_for_pre_body_ptr (gs: stmt), ctx);
11592
11593 block = make_node (BLOCK);
11594 new_stmt = gimple_build_bind (NULL, NULL, block);
11595 /* Replace at gsi right away, so that 'stmt' is no member
11596 of a sequence anymore as we're going to add to a different
11597 one below. */
11598 gsi_replace (gsi_p, new_stmt, true);
11599
11600 /* Move declaration of temporaries in the loop body before we make
11601 it go away. */
11602 omp_for_body = gimple_omp_body (gs: stmt);
11603 if (!gimple_seq_empty_p (s: omp_for_body)
11604 && gimple_code (g: gimple_seq_first_stmt (s: omp_for_body)) == GIMPLE_BIND)
11605 {
11606 gbind *inner_bind
11607 = as_a <gbind *> (p: gimple_seq_first_stmt (s: omp_for_body));
11608 tree vars = gimple_bind_vars (bind_stmt: inner_bind);
11609 if (is_gimple_omp_oacc (stmt: ctx->stmt))
11610 oacc_privatization_scan_decl_chain (ctx, decls: vars);
11611 gimple_bind_append_vars (bind_stmt: new_stmt, vars);
11612 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
11613 keep them on the inner_bind and it's block. */
11614 gimple_bind_set_vars (bind_stmt: inner_bind, NULL_TREE);
11615 if (gimple_bind_block (bind_stmt: inner_bind))
11616 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
11617 }
11618
11619 if (gimple_omp_for_combined_into_p (g: stmt))
11620 {
11621 omp_extract_for_data (for_stmt: stmt, fd: &fd, NULL);
11622 fdp = &fd;
11623
11624 /* We need two temporaries with fd.loop.v type (istart/iend)
11625 and then (fd.collapse - 1) temporaries with the same
11626 type for count2 ... countN-1 vars if not constant. */
11627 size_t count = 2;
11628 tree type = fd.iter_type;
11629 if (fd.collapse > 1
11630 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
11631 count += fd.collapse - 1;
11632 size_t count2 = 0;
11633 tree type2 = NULL_TREE;
11634 bool taskreg_for
11635 = (gimple_omp_for_kind (g: stmt) == GF_OMP_FOR_KIND_FOR
11636 || gimple_omp_for_kind (g: stmt) == GF_OMP_FOR_KIND_TASKLOOP);
11637 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (gs: stmt);
11638 tree simtc = NULL;
11639 tree clauses = *pc;
11640 if (fd.collapse > 1
11641 && fd.non_rect
11642 && fd.last_nonrect == fd.first_nonrect + 1
11643 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
11644 if (tree v = gimple_omp_for_index (gs: stmt, i: fd.last_nonrect))
11645 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
11646 {
11647 v = gimple_omp_for_index (gs: stmt, i: fd.first_nonrect);
11648 type2 = TREE_TYPE (v);
11649 count++;
11650 count2 = 3;
11651 }
11652 if (taskreg_for)
11653 outerc
11654 = omp_find_clause (clauses: gimple_omp_taskreg_clauses (gs: ctx->outer->stmt),
11655 kind: OMP_CLAUSE__LOOPTEMP_);
11656 if (ctx->simt_stmt)
11657 simtc = omp_find_clause (clauses: gimple_omp_for_clauses (gs: ctx->simt_stmt),
11658 kind: OMP_CLAUSE__LOOPTEMP_);
11659 for (i = 0; i < count + count2; i++)
11660 {
11661 tree temp;
11662 if (taskreg_for)
11663 {
11664 gcc_assert (outerc);
11665 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx: ctx->outer);
11666 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
11667 kind: OMP_CLAUSE__LOOPTEMP_);
11668 }
11669 else
11670 {
11671 /* If there are 2 adjacent SIMD stmts, one with _simt_
11672 clause, another without, make sure they have the same
11673 decls in _looptemp_ clauses, because the outer stmt
11674 they are combined into will look up just one inner_stmt. */
11675 if (ctx->simt_stmt)
11676 temp = OMP_CLAUSE_DECL (simtc);
11677 else
11678 temp = create_tmp_var (i >= count ? type2 : type);
11679 insert_decl_map (&ctx->outer->cb, temp, temp);
11680 }
11681 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
11682 OMP_CLAUSE_DECL (*pc) = temp;
11683 pc = &OMP_CLAUSE_CHAIN (*pc);
11684 if (ctx->simt_stmt)
11685 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
11686 kind: OMP_CLAUSE__LOOPTEMP_);
11687 }
11688 *pc = clauses;
11689 }
11690
11691 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
11692 dlist = NULL;
11693 body = NULL;
11694 tree rclauses
11695 = omp_task_reductions_find_first (clauses: gimple_omp_for_clauses (gs: stmt), code: OMP_FOR,
11696 ccode: OMP_CLAUSE_REDUCTION);
11697 tree rtmp = NULL_TREE;
11698 if (rclauses)
11699 {
11700 tree type = build_pointer_type (pointer_sized_int_node);
11701 tree temp = create_tmp_var (type);
11702 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
11703 OMP_CLAUSE_DECL (c) = temp;
11704 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gs: stmt);
11705 gimple_omp_for_set_clauses (gs: stmt, clauses: c);
11706 lower_omp_task_reductions (ctx, code: OMP_FOR,
11707 clauses: gimple_omp_for_clauses (gs: stmt),
11708 start: &tred_ilist, end: &tred_dlist);
11709 rclauses = c;
11710 rtmp = make_ssa_name (var: type);
11711 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
11712 }
11713
11714 lower_lastprivate_conditional_clauses (clauses: gimple_omp_for_clauses_ptr (gs: stmt),
11715 ctx);
11716
11717 lower_rec_input_clauses (clauses: gimple_omp_for_clauses (gs: stmt), ilist: &body, dlist: &dlist, ctx,
11718 fd: fdp);
11719 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
11720 gimple_omp_for_pre_body (gs: stmt));
11721
11722 lower_omp (gimple_omp_body_ptr (gs: stmt), ctx);
11723
11724 gcall *private_marker = NULL;
11725 if (is_gimple_omp_oacc (stmt: ctx->stmt)
11726 && !gimple_seq_empty_p (s: omp_for_body))
11727 private_marker = lower_oacc_private_marker (ctx);
11728
11729 /* Lower the header expressions. At this point, we can assume that
11730 the header is of the form:
11731
11732 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
11733
11734 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
11735 using the .omp_data_s mapping, if needed. */
11736 for (i = 0; i < gimple_omp_for_collapse (gs: stmt); i++)
11737 {
11738 rhs_p = gimple_omp_for_initial_ptr (gs: stmt, i);
11739 if (TREE_CODE (*rhs_p) == TREE_VEC)
11740 {
11741 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11742 TREE_VEC_ELT (*rhs_p, 1)
11743 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11744 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11745 TREE_VEC_ELT (*rhs_p, 2)
11746 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11747 }
11748 else if (!is_gimple_min_invariant (*rhs_p))
11749 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11750 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11751 recompute_tree_invariant_for_addr_expr (*rhs_p);
11752
11753 rhs_p = gimple_omp_for_final_ptr (gs: stmt, i);
11754 if (TREE_CODE (*rhs_p) == TREE_VEC)
11755 {
11756 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11757 TREE_VEC_ELT (*rhs_p, 1)
11758 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11759 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11760 TREE_VEC_ELT (*rhs_p, 2)
11761 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11762 }
11763 else if (!is_gimple_min_invariant (*rhs_p))
11764 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11765 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11766 recompute_tree_invariant_for_addr_expr (*rhs_p);
11767
11768 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
11769 if (!is_gimple_min_invariant (*rhs_p))
11770 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11771 }
11772 if (rclauses)
11773 gimple_seq_add_seq (&tred_ilist, cnt_list);
11774 else
11775 gimple_seq_add_seq (&body, cnt_list);
11776
11777 /* Once lowered, extract the bounds and clauses. */
11778 omp_extract_for_data (for_stmt: stmt, fd: &fd, NULL);
11779
11780 if (is_gimple_omp_oacc (stmt: ctx->stmt)
11781 && !ctx_in_oacc_kernels_region (ctx))
11782 lower_oacc_head_tail (loc: gimple_location (g: stmt),
11783 clauses: gimple_omp_for_clauses (gs: stmt), private_marker,
11784 head: &oacc_head, tail: &oacc_tail, ctx);
11785
11786 /* Add OpenACC partitioning and reduction markers just before the loop. */
11787 if (oacc_head)
11788 gimple_seq_add_seq (&body, oacc_head);
11789
11790 lower_omp_for_lastprivate (fd: &fd, body_p: &body, dlist: &dlist, clist: &clist, ctx);
11791
11792 if (gimple_omp_for_kind (g: stmt) == GF_OMP_FOR_KIND_FOR)
11793 for (tree c = gimple_omp_for_clauses (gs: stmt); c; c = OMP_CLAUSE_CHAIN (c))
11794 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11795 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11796 {
11797 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
11798 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
11799 OMP_CLAUSE_LINEAR_STEP (c)
11800 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
11801 ctx);
11802 }
11803
11804 if ((ctx->scan_inclusive || ctx->scan_exclusive)
11805 && gimple_omp_for_kind (g: stmt) == GF_OMP_FOR_KIND_FOR)
11806 lower_omp_for_scan (body_p: &body, dlist: &dlist, stmt, fd: &fd, ctx);
11807 else
11808 {
11809 gimple_seq_add_stmt (&body, stmt);
11810 gimple_seq_add_seq (&body, gimple_omp_body (gs: stmt));
11811 }
11812
11813 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
11814 fd.loop.v));
11815
11816 /* After the loop, add exit clauses. */
11817 lower_reduction_clauses (clauses: gimple_omp_for_clauses (gs: stmt), stmt_seqp: &body, clist: &clist, ctx);
11818
11819 if (clist)
11820 {
11821 tree fndecl = builtin_decl_explicit (fncode: BUILT_IN_GOMP_ATOMIC_START);
11822 gcall *g = gimple_build_call (fndecl, 0);
11823 gimple_seq_add_stmt (&body, g);
11824 gimple_seq_add_seq (&body, clist);
11825 fndecl = builtin_decl_explicit (fncode: BUILT_IN_GOMP_ATOMIC_END);
11826 g = gimple_build_call (fndecl, 0);
11827 gimple_seq_add_stmt (&body, g);
11828 }
11829
11830 if (ctx->cancellable)
11831 gimple_seq_add_stmt (&body, gimple_build_label (label: ctx->cancel_label));
11832
11833 gimple_seq_add_seq (&body, dlist);
11834
11835 if (rclauses)
11836 {
11837 gimple_seq_add_seq (&tred_ilist, body);
11838 body = tred_ilist;
11839 }
11840
11841 body = maybe_catch_exception (body);
11842
11843 /* Region exit marker goes at the end of the loop body. */
11844 gimple *g = gimple_build_omp_return (fd.have_nowait);
11845 gimple_seq_add_stmt (&body, g);
11846
11847 gimple_seq_add_seq (&body, tred_dlist);
11848
11849 maybe_add_implicit_barrier_cancel (ctx, omp_return: g, body: &body);
11850
11851 if (rclauses)
11852 OMP_CLAUSE_DECL (rclauses) = rtmp;
11853
11854 /* Add OpenACC joining and reduction markers just after the loop. */
11855 if (oacc_tail)
11856 gimple_seq_add_seq (&body, oacc_tail);
11857
11858 pop_gimplify_context (new_stmt);
11859
11860 gimple_bind_append_vars (bind_stmt: new_stmt, vars: ctx->block_vars);
11861 maybe_remove_omp_member_access_dummy_vars (bind: new_stmt);
11862 BLOCK_VARS (block) = gimple_bind_vars (bind_stmt: new_stmt);
11863 if (BLOCK_VARS (block))
11864 TREE_USED (block) = 1;
11865
11866 gimple_bind_set_body (bind_stmt: new_stmt, seq: body);
11867 gimple_omp_set_body (gs: stmt, NULL);
11868 gimple_omp_for_set_pre_body (gs: stmt, NULL);
11869}
11870
11871/* Callback for walk_stmts. Check if the current statement only contains
11872 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
11873
11874static tree
11875check_combined_parallel (gimple_stmt_iterator *gsi_p,
11876 bool *handled_ops_p,
11877 struct walk_stmt_info *wi)
11878{
11879 int *info = (int *) wi->info;
11880 gimple *stmt = gsi_stmt (i: *gsi_p);
11881
11882 *handled_ops_p = true;
11883 switch (gimple_code (g: stmt))
11884 {
11885 WALK_SUBSTMTS;
11886
11887 case GIMPLE_DEBUG:
11888 break;
11889 case GIMPLE_OMP_FOR:
11890 case GIMPLE_OMP_SECTIONS:
11891 *info = *info == 0 ? 1 : -1;
11892 break;
11893 default:
11894 *info = -1;
11895 break;
11896 }
11897 return NULL;
11898}
11899
11900struct omp_taskcopy_context
11901{
11902 /* This field must be at the beginning, as we do "inheritance": Some
11903 callback functions for tree-inline.cc (e.g., omp_copy_decl)
11904 receive a copy_body_data pointer that is up-casted to an
11905 omp_context pointer. */
11906 copy_body_data cb;
11907 omp_context *ctx;
11908};
11909
11910static tree
11911task_copyfn_copy_decl (tree var, copy_body_data *cb)
11912{
11913 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
11914
11915 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
11916 return create_tmp_var (TREE_TYPE (var));
11917
11918 return var;
11919}
11920
11921static tree
11922task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
11923{
11924 tree name, new_fields = NULL, type, f;
11925
11926 type = lang_hooks.types.make_type (RECORD_TYPE);
11927 name = DECL_NAME (TYPE_NAME (orig_type));
11928 name = build_decl (gimple_location (g: tcctx->ctx->stmt),
11929 TYPE_DECL, name, type);
11930 TYPE_NAME (type) = name;
11931
11932 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
11933 {
11934 tree new_f = copy_node (f);
11935 DECL_CONTEXT (new_f) = type;
11936 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), id: &tcctx->cb);
11937 TREE_CHAIN (new_f) = new_fields;
11938 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
11939 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
11940 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
11941 &tcctx->cb, NULL);
11942 new_fields = new_f;
11943 tcctx->cb.decl_map->put (k: f, v: new_f);
11944 }
11945 TYPE_FIELDS (type) = nreverse (new_fields);
11946 layout_type (type);
11947 return type;
11948}
11949
11950/* Create task copyfn. */
11951
11952static void
11953create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
11954{
11955 struct function *child_cfun;
11956 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
11957 tree record_type, srecord_type, bind, list;
11958 bool record_needs_remap = false, srecord_needs_remap = false;
11959 splay_tree_node n;
11960 struct omp_taskcopy_context tcctx;
11961 location_t loc = gimple_location (g: task_stmt);
11962 size_t looptempno = 0;
11963
11964 child_fn = gimple_omp_task_copy_fn (gs: task_stmt);
11965 task_cpyfns.safe_push (obj: task_stmt);
11966 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
11967 gcc_assert (child_cfun->cfg == NULL);
11968 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
11969
11970 /* Reset DECL_CONTEXT on function arguments. */
11971 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
11972 DECL_CONTEXT (t) = child_fn;
11973
11974 /* Populate the function. */
11975 push_gimplify_context ();
11976 push_cfun (new_cfun: child_cfun);
11977
11978 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
11979 TREE_SIDE_EFFECTS (bind) = 1;
11980 list = NULL;
11981 DECL_SAVED_TREE (child_fn) = bind;
11982 DECL_SOURCE_LOCATION (child_fn) = gimple_location (g: task_stmt);
11983
11984 /* Remap src and dst argument types if needed. */
11985 record_type = ctx->record_type;
11986 srecord_type = ctx->srecord_type;
11987 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
11988 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
11989 {
11990 record_needs_remap = true;
11991 break;
11992 }
11993 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
11994 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
11995 {
11996 srecord_needs_remap = true;
11997 break;
11998 }
11999
12000 if (record_needs_remap || srecord_needs_remap)
12001 {
12002 memset (s: &tcctx, c: '\0', n: sizeof (tcctx));
12003 tcctx.cb.src_fn = ctx->cb.src_fn;
12004 tcctx.cb.dst_fn = child_fn;
12005 tcctx.cb.src_node = cgraph_node::get (decl: tcctx.cb.src_fn);
12006 gcc_checking_assert (tcctx.cb.src_node);
12007 tcctx.cb.dst_node = tcctx.cb.src_node;
12008 tcctx.cb.src_cfun = ctx->cb.src_cfun;
12009 tcctx.cb.copy_decl = task_copyfn_copy_decl;
12010 tcctx.cb.eh_lp_nr = 0;
12011 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
12012 tcctx.cb.decl_map = new hash_map<tree, tree>;
12013 tcctx.ctx = ctx;
12014
12015 if (record_needs_remap)
12016 record_type = task_copyfn_remap_type (tcctx: &tcctx, orig_type: record_type);
12017 if (srecord_needs_remap)
12018 srecord_type = task_copyfn_remap_type (tcctx: &tcctx, orig_type: srecord_type);
12019 }
12020 else
12021 tcctx.cb.decl_map = NULL;
12022
12023 arg = DECL_ARGUMENTS (child_fn);
12024 TREE_TYPE (arg) = build_pointer_type (record_type);
12025 sarg = DECL_CHAIN (arg);
12026 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
12027
12028 /* First pass: initialize temporaries used in record_type and srecord_type
12029 sizes and field offsets. */
12030 if (tcctx.cb.decl_map)
12031 for (c = gimple_omp_task_clauses (gs: task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12032 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12033 {
12034 tree *p;
12035
12036 decl = OMP_CLAUSE_DECL (c);
12037 p = tcctx.cb.decl_map->get (k: decl);
12038 if (p == NULL)
12039 continue;
12040 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12041 sf = (tree) n->value;
12042 sf = *tcctx.cb.decl_map->get (k: sf);
12043 src = build_simple_mem_ref_loc (loc, sarg);
12044 src = omp_build_component_ref (obj: src, field: sf);
12045 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
12046 append_to_statement_list (t, &list);
12047 }
12048
12049 /* Second pass: copy shared var pointers and copy construct non-VLA
12050 firstprivate vars. */
12051 for (c = gimple_omp_task_clauses (gs: task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12052 switch (OMP_CLAUSE_CODE (c))
12053 {
12054 splay_tree_key key;
12055 case OMP_CLAUSE_SHARED:
12056 decl = OMP_CLAUSE_DECL (c);
12057 key = (splay_tree_key) decl;
12058 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
12059 key = (splay_tree_key) &DECL_UID (decl);
12060 n = splay_tree_lookup (ctx->field_map, key);
12061 if (n == NULL)
12062 break;
12063 f = (tree) n->value;
12064 if (tcctx.cb.decl_map)
12065 f = *tcctx.cb.decl_map->get (k: f);
12066 n = splay_tree_lookup (ctx->sfield_map, key);
12067 sf = (tree) n->value;
12068 if (tcctx.cb.decl_map)
12069 sf = *tcctx.cb.decl_map->get (k: sf);
12070 src = build_simple_mem_ref_loc (loc, sarg);
12071 src = omp_build_component_ref (obj: src, field: sf);
12072 dst = build_simple_mem_ref_loc (loc, arg);
12073 dst = omp_build_component_ref (obj: dst, field: f);
12074 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12075 append_to_statement_list (t, &list);
12076 break;
12077 case OMP_CLAUSE_REDUCTION:
12078 case OMP_CLAUSE_IN_REDUCTION:
12079 decl = OMP_CLAUSE_DECL (c);
12080 if (TREE_CODE (decl) == MEM_REF)
12081 {
12082 decl = TREE_OPERAND (decl, 0);
12083 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
12084 decl = TREE_OPERAND (decl, 0);
12085 if (TREE_CODE (decl) == INDIRECT_REF
12086 || TREE_CODE (decl) == ADDR_EXPR)
12087 decl = TREE_OPERAND (decl, 0);
12088 }
12089 key = (splay_tree_key) decl;
12090 n = splay_tree_lookup (ctx->field_map, key);
12091 if (n == NULL)
12092 break;
12093 f = (tree) n->value;
12094 if (tcctx.cb.decl_map)
12095 f = *tcctx.cb.decl_map->get (k: f);
12096 n = splay_tree_lookup (ctx->sfield_map, key);
12097 sf = (tree) n->value;
12098 if (tcctx.cb.decl_map)
12099 sf = *tcctx.cb.decl_map->get (k: sf);
12100 src = build_simple_mem_ref_loc (loc, sarg);
12101 src = omp_build_component_ref (obj: src, field: sf);
12102 if (decl != OMP_CLAUSE_DECL (c)
12103 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
12104 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
12105 src = build_simple_mem_ref_loc (loc, src);
12106 dst = build_simple_mem_ref_loc (loc, arg);
12107 dst = omp_build_component_ref (obj: dst, field: f);
12108 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12109 append_to_statement_list (t, &list);
12110 break;
12111 case OMP_CLAUSE__LOOPTEMP_:
12112 /* Fields for first two _looptemp_ clauses are initialized by
12113 GOMP_taskloop*, the rest are handled like firstprivate. */
12114 if (looptempno < 2)
12115 {
12116 looptempno++;
12117 break;
12118 }
12119 /* FALLTHRU */
12120 case OMP_CLAUSE__REDUCTEMP_:
12121 case OMP_CLAUSE_FIRSTPRIVATE:
12122 decl = OMP_CLAUSE_DECL (c);
12123 if (is_variable_sized (expr: decl))
12124 break;
12125 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12126 if (n == NULL)
12127 break;
12128 f = (tree) n->value;
12129 if (tcctx.cb.decl_map)
12130 f = *tcctx.cb.decl_map->get (k: f);
12131 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12132 if (n != NULL)
12133 {
12134 sf = (tree) n->value;
12135 if (tcctx.cb.decl_map)
12136 sf = *tcctx.cb.decl_map->get (k: sf);
12137 src = build_simple_mem_ref_loc (loc, sarg);
12138 src = omp_build_component_ref (obj: src, field: sf);
12139 if (use_pointer_for_field (decl, NULL)
12140 || omp_privatize_by_reference (decl))
12141 src = build_simple_mem_ref_loc (loc, src);
12142 }
12143 else
12144 src = decl;
12145 dst = build_simple_mem_ref_loc (loc, arg);
12146 dst = omp_build_component_ref (obj: dst, field: f);
12147 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
12148 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12149 else
12150 {
12151 if (ctx->allocate_map)
12152 if (tree *allocatorp = ctx->allocate_map->get (k: decl))
12153 {
12154 tree allocator = *allocatorp;
12155 HOST_WIDE_INT ialign = 0;
12156 if (TREE_CODE (allocator) == TREE_LIST)
12157 {
12158 ialign = tree_to_uhwi (TREE_VALUE (allocator));
12159 allocator = TREE_PURPOSE (allocator);
12160 }
12161 if (TREE_CODE (allocator) != INTEGER_CST)
12162 {
12163 n = splay_tree_lookup (ctx->sfield_map,
12164 (splay_tree_key) allocator);
12165 allocator = (tree) n->value;
12166 if (tcctx.cb.decl_map)
12167 allocator = *tcctx.cb.decl_map->get (k: allocator);
12168 tree a = build_simple_mem_ref_loc (loc, sarg);
12169 allocator = omp_build_component_ref (obj: a, field: allocator);
12170 }
12171 allocator = fold_convert (pointer_sized_int_node, allocator);
12172 tree a = builtin_decl_explicit (fncode: BUILT_IN_GOMP_ALLOC);
12173 tree align = build_int_cst (size_type_node,
12174 MAX (ialign,
12175 DECL_ALIGN_UNIT (decl)));
12176 tree sz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dst)));
12177 tree ptr = build_call_expr_loc (loc, a, 3, align, sz,
12178 allocator);
12179 ptr = fold_convert (TREE_TYPE (dst), ptr);
12180 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, ptr);
12181 append_to_statement_list (t, &list);
12182 dst = build_simple_mem_ref_loc (loc, dst);
12183 }
12184 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
12185 }
12186 append_to_statement_list (t, &list);
12187 break;
12188 case OMP_CLAUSE_PRIVATE:
12189 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
12190 break;
12191 decl = OMP_CLAUSE_DECL (c);
12192 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12193 f = (tree) n->value;
12194 if (tcctx.cb.decl_map)
12195 f = *tcctx.cb.decl_map->get (k: f);
12196 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12197 if (n != NULL)
12198 {
12199 sf = (tree) n->value;
12200 if (tcctx.cb.decl_map)
12201 sf = *tcctx.cb.decl_map->get (k: sf);
12202 src = build_simple_mem_ref_loc (loc, sarg);
12203 src = omp_build_component_ref (obj: src, field: sf);
12204 if (use_pointer_for_field (decl, NULL))
12205 src = build_simple_mem_ref_loc (loc, src);
12206 }
12207 else
12208 src = decl;
12209 dst = build_simple_mem_ref_loc (loc, arg);
12210 dst = omp_build_component_ref (obj: dst, field: f);
12211 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12212 append_to_statement_list (t, &list);
12213 break;
12214 default:
12215 break;
12216 }
12217
12218 /* Last pass: handle VLA firstprivates. */
12219 if (tcctx.cb.decl_map)
12220 for (c = gimple_omp_task_clauses (gs: task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12221 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12222 {
12223 tree ind, ptr, df;
12224
12225 decl = OMP_CLAUSE_DECL (c);
12226 if (!is_variable_sized (expr: decl))
12227 continue;
12228 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12229 if (n == NULL)
12230 continue;
12231 f = (tree) n->value;
12232 f = *tcctx.cb.decl_map->get (k: f);
12233 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
12234 ind = DECL_VALUE_EXPR (decl);
12235 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
12236 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
12237 n = splay_tree_lookup (ctx->sfield_map,
12238 (splay_tree_key) TREE_OPERAND (ind, 0));
12239 sf = (tree) n->value;
12240 sf = *tcctx.cb.decl_map->get (k: sf);
12241 src = build_simple_mem_ref_loc (loc, sarg);
12242 src = omp_build_component_ref (obj: src, field: sf);
12243 src = build_simple_mem_ref_loc (loc, src);
12244 dst = build_simple_mem_ref_loc (loc, arg);
12245 dst = omp_build_component_ref (obj: dst, field: f);
12246 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
12247 append_to_statement_list (t, &list);
12248 n = splay_tree_lookup (ctx->field_map,
12249 (splay_tree_key) TREE_OPERAND (ind, 0));
12250 df = (tree) n->value;
12251 df = *tcctx.cb.decl_map->get (k: df);
12252 ptr = build_simple_mem_ref_loc (loc, arg);
12253 ptr = omp_build_component_ref (obj: ptr, field: df);
12254 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
12255 build_fold_addr_expr_loc (loc, dst));
12256 append_to_statement_list (t, &list);
12257 }
12258
12259 t = build1 (RETURN_EXPR, void_type_node, NULL);
12260 append_to_statement_list (t, &list);
12261
12262 if (tcctx.cb.decl_map)
12263 delete tcctx.cb.decl_map;
12264 pop_gimplify_context (NULL);
12265 BIND_EXPR_BODY (bind) = list;
12266 pop_cfun ();
12267}
12268
12269static void
12270lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
12271{
12272 tree c, clauses;
12273 gimple *g;
12274 size_t cnt[5] = { 0, 0, 0, 0, 0 }, idx = 2, i;
12275
12276 clauses = omp_find_clause (clauses: *pclauses, kind: OMP_CLAUSE_DEPEND);
12277 gcc_assert (clauses);
12278 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12279 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
12280 switch (OMP_CLAUSE_DEPEND_KIND (c))
12281 {
12282 case OMP_CLAUSE_DEPEND_LAST:
12283 /* Lowering already done at gimplification. */
12284 return;
12285 case OMP_CLAUSE_DEPEND_IN:
12286 cnt[2]++;
12287 break;
12288 case OMP_CLAUSE_DEPEND_OUT:
12289 case OMP_CLAUSE_DEPEND_INOUT:
12290 cnt[0]++;
12291 break;
12292 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
12293 cnt[1]++;
12294 break;
12295 case OMP_CLAUSE_DEPEND_DEPOBJ:
12296 cnt[3]++;
12297 break;
12298 case OMP_CLAUSE_DEPEND_INOUTSET:
12299 cnt[4]++;
12300 break;
12301 default:
12302 gcc_unreachable ();
12303 }
12304 if (cnt[1] || cnt[3] || cnt[4])
12305 idx = 5;
12306 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3] + cnt[4];
12307 size_t inoutidx = total + idx;
12308 tree type = build_array_type_nelts (ptr_type_node, total + idx + 2 * cnt[4]);
12309 tree array = create_tmp_var (type);
12310 TREE_ADDRESSABLE (array) = 1;
12311 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
12312 NULL_TREE);
12313 if (idx == 5)
12314 {
12315 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
12316 gimple_seq_add_stmt (iseq, g);
12317 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
12318 NULL_TREE);
12319 }
12320 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
12321 gimple_seq_add_stmt (iseq, g);
12322 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
12323 {
12324 r = build4 (ARRAY_REF, ptr_type_node, array,
12325 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
12326 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
12327 gimple_seq_add_stmt (iseq, g);
12328 }
12329 for (i = 0; i < 5; i++)
12330 {
12331 if (cnt[i] == 0)
12332 continue;
12333 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12334 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
12335 continue;
12336 else
12337 {
12338 switch (OMP_CLAUSE_DEPEND_KIND (c))
12339 {
12340 case OMP_CLAUSE_DEPEND_IN:
12341 if (i != 2)
12342 continue;
12343 break;
12344 case OMP_CLAUSE_DEPEND_OUT:
12345 case OMP_CLAUSE_DEPEND_INOUT:
12346 if (i != 0)
12347 continue;
12348 break;
12349 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
12350 if (i != 1)
12351 continue;
12352 break;
12353 case OMP_CLAUSE_DEPEND_DEPOBJ:
12354 if (i != 3)
12355 continue;
12356 break;
12357 case OMP_CLAUSE_DEPEND_INOUTSET:
12358 if (i != 4)
12359 continue;
12360 break;
12361 default:
12362 gcc_unreachable ();
12363 }
12364 tree t = OMP_CLAUSE_DECL (c);
12365 if (i == 4)
12366 {
12367 t = build4 (ARRAY_REF, ptr_type_node, array,
12368 size_int (inoutidx), NULL_TREE, NULL_TREE);
12369 t = build_fold_addr_expr (t);
12370 inoutidx += 2;
12371 }
12372 t = fold_convert (ptr_type_node, t);
12373 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
12374 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
12375 NULL_TREE, NULL_TREE);
12376 g = gimple_build_assign (r, t);
12377 gimple_seq_add_stmt (iseq, g);
12378 }
12379 }
12380 if (cnt[4])
12381 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12382 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
12383 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_INOUTSET)
12384 {
12385 tree t = OMP_CLAUSE_DECL (c);
12386 t = fold_convert (ptr_type_node, t);
12387 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
12388 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
12389 NULL_TREE, NULL_TREE);
12390 g = gimple_build_assign (r, t);
12391 gimple_seq_add_stmt (iseq, g);
12392 t = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET);
12393 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
12394 NULL_TREE, NULL_TREE);
12395 g = gimple_build_assign (r, t);
12396 gimple_seq_add_stmt (iseq, g);
12397 }
12398
12399 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
12400 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
12401 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
12402 OMP_CLAUSE_CHAIN (c) = *pclauses;
12403 *pclauses = c;
12404 tree clobber = build_clobber (type);
12405 g = gimple_build_assign (array, clobber);
12406 gimple_seq_add_stmt (oseq, g);
12407}
12408
12409/* Lower the OpenMP parallel or task directive in the current statement
12410 in GSI_P. CTX holds context information for the directive. */
12411
12412static void
12413lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12414{
12415 tree clauses;
12416 tree child_fn, t;
12417 gimple *stmt = gsi_stmt (i: *gsi_p);
12418 gbind *par_bind, *bind, *dep_bind = NULL;
12419 gimple_seq par_body;
12420 location_t loc = gimple_location (g: stmt);
12421
12422 clauses = gimple_omp_taskreg_clauses (gs: stmt);
12423 if (gimple_code (g: stmt) == GIMPLE_OMP_TASK
12424 && gimple_omp_task_taskwait_p (g: stmt))
12425 {
12426 par_bind = NULL;
12427 par_body = NULL;
12428 }
12429 else
12430 {
12431 par_bind
12432 = as_a <gbind *> (p: gimple_seq_first_stmt (s: gimple_omp_body (gs: stmt)));
12433 par_body = gimple_bind_body (gs: par_bind);
12434 }
12435 child_fn = ctx->cb.dst_fn;
12436 if (gimple_code (g: stmt) == GIMPLE_OMP_PARALLEL
12437 && !gimple_omp_parallel_combined_p (g: stmt))
12438 {
12439 struct walk_stmt_info wi;
12440 int ws_num = 0;
12441
12442 memset (s: &wi, c: 0, n: sizeof (wi));
12443 wi.info = &ws_num;
12444 wi.val_only = true;
12445 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
12446 if (ws_num == 1)
12447 gimple_omp_parallel_set_combined_p (g: stmt, combined_p: true);
12448 }
12449 gimple_seq dep_ilist = NULL;
12450 gimple_seq dep_olist = NULL;
12451 if (gimple_code (g: stmt) == GIMPLE_OMP_TASK
12452 && omp_find_clause (clauses, kind: OMP_CLAUSE_DEPEND))
12453 {
12454 push_gimplify_context ();
12455 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12456 lower_depend_clauses (pclauses: gimple_omp_task_clauses_ptr (gs: stmt),
12457 iseq: &dep_ilist, oseq: &dep_olist);
12458 }
12459
12460 if (gimple_code (g: stmt) == GIMPLE_OMP_TASK
12461 && gimple_omp_task_taskwait_p (g: stmt))
12462 {
12463 if (dep_bind)
12464 {
12465 gsi_replace (gsi_p, dep_bind, true);
12466 gimple_bind_add_seq (bind_stmt: dep_bind, seq: dep_ilist);
12467 gimple_bind_add_stmt (bind_stmt: dep_bind, stmt);
12468 gimple_bind_add_seq (bind_stmt: dep_bind, seq: dep_olist);
12469 pop_gimplify_context (dep_bind);
12470 }
12471 return;
12472 }
12473
12474 if (ctx->srecord_type)
12475 create_task_copyfn (task_stmt: as_a <gomp_task *> (p: stmt), ctx);
12476
12477 gimple_seq tskred_ilist = NULL;
12478 gimple_seq tskred_olist = NULL;
12479 if ((is_task_ctx (ctx)
12480 && gimple_omp_task_taskloop_p (g: ctx->stmt)
12481 && omp_find_clause (clauses: gimple_omp_task_clauses (gs: ctx->stmt),
12482 kind: OMP_CLAUSE_REDUCTION))
12483 || (is_parallel_ctx (ctx)
12484 && omp_find_clause (clauses: gimple_omp_parallel_clauses (gs: stmt),
12485 kind: OMP_CLAUSE__REDUCTEMP_)))
12486 {
12487 if (dep_bind == NULL)
12488 {
12489 push_gimplify_context ();
12490 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12491 }
12492 lower_omp_task_reductions (ctx, code: is_task_ctx (ctx) ? OMP_TASKLOOP
12493 : OMP_PARALLEL,
12494 clauses: gimple_omp_taskreg_clauses (gs: ctx->stmt),
12495 start: &tskred_ilist, end: &tskred_olist);
12496 }
12497
12498 push_gimplify_context ();
12499
12500 gimple_seq par_olist = NULL;
12501 gimple_seq par_ilist = NULL;
12502 gimple_seq par_rlist = NULL;
12503 lower_rec_input_clauses (clauses, ilist: &par_ilist, dlist: &par_olist, ctx, NULL);
12504 lower_omp (&par_body, ctx);
12505 if (gimple_code (g: stmt) != GIMPLE_OMP_TASK)
12506 lower_reduction_clauses (clauses, stmt_seqp: &par_rlist, NULL, ctx);
12507
12508 /* Declare all the variables created by mapping and the variables
12509 declared in the scope of the parallel body. */
12510 record_vars_into (ctx->block_vars, child_fn);
12511 maybe_remove_omp_member_access_dummy_vars (bind: par_bind);
12512 record_vars_into (gimple_bind_vars (bind_stmt: par_bind), child_fn);
12513
12514 if (ctx->record_type)
12515 {
12516 ctx->sender_decl
12517 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
12518 : ctx->record_type, ".omp_data_o");
12519 DECL_NAMELESS (ctx->sender_decl) = 1;
12520 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
12521 gimple_omp_taskreg_set_data_arg (gs: stmt, data_arg: ctx->sender_decl);
12522 }
12523
12524 gimple_seq olist = NULL;
12525 gimple_seq ilist = NULL;
12526 lower_send_clauses (clauses, ilist: &ilist, olist: &olist, ctx);
12527 lower_send_shared_vars (ilist: &ilist, olist: &olist, ctx);
12528
12529 if (ctx->record_type)
12530 {
12531 tree clobber = build_clobber (TREE_TYPE (ctx->sender_decl));
12532 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
12533 clobber));
12534 }
12535
12536 /* Once all the expansions are done, sequence all the different
12537 fragments inside gimple_omp_body. */
12538
12539 gimple_seq new_body = NULL;
12540
12541 if (ctx->record_type)
12542 {
12543 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
12544 /* fixup_child_record_type might have changed receiver_decl's type. */
12545 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
12546 gimple_seq_add_stmt (&new_body,
12547 gimple_build_assign (ctx->receiver_decl, t));
12548 }
12549
12550 gimple_seq_add_seq (&new_body, par_ilist);
12551 gimple_seq_add_seq (&new_body, par_body);
12552 gimple_seq_add_seq (&new_body, par_rlist);
12553 if (ctx->cancellable)
12554 gimple_seq_add_stmt (&new_body, gimple_build_label (label: ctx->cancel_label));
12555 gimple_seq_add_seq (&new_body, par_olist);
12556 new_body = maybe_catch_exception (body: new_body);
12557 if (gimple_code (g: stmt) == GIMPLE_OMP_TASK)
12558 gimple_seq_add_stmt (&new_body,
12559 gimple_build_omp_continue (integer_zero_node,
12560 integer_zero_node));
12561 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
12562 gimple_omp_set_body (gs: stmt, body: new_body);
12563
12564 if (dep_bind && gimple_bind_block (bind_stmt: par_bind) == NULL_TREE)
12565 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12566 else
12567 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (bind_stmt: par_bind));
12568 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
12569 gimple_bind_add_seq (bind_stmt: bind, seq: ilist);
12570 gimple_bind_add_stmt (bind_stmt: bind, stmt);
12571 gimple_bind_add_seq (bind_stmt: bind, seq: olist);
12572
12573 pop_gimplify_context (NULL);
12574
12575 if (dep_bind)
12576 {
12577 gimple_bind_add_seq (bind_stmt: dep_bind, seq: dep_ilist);
12578 gimple_bind_add_seq (bind_stmt: dep_bind, seq: tskred_ilist);
12579 gimple_bind_add_stmt (bind_stmt: dep_bind, stmt: bind);
12580 gimple_bind_add_seq (bind_stmt: dep_bind, seq: tskred_olist);
12581 gimple_bind_add_seq (bind_stmt: dep_bind, seq: dep_olist);
12582 pop_gimplify_context (dep_bind);
12583 }
12584}
12585
12586/* Lower the GIMPLE_OMP_TARGET in the current statement
12587 in GSI_P. CTX holds context information for the directive. */
12588
12589static void
12590lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12591{
12592 tree clauses;
12593 tree child_fn, t, c;
12594 gomp_target *stmt = as_a <gomp_target *> (p: gsi_stmt (i: *gsi_p));
12595 gbind *tgt_bind, *bind, *dep_bind = NULL;
12596 gimple_seq tgt_body, olist, ilist, fplist, new_body;
12597 location_t loc = gimple_location (g: stmt);
12598 bool offloaded, data_region;
12599 unsigned int map_cnt = 0;
12600 tree in_reduction_clauses = NULL_TREE;
12601
12602 offloaded = is_gimple_omp_offloaded (stmt);
12603 switch (gimple_omp_target_kind (g: stmt))
12604 {
12605 case GF_OMP_TARGET_KIND_REGION:
12606 tree *p, *q;
12607 q = &in_reduction_clauses;
12608 for (p = gimple_omp_target_clauses_ptr (gs: stmt); *p; )
12609 if (OMP_CLAUSE_CODE (*p) == OMP_CLAUSE_IN_REDUCTION)
12610 {
12611 *q = *p;
12612 q = &OMP_CLAUSE_CHAIN (*q);
12613 *p = OMP_CLAUSE_CHAIN (*p);
12614 }
12615 else
12616 p = &OMP_CLAUSE_CHAIN (*p);
12617 *q = NULL_TREE;
12618 *p = in_reduction_clauses;
12619 /* FALLTHRU */
12620 case GF_OMP_TARGET_KIND_UPDATE:
12621 case GF_OMP_TARGET_KIND_ENTER_DATA:
12622 case GF_OMP_TARGET_KIND_EXIT_DATA:
12623 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
12624 case GF_OMP_TARGET_KIND_OACC_KERNELS:
12625 case GF_OMP_TARGET_KIND_OACC_SERIAL:
12626 case GF_OMP_TARGET_KIND_OACC_UPDATE:
12627 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA:
12628 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA:
12629 case GF_OMP_TARGET_KIND_OACC_DECLARE:
12630 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
12631 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
12632 data_region = false;
12633 break;
12634 case GF_OMP_TARGET_KIND_DATA:
12635 case GF_OMP_TARGET_KIND_OACC_DATA:
12636 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
12637 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
12638 data_region = true;
12639 break;
12640 default:
12641 gcc_unreachable ();
12642 }
12643
12644 /* Ensure that requires map is written via output_offload_tables, even if only
12645 'target (enter/exit) data' is used in the translation unit. */
12646 if (ENABLE_OFFLOADING && (omp_requires_mask & OMP_REQUIRES_TARGET_USED))
12647 g->have_offload = true;
12648
12649 clauses = gimple_omp_target_clauses (gs: stmt);
12650
12651 gimple_seq dep_ilist = NULL;
12652 gimple_seq dep_olist = NULL;
12653 bool has_depend = omp_find_clause (clauses, kind: OMP_CLAUSE_DEPEND) != NULL_TREE;
12654 if (has_depend || in_reduction_clauses)
12655 {
12656 push_gimplify_context ();
12657 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12658 if (has_depend)
12659 lower_depend_clauses (pclauses: gimple_omp_target_clauses_ptr (gs: stmt),
12660 iseq: &dep_ilist, oseq: &dep_olist);
12661 if (in_reduction_clauses)
12662 lower_rec_input_clauses (clauses: in_reduction_clauses, ilist: &dep_ilist, dlist: &dep_olist,
12663 ctx, NULL);
12664 }
12665
12666 tgt_bind = NULL;
12667 tgt_body = NULL;
12668 if (offloaded)
12669 {
12670 tgt_bind = gimple_seq_first_stmt_as_a_bind (s: gimple_omp_body (gs: stmt));
12671 tgt_body = gimple_bind_body (gs: tgt_bind);
12672 }
12673 else if (data_region)
12674 tgt_body = gimple_omp_body (gs: stmt);
12675 child_fn = ctx->cb.dst_fn;
12676
12677 push_gimplify_context ();
12678 fplist = NULL;
12679
12680 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12681 switch (OMP_CLAUSE_CODE (c))
12682 {
12683 tree var, x;
12684
12685 default:
12686 break;
12687 case OMP_CLAUSE_MAP:
12688#if CHECKING_P
12689 /* First check what we're prepared to handle in the following. */
12690 switch (OMP_CLAUSE_MAP_KIND (c))
12691 {
12692 case GOMP_MAP_ALLOC:
12693 case GOMP_MAP_TO:
12694 case GOMP_MAP_FROM:
12695 case GOMP_MAP_TOFROM:
12696 case GOMP_MAP_POINTER:
12697 case GOMP_MAP_TO_PSET:
12698 case GOMP_MAP_DELETE:
12699 case GOMP_MAP_RELEASE:
12700 case GOMP_MAP_ALWAYS_TO:
12701 case GOMP_MAP_ALWAYS_FROM:
12702 case GOMP_MAP_ALWAYS_TOFROM:
12703 case GOMP_MAP_FORCE_PRESENT:
12704 case GOMP_MAP_ALWAYS_PRESENT_FROM:
12705 case GOMP_MAP_ALWAYS_PRESENT_TO:
12706 case GOMP_MAP_ALWAYS_PRESENT_TOFROM:
12707
12708 case GOMP_MAP_FIRSTPRIVATE_POINTER:
12709 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
12710 case GOMP_MAP_STRUCT:
12711 case GOMP_MAP_ALWAYS_POINTER:
12712 case GOMP_MAP_ATTACH:
12713 case GOMP_MAP_DETACH:
12714 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
12715 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
12716 break;
12717 case GOMP_MAP_IF_PRESENT:
12718 case GOMP_MAP_FORCE_ALLOC:
12719 case GOMP_MAP_FORCE_TO:
12720 case GOMP_MAP_FORCE_FROM:
12721 case GOMP_MAP_FORCE_TOFROM:
12722 case GOMP_MAP_FORCE_DEVICEPTR:
12723 case GOMP_MAP_DEVICE_RESIDENT:
12724 case GOMP_MAP_LINK:
12725 case GOMP_MAP_FORCE_DETACH:
12726 gcc_assert (is_gimple_omp_oacc (stmt));
12727 break;
12728 default:
12729 gcc_unreachable ();
12730 }
12731#endif
12732 /* FALLTHRU */
12733 case OMP_CLAUSE_TO:
12734 case OMP_CLAUSE_FROM:
12735 oacc_firstprivate:
12736 var = OMP_CLAUSE_DECL (c);
12737 if (!DECL_P (var))
12738 {
12739 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
12740 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12741 && (OMP_CLAUSE_MAP_KIND (c)
12742 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
12743 map_cnt++;
12744 continue;
12745 }
12746
12747 if (DECL_SIZE (var)
12748 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
12749 {
12750 tree var2 = DECL_VALUE_EXPR (var);
12751 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
12752 var2 = TREE_OPERAND (var2, 0);
12753 gcc_assert (DECL_P (var2));
12754 var = var2;
12755 }
12756
12757 if (offloaded
12758 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12759 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12760 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
12761 {
12762 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12763 {
12764 if (is_global_var (t: maybe_lookup_decl_in_outer_ctx (decl: var, ctx))
12765 && varpool_node::get_create (decl: var)->offloadable)
12766 continue;
12767
12768 tree type = build_pointer_type (TREE_TYPE (var));
12769 tree new_var = lookup_decl (var, ctx);
12770 x = create_tmp_var_raw (type, get_name (new_var));
12771 gimple_add_tmp_var (x);
12772 x = build_simple_mem_ref (x);
12773 SET_DECL_VALUE_EXPR (new_var, x);
12774 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12775 }
12776 continue;
12777 }
12778
12779 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12780 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12781 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
12782 && is_omp_target (stmt))
12783 {
12784 gcc_assert (maybe_lookup_field (c, ctx));
12785 map_cnt++;
12786 continue;
12787 }
12788
12789 if (!maybe_lookup_field (var, ctx))
12790 continue;
12791
12792 /* Don't remap compute constructs' reduction variables, because the
12793 intermediate result must be local to each gang. */
12794 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12795 && is_gimple_omp_oacc (stmt: ctx->stmt)
12796 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
12797 {
12798 x = build_receiver_ref (var, by_ref: true, ctx);
12799 tree new_var = lookup_decl (var, ctx);
12800
12801 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12802 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
12803 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12804 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12805 x = build_simple_mem_ref (x);
12806 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12807 {
12808 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
12809 if (omp_privatize_by_reference (decl: new_var)
12810 && (TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE
12811 || DECL_BY_REFERENCE (var)))
12812 {
12813 /* Create a local object to hold the instance
12814 value. */
12815 tree type = TREE_TYPE (TREE_TYPE (new_var));
12816 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
12817 tree inst = create_tmp_var (type, id);
12818 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
12819 x = build_fold_addr_expr (inst);
12820 }
12821 gimplify_assign (new_var, x, &fplist);
12822 }
12823 else if (DECL_P (new_var))
12824 {
12825 SET_DECL_VALUE_EXPR (new_var, x);
12826 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12827 }
12828 else
12829 gcc_unreachable ();
12830 }
12831 map_cnt++;
12832 break;
12833
12834 case OMP_CLAUSE_FIRSTPRIVATE:
12835 omp_firstprivate_recv:
12836 gcc_checking_assert (offloaded);
12837 if (is_gimple_omp_oacc (stmt: ctx->stmt))
12838 {
12839 /* No 'firstprivate' clauses on OpenACC 'kernels'. */
12840 gcc_checking_assert (!is_oacc_kernels (ctx));
12841 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12842 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12843
12844 goto oacc_firstprivate;
12845 }
12846 map_cnt++;
12847 var = OMP_CLAUSE_DECL (c);
12848 if (!omp_privatize_by_reference (decl: var)
12849 && !is_gimple_reg_type (TREE_TYPE (var)))
12850 {
12851 tree new_var = lookup_decl (var, ctx);
12852 if (is_variable_sized (expr: var))
12853 {
12854 tree pvar = DECL_VALUE_EXPR (var);
12855 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12856 pvar = TREE_OPERAND (pvar, 0);
12857 gcc_assert (DECL_P (pvar));
12858 tree new_pvar = lookup_decl (var: pvar, ctx);
12859 x = build_fold_indirect_ref (new_pvar);
12860 TREE_THIS_NOTRAP (x) = 1;
12861 }
12862 else
12863 x = build_receiver_ref (var, by_ref: true, ctx);
12864 SET_DECL_VALUE_EXPR (new_var, x);
12865 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12866 }
12867 /* Fortran array descriptors: firstprivate of data + attach. */
12868 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR
12869 && lang_hooks.decls.omp_array_data (var, true))
12870 map_cnt += 2;
12871 break;
12872
12873 case OMP_CLAUSE_PRIVATE:
12874 gcc_checking_assert (offloaded);
12875 if (is_gimple_omp_oacc (stmt: ctx->stmt))
12876 {
12877 /* No 'private' clauses on OpenACC 'kernels'. */
12878 gcc_checking_assert (!is_oacc_kernels (ctx));
12879 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12880 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12881
12882 break;
12883 }
12884 var = OMP_CLAUSE_DECL (c);
12885 if (is_variable_sized (expr: var))
12886 {
12887 tree new_var = lookup_decl (var, ctx);
12888 tree pvar = DECL_VALUE_EXPR (var);
12889 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12890 pvar = TREE_OPERAND (pvar, 0);
12891 gcc_assert (DECL_P (pvar));
12892 tree new_pvar = lookup_decl (var: pvar, ctx);
12893 x = build_fold_indirect_ref (new_pvar);
12894 TREE_THIS_NOTRAP (x) = 1;
12895 SET_DECL_VALUE_EXPR (new_var, x);
12896 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12897 }
12898 break;
12899
12900 case OMP_CLAUSE_USE_DEVICE_PTR:
12901 case OMP_CLAUSE_USE_DEVICE_ADDR:
12902 case OMP_CLAUSE_HAS_DEVICE_ADDR:
12903 case OMP_CLAUSE_IS_DEVICE_PTR:
12904 var = OMP_CLAUSE_DECL (c);
12905 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
12906 {
12907 while (TREE_CODE (var) == INDIRECT_REF
12908 || TREE_CODE (var) == ARRAY_REF)
12909 var = TREE_OPERAND (var, 0);
12910 if (lang_hooks.decls.omp_array_data (var, true))
12911 goto omp_firstprivate_recv;
12912 }
12913 map_cnt++;
12914 if (is_variable_sized (expr: var))
12915 {
12916 tree new_var = lookup_decl (var, ctx);
12917 tree pvar = DECL_VALUE_EXPR (var);
12918 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12919 pvar = TREE_OPERAND (pvar, 0);
12920 gcc_assert (DECL_P (pvar));
12921 tree new_pvar = lookup_decl (var: pvar, ctx);
12922 x = build_fold_indirect_ref (new_pvar);
12923 TREE_THIS_NOTRAP (x) = 1;
12924 SET_DECL_VALUE_EXPR (new_var, x);
12925 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12926 }
12927 else if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12928 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
12929 && !omp_privatize_by_reference (decl: var)
12930 && !omp_is_allocatable_or_ptr (decl: var)
12931 && !lang_hooks.decls.omp_array_data (var, true))
12932 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12933 {
12934 tree new_var = lookup_decl (var, ctx);
12935 tree type = build_pointer_type (TREE_TYPE (var));
12936 x = create_tmp_var_raw (type, get_name (new_var));
12937 gimple_add_tmp_var (x);
12938 x = build_simple_mem_ref (x);
12939 SET_DECL_VALUE_EXPR (new_var, x);
12940 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12941 }
12942 else
12943 {
12944 tree new_var = lookup_decl (var, ctx);
12945 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
12946 gimple_add_tmp_var (x);
12947 SET_DECL_VALUE_EXPR (new_var, x);
12948 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12949 }
12950 break;
12951 }
12952
12953 if (offloaded)
12954 {
12955 target_nesting_level++;
12956 lower_omp (&tgt_body, ctx);
12957 target_nesting_level--;
12958 }
12959 else if (data_region)
12960 lower_omp (&tgt_body, ctx);
12961
12962 if (offloaded)
12963 {
12964 /* Declare all the variables created by mapping and the variables
12965 declared in the scope of the target body. */
12966 record_vars_into (ctx->block_vars, child_fn);
12967 maybe_remove_omp_member_access_dummy_vars (bind: tgt_bind);
12968 record_vars_into (gimple_bind_vars (bind_stmt: tgt_bind), child_fn);
12969 }
12970
12971 olist = NULL;
12972 ilist = NULL;
12973 if (ctx->record_type)
12974 {
12975 ctx->sender_decl
12976 = create_tmp_var (ctx->record_type, ".omp_data_arr");
12977 DECL_NAMELESS (ctx->sender_decl) = 1;
12978 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
12979 t = make_tree_vec (3);
12980 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
12981 TREE_VEC_ELT (t, 1)
12982 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
12983 ".omp_data_sizes");
12984 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
12985 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
12986 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
12987 tree tkind_type = short_unsigned_type_node;
12988 int talign_shift = 8;
12989 TREE_VEC_ELT (t, 2)
12990 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
12991 ".omp_data_kinds");
12992 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
12993 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
12994 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
12995 gimple_omp_target_set_data_arg (omp_target_stmt: stmt, data_arg: t);
12996
12997 vec<constructor_elt, va_gc> *vsize;
12998 vec<constructor_elt, va_gc> *vkind;
12999 vec_alloc (v&: vsize, nelems: map_cnt);
13000 vec_alloc (v&: vkind, nelems: map_cnt);
13001 unsigned int map_idx = 0;
13002
13003 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
13004 switch (OMP_CLAUSE_CODE (c))
13005 {
13006 tree ovar, nc, s, purpose, var, x, type;
13007 unsigned int talign;
13008
13009 default:
13010 break;
13011
13012 case OMP_CLAUSE_MAP:
13013 case OMP_CLAUSE_TO:
13014 case OMP_CLAUSE_FROM:
13015 oacc_firstprivate_map:
13016 nc = c;
13017 ovar = OMP_CLAUSE_DECL (c);
13018 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13019 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
13020 || (OMP_CLAUSE_MAP_KIND (c)
13021 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
13022 break;
13023 if (!DECL_P (ovar))
13024 {
13025 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13026 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
13027 {
13028 nc = OMP_CLAUSE_CHAIN (c);
13029 gcc_checking_assert (OMP_CLAUSE_DECL (nc)
13030 == get_base_address (ovar));
13031 ovar = OMP_CLAUSE_DECL (nc);
13032 }
13033 else
13034 {
13035 tree x = build_sender_ref (var: ovar, ctx);
13036 tree v = ovar;
13037 if (in_reduction_clauses
13038 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13039 && OMP_CLAUSE_MAP_IN_REDUCTION (c))
13040 {
13041 v = unshare_expr (v);
13042 tree *p = &v;
13043 while (handled_component_p (t: *p)
13044 || TREE_CODE (*p) == INDIRECT_REF
13045 || TREE_CODE (*p) == ADDR_EXPR
13046 || TREE_CODE (*p) == MEM_REF
13047 || TREE_CODE (*p) == NON_LVALUE_EXPR)
13048 p = &TREE_OPERAND (*p, 0);
13049 tree d = *p;
13050 if (is_variable_sized (expr: d))
13051 {
13052 gcc_assert (DECL_HAS_VALUE_EXPR_P (d));
13053 d = DECL_VALUE_EXPR (d);
13054 gcc_assert (TREE_CODE (d) == INDIRECT_REF);
13055 d = TREE_OPERAND (d, 0);
13056 gcc_assert (DECL_P (d));
13057 }
13058 splay_tree_key key
13059 = (splay_tree_key) &DECL_CONTEXT (d);
13060 tree nd = (tree) splay_tree_lookup (ctx->field_map,
13061 key)->value;
13062 if (d == *p)
13063 *p = nd;
13064 else
13065 *p = build_fold_indirect_ref (nd);
13066 }
13067 v = build_fold_addr_expr_with_type (v, ptr_type_node);
13068 gimplify_assign (x, v, &ilist);
13069 nc = NULL_TREE;
13070 }
13071 }
13072 else
13073 {
13074 if (DECL_SIZE (ovar)
13075 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
13076 {
13077 tree ovar2 = DECL_VALUE_EXPR (ovar);
13078 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
13079 ovar2 = TREE_OPERAND (ovar2, 0);
13080 gcc_assert (DECL_P (ovar2));
13081 ovar = ovar2;
13082 }
13083 if (!maybe_lookup_field (var: ovar, ctx)
13084 && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13085 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
13086 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)))
13087 continue;
13088 }
13089
13090 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
13091 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
13092 talign = DECL_ALIGN_UNIT (ovar);
13093
13094 var = NULL_TREE;
13095 if (nc)
13096 {
13097 if (in_reduction_clauses
13098 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13099 && OMP_CLAUSE_MAP_IN_REDUCTION (c))
13100 {
13101 tree d = ovar;
13102 if (is_variable_sized (expr: d))
13103 {
13104 gcc_assert (DECL_HAS_VALUE_EXPR_P (d));
13105 d = DECL_VALUE_EXPR (d);
13106 gcc_assert (TREE_CODE (d) == INDIRECT_REF);
13107 d = TREE_OPERAND (d, 0);
13108 gcc_assert (DECL_P (d));
13109 }
13110 splay_tree_key key
13111 = (splay_tree_key) &DECL_CONTEXT (d);
13112 tree nd = (tree) splay_tree_lookup (ctx->field_map,
13113 key)->value;
13114 if (d == ovar)
13115 var = nd;
13116 else
13117 var = build_fold_indirect_ref (nd);
13118 }
13119 else
13120 var = lookup_decl_in_outer_ctx (decl: ovar, ctx);
13121 }
13122 if (nc
13123 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13124 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
13125 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
13126 && is_omp_target (stmt))
13127 {
13128 x = build_sender_ref (var: c, ctx);
13129 gimplify_assign (x, build_fold_addr_expr (var), &ilist);
13130 }
13131 else if (nc)
13132 {
13133 x = build_sender_ref (var: ovar, ctx);
13134
13135 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13136 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
13137 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
13138 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
13139 {
13140 gcc_assert (offloaded);
13141 tree avar
13142 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
13143 mark_addressable (avar);
13144 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
13145 talign = DECL_ALIGN_UNIT (avar);
13146 avar = build_fold_addr_expr (avar);
13147 gimplify_assign (x, avar, &ilist);
13148 }
13149 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
13150 {
13151 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
13152 if (!omp_privatize_by_reference (decl: var))
13153 {
13154 if (is_gimple_reg (var)
13155 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13156 suppress_warning (var);
13157 var = build_fold_addr_expr (var);
13158 }
13159 else
13160 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13161 gimplify_assign (x, var, &ilist);
13162 }
13163 else if (is_gimple_reg (var))
13164 {
13165 gcc_assert (offloaded);
13166 tree avar = create_tmp_var (TREE_TYPE (var));
13167 mark_addressable (avar);
13168 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
13169 if (GOMP_MAP_COPY_TO_P (map_kind)
13170 || map_kind == GOMP_MAP_POINTER
13171 || map_kind == GOMP_MAP_TO_PSET
13172 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
13173 {
13174 /* If we need to initialize a temporary
13175 with VAR because it is not addressable, and
13176 the variable hasn't been initialized yet, then
13177 we'll get a warning for the store to avar.
13178 Don't warn in that case, the mapping might
13179 be implicit. */
13180 suppress_warning (var, OPT_Wuninitialized);
13181 gimplify_assign (avar, var, &ilist);
13182 }
13183 avar = build_fold_addr_expr (avar);
13184 gimplify_assign (x, avar, &ilist);
13185 if ((GOMP_MAP_COPY_FROM_P (map_kind)
13186 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
13187 && !TYPE_READONLY (TREE_TYPE (var)))
13188 {
13189 x = unshare_expr (x);
13190 x = build_simple_mem_ref (x);
13191 gimplify_assign (var, x, &olist);
13192 }
13193 }
13194 else
13195 {
13196 /* While MAP is handled explicitly by the FE,
13197 for 'target update', only the identified is passed. */
13198 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM
13199 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO)
13200 && (omp_is_allocatable_or_ptr (decl: var)
13201 && omp_check_optional_argument (decl: var, for_present_check: false)))
13202 var = build_fold_indirect_ref (var);
13203 else if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FROM
13204 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TO)
13205 || (!omp_is_allocatable_or_ptr (decl: var)
13206 && !omp_check_optional_argument (decl: var, for_present_check: false)))
13207 var = build_fold_addr_expr (var);
13208 gimplify_assign (x, var, &ilist);
13209 }
13210 }
13211 s = NULL_TREE;
13212 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
13213 {
13214 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
13215 s = TREE_TYPE (ovar);
13216 if (TREE_CODE (s) == REFERENCE_TYPE
13217 || omp_check_optional_argument (decl: ovar, for_present_check: false))
13218 s = TREE_TYPE (s);
13219 s = TYPE_SIZE_UNIT (s);
13220 }
13221 else
13222 s = OMP_CLAUSE_SIZE (c);
13223 if (s == NULL_TREE)
13224 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
13225 s = fold_convert (size_type_node, s);
13226 purpose = size_int (map_idx++);
13227 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13228 if (TREE_CODE (s) != INTEGER_CST)
13229 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13230
13231 unsigned HOST_WIDE_INT tkind, tkind_zero;
13232 switch (OMP_CLAUSE_CODE (c))
13233 {
13234 case OMP_CLAUSE_MAP:
13235 tkind = OMP_CLAUSE_MAP_KIND (c);
13236 tkind_zero = tkind;
13237 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
13238 switch (tkind)
13239 {
13240 case GOMP_MAP_ALLOC:
13241 case GOMP_MAP_IF_PRESENT:
13242 case GOMP_MAP_TO:
13243 case GOMP_MAP_FROM:
13244 case GOMP_MAP_TOFROM:
13245 case GOMP_MAP_ALWAYS_TO:
13246 case GOMP_MAP_ALWAYS_FROM:
13247 case GOMP_MAP_ALWAYS_TOFROM:
13248 case GOMP_MAP_ALWAYS_PRESENT_TO:
13249 case GOMP_MAP_ALWAYS_PRESENT_FROM:
13250 case GOMP_MAP_ALWAYS_PRESENT_TOFROM:
13251 case GOMP_MAP_RELEASE:
13252 case GOMP_MAP_FORCE_TO:
13253 case GOMP_MAP_FORCE_FROM:
13254 case GOMP_MAP_FORCE_TOFROM:
13255 case GOMP_MAP_FORCE_PRESENT:
13256 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
13257 break;
13258 case GOMP_MAP_DELETE:
13259 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
13260 default:
13261 break;
13262 }
13263 if (tkind_zero != tkind)
13264 {
13265 if (integer_zerop (s))
13266 tkind = tkind_zero;
13267 else if (integer_nonzerop (s))
13268 tkind_zero = tkind;
13269 }
13270 if (tkind_zero == tkind
13271 && OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (c)
13272 && (((tkind & GOMP_MAP_FLAG_SPECIAL_BITS)
13273 & ~GOMP_MAP_IMPLICIT)
13274 == 0))
13275 {
13276 /* If this is an implicit map, and the GOMP_MAP_IMPLICIT
13277 bits are not interfered by other special bit encodings,
13278 then turn the GOMP_IMPLICIT_BIT flag on for the runtime
13279 to see. */
13280 tkind |= GOMP_MAP_IMPLICIT;
13281 tkind_zero = tkind;
13282 }
13283 break;
13284 case OMP_CLAUSE_FIRSTPRIVATE:
13285 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
13286 tkind = GOMP_MAP_TO;
13287 tkind_zero = tkind;
13288 break;
13289 case OMP_CLAUSE_TO:
13290 tkind
13291 = (OMP_CLAUSE_MOTION_PRESENT (c)
13292 ? GOMP_MAP_ALWAYS_PRESENT_TO : GOMP_MAP_TO);
13293 tkind_zero = tkind;
13294 break;
13295 case OMP_CLAUSE_FROM:
13296 tkind
13297 = (OMP_CLAUSE_MOTION_PRESENT (c)
13298 ? GOMP_MAP_ALWAYS_PRESENT_FROM : GOMP_MAP_FROM);
13299 tkind_zero = tkind;
13300 break;
13301 default:
13302 gcc_unreachable ();
13303 }
13304 gcc_checking_assert (tkind
13305 < (HOST_WIDE_INT_C (1U) << talign_shift));
13306 gcc_checking_assert (tkind_zero
13307 < (HOST_WIDE_INT_C (1U) << talign_shift));
13308 talign = ceil_log2 (x: talign);
13309 tkind |= talign << talign_shift;
13310 tkind_zero |= talign << talign_shift;
13311 gcc_checking_assert (tkind
13312 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13313 gcc_checking_assert (tkind_zero
13314 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13315 if (tkind == tkind_zero)
13316 x = build_int_cstu (type: tkind_type, tkind);
13317 else
13318 {
13319 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
13320 x = build3 (COND_EXPR, tkind_type,
13321 fold_build2 (EQ_EXPR, boolean_type_node,
13322 unshare_expr (s), size_zero_node),
13323 build_int_cstu (type: tkind_type, tkind_zero),
13324 build_int_cstu (type: tkind_type, tkind));
13325 }
13326 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
13327 if (nc && nc != c)
13328 c = nc;
13329 break;
13330
13331 case OMP_CLAUSE_FIRSTPRIVATE:
13332 omp_has_device_addr_descr:
13333 if (is_gimple_omp_oacc (stmt: ctx->stmt))
13334 goto oacc_firstprivate_map;
13335 ovar = OMP_CLAUSE_DECL (c);
13336 if (omp_privatize_by_reference (decl: ovar))
13337 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13338 else
13339 talign = DECL_ALIGN_UNIT (ovar);
13340 var = lookup_decl_in_outer_ctx (decl: ovar, ctx);
13341 x = build_sender_ref (var: ovar, ctx);
13342 tkind = GOMP_MAP_FIRSTPRIVATE;
13343 type = TREE_TYPE (ovar);
13344 if (omp_privatize_by_reference (decl: ovar))
13345 type = TREE_TYPE (type);
13346 if ((INTEGRAL_TYPE_P (type)
13347 && TYPE_PRECISION (type) <= POINTER_SIZE)
13348 || TREE_CODE (type) == POINTER_TYPE)
13349 {
13350 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
13351 tree t = var;
13352 if (omp_privatize_by_reference (decl: var))
13353 t = build_simple_mem_ref (var);
13354 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13355 suppress_warning (var);
13356 if (TREE_CODE (type) != POINTER_TYPE)
13357 t = fold_convert (pointer_sized_int_node, t);
13358 t = fold_convert (TREE_TYPE (x), t);
13359 gimplify_assign (x, t, &ilist);
13360 }
13361 else if (omp_privatize_by_reference (decl: var))
13362 gimplify_assign (x, var, &ilist);
13363 else if (is_gimple_reg (var))
13364 {
13365 tree avar = create_tmp_var (TREE_TYPE (var));
13366 mark_addressable (avar);
13367 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13368 suppress_warning (var);
13369 gimplify_assign (avar, var, &ilist);
13370 avar = build_fold_addr_expr (avar);
13371 gimplify_assign (x, avar, &ilist);
13372 }
13373 else
13374 {
13375 var = build_fold_addr_expr (var);
13376 gimplify_assign (x, var, &ilist);
13377 }
13378 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
13379 s = size_int (0);
13380 else if (omp_privatize_by_reference (decl: ovar))
13381 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13382 else
13383 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
13384 s = fold_convert (size_type_node, s);
13385 purpose = size_int (map_idx++);
13386 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13387 if (TREE_CODE (s) != INTEGER_CST)
13388 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13389
13390 gcc_checking_assert (tkind
13391 < (HOST_WIDE_INT_C (1U) << talign_shift));
13392 talign = ceil_log2 (x: talign);
13393 tkind |= talign << talign_shift;
13394 gcc_checking_assert (tkind
13395 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13396 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13397 build_int_cstu (tkind_type, tkind));
13398 /* Fortran array descriptors: firstprivate of data + attach. */
13399 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR
13400 && lang_hooks.decls.omp_array_data (ovar, true))
13401 {
13402 tree not_null_lb, null_lb, after_lb;
13403 tree var1, var2, size1, size2;
13404 tree present = omp_check_optional_argument (decl: ovar, for_present_check: true);
13405 if (present)
13406 {
13407 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13408 not_null_lb = create_artificial_label (clause_loc);
13409 null_lb = create_artificial_label (clause_loc);
13410 after_lb = create_artificial_label (clause_loc);
13411 gimple_seq seq = NULL;
13412 present = force_gimple_operand (present, &seq, true,
13413 NULL_TREE);
13414 gimple_seq_add_seq (&ilist, seq);
13415 gimple_seq_add_stmt (&ilist,
13416 gimple_build_cond_from_tree (present,
13417 not_null_lb, null_lb));
13418 gimple_seq_add_stmt (&ilist,
13419 gimple_build_label (label: not_null_lb));
13420 }
13421 var1 = lang_hooks.decls.omp_array_data (var, false);
13422 size1 = lang_hooks.decls.omp_array_size (var, &ilist);
13423 var2 = build_fold_addr_expr (x);
13424 if (!POINTER_TYPE_P (TREE_TYPE (var)))
13425 var = build_fold_addr_expr (var);
13426 size2 = fold_build2 (POINTER_DIFF_EXPR, ssizetype,
13427 build_fold_addr_expr (var1), var);
13428 size2 = fold_convert (sizetype, size2);
13429 if (present)
13430 {
13431 tree tmp = create_tmp_var (TREE_TYPE (var1));
13432 gimplify_assign (tmp, var1, &ilist);
13433 var1 = tmp;
13434 tmp = create_tmp_var (TREE_TYPE (var2));
13435 gimplify_assign (tmp, var2, &ilist);
13436 var2 = tmp;
13437 tmp = create_tmp_var (TREE_TYPE (size1));
13438 gimplify_assign (tmp, size1, &ilist);
13439 size1 = tmp;
13440 tmp = create_tmp_var (TREE_TYPE (size2));
13441 gimplify_assign (tmp, size2, &ilist);
13442 size2 = tmp;
13443 gimple_seq_add_stmt (&ilist, gimple_build_goto (dest: after_lb));
13444 gimple_seq_add_stmt (&ilist, gimple_build_label (label: null_lb));
13445 gimplify_assign (var1, null_pointer_node, &ilist);
13446 gimplify_assign (var2, null_pointer_node, &ilist);
13447 gimplify_assign (size1, size_zero_node, &ilist);
13448 gimplify_assign (size2, size_zero_node, &ilist);
13449 gimple_seq_add_stmt (&ilist, gimple_build_label (label: after_lb));
13450 }
13451 x = build_sender_ref (key: (splay_tree_key) &DECL_NAME (ovar), ctx);
13452 gimplify_assign (x, var1, &ilist);
13453 tkind = GOMP_MAP_FIRSTPRIVATE;
13454 talign = DECL_ALIGN_UNIT (ovar);
13455 talign = ceil_log2 (x: talign);
13456 tkind |= talign << talign_shift;
13457 gcc_checking_assert (tkind
13458 <= tree_to_uhwi (
13459 TYPE_MAX_VALUE (tkind_type)));
13460 purpose = size_int (map_idx++);
13461 CONSTRUCTOR_APPEND_ELT (vsize, purpose, size1);
13462 if (TREE_CODE (size1) != INTEGER_CST)
13463 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13464 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13465 build_int_cstu (tkind_type, tkind));
13466 x = build_sender_ref (key: (splay_tree_key) &DECL_UID (ovar), ctx);
13467 gimplify_assign (x, var2, &ilist);
13468 tkind = GOMP_MAP_ATTACH;
13469 purpose = size_int (map_idx++);
13470 CONSTRUCTOR_APPEND_ELT (vsize, purpose, size2);
13471 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13472 build_int_cstu (tkind_type, tkind));
13473 }
13474 break;
13475
13476 case OMP_CLAUSE_USE_DEVICE_PTR:
13477 case OMP_CLAUSE_USE_DEVICE_ADDR:
13478 case OMP_CLAUSE_HAS_DEVICE_ADDR:
13479 case OMP_CLAUSE_IS_DEVICE_PTR:
13480 ovar = OMP_CLAUSE_DECL (c);
13481 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13482 {
13483 if (lang_hooks.decls.omp_array_data (ovar, true))
13484 goto omp_has_device_addr_descr;
13485 while (TREE_CODE (ovar) == INDIRECT_REF
13486 || TREE_CODE (ovar) == ARRAY_REF)
13487 ovar = TREE_OPERAND (ovar, 0);
13488 }
13489 var = lookup_decl_in_outer_ctx (decl: ovar, ctx);
13490
13491 if (lang_hooks.decls.omp_array_data (ovar, true))
13492 {
13493 tkind = ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
13494 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13495 ? GOMP_MAP_USE_DEVICE_PTR : GOMP_MAP_FIRSTPRIVATE_INT);
13496 x = build_sender_ref (key: (splay_tree_key) &DECL_NAME (ovar), ctx);
13497 }
13498 else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
13499 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13500 {
13501 tkind = GOMP_MAP_USE_DEVICE_PTR;
13502 x = build_sender_ref (key: (splay_tree_key) &DECL_UID (ovar), ctx);
13503 }
13504 else
13505 {
13506 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
13507 x = build_sender_ref (var: ovar, ctx);
13508 }
13509
13510 if (is_gimple_omp_oacc (stmt: ctx->stmt))
13511 {
13512 gcc_assert (tkind == GOMP_MAP_USE_DEVICE_PTR);
13513
13514 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c))
13515 tkind = GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT;
13516 }
13517
13518 type = TREE_TYPE (ovar);
13519 if (lang_hooks.decls.omp_array_data (ovar, true))
13520 var = lang_hooks.decls.omp_array_data (var, false);
13521 else if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13522 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13523 && !omp_privatize_by_reference (decl: ovar)
13524 && !omp_is_allocatable_or_ptr (decl: ovar))
13525 || TREE_CODE (type) == ARRAY_TYPE)
13526 var = build_fold_addr_expr (var);
13527 else
13528 {
13529 if (omp_privatize_by_reference (decl: ovar)
13530 || omp_check_optional_argument (decl: ovar, for_present_check: false)
13531 || omp_is_allocatable_or_ptr (decl: ovar))
13532 {
13533 type = TREE_TYPE (type);
13534 if (POINTER_TYPE_P (type)
13535 && TREE_CODE (type) != ARRAY_TYPE
13536 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
13537 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR
13538 && !omp_is_allocatable_or_ptr (decl: ovar))
13539 || (omp_privatize_by_reference (decl: ovar)
13540 && omp_is_allocatable_or_ptr (decl: ovar))))
13541 var = build_simple_mem_ref (var);
13542 var = fold_convert (TREE_TYPE (x), var);
13543 }
13544 }
13545 tree present;
13546 present = omp_check_optional_argument (decl: ovar, for_present_check: true);
13547 if (present)
13548 {
13549 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
13550 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
13551 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
13552 tree new_x = unshare_expr (x);
13553 gimplify_expr (&present, &ilist, NULL, is_gimple_val,
13554 fb_rvalue);
13555 gcond *cond = gimple_build_cond_from_tree (present,
13556 notnull_label,
13557 null_label);
13558 gimple_seq_add_stmt (&ilist, cond);
13559 gimple_seq_add_stmt (&ilist, gimple_build_label (label: null_label));
13560 gimplify_assign (new_x, null_pointer_node, &ilist);
13561 gimple_seq_add_stmt (&ilist, gimple_build_goto (dest: opt_arg_label));
13562 gimple_seq_add_stmt (&ilist,
13563 gimple_build_label (label: notnull_label));
13564 gimplify_assign (x, var, &ilist);
13565 gimple_seq_add_stmt (&ilist,
13566 gimple_build_label (label: opt_arg_label));
13567 }
13568 else
13569 gimplify_assign (x, var, &ilist);
13570 s = size_int (0);
13571 purpose = size_int (map_idx++);
13572 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13573 gcc_checking_assert (tkind
13574 < (HOST_WIDE_INT_C (1U) << talign_shift));
13575 gcc_checking_assert (tkind
13576 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13577 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13578 build_int_cstu (tkind_type, tkind));
13579 break;
13580 }
13581
13582 gcc_assert (map_idx == map_cnt);
13583
13584 DECL_INITIAL (TREE_VEC_ELT (t, 1))
13585 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
13586 DECL_INITIAL (TREE_VEC_ELT (t, 2))
13587 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
13588 for (int i = 1; i <= 2; i++)
13589 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
13590 {
13591 gimple_seq initlist = NULL;
13592 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
13593 TREE_VEC_ELT (t, i)),
13594 &initlist, true, NULL_TREE);
13595 gimple_seq_add_seq (&ilist, initlist);
13596
13597 tree clobber = build_clobber (TREE_TYPE (TREE_VEC_ELT (t, i)));
13598 gimple_seq_add_stmt (&olist,
13599 gimple_build_assign (TREE_VEC_ELT (t, i),
13600 clobber));
13601 }
13602 else if (omp_maybe_offloaded_ctx (ctx: ctx->outer))
13603 {
13604 tree id = get_identifier ("omp declare target");
13605 tree decl = TREE_VEC_ELT (t, i);
13606 DECL_ATTRIBUTES (decl)
13607 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
13608 varpool_node *node = varpool_node::get (decl);
13609 if (node)
13610 {
13611 node->offloadable = 1;
13612 if (ENABLE_OFFLOADING)
13613 {
13614 g->have_offload = true;
13615 vec_safe_push (v&: offload_vars, obj: t);
13616 }
13617 }
13618 }
13619
13620 tree clobber = build_clobber (ctx->record_type);
13621 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
13622 clobber));
13623 }
13624
13625 /* Once all the expansions are done, sequence all the different
13626 fragments inside gimple_omp_body. */
13627
13628 new_body = NULL;
13629
13630 if (offloaded
13631 && ctx->record_type)
13632 {
13633 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
13634 /* fixup_child_record_type might have changed receiver_decl's type. */
13635 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
13636 gimple_seq_add_stmt (&new_body,
13637 gimple_build_assign (ctx->receiver_decl, t));
13638 }
13639 gimple_seq_add_seq (&new_body, fplist);
13640
13641 if (offloaded || data_region)
13642 {
13643 tree prev = NULL_TREE;
13644 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
13645 switch (OMP_CLAUSE_CODE (c))
13646 {
13647 tree var, x;
13648 default:
13649 break;
13650 case OMP_CLAUSE_FIRSTPRIVATE:
13651 omp_firstprivatize_data_region:
13652 if (is_gimple_omp_oacc (stmt: ctx->stmt))
13653 break;
13654 var = OMP_CLAUSE_DECL (c);
13655 if (omp_privatize_by_reference (decl: var)
13656 || is_gimple_reg_type (TREE_TYPE (var)))
13657 {
13658 tree new_var = lookup_decl (var, ctx);
13659 tree type;
13660 type = TREE_TYPE (var);
13661 if (omp_privatize_by_reference (decl: var))
13662 type = TREE_TYPE (type);
13663 if ((INTEGRAL_TYPE_P (type)
13664 && TYPE_PRECISION (type) <= POINTER_SIZE)
13665 || TREE_CODE (type) == POINTER_TYPE)
13666 {
13667 x = build_receiver_ref (var, by_ref: false, ctx);
13668 if (TREE_CODE (type) != POINTER_TYPE)
13669 x = fold_convert (pointer_sized_int_node, x);
13670 x = fold_convert (type, x);
13671 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13672 fb_rvalue);
13673 if (omp_privatize_by_reference (decl: var))
13674 {
13675 tree v = create_tmp_var_raw (type, get_name (var));
13676 gimple_add_tmp_var (v);
13677 TREE_ADDRESSABLE (v) = 1;
13678 gimple_seq_add_stmt (&new_body,
13679 gimple_build_assign (v, x));
13680 x = build_fold_addr_expr (v);
13681 }
13682 gimple_seq_add_stmt (&new_body,
13683 gimple_build_assign (new_var, x));
13684 }
13685 else
13686 {
13687 bool by_ref = !omp_privatize_by_reference (decl: var);
13688 x = build_receiver_ref (var, by_ref, ctx);
13689 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13690 fb_rvalue);
13691 gimple_seq_add_stmt (&new_body,
13692 gimple_build_assign (new_var, x));
13693 }
13694 }
13695 else if (is_variable_sized (expr: var))
13696 {
13697 tree pvar = DECL_VALUE_EXPR (var);
13698 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13699 pvar = TREE_OPERAND (pvar, 0);
13700 gcc_assert (DECL_P (pvar));
13701 tree new_var = lookup_decl (var: pvar, ctx);
13702 x = build_receiver_ref (var, by_ref: false, ctx);
13703 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13704 gimple_seq_add_stmt (&new_body,
13705 gimple_build_assign (new_var, x));
13706 }
13707 break;
13708 case OMP_CLAUSE_PRIVATE:
13709 if (is_gimple_omp_oacc (stmt: ctx->stmt))
13710 break;
13711 var = OMP_CLAUSE_DECL (c);
13712 if (omp_privatize_by_reference (decl: var))
13713 {
13714 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13715 tree new_var = lookup_decl (var, ctx);
13716 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
13717 if (TREE_CONSTANT (x))
13718 {
13719 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
13720 get_name (var));
13721 gimple_add_tmp_var (x);
13722 TREE_ADDRESSABLE (x) = 1;
13723 x = build_fold_addr_expr_loc (clause_loc, x);
13724 }
13725 else
13726 break;
13727
13728 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13729 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13730 gimple_seq_add_stmt (&new_body,
13731 gimple_build_assign (new_var, x));
13732 }
13733 break;
13734 case OMP_CLAUSE_USE_DEVICE_PTR:
13735 case OMP_CLAUSE_USE_DEVICE_ADDR:
13736 case OMP_CLAUSE_HAS_DEVICE_ADDR:
13737 case OMP_CLAUSE_IS_DEVICE_PTR:
13738 tree new_var;
13739 gimple_seq assign_body;
13740 bool is_array_data;
13741 bool do_optional_check;
13742 assign_body = NULL;
13743 do_optional_check = false;
13744 var = OMP_CLAUSE_DECL (c);
13745 is_array_data = lang_hooks.decls.omp_array_data (var, true) != NULL;
13746 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR && is_array_data)
13747 goto omp_firstprivatize_data_region;
13748
13749 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
13750 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13751 x = build_sender_ref (key: is_array_data
13752 ? (splay_tree_key) &DECL_NAME (var)
13753 : (splay_tree_key) &DECL_UID (var), ctx);
13754 else
13755 {
13756 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13757 {
13758 while (TREE_CODE (var) == INDIRECT_REF
13759 || TREE_CODE (var) == ARRAY_REF)
13760 var = TREE_OPERAND (var, 0);
13761 }
13762 x = build_receiver_ref (var, by_ref: false, ctx);
13763 }
13764
13765 if (is_array_data)
13766 {
13767 bool is_ref = omp_privatize_by_reference (decl: var);
13768 do_optional_check = true;
13769 /* First, we copy the descriptor data from the host; then
13770 we update its data to point to the target address. */
13771 new_var = lookup_decl (var, ctx);
13772 new_var = DECL_VALUE_EXPR (new_var);
13773 tree v = new_var;
13774 tree v2 = var;
13775 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR
13776 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR)
13777 v2 = maybe_lookup_decl_in_outer_ctx (decl: var, ctx);
13778
13779 if (is_ref)
13780 {
13781 v2 = build_fold_indirect_ref (v2);
13782 v = create_tmp_var_raw (TREE_TYPE (v2), get_name (var));
13783 gimple_add_tmp_var (v);
13784 TREE_ADDRESSABLE (v) = 1;
13785 gimplify_assign (v, v2, &assign_body);
13786 tree rhs = build_fold_addr_expr (v);
13787 gimple_seq_add_stmt (&assign_body,
13788 gimple_build_assign (new_var, rhs));
13789 }
13790 else
13791 gimplify_assign (new_var, v2, &assign_body);
13792
13793 v2 = lang_hooks.decls.omp_array_data (unshare_expr (v), false);
13794 gcc_assert (v2);
13795 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13796 gimple_seq_add_stmt (&assign_body,
13797 gimple_build_assign (v2, x));
13798 }
13799 else if (is_variable_sized (expr: var))
13800 {
13801 tree pvar = DECL_VALUE_EXPR (var);
13802 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13803 pvar = TREE_OPERAND (pvar, 0);
13804 gcc_assert (DECL_P (pvar));
13805 new_var = lookup_decl (var: pvar, ctx);
13806 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13807 gimple_seq_add_stmt (&assign_body,
13808 gimple_build_assign (new_var, x));
13809 }
13810 else if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13811 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13812 && !omp_privatize_by_reference (decl: var)
13813 && !omp_is_allocatable_or_ptr (decl: var))
13814 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
13815 {
13816 new_var = lookup_decl (var, ctx);
13817 new_var = DECL_VALUE_EXPR (new_var);
13818 gcc_assert (TREE_CODE (new_var) == MEM_REF);
13819 new_var = TREE_OPERAND (new_var, 0);
13820 gcc_assert (DECL_P (new_var));
13821 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13822 gimple_seq_add_stmt (&assign_body,
13823 gimple_build_assign (new_var, x));
13824 }
13825 else
13826 {
13827 tree type = TREE_TYPE (var);
13828 new_var = lookup_decl (var, ctx);
13829 if (omp_privatize_by_reference (decl: var))
13830 {
13831 type = TREE_TYPE (type);
13832 if (POINTER_TYPE_P (type)
13833 && TREE_CODE (type) != ARRAY_TYPE
13834 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
13835 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13836 || (omp_privatize_by_reference (decl: var)
13837 && omp_is_allocatable_or_ptr (decl: var))))
13838 {
13839 tree v = create_tmp_var_raw (type, get_name (var));
13840 gimple_add_tmp_var (v);
13841 TREE_ADDRESSABLE (v) = 1;
13842 x = fold_convert (type, x);
13843 gimplify_expr (&x, &assign_body, NULL, is_gimple_val,
13844 fb_rvalue);
13845 gimple_seq_add_stmt (&assign_body,
13846 gimple_build_assign (v, x));
13847 x = build_fold_addr_expr (v);
13848 do_optional_check = true;
13849 }
13850 }
13851 new_var = DECL_VALUE_EXPR (new_var);
13852 x = fold_convert (TREE_TYPE (new_var), x);
13853 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13854 gimple_seq_add_stmt (&assign_body,
13855 gimple_build_assign (new_var, x));
13856 }
13857 tree present;
13858 present = ((do_optional_check
13859 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR
13860 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
13861 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c), for_present_check: true)
13862 : NULL_TREE);
13863 if (present)
13864 {
13865 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
13866 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
13867 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
13868 glabel *null_glabel = gimple_build_label (label: null_label);
13869 glabel *notnull_glabel = gimple_build_label (label: notnull_label);
13870 ggoto *opt_arg_ggoto = gimple_build_goto (dest: opt_arg_label);
13871 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13872 fb_rvalue);
13873 gimplify_expr (&present, &new_body, NULL, is_gimple_val,
13874 fb_rvalue);
13875 gcond *cond = gimple_build_cond_from_tree (present,
13876 notnull_label,
13877 null_label);
13878 gimple_seq_add_stmt (&new_body, cond);
13879 gimple_seq_add_stmt (&new_body, null_glabel);
13880 gimplify_assign (new_var, null_pointer_node, &new_body);
13881 gimple_seq_add_stmt (&new_body, opt_arg_ggoto);
13882 gimple_seq_add_stmt (&new_body, notnull_glabel);
13883 gimple_seq_add_seq (&new_body, assign_body);
13884 gimple_seq_add_stmt (&new_body,
13885 gimple_build_label (label: opt_arg_label));
13886 }
13887 else
13888 gimple_seq_add_seq (&new_body, assign_body);
13889 break;
13890 }
13891 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
13892 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
13893 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
13894 or references to VLAs. */
13895 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
13896 switch (OMP_CLAUSE_CODE (c))
13897 {
13898 tree var;
13899 default:
13900 break;
13901 case OMP_CLAUSE_MAP:
13902 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
13903 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
13904 {
13905 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13906 poly_int64 offset = 0;
13907 gcc_assert (prev);
13908 var = OMP_CLAUSE_DECL (c);
13909 if (DECL_P (var)
13910 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
13911 && is_global_var (t: maybe_lookup_decl_in_outer_ctx (decl: var,
13912 ctx))
13913 && varpool_node::get_create (decl: var)->offloadable)
13914 break;
13915 if (TREE_CODE (var) == INDIRECT_REF
13916 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
13917 var = TREE_OPERAND (var, 0);
13918 if (TREE_CODE (var) == COMPONENT_REF)
13919 {
13920 var = get_addr_base_and_unit_offset (var, &offset);
13921 gcc_assert (var != NULL_TREE && DECL_P (var));
13922 }
13923 else if (DECL_SIZE (var)
13924 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
13925 {
13926 tree var2 = DECL_VALUE_EXPR (var);
13927 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
13928 var2 = TREE_OPERAND (var2, 0);
13929 gcc_assert (DECL_P (var2));
13930 var = var2;
13931 }
13932 tree new_var = lookup_decl (var, ctx), x;
13933 tree type = TREE_TYPE (new_var);
13934 bool is_ref;
13935 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
13936 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
13937 == COMPONENT_REF))
13938 {
13939 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
13940 is_ref = true;
13941 new_var = build2 (MEM_REF, type,
13942 build_fold_addr_expr (new_var),
13943 build_int_cst (build_pointer_type (type),
13944 offset));
13945 }
13946 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
13947 {
13948 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
13949 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
13950 new_var = build2 (MEM_REF, type,
13951 build_fold_addr_expr (new_var),
13952 build_int_cst (build_pointer_type (type),
13953 offset));
13954 }
13955 else
13956 is_ref = omp_privatize_by_reference (decl: var);
13957 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
13958 is_ref = false;
13959 bool ref_to_array = false;
13960 bool ref_to_ptr = false;
13961 if (is_ref)
13962 {
13963 type = TREE_TYPE (type);
13964 if (TREE_CODE (type) == ARRAY_TYPE)
13965 {
13966 type = build_pointer_type (type);
13967 ref_to_array = true;
13968 }
13969 }
13970 else if (TREE_CODE (type) == ARRAY_TYPE)
13971 {
13972 tree decl2 = DECL_VALUE_EXPR (new_var);
13973 gcc_assert (TREE_CODE (decl2) == MEM_REF);
13974 decl2 = TREE_OPERAND (decl2, 0);
13975 gcc_assert (DECL_P (decl2));
13976 new_var = decl2;
13977 type = TREE_TYPE (new_var);
13978 }
13979 else if (TREE_CODE (type) == REFERENCE_TYPE
13980 && TREE_CODE (TREE_TYPE (type)) == POINTER_TYPE)
13981 {
13982 type = TREE_TYPE (type);
13983 ref_to_ptr = true;
13984 }
13985 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), by_ref: false, ctx);
13986 x = fold_convert_loc (clause_loc, type, x);
13987 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
13988 {
13989 tree bias = OMP_CLAUSE_SIZE (c);
13990 if (DECL_P (bias))
13991 bias = lookup_decl (var: bias, ctx);
13992 bias = fold_convert_loc (clause_loc, sizetype, bias);
13993 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
13994 bias);
13995 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
13996 TREE_TYPE (x), x, bias);
13997 }
13998 if (ref_to_array)
13999 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
14000 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
14001 if ((is_ref && !ref_to_array)
14002 || ref_to_ptr)
14003 {
14004 tree t = create_tmp_var_raw (type, get_name (var));
14005 gimple_add_tmp_var (t);
14006 TREE_ADDRESSABLE (t) = 1;
14007 gimple_seq_add_stmt (&new_body,
14008 gimple_build_assign (t, x));
14009 x = build_fold_addr_expr_loc (clause_loc, t);
14010 }
14011 gimple_seq_add_stmt (&new_body,
14012 gimple_build_assign (new_var, x));
14013 prev = NULL_TREE;
14014 }
14015 else if (OMP_CLAUSE_CHAIN (c)
14016 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
14017 == OMP_CLAUSE_MAP
14018 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
14019 == GOMP_MAP_FIRSTPRIVATE_POINTER
14020 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
14021 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
14022 prev = c;
14023 break;
14024 case OMP_CLAUSE_PRIVATE:
14025 var = OMP_CLAUSE_DECL (c);
14026 if (is_variable_sized (expr: var))
14027 {
14028 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
14029 tree new_var = lookup_decl (var, ctx);
14030 tree pvar = DECL_VALUE_EXPR (var);
14031 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
14032 pvar = TREE_OPERAND (pvar, 0);
14033 gcc_assert (DECL_P (pvar));
14034 tree new_pvar = lookup_decl (var: pvar, ctx);
14035 tree atmp = builtin_decl_explicit (fncode: BUILT_IN_ALLOCA_WITH_ALIGN);
14036 tree al = size_int (DECL_ALIGN (var));
14037 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
14038 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
14039 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
14040 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
14041 gimple_seq_add_stmt (&new_body,
14042 gimple_build_assign (new_pvar, x));
14043 }
14044 else if (omp_privatize_by_reference (decl: var)
14045 && !is_gimple_omp_oacc (stmt: ctx->stmt))
14046 {
14047 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
14048 tree new_var = lookup_decl (var, ctx);
14049 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
14050 if (TREE_CONSTANT (x))
14051 break;
14052 else
14053 {
14054 tree atmp
14055 = builtin_decl_explicit (fncode: BUILT_IN_ALLOCA_WITH_ALIGN);
14056 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
14057 tree al = size_int (TYPE_ALIGN (rtype));
14058 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
14059 }
14060
14061 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
14062 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
14063 gimple_seq_add_stmt (&new_body,
14064 gimple_build_assign (new_var, x));
14065 }
14066 break;
14067 }
14068
14069 gimple_seq fork_seq = NULL;
14070 gimple_seq join_seq = NULL;
14071
14072 if (offloaded && is_gimple_omp_oacc (stmt: ctx->stmt))
14073 {
14074 /* If there are reductions on the offloaded region itself, treat
14075 them as a dummy GANG loop. */
14076 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
14077
14078 gcall *private_marker = lower_oacc_private_marker (ctx);
14079
14080 if (private_marker)
14081 gimple_call_set_arg (gs: private_marker, index: 2, arg: level);
14082
14083 lower_oacc_reductions (loc: gimple_location (g: ctx->stmt), clauses, level,
14084 inner: false, NULL, private_marker, NULL, fork_seq: &fork_seq,
14085 join_seq: &join_seq, ctx);
14086 }
14087
14088 gimple_seq_add_seq (&new_body, fork_seq);
14089 gimple_seq_add_seq (&new_body, tgt_body);
14090 gimple_seq_add_seq (&new_body, join_seq);
14091
14092 if (offloaded)
14093 {
14094 new_body = maybe_catch_exception (body: new_body);
14095 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
14096 }
14097 gimple_omp_set_body (gs: stmt, body: new_body);
14098 }
14099
14100 bind = gimple_build_bind (NULL, NULL,
14101 tgt_bind ? gimple_bind_block (bind_stmt: tgt_bind)
14102 : NULL_TREE);
14103 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
14104 gimple_bind_add_seq (bind_stmt: bind, seq: ilist);
14105 gimple_bind_add_stmt (bind_stmt: bind, stmt);
14106 gimple_bind_add_seq (bind_stmt: bind, seq: olist);
14107
14108 pop_gimplify_context (NULL);
14109
14110 if (dep_bind)
14111 {
14112 gimple_bind_add_seq (bind_stmt: dep_bind, seq: dep_ilist);
14113 gimple_bind_add_stmt (bind_stmt: dep_bind, stmt: bind);
14114 gimple_bind_add_seq (bind_stmt: dep_bind, seq: dep_olist);
14115 pop_gimplify_context (dep_bind);
14116 }
14117}
14118
14119/* Expand code for an OpenMP teams directive. */
14120
14121static void
14122lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
14123{
14124 gomp_teams *teams_stmt = as_a <gomp_teams *> (p: gsi_stmt (i: *gsi_p));
14125 push_gimplify_context ();
14126
14127 tree block = make_node (BLOCK);
14128 gbind *bind = gimple_build_bind (NULL, NULL, block);
14129 gsi_replace (gsi_p, bind, true);
14130 gimple_seq bind_body = NULL;
14131 gimple_seq dlist = NULL;
14132 gimple_seq olist = NULL;
14133
14134 tree num_teams = omp_find_clause (clauses: gimple_omp_teams_clauses (gs: teams_stmt),
14135 kind: OMP_CLAUSE_NUM_TEAMS);
14136 tree num_teams_lower = NULL_TREE;
14137 if (num_teams == NULL_TREE)
14138 num_teams = build_int_cst (unsigned_type_node, 0);
14139 else
14140 {
14141 num_teams_lower = OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (num_teams);
14142 if (num_teams_lower)
14143 {
14144 num_teams_lower = fold_convert (unsigned_type_node, num_teams_lower);
14145 gimplify_expr (&num_teams_lower, &bind_body, NULL, is_gimple_val,
14146 fb_rvalue);
14147 }
14148 num_teams = OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (num_teams);
14149 num_teams = fold_convert (unsigned_type_node, num_teams);
14150 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
14151 }
14152 if (num_teams_lower == NULL_TREE)
14153 num_teams_lower = num_teams;
14154 tree thread_limit = omp_find_clause (clauses: gimple_omp_teams_clauses (gs: teams_stmt),
14155 kind: OMP_CLAUSE_THREAD_LIMIT);
14156 if (thread_limit == NULL_TREE)
14157 thread_limit = build_int_cst (unsigned_type_node, 0);
14158 else
14159 {
14160 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
14161 thread_limit = fold_convert (unsigned_type_node, thread_limit);
14162 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
14163 fb_rvalue);
14164 }
14165 location_t loc = gimple_location (g: teams_stmt);
14166 tree decl = builtin_decl_explicit (fncode: BUILT_IN_GOMP_TEAMS4);
14167 tree rettype = TREE_TYPE (TREE_TYPE (decl));
14168 tree first = create_tmp_var (rettype);
14169 gimple_seq_add_stmt (&bind_body,
14170 gimple_build_assign (first, build_one_cst (rettype)));
14171 tree llabel = create_artificial_label (loc);
14172 gimple_seq_add_stmt (&bind_body, gimple_build_label (label: llabel));
14173 gimple *call
14174 = gimple_build_call (decl, 4, num_teams_lower, num_teams, thread_limit,
14175 first);
14176 gimple_set_location (g: call, location: loc);
14177 tree temp = create_tmp_var (rettype);
14178 gimple_call_set_lhs (gs: call, lhs: temp);
14179 gimple_seq_add_stmt (&bind_body, call);
14180
14181 tree tlabel = create_artificial_label (loc);
14182 tree flabel = create_artificial_label (loc);
14183 gimple *cond = gimple_build_cond (NE_EXPR, temp, build_zero_cst (rettype),
14184 tlabel, flabel);
14185 gimple_seq_add_stmt (&bind_body, cond);
14186 gimple_seq_add_stmt (&bind_body, gimple_build_label (label: tlabel));
14187 gimple_seq_add_stmt (&bind_body,
14188 gimple_build_assign (first, build_zero_cst (rettype)));
14189
14190 lower_rec_input_clauses (clauses: gimple_omp_teams_clauses (gs: teams_stmt),
14191 ilist: &bind_body, dlist: &dlist, ctx, NULL);
14192 lower_omp (gimple_omp_body_ptr (gs: teams_stmt), ctx);
14193 lower_reduction_clauses (clauses: gimple_omp_teams_clauses (gs: teams_stmt), stmt_seqp: &olist,
14194 NULL, ctx);
14195 gimple_seq_add_stmt (&bind_body, teams_stmt);
14196
14197 gimple_seq_add_seq (&bind_body, gimple_omp_body (gs: teams_stmt));
14198 gimple_omp_set_body (gs: teams_stmt, NULL);
14199 gimple_seq_add_seq (&bind_body, olist);
14200 gimple_seq_add_seq (&bind_body, dlist);
14201 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
14202 gimple_seq_add_stmt (&bind_body, gimple_build_goto (dest: llabel));
14203 gimple_seq_add_stmt (&bind_body, gimple_build_label (label: flabel));
14204 gimple_bind_set_body (bind_stmt: bind, seq: bind_body);
14205
14206 pop_gimplify_context (bind);
14207
14208 gimple_bind_append_vars (bind_stmt: bind, vars: ctx->block_vars);
14209 BLOCK_VARS (block) = ctx->block_vars;
14210 if (BLOCK_VARS (block))
14211 TREE_USED (block) = 1;
14212}
14213
14214/* Callback for lower_omp_1. Return non-NULL if *tp needs to be
14215 regimplified. If DATA is non-NULL, lower_omp_1 is outside
14216 of OMP context, but with make_addressable_vars set. */
14217
14218static tree
14219lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
14220 void *data)
14221{
14222 tree t = *tp;
14223
14224 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
14225 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
14226 && data == NULL
14227 && DECL_HAS_VALUE_EXPR_P (t))
14228 return t;
14229
14230 if (make_addressable_vars
14231 && DECL_P (t)
14232 && bitmap_bit_p (make_addressable_vars, DECL_UID (t)))
14233 return t;
14234
14235 /* If a global variable has been privatized, TREE_CONSTANT on
14236 ADDR_EXPR might be wrong. */
14237 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
14238 recompute_tree_invariant_for_addr_expr (t);
14239
14240 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
14241 return NULL_TREE;
14242}
14243
14244/* Data to be communicated between lower_omp_regimplify_operands and
14245 lower_omp_regimplify_operands_p. */
14246
14247struct lower_omp_regimplify_operands_data
14248{
14249 omp_context *ctx;
14250 vec<tree> *decls;
14251};
14252
14253/* Helper function for lower_omp_regimplify_operands. Find
14254 omp_member_access_dummy_var vars and adjust temporarily their
14255 DECL_VALUE_EXPRs if needed. */
14256
14257static tree
14258lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
14259 void *data)
14260{
14261 tree t = omp_member_access_dummy_var (decl: *tp);
14262 if (t)
14263 {
14264 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
14265 lower_omp_regimplify_operands_data *ldata
14266 = (lower_omp_regimplify_operands_data *) wi->info;
14267 tree o = maybe_lookup_decl (var: t, ctx: ldata->ctx);
14268 if (o != t)
14269 {
14270 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
14271 ldata->decls->safe_push (obj: *tp);
14272 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), from: t, to: o);
14273 SET_DECL_VALUE_EXPR (*tp, v);
14274 }
14275 }
14276 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
14277 return NULL_TREE;
14278}
14279
14280/* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
14281 of omp_member_access_dummy_var vars during regimplification. */
14282
14283static void
14284lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
14285 gimple_stmt_iterator *gsi_p)
14286{
14287 auto_vec<tree, 10> decls;
14288 if (ctx)
14289 {
14290 struct walk_stmt_info wi;
14291 memset (s: &wi, c: '\0', n: sizeof (wi));
14292 struct lower_omp_regimplify_operands_data data;
14293 data.ctx = ctx;
14294 data.decls = &decls;
14295 wi.info = &data;
14296 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
14297 }
14298 gimple_regimplify_operands (stmt, gsi_p);
14299 while (!decls.is_empty ())
14300 {
14301 tree t = decls.pop ();
14302 tree v = decls.pop ();
14303 SET_DECL_VALUE_EXPR (t, v);
14304 }
14305}
14306
14307static void
14308lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
14309{
14310 gimple *stmt = gsi_stmt (i: *gsi_p);
14311 struct walk_stmt_info wi;
14312 gcall *call_stmt;
14313
14314 if (gimple_has_location (g: stmt))
14315 input_location = gimple_location (g: stmt);
14316
14317 if (make_addressable_vars)
14318 memset (s: &wi, c: '\0', n: sizeof (wi));
14319
14320 /* If we have issued syntax errors, avoid doing any heavy lifting.
14321 Just replace the OMP directives with a NOP to avoid
14322 confusing RTL expansion. */
14323 if (seen_error () && is_gimple_omp (stmt))
14324 {
14325 gsi_replace (gsi_p, gimple_build_nop (), true);
14326 return;
14327 }
14328
14329 switch (gimple_code (g: stmt))
14330 {
14331 case GIMPLE_COND:
14332 {
14333 gcond *cond_stmt = as_a <gcond *> (p: stmt);
14334 if ((ctx || make_addressable_vars)
14335 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
14336 lower_omp_regimplify_p,
14337 ctx ? NULL : &wi, NULL)
14338 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
14339 lower_omp_regimplify_p,
14340 ctx ? NULL : &wi, NULL)))
14341 lower_omp_regimplify_operands (ctx, stmt: cond_stmt, gsi_p);
14342 }
14343 break;
14344 case GIMPLE_CATCH:
14345 lower_omp (gimple_catch_handler_ptr (catch_stmt: as_a <gcatch *> (p: stmt)), ctx);
14346 break;
14347 case GIMPLE_EH_FILTER:
14348 lower_omp (gimple_eh_filter_failure_ptr (gs: stmt), ctx);
14349 break;
14350 case GIMPLE_TRY:
14351 lower_omp (gimple_try_eval_ptr (gs: stmt), ctx);
14352 lower_omp (gimple_try_cleanup_ptr (gs: stmt), ctx);
14353 break;
14354 case GIMPLE_ASSUME:
14355 lower_omp (gimple_assume_body_ptr (gs: stmt), ctx);
14356 break;
14357 case GIMPLE_TRANSACTION:
14358 lower_omp (gimple_transaction_body_ptr (transaction_stmt: as_a <gtransaction *> (p: stmt)),
14359 ctx);
14360 break;
14361 case GIMPLE_BIND:
14362 if (ctx && is_gimple_omp_oacc (stmt: ctx->stmt))
14363 {
14364 tree vars = gimple_bind_vars (bind_stmt: as_a <gbind *> (p: stmt));
14365 oacc_privatization_scan_decl_chain (ctx, decls: vars);
14366 }
14367 lower_omp (gimple_bind_body_ptr (bind_stmt: as_a <gbind *> (p: stmt)), ctx);
14368 maybe_remove_omp_member_access_dummy_vars (bind: as_a <gbind *> (p: stmt));
14369 break;
14370 case GIMPLE_OMP_PARALLEL:
14371 case GIMPLE_OMP_TASK:
14372 ctx = maybe_lookup_ctx (stmt);
14373 gcc_assert (ctx);
14374 if (ctx->cancellable)
14375 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14376 lower_omp_taskreg (gsi_p, ctx);
14377 break;
14378 case GIMPLE_OMP_FOR:
14379 ctx = maybe_lookup_ctx (stmt);
14380 gcc_assert (ctx);
14381 if (ctx->cancellable)
14382 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14383 lower_omp_for (gsi_p, ctx);
14384 break;
14385 case GIMPLE_OMP_SECTIONS:
14386 ctx = maybe_lookup_ctx (stmt);
14387 gcc_assert (ctx);
14388 if (ctx->cancellable)
14389 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14390 lower_omp_sections (gsi_p, ctx);
14391 break;
14392 case GIMPLE_OMP_SCOPE:
14393 ctx = maybe_lookup_ctx (stmt);
14394 gcc_assert (ctx);
14395 lower_omp_scope (gsi_p, ctx);
14396 break;
14397 case GIMPLE_OMP_SINGLE:
14398 ctx = maybe_lookup_ctx (stmt);
14399 gcc_assert (ctx);
14400 lower_omp_single (gsi_p, ctx);
14401 break;
14402 case GIMPLE_OMP_STRUCTURED_BLOCK:
14403 /* We have already done error checking at this point, so these nodes
14404 can be completely removed and replaced with their body. */
14405 ctx = maybe_lookup_ctx (stmt);
14406 gcc_assert (ctx);
14407 lower_omp (gimple_omp_body_ptr (gs: stmt), ctx);
14408 gsi_replace_with_seq (gsi_p, gimple_omp_body (gs: stmt), true);
14409 break;
14410 case GIMPLE_OMP_MASTER:
14411 case GIMPLE_OMP_MASKED:
14412 ctx = maybe_lookup_ctx (stmt);
14413 gcc_assert (ctx);
14414 lower_omp_master (gsi_p, ctx);
14415 break;
14416 case GIMPLE_OMP_TASKGROUP:
14417 ctx = maybe_lookup_ctx (stmt);
14418 gcc_assert (ctx);
14419 lower_omp_taskgroup (gsi_p, ctx);
14420 break;
14421 case GIMPLE_OMP_ORDERED:
14422 ctx = maybe_lookup_ctx (stmt);
14423 gcc_assert (ctx);
14424 lower_omp_ordered (gsi_p, ctx);
14425 break;
14426 case GIMPLE_OMP_SCAN:
14427 ctx = maybe_lookup_ctx (stmt);
14428 gcc_assert (ctx);
14429 lower_omp_scan (gsi_p, ctx);
14430 break;
14431 case GIMPLE_OMP_CRITICAL:
14432 ctx = maybe_lookup_ctx (stmt);
14433 gcc_assert (ctx);
14434 lower_omp_critical (gsi_p, ctx);
14435 break;
14436 case GIMPLE_OMP_ATOMIC_LOAD:
14437 if ((ctx || make_addressable_vars)
14438 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
14439 as_a <gomp_atomic_load *> (stmt)),
14440 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
14441 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
14442 break;
14443 case GIMPLE_OMP_TARGET:
14444 ctx = maybe_lookup_ctx (stmt);
14445 gcc_assert (ctx);
14446 lower_omp_target (gsi_p, ctx);
14447 break;
14448 case GIMPLE_OMP_TEAMS:
14449 ctx = maybe_lookup_ctx (stmt);
14450 gcc_assert (ctx);
14451 if (gimple_omp_teams_host (omp_teams_stmt: as_a <gomp_teams *> (p: stmt)))
14452 lower_omp_taskreg (gsi_p, ctx);
14453 else
14454 lower_omp_teams (gsi_p, ctx);
14455 break;
14456 case GIMPLE_CALL:
14457 tree fndecl;
14458 call_stmt = as_a <gcall *> (p: stmt);
14459 fndecl = gimple_call_fndecl (gs: call_stmt);
14460 if (fndecl
14461 && fndecl_built_in_p (node: fndecl, klass: BUILT_IN_NORMAL))
14462 switch (DECL_FUNCTION_CODE (decl: fndecl))
14463 {
14464 case BUILT_IN_GOMP_BARRIER:
14465 if (ctx == NULL)
14466 break;
14467 /* FALLTHRU */
14468 case BUILT_IN_GOMP_CANCEL:
14469 case BUILT_IN_GOMP_CANCELLATION_POINT:
14470 omp_context *cctx;
14471 cctx = ctx;
14472 if (gimple_code (g: cctx->stmt) == GIMPLE_OMP_SECTION)
14473 cctx = cctx->outer;
14474 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
14475 if (!cctx->cancellable)
14476 {
14477 if (DECL_FUNCTION_CODE (decl: fndecl)
14478 == BUILT_IN_GOMP_CANCELLATION_POINT)
14479 {
14480 stmt = gimple_build_nop ();
14481 gsi_replace (gsi_p, stmt, false);
14482 }
14483 break;
14484 }
14485 if (DECL_FUNCTION_CODE (decl: fndecl) == BUILT_IN_GOMP_BARRIER)
14486 {
14487 fndecl = builtin_decl_explicit (fncode: BUILT_IN_GOMP_BARRIER_CANCEL);
14488 gimple_call_set_fndecl (gs: call_stmt, decl: fndecl);
14489 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
14490 }
14491 tree lhs;
14492 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
14493 gimple_call_set_lhs (gs: call_stmt, lhs);
14494 tree fallthru_label;
14495 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
14496 gimple *g;
14497 g = gimple_build_label (label: fallthru_label);
14498 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14499 g = gimple_build_cond (NE_EXPR, lhs,
14500 fold_convert (TREE_TYPE (lhs),
14501 boolean_false_node),
14502 cctx->cancel_label, fallthru_label);
14503 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14504 break;
14505 default:
14506 break;
14507 }
14508 goto regimplify;
14509
14510 case GIMPLE_ASSIGN:
14511 for (omp_context *up = ctx; up; up = up->outer)
14512 {
14513 if (gimple_code (g: up->stmt) == GIMPLE_OMP_ORDERED
14514 || gimple_code (g: up->stmt) == GIMPLE_OMP_CRITICAL
14515 || gimple_code (g: up->stmt) == GIMPLE_OMP_TASKGROUP
14516 || gimple_code (g: up->stmt) == GIMPLE_OMP_SCOPE
14517 || gimple_code (g: up->stmt) == GIMPLE_OMP_SECTION
14518 || gimple_code (g: up->stmt) == GIMPLE_OMP_SCAN
14519 || (gimple_code (g: up->stmt) == GIMPLE_OMP_TARGET
14520 && (gimple_omp_target_kind (g: up->stmt)
14521 == GF_OMP_TARGET_KIND_DATA)))
14522 continue;
14523 else if (!up->lastprivate_conditional_map)
14524 break;
14525 tree lhs = get_base_address (t: gimple_assign_lhs (gs: stmt));
14526 if (TREE_CODE (lhs) == MEM_REF
14527 && DECL_P (TREE_OPERAND (lhs, 0))
14528 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
14529 0))) == REFERENCE_TYPE)
14530 lhs = TREE_OPERAND (lhs, 0);
14531 if (DECL_P (lhs))
14532 if (tree *v = up->lastprivate_conditional_map->get (k: lhs))
14533 {
14534 tree clauses;
14535 if (up->combined_into_simd_safelen1)
14536 {
14537 up = up->outer;
14538 if (gimple_code (g: up->stmt) == GIMPLE_OMP_SCAN)
14539 up = up->outer;
14540 }
14541 if (gimple_code (g: up->stmt) == GIMPLE_OMP_FOR)
14542 clauses = gimple_omp_for_clauses (gs: up->stmt);
14543 else
14544 clauses = gimple_omp_sections_clauses (gs: up->stmt);
14545 tree c = omp_find_clause (clauses, kind: OMP_CLAUSE__CONDTEMP_);
14546 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
14547 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
14548 kind: OMP_CLAUSE__CONDTEMP_);
14549 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
14550 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
14551 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14552 }
14553 }
14554 /* FALLTHRU */
14555
14556 default:
14557 regimplify:
14558 if ((ctx || make_addressable_vars)
14559 && walk_gimple_op (stmt, lower_omp_regimplify_p,
14560 ctx ? NULL : &wi))
14561 {
14562 /* Just remove clobbers, this should happen only if we have
14563 "privatized" local addressable variables in SIMD regions,
14564 the clobber isn't needed in that case and gimplifying address
14565 of the ARRAY_REF into a pointer and creating MEM_REF based
14566 clobber would create worse code than we get with the clobber
14567 dropped. */
14568 if (gimple_clobber_p (s: stmt))
14569 {
14570 gsi_replace (gsi_p, gimple_build_nop (), true);
14571 break;
14572 }
14573 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
14574 }
14575 break;
14576 }
14577}
14578
14579static void
14580lower_omp (gimple_seq *body, omp_context *ctx)
14581{
14582 location_t saved_location = input_location;
14583 gimple_stmt_iterator gsi;
14584 for (gsi = gsi_start (seq&: *body); !gsi_end_p (i: gsi); gsi_next (i: &gsi))
14585 lower_omp_1 (gsi_p: &gsi, ctx);
14586 /* During gimplification, we haven't folded statments inside offloading
14587 or taskreg regions (gimplify.cc:maybe_fold_stmt); do that now. */
14588 if (target_nesting_level || taskreg_nesting_level)
14589 for (gsi = gsi_start (seq&: *body); !gsi_end_p (i: gsi); gsi_next (i: &gsi))
14590 fold_stmt (&gsi);
14591 input_location = saved_location;
14592}
14593
14594/* Main entry point. */
14595
14596static unsigned int
14597execute_lower_omp (void)
14598{
14599 gimple_seq body;
14600 int i;
14601 omp_context *ctx;
14602
14603 /* This pass always runs, to provide PROP_gimple_lomp.
14604 But often, there is nothing to do. */
14605 if (flag_openacc == 0 && flag_openmp == 0
14606 && flag_openmp_simd == 0)
14607 return 0;
14608
14609 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
14610 delete_omp_context);
14611
14612 body = gimple_body (current_function_decl);
14613
14614 scan_omp (body_p: &body, NULL);
14615 gcc_assert (taskreg_nesting_level == 0);
14616 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
14617 finish_taskreg_scan (ctx);
14618 taskreg_contexts.release ();
14619
14620 if (all_contexts->root)
14621 {
14622 if (make_addressable_vars)
14623 push_gimplify_context ();
14624 lower_omp (body: &body, NULL);
14625 if (make_addressable_vars)
14626 pop_gimplify_context (NULL);
14627 }
14628
14629 if (all_contexts)
14630 {
14631 splay_tree_delete (all_contexts);
14632 all_contexts = NULL;
14633 }
14634 BITMAP_FREE (make_addressable_vars);
14635 BITMAP_FREE (global_nonaddressable_vars);
14636
14637 /* If current function is a method, remove artificial dummy VAR_DECL created
14638 for non-static data member privatization, they aren't needed for
14639 debuginfo nor anything else, have been already replaced everywhere in the
14640 IL and cause problems with LTO. */
14641 if (DECL_ARGUMENTS (current_function_decl)
14642 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
14643 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
14644 == POINTER_TYPE))
14645 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
14646
14647 for (auto task_stmt : task_cpyfns)
14648 finalize_task_copyfn (task_stmt);
14649 task_cpyfns.release ();
14650 return 0;
14651}
14652
14653namespace {
14654
14655const pass_data pass_data_lower_omp =
14656{
14657 .type: GIMPLE_PASS, /* type */
14658 .name: "omplower", /* name */
14659 .optinfo_flags: OPTGROUP_OMP, /* optinfo_flags */
14660 .tv_id: TV_NONE, /* tv_id */
14661 PROP_gimple_any, /* properties_required */
14662 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
14663 .properties_destroyed: 0, /* properties_destroyed */
14664 .todo_flags_start: 0, /* todo_flags_start */
14665 .todo_flags_finish: 0, /* todo_flags_finish */
14666};
14667
14668class pass_lower_omp : public gimple_opt_pass
14669{
14670public:
14671 pass_lower_omp (gcc::context *ctxt)
14672 : gimple_opt_pass (pass_data_lower_omp, ctxt)
14673 {}
14674
14675 /* opt_pass methods: */
14676 unsigned int execute (function *) final override
14677 {
14678 return execute_lower_omp ();
14679 }
14680
14681}; // class pass_lower_omp
14682
14683} // anon namespace
14684
14685gimple_opt_pass *
14686make_pass_lower_omp (gcc::context *ctxt)
14687{
14688 return new pass_lower_omp (ctxt);
14689}
14690
14691/* The following is a utility to diagnose structured block violations.
14692 It is not part of the "omplower" pass, as that's invoked too late. It
14693 should be invoked by the respective front ends after gimplification. */
14694
14695static splay_tree all_labels;
14696
14697/* Check for mismatched contexts and generate an error if needed. Return
14698 true if an error is detected. */
14699
14700static bool
14701diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
14702 gimple *branch_ctx, gimple *label_ctx)
14703{
14704 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
14705 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
14706
14707 if (label_ctx == branch_ctx)
14708 return false;
14709
14710 const char* kind = NULL;
14711
14712 if (flag_openacc)
14713 {
14714 if ((branch_ctx && is_gimple_omp_oacc (stmt: branch_ctx))
14715 || (label_ctx && is_gimple_omp_oacc (stmt: label_ctx)))
14716 {
14717 gcc_checking_assert (kind == NULL);
14718 kind = "OpenACC";
14719 }
14720 }
14721 if (kind == NULL)
14722 {
14723 gcc_checking_assert (flag_openmp || flag_openmp_simd);
14724 kind = "OpenMP";
14725 }
14726
14727 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
14728 so we could traverse it and issue a correct "exit" or "enter" error
14729 message upon a structured block violation.
14730
14731 We built the context by building a list with tree_cons'ing, but there is
14732 no easy counterpart in gimple tuples. It seems like far too much work
14733 for issuing exit/enter error messages. If someone really misses the
14734 distinct error message... patches welcome. */
14735
14736#if 0
14737 /* Try to avoid confusing the user by producing and error message
14738 with correct "exit" or "enter" verbiage. We prefer "exit"
14739 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
14740 if (branch_ctx == NULL)
14741 exit_p = false;
14742 else
14743 {
14744 while (label_ctx)
14745 {
14746 if (TREE_VALUE (label_ctx) == branch_ctx)
14747 {
14748 exit_p = false;
14749 break;
14750 }
14751 label_ctx = TREE_CHAIN (label_ctx);
14752 }
14753 }
14754
14755 if (exit_p)
14756 error ("invalid exit from %s structured block", kind);
14757 else
14758 error ("invalid entry to %s structured block", kind);
14759#endif
14760
14761 /* If it's obvious we have an invalid entry, be specific about the error. */
14762 if (branch_ctx == NULL)
14763 error ("invalid entry to %s structured block", kind);
14764 else
14765 {
14766 /* Otherwise, be vague and lazy, but efficient. */
14767 error ("invalid branch to/from %s structured block", kind);
14768 }
14769
14770 gsi_replace (gsi_p, gimple_build_nop (), false);
14771 return true;
14772}
14773
14774/* Pass 1: Create a minimal tree of structured blocks, and record
14775 where each label is found. */
14776
14777static tree
14778diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
14779 struct walk_stmt_info *wi)
14780{
14781 gimple *context = (gimple *) wi->info;
14782 gimple *inner_context;
14783 gimple *stmt = gsi_stmt (i: *gsi_p);
14784
14785 *handled_ops_p = true;
14786
14787 switch (gimple_code (g: stmt))
14788 {
14789 WALK_SUBSTMTS;
14790
14791 case GIMPLE_OMP_PARALLEL:
14792 case GIMPLE_OMP_TASK:
14793 case GIMPLE_OMP_SCOPE:
14794 case GIMPLE_OMP_SECTIONS:
14795 case GIMPLE_OMP_SINGLE:
14796 case GIMPLE_OMP_SECTION:
14797 case GIMPLE_OMP_STRUCTURED_BLOCK:
14798 case GIMPLE_OMP_MASTER:
14799 case GIMPLE_OMP_MASKED:
14800 case GIMPLE_OMP_ORDERED:
14801 case GIMPLE_OMP_SCAN:
14802 case GIMPLE_OMP_CRITICAL:
14803 case GIMPLE_OMP_TARGET:
14804 case GIMPLE_OMP_TEAMS:
14805 case GIMPLE_OMP_TASKGROUP:
14806 /* The minimal context here is just the current OMP construct. */
14807 inner_context = stmt;
14808 wi->info = inner_context;
14809 walk_gimple_seq (gimple_omp_body (gs: stmt), diagnose_sb_1, NULL, wi);
14810 wi->info = context;
14811 break;
14812
14813 case GIMPLE_OMP_FOR:
14814 inner_context = stmt;
14815 wi->info = inner_context;
14816 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14817 walk them. */
14818 walk_gimple_seq (gimple_omp_for_pre_body (gs: stmt),
14819 diagnose_sb_1, NULL, wi);
14820 walk_gimple_seq (gimple_omp_body (gs: stmt), diagnose_sb_1, NULL, wi);
14821 wi->info = context;
14822 break;
14823
14824 case GIMPLE_LABEL:
14825 splay_tree_insert (all_labels,
14826 (splay_tree_key) gimple_label_label (
14827 gs: as_a <glabel *> (p: stmt)),
14828 (splay_tree_value) context);
14829 break;
14830
14831 default:
14832 break;
14833 }
14834
14835 return NULL_TREE;
14836}
14837
14838/* Pass 2: Check each branch and see if its context differs from that of
14839 the destination label's context. */
14840
14841static tree
14842diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
14843 struct walk_stmt_info *wi)
14844{
14845 gimple *context = (gimple *) wi->info;
14846 splay_tree_node n;
14847 gimple *stmt = gsi_stmt (i: *gsi_p);
14848
14849 *handled_ops_p = true;
14850
14851 switch (gimple_code (g: stmt))
14852 {
14853 WALK_SUBSTMTS;
14854
14855 case GIMPLE_OMP_PARALLEL:
14856 case GIMPLE_OMP_TASK:
14857 case GIMPLE_OMP_SCOPE:
14858 case GIMPLE_OMP_SECTIONS:
14859 case GIMPLE_OMP_SINGLE:
14860 case GIMPLE_OMP_SECTION:
14861 case GIMPLE_OMP_STRUCTURED_BLOCK:
14862 case GIMPLE_OMP_MASTER:
14863 case GIMPLE_OMP_MASKED:
14864 case GIMPLE_OMP_ORDERED:
14865 case GIMPLE_OMP_SCAN:
14866 case GIMPLE_OMP_CRITICAL:
14867 case GIMPLE_OMP_TARGET:
14868 case GIMPLE_OMP_TEAMS:
14869 case GIMPLE_OMP_TASKGROUP:
14870 wi->info = stmt;
14871 walk_gimple_seq_mod (gimple_omp_body_ptr (gs: stmt), diagnose_sb_2, NULL, wi);
14872 wi->info = context;
14873 break;
14874
14875 case GIMPLE_OMP_FOR:
14876 wi->info = stmt;
14877 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14878 walk them. */
14879 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (gs: stmt),
14880 diagnose_sb_2, NULL, wi);
14881 walk_gimple_seq_mod (gimple_omp_body_ptr (gs: stmt), diagnose_sb_2, NULL, wi);
14882 wi->info = context;
14883 break;
14884
14885 case GIMPLE_COND:
14886 {
14887 gcond *cond_stmt = as_a <gcond *> (p: stmt);
14888 tree lab = gimple_cond_true_label (gs: cond_stmt);
14889 if (lab)
14890 {
14891 n = splay_tree_lookup (all_labels,
14892 (splay_tree_key) lab);
14893 diagnose_sb_0 (gsi_p, branch_ctx: context,
14894 label_ctx: n ? (gimple *) n->value : NULL);
14895 }
14896 lab = gimple_cond_false_label (gs: cond_stmt);
14897 if (lab)
14898 {
14899 n = splay_tree_lookup (all_labels,
14900 (splay_tree_key) lab);
14901 diagnose_sb_0 (gsi_p, branch_ctx: context,
14902 label_ctx: n ? (gimple *) n->value : NULL);
14903 }
14904 }
14905 break;
14906
14907 case GIMPLE_GOTO:
14908 {
14909 tree lab = gimple_goto_dest (gs: stmt);
14910 if (TREE_CODE (lab) != LABEL_DECL)
14911 break;
14912
14913 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
14914 diagnose_sb_0 (gsi_p, branch_ctx: context, label_ctx: n ? (gimple *) n->value : NULL);
14915 }
14916 break;
14917
14918 case GIMPLE_SWITCH:
14919 {
14920 gswitch *switch_stmt = as_a <gswitch *> (p: stmt);
14921 unsigned int i;
14922 for (i = 0; i < gimple_switch_num_labels (gs: switch_stmt); ++i)
14923 {
14924 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
14925 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
14926 if (n && diagnose_sb_0 (gsi_p, branch_ctx: context, label_ctx: (gimple *) n->value))
14927 break;
14928 }
14929 }
14930 break;
14931
14932 case GIMPLE_RETURN:
14933 diagnose_sb_0 (gsi_p, branch_ctx: context, NULL);
14934 break;
14935
14936 default:
14937 break;
14938 }
14939
14940 return NULL_TREE;
14941}
14942
14943static unsigned int
14944diagnose_omp_structured_block_errors (void)
14945{
14946 struct walk_stmt_info wi;
14947 gimple_seq body = gimple_body (current_function_decl);
14948
14949 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
14950
14951 memset (s: &wi, c: 0, n: sizeof (wi));
14952 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
14953
14954 memset (s: &wi, c: 0, n: sizeof (wi));
14955 wi.want_locations = true;
14956 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
14957
14958 gimple_set_body (current_function_decl, body);
14959
14960 splay_tree_delete (all_labels);
14961 all_labels = NULL;
14962
14963 return 0;
14964}
14965
14966namespace {
14967
14968const pass_data pass_data_diagnose_omp_blocks =
14969{
14970 .type: GIMPLE_PASS, /* type */
14971 .name: "*diagnose_omp_blocks", /* name */
14972 .optinfo_flags: OPTGROUP_OMP, /* optinfo_flags */
14973 .tv_id: TV_NONE, /* tv_id */
14974 PROP_gimple_any, /* properties_required */
14975 .properties_provided: 0, /* properties_provided */
14976 .properties_destroyed: 0, /* properties_destroyed */
14977 .todo_flags_start: 0, /* todo_flags_start */
14978 .todo_flags_finish: 0, /* todo_flags_finish */
14979};
14980
14981class pass_diagnose_omp_blocks : public gimple_opt_pass
14982{
14983public:
14984 pass_diagnose_omp_blocks (gcc::context *ctxt)
14985 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
14986 {}
14987
14988 /* opt_pass methods: */
14989 bool gate (function *) final override
14990 {
14991 return flag_openacc || flag_openmp || flag_openmp_simd;
14992 }
14993 unsigned int execute (function *) final override
14994 {
14995 return diagnose_omp_structured_block_errors ();
14996 }
14997
14998}; // class pass_diagnose_omp_blocks
14999
15000} // anon namespace
15001
15002gimple_opt_pass *
15003make_pass_diagnose_omp_blocks (gcc::context *ctxt)
15004{
15005 return new pass_diagnose_omp_blocks (ctxt);
15006}
15007
15008
15009#include "gt-omp-low.h"
15010

source code of gcc/omp-low.cc