1/* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2023 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6
7This file is part of GCC.
8
9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
11Software Foundation; either version 3, or (at your option) any later
12version.
13
14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
18
19You should have received a copy of the GNU General Public License
20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
22
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
26#include "backend.h"
27#include "target.h"
28#include "rtl.h"
29#include "tree.h"
30#include "memmodel.h"
31#include "tm_p.h"
32#include "gimple.h"
33#include "gimple-predict.h"
34#include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35#include "ssa.h"
36#include "cgraph.h"
37#include "tree-pretty-print.h"
38#include "diagnostic-core.h"
39#include "diagnostic.h" /* For errorcount. */
40#include "alias.h"
41#include "fold-const.h"
42#include "calls.h"
43#include "varasm.h"
44#include "stmt.h"
45#include "expr.h"
46#include "gimple-iterator.h"
47#include "gimple-fold.h"
48#include "tree-eh.h"
49#include "gimplify.h"
50#include "stor-layout.h"
51#include "print-tree.h"
52#include "tree-iterator.h"
53#include "tree-inline.h"
54#include "langhooks.h"
55#include "tree-cfg.h"
56#include "tree-ssa.h"
57#include "tree-hash-traits.h"
58#include "omp-general.h"
59#include "omp-low.h"
60#include "gimple-low.h"
61#include "gomp-constants.h"
62#include "splay-tree.h"
63#include "gimple-walk.h"
64#include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
65#include "builtins.h"
66#include "stringpool.h"
67#include "attribs.h"
68#include "asan.h"
69#include "dbgcnt.h"
70#include "omp-offload.h"
71#include "context.h"
72#include "tree-nested.h"
73
74/* Hash set of poisoned variables in a bind expr. */
75static hash_set<tree> *asan_poisoned_variables = NULL;
76
77enum gimplify_omp_var_data
78{
79 GOVD_SEEN = 0x000001,
80 GOVD_EXPLICIT = 0x000002,
81 GOVD_SHARED = 0x000004,
82 GOVD_PRIVATE = 0x000008,
83 GOVD_FIRSTPRIVATE = 0x000010,
84 GOVD_LASTPRIVATE = 0x000020,
85 GOVD_REDUCTION = 0x000040,
86 GOVD_LOCAL = 0x00080,
87 GOVD_MAP = 0x000100,
88 GOVD_DEBUG_PRIVATE = 0x000200,
89 GOVD_PRIVATE_OUTER_REF = 0x000400,
90 GOVD_LINEAR = 0x000800,
91 GOVD_ALIGNED = 0x001000,
92
93 /* Flag for GOVD_MAP: don't copy back. */
94 GOVD_MAP_TO_ONLY = 0x002000,
95
96 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
97 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 0x004000,
98
99 GOVD_MAP_0LEN_ARRAY = 0x008000,
100
101 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
102 GOVD_MAP_ALWAYS_TO = 0x010000,
103
104 /* Flag for shared vars that are or might be stored to in the region. */
105 GOVD_WRITTEN = 0x020000,
106
107 /* Flag for GOVD_MAP, if it is a forced mapping. */
108 GOVD_MAP_FORCE = 0x040000,
109
110 /* Flag for GOVD_MAP: must be present already. */
111 GOVD_MAP_FORCE_PRESENT = 0x080000,
112
113 /* Flag for GOVD_MAP: only allocate. */
114 GOVD_MAP_ALLOC_ONLY = 0x100000,
115
116 /* Flag for GOVD_MAP: only copy back. */
117 GOVD_MAP_FROM_ONLY = 0x200000,
118
119 GOVD_NONTEMPORAL = 0x400000,
120
121 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
122 GOVD_LASTPRIVATE_CONDITIONAL = 0x800000,
123
124 GOVD_CONDTEMP = 0x1000000,
125
126 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
127 GOVD_REDUCTION_INSCAN = 0x2000000,
128
129 /* Flag for GOVD_FIRSTPRIVATE: OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT. */
130 GOVD_FIRSTPRIVATE_IMPLICIT = 0x4000000,
131
132 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
133 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
134 | GOVD_LOCAL)
135};
136
137
138enum omp_region_type
139{
140 ORT_WORKSHARE = 0x00,
141 ORT_TASKGROUP = 0x01,
142 ORT_SIMD = 0x04,
143
144 ORT_PARALLEL = 0x08,
145 ORT_COMBINED_PARALLEL = ORT_PARALLEL | 1,
146
147 ORT_TASK = 0x10,
148 ORT_UNTIED_TASK = ORT_TASK | 1,
149 ORT_TASKLOOP = ORT_TASK | 2,
150 ORT_UNTIED_TASKLOOP = ORT_UNTIED_TASK | 2,
151
152 ORT_TEAMS = 0x20,
153 ORT_COMBINED_TEAMS = ORT_TEAMS | 1,
154 ORT_HOST_TEAMS = ORT_TEAMS | 2,
155 ORT_COMBINED_HOST_TEAMS = ORT_COMBINED_TEAMS | 2,
156
157 /* Data region. */
158 ORT_TARGET_DATA = 0x40,
159
160 /* Data region with offloading. */
161 ORT_TARGET = 0x80,
162 ORT_COMBINED_TARGET = ORT_TARGET | 1,
163 ORT_IMPLICIT_TARGET = ORT_TARGET | 2,
164
165 /* OpenACC variants. */
166 ORT_ACC = 0x100, /* A generic OpenACC region. */
167 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
168 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
169 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 2, /* Kernels construct. */
170 ORT_ACC_SERIAL = ORT_ACC | ORT_TARGET | 4, /* Serial construct. */
171 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 2, /* Host data. */
172
173 /* Dummy OpenMP region, used to disable expansion of
174 DECL_VALUE_EXPRs in taskloop pre body. */
175 ORT_NONE = 0x200
176};
177
178/* Gimplify hashtable helper. */
179
180struct gimplify_hasher : free_ptr_hash <elt_t>
181{
182 static inline hashval_t hash (const elt_t *);
183 static inline bool equal (const elt_t *, const elt_t *);
184};
185
186struct gimplify_ctx
187{
188 struct gimplify_ctx *prev_context;
189
190 vec<gbind *> bind_expr_stack;
191 tree temps;
192 gimple_seq conditional_cleanups;
193 tree exit_label;
194 tree return_temp;
195
196 vec<tree> case_labels;
197 hash_set<tree> *live_switch_vars;
198 /* The formal temporary table. Should this be persistent? */
199 hash_table<gimplify_hasher> *temp_htab;
200
201 int conditions;
202 unsigned into_ssa : 1;
203 unsigned allow_rhs_cond_expr : 1;
204 unsigned in_cleanup_point_expr : 1;
205 unsigned keep_stack : 1;
206 unsigned save_stack : 1;
207 unsigned in_switch_expr : 1;
208};
209
210enum gimplify_defaultmap_kind
211{
212 GDMK_SCALAR,
213 GDMK_SCALAR_TARGET, /* w/ Fortran's target attr, implicit mapping, only. */
214 GDMK_AGGREGATE,
215 GDMK_ALLOCATABLE,
216 GDMK_POINTER
217};
218
219struct gimplify_omp_ctx
220{
221 struct gimplify_omp_ctx *outer_context;
222 splay_tree variables;
223 hash_set<tree> *privatized_types;
224 tree clauses;
225 /* Iteration variables in an OMP_FOR. */
226 vec<tree> loop_iter_var;
227 location_t location;
228 enum omp_clause_default_kind default_kind;
229 enum omp_region_type region_type;
230 enum tree_code code;
231 bool combined_loop;
232 bool distribute;
233 bool target_firstprivatize_array_bases;
234 bool add_safelen1;
235 bool order_concurrent;
236 bool has_depend;
237 bool in_for_exprs;
238 int defaultmap[5];
239};
240
241static struct gimplify_ctx *gimplify_ctxp;
242static struct gimplify_omp_ctx *gimplify_omp_ctxp;
243static bool in_omp_construct;
244
245/* Forward declaration. */
246static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
247static hash_map<tree, tree> *oacc_declare_returns;
248static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
249 bool (*) (tree), fallback_t, bool);
250static void prepare_gimple_addressable (tree *, gimple_seq *);
251
252/* Shorter alias name for the above function for use in gimplify.cc
253 only. */
254
255static inline void
256gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
257{
258 gimple_seq_add_stmt_without_update (seq_p, gs);
259}
260
261/* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
262 NULL, a new sequence is allocated. This function is
263 similar to gimple_seq_add_seq, but does not scan the operands.
264 During gimplification, we need to manipulate statement sequences
265 before the def/use vectors have been constructed. */
266
267static void
268gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
269{
270 gimple_stmt_iterator si;
271
272 if (src == NULL)
273 return;
274
275 si = gsi_last (seq&: *dst_p);
276 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
277}
278
279
280/* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
281 and popping gimplify contexts. */
282
283static struct gimplify_ctx *ctx_pool = NULL;
284
285/* Return a gimplify context struct from the pool. */
286
287static inline struct gimplify_ctx *
288ctx_alloc (void)
289{
290 struct gimplify_ctx * c = ctx_pool;
291
292 if (c)
293 ctx_pool = c->prev_context;
294 else
295 c = XNEW (struct gimplify_ctx);
296
297 memset (s: c, c: '\0', n: sizeof (*c));
298 return c;
299}
300
301/* Put gimplify context C back into the pool. */
302
303static inline void
304ctx_free (struct gimplify_ctx *c)
305{
306 c->prev_context = ctx_pool;
307 ctx_pool = c;
308}
309
310/* Free allocated ctx stack memory. */
311
312void
313free_gimplify_stack (void)
314{
315 struct gimplify_ctx *c;
316
317 while ((c = ctx_pool))
318 {
319 ctx_pool = c->prev_context;
320 free (ptr: c);
321 }
322}
323
324
325/* Set up a context for the gimplifier. */
326
327void
328push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
329{
330 struct gimplify_ctx *c = ctx_alloc ();
331
332 c->prev_context = gimplify_ctxp;
333 gimplify_ctxp = c;
334 gimplify_ctxp->into_ssa = in_ssa;
335 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
336}
337
338/* Tear down a context for the gimplifier. If BODY is non-null, then
339 put the temporaries into the outer BIND_EXPR. Otherwise, put them
340 in the local_decls.
341
342 BODY is not a sequence, but the first tuple in a sequence. */
343
344void
345pop_gimplify_context (gimple *body)
346{
347 struct gimplify_ctx *c = gimplify_ctxp;
348
349 gcc_assert (c
350 && (!c->bind_expr_stack.exists ()
351 || c->bind_expr_stack.is_empty ()));
352 c->bind_expr_stack.release ();
353 gimplify_ctxp = c->prev_context;
354
355 if (body)
356 declare_vars (c->temps, body, false);
357 else
358 record_vars (c->temps);
359
360 delete c->temp_htab;
361 c->temp_htab = NULL;
362 ctx_free (c);
363}
364
365/* Push a GIMPLE_BIND tuple onto the stack of bindings. */
366
367static void
368gimple_push_bind_expr (gbind *bind_stmt)
369{
370 gimplify_ctxp->bind_expr_stack.reserve (nelems: 8);
371 gimplify_ctxp->bind_expr_stack.safe_push (obj: bind_stmt);
372}
373
374/* Pop the first element off the stack of bindings. */
375
376static void
377gimple_pop_bind_expr (void)
378{
379 gimplify_ctxp->bind_expr_stack.pop ();
380}
381
382/* Return the first element of the stack of bindings. */
383
384gbind *
385gimple_current_bind_expr (void)
386{
387 return gimplify_ctxp->bind_expr_stack.last ();
388}
389
390/* Return the stack of bindings created during gimplification. */
391
392vec<gbind *>
393gimple_bind_expr_stack (void)
394{
395 return gimplify_ctxp->bind_expr_stack;
396}
397
398/* Return true iff there is a COND_EXPR between us and the innermost
399 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
400
401static bool
402gimple_conditional_context (void)
403{
404 return gimplify_ctxp->conditions > 0;
405}
406
407/* Note that we've entered a COND_EXPR. */
408
409static void
410gimple_push_condition (void)
411{
412#ifdef ENABLE_GIMPLE_CHECKING
413 if (gimplify_ctxp->conditions == 0)
414 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
415#endif
416 ++(gimplify_ctxp->conditions);
417}
418
419/* Note that we've left a COND_EXPR. If we're back at unconditional scope
420 now, add any conditional cleanups we've seen to the prequeue. */
421
422static void
423gimple_pop_condition (gimple_seq *pre_p)
424{
425 int conds = --(gimplify_ctxp->conditions);
426
427 gcc_assert (conds >= 0);
428 if (conds == 0)
429 {
430 gimplify_seq_add_seq (dst_p: pre_p, src: gimplify_ctxp->conditional_cleanups);
431 gimplify_ctxp->conditional_cleanups = NULL;
432 }
433}
434
435/* A stable comparison routine for use with splay trees and DECLs. */
436
437static int
438splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
439{
440 tree a = (tree) xa;
441 tree b = (tree) xb;
442
443 return DECL_UID (a) - DECL_UID (b);
444}
445
446/* Create a new omp construct that deals with variable remapping. */
447
448static struct gimplify_omp_ctx *
449new_omp_context (enum omp_region_type region_type)
450{
451 struct gimplify_omp_ctx *c;
452
453 c = XCNEW (struct gimplify_omp_ctx);
454 c->outer_context = gimplify_omp_ctxp;
455 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
456 c->privatized_types = new hash_set<tree>;
457 c->location = input_location;
458 c->region_type = region_type;
459 if ((region_type & ORT_TASK) == 0)
460 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
461 else
462 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
463 c->defaultmap[GDMK_SCALAR] = GOVD_MAP;
464 c->defaultmap[GDMK_SCALAR_TARGET] = GOVD_MAP;
465 c->defaultmap[GDMK_AGGREGATE] = GOVD_MAP;
466 c->defaultmap[GDMK_ALLOCATABLE] = GOVD_MAP;
467 c->defaultmap[GDMK_POINTER] = GOVD_MAP;
468
469 return c;
470}
471
472/* Destroy an omp construct that deals with variable remapping. */
473
474static void
475delete_omp_context (struct gimplify_omp_ctx *c)
476{
477 splay_tree_delete (c->variables);
478 delete c->privatized_types;
479 c->loop_iter_var.release ();
480 XDELETE (c);
481}
482
483static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
484static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
485
486/* Both gimplify the statement T and append it to *SEQ_P. This function
487 behaves exactly as gimplify_stmt, but you don't have to pass T as a
488 reference. */
489
490void
491gimplify_and_add (tree t, gimple_seq *seq_p)
492{
493 gimplify_stmt (&t, seq_p);
494}
495
496/* Gimplify statement T into sequence *SEQ_P, and return the first
497 tuple in the sequence of generated tuples for this statement.
498 Return NULL if gimplifying T produced no tuples. */
499
500static gimple *
501gimplify_and_return_first (tree t, gimple_seq *seq_p)
502{
503 gimple_stmt_iterator last = gsi_last (seq&: *seq_p);
504
505 gimplify_and_add (t, seq_p);
506
507 if (!gsi_end_p (i: last))
508 {
509 gsi_next (i: &last);
510 return gsi_stmt (i: last);
511 }
512 else
513 return gimple_seq_first_stmt (s: *seq_p);
514}
515
516/* Returns true iff T is a valid RHS for an assignment to an un-renamed
517 LHS, or for a call argument. */
518
519static bool
520is_gimple_mem_rhs (tree t)
521{
522 /* If we're dealing with a renamable type, either source or dest must be
523 a renamed variable. */
524 if (is_gimple_reg_type (TREE_TYPE (t)))
525 return is_gimple_val (t);
526 else
527 return is_gimple_val (t) || is_gimple_lvalue (t);
528}
529
530/* Return true if T is a CALL_EXPR or an expression that can be
531 assigned to a temporary. Note that this predicate should only be
532 used during gimplification. See the rationale for this in
533 gimplify_modify_expr. */
534
535static bool
536is_gimple_reg_rhs_or_call (tree t)
537{
538 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
539 || TREE_CODE (t) == CALL_EXPR);
540}
541
542/* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
543 this predicate should only be used during gimplification. See the
544 rationale for this in gimplify_modify_expr. */
545
546static bool
547is_gimple_mem_rhs_or_call (tree t)
548{
549 /* If we're dealing with a renamable type, either source or dest must be
550 a renamed variable. */
551 if (is_gimple_reg_type (TREE_TYPE (t)))
552 return is_gimple_val (t);
553 else
554 return (is_gimple_val (t)
555 || is_gimple_lvalue (t)
556 || TREE_CLOBBER_P (t)
557 || TREE_CODE (t) == CALL_EXPR);
558}
559
560/* Create a temporary with a name derived from VAL. Subroutine of
561 lookup_tmp_var; nobody else should call this function. */
562
563static inline tree
564create_tmp_from_val (tree val)
565{
566 /* Drop all qualifiers and address-space information from the value type. */
567 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
568 tree var = create_tmp_var (type, get_name (val));
569 return var;
570}
571
572/* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
573 an existing expression temporary. If NOT_GIMPLE_REG, mark it as such. */
574
575static tree
576lookup_tmp_var (tree val, bool is_formal, bool not_gimple_reg)
577{
578 tree ret;
579
580 /* We cannot mark a formal temporary with DECL_NOT_GIMPLE_REG_P. */
581 gcc_assert (!is_formal || !not_gimple_reg);
582
583 /* If not optimizing, never really reuse a temporary. local-alloc
584 won't allocate any variable that is used in more than one basic
585 block, which means it will go into memory, causing much extra
586 work in reload and final and poorer code generation, outweighing
587 the extra memory allocation here. */
588 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
589 {
590 ret = create_tmp_from_val (val);
591 DECL_NOT_GIMPLE_REG_P (ret) = not_gimple_reg;
592 }
593 else
594 {
595 elt_t elt, *elt_p;
596 elt_t **slot;
597
598 elt.val = val;
599 if (!gimplify_ctxp->temp_htab)
600 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
601 slot = gimplify_ctxp->temp_htab->find_slot (value: &elt, insert: INSERT);
602 if (*slot == NULL)
603 {
604 elt_p = XNEW (elt_t);
605 elt_p->val = val;
606 elt_p->temp = ret = create_tmp_from_val (val);
607 *slot = elt_p;
608 }
609 else
610 {
611 elt_p = *slot;
612 ret = elt_p->temp;
613 }
614 }
615
616 return ret;
617}
618
619/* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
620
621static tree
622internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
623 bool is_formal, bool allow_ssa, bool not_gimple_reg)
624{
625 tree t, mod;
626
627 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
628 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
629 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
630 fb_rvalue);
631
632 if (allow_ssa
633 && gimplify_ctxp->into_ssa
634 && is_gimple_reg_type (TREE_TYPE (val)))
635 {
636 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
637 if (! gimple_in_ssa_p (cfun))
638 {
639 const char *name = get_name (val);
640 if (name)
641 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
642 }
643 }
644 else
645 t = lookup_tmp_var (val, is_formal, not_gimple_reg);
646
647 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
648
649 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
650
651 /* gimplify_modify_expr might want to reduce this further. */
652 gimplify_and_add (t: mod, seq_p: pre_p);
653 ggc_free (mod);
654
655 return t;
656}
657
658/* Return a formal temporary variable initialized with VAL. PRE_P is as
659 in gimplify_expr. Only use this function if:
660
661 1) The value of the unfactored expression represented by VAL will not
662 change between the initialization and use of the temporary, and
663 2) The temporary will not be otherwise modified.
664
665 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
666 and #2 means it is inappropriate for && temps.
667
668 For other cases, use get_initialized_tmp_var instead. */
669
670tree
671get_formal_tmp_var (tree val, gimple_seq *pre_p)
672{
673 return internal_get_tmp_var (val, pre_p, NULL, is_formal: true, allow_ssa: true, not_gimple_reg: false);
674}
675
676/* Return a temporary variable initialized with VAL. PRE_P and POST_P
677 are as in gimplify_expr. */
678
679tree
680get_initialized_tmp_var (tree val, gimple_seq *pre_p,
681 gimple_seq *post_p /* = NULL */,
682 bool allow_ssa /* = true */)
683{
684 return internal_get_tmp_var (val, pre_p, post_p, is_formal: false, allow_ssa, not_gimple_reg: false);
685}
686
687/* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
688 generate debug info for them; otherwise don't. */
689
690void
691declare_vars (tree vars, gimple *gs, bool debug_info)
692{
693 tree last = vars;
694 if (last)
695 {
696 tree temps, block;
697
698 gbind *scope = as_a <gbind *> (p: gs);
699
700 temps = nreverse (last);
701
702 block = gimple_bind_block (bind_stmt: scope);
703 gcc_assert (!block || TREE_CODE (block) == BLOCK);
704 if (!block || !debug_info)
705 {
706 DECL_CHAIN (last) = gimple_bind_vars (bind_stmt: scope);
707 gimple_bind_set_vars (bind_stmt: scope, vars: temps);
708 }
709 else
710 {
711 /* We need to attach the nodes both to the BIND_EXPR and to its
712 associated BLOCK for debugging purposes. The key point here
713 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
714 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
715 if (BLOCK_VARS (block))
716 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
717 else
718 {
719 gimple_bind_set_vars (bind_stmt: scope,
720 vars: chainon (gimple_bind_vars (bind_stmt: scope), temps));
721 BLOCK_VARS (block) = temps;
722 }
723 }
724 }
725}
726
727/* For VAR a VAR_DECL of variable size, try to find a constant upper bound
728 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
729 no such upper bound can be obtained. */
730
731static void
732force_constant_size (tree var)
733{
734 /* The only attempt we make is by querying the maximum size of objects
735 of the variable's type. */
736
737 HOST_WIDE_INT max_size;
738
739 gcc_assert (VAR_P (var));
740
741 max_size = max_int_size_in_bytes (TREE_TYPE (var));
742
743 gcc_assert (max_size >= 0);
744
745 DECL_SIZE_UNIT (var)
746 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
747 DECL_SIZE (var)
748 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
749}
750
751/* Push the temporary variable TMP into the current binding. */
752
753void
754gimple_add_tmp_var_fn (struct function *fn, tree tmp)
755{
756 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
757
758 /* Later processing assumes that the object size is constant, which might
759 not be true at this point. Force the use of a constant upper bound in
760 this case. */
761 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
762 force_constant_size (var: tmp);
763
764 DECL_CONTEXT (tmp) = fn->decl;
765 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
766
767 record_vars_into (tmp, fn->decl);
768}
769
770/* Push the temporary variable TMP into the current binding. */
771
772void
773gimple_add_tmp_var (tree tmp)
774{
775 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
776
777 /* Later processing assumes that the object size is constant, which might
778 not be true at this point. Force the use of a constant upper bound in
779 this case. */
780 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
781 force_constant_size (var: tmp);
782
783 DECL_CONTEXT (tmp) = current_function_decl;
784 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
785
786 if (gimplify_ctxp)
787 {
788 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
789 gimplify_ctxp->temps = tmp;
790
791 /* Mark temporaries local within the nearest enclosing parallel. */
792 if (gimplify_omp_ctxp)
793 {
794 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
795 int flag = GOVD_LOCAL | GOVD_SEEN;
796 while (ctx
797 && (ctx->region_type == ORT_WORKSHARE
798 || ctx->region_type == ORT_TASKGROUP
799 || ctx->region_type == ORT_SIMD
800 || ctx->region_type == ORT_ACC))
801 {
802 if (ctx->region_type == ORT_SIMD
803 && TREE_ADDRESSABLE (tmp)
804 && !TREE_STATIC (tmp))
805 {
806 if (TREE_CODE (DECL_SIZE_UNIT (tmp)) != INTEGER_CST)
807 ctx->add_safelen1 = true;
808 else if (ctx->in_for_exprs)
809 flag = GOVD_PRIVATE;
810 else
811 flag = GOVD_PRIVATE | GOVD_SEEN;
812 break;
813 }
814 ctx = ctx->outer_context;
815 }
816 if (ctx)
817 omp_add_variable (ctx, tmp, flag);
818 }
819 }
820 else if (cfun)
821 record_vars (tmp);
822 else
823 {
824 gimple_seq body_seq;
825
826 /* This case is for nested functions. We need to expose the locals
827 they create. */
828 body_seq = gimple_body (current_function_decl);
829 declare_vars (vars: tmp, gs: gimple_seq_first_stmt (s: body_seq), debug_info: false);
830 }
831}
832
833
834
835/* This page contains routines to unshare tree nodes, i.e. to duplicate tree
836 nodes that are referenced more than once in GENERIC functions. This is
837 necessary because gimplification (translation into GIMPLE) is performed
838 by modifying tree nodes in-place, so gimplication of a shared node in a
839 first context could generate an invalid GIMPLE form in a second context.
840
841 This is achieved with a simple mark/copy/unmark algorithm that walks the
842 GENERIC representation top-down, marks nodes with TREE_VISITED the first
843 time it encounters them, duplicates them if they already have TREE_VISITED
844 set, and finally removes the TREE_VISITED marks it has set.
845
846 The algorithm works only at the function level, i.e. it generates a GENERIC
847 representation of a function with no nodes shared within the function when
848 passed a GENERIC function (except for nodes that are allowed to be shared).
849
850 At the global level, it is also necessary to unshare tree nodes that are
851 referenced in more than one function, for the same aforementioned reason.
852 This requires some cooperation from the front-end. There are 2 strategies:
853
854 1. Manual unsharing. The front-end needs to call unshare_expr on every
855 expression that might end up being shared across functions.
856
857 2. Deep unsharing. This is an extension of regular unsharing. Instead
858 of calling unshare_expr on expressions that might be shared across
859 functions, the front-end pre-marks them with TREE_VISITED. This will
860 ensure that they are unshared on the first reference within functions
861 when the regular unsharing algorithm runs. The counterpart is that
862 this algorithm must look deeper than for manual unsharing, which is
863 specified by LANG_HOOKS_DEEP_UNSHARING.
864
865 If there are only few specific cases of node sharing across functions, it is
866 probably easier for a front-end to unshare the expressions manually. On the
867 contrary, if the expressions generated at the global level are as widespread
868 as expressions generated within functions, deep unsharing is very likely the
869 way to go. */
870
871/* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
872 These nodes model computations that must be done once. If we were to
873 unshare something like SAVE_EXPR(i++), the gimplification process would
874 create wrong code. However, if DATA is non-null, it must hold a pointer
875 set that is used to unshare the subtrees of these nodes. */
876
877static tree
878mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
879{
880 tree t = *tp;
881 enum tree_code code = TREE_CODE (t);
882
883 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
884 copy their subtrees if we can make sure to do it only once. */
885 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
886 {
887 if (data && !((hash_set<tree> *)data)->add (k: t))
888 ;
889 else
890 *walk_subtrees = 0;
891 }
892
893 /* Stop at types, decls, constants like copy_tree_r. */
894 else if (TREE_CODE_CLASS (code) == tcc_type
895 || TREE_CODE_CLASS (code) == tcc_declaration
896 || TREE_CODE_CLASS (code) == tcc_constant)
897 *walk_subtrees = 0;
898
899 /* Cope with the statement expression extension. */
900 else if (code == STATEMENT_LIST)
901 ;
902
903 /* Leave the bulk of the work to copy_tree_r itself. */
904 else
905 copy_tree_r (tp, walk_subtrees, NULL);
906
907 return NULL_TREE;
908}
909
910/* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
911 If *TP has been visited already, then *TP is deeply copied by calling
912 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
913
914static tree
915copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
916{
917 tree t = *tp;
918 enum tree_code code = TREE_CODE (t);
919
920 /* Skip types, decls, and constants. But we do want to look at their
921 types and the bounds of types. Mark them as visited so we properly
922 unmark their subtrees on the unmark pass. If we've already seen them,
923 don't look down further. */
924 if (TREE_CODE_CLASS (code) == tcc_type
925 || TREE_CODE_CLASS (code) == tcc_declaration
926 || TREE_CODE_CLASS (code) == tcc_constant)
927 {
928 if (TREE_VISITED (t))
929 *walk_subtrees = 0;
930 else
931 TREE_VISITED (t) = 1;
932 }
933
934 /* If this node has been visited already, unshare it and don't look
935 any deeper. */
936 else if (TREE_VISITED (t))
937 {
938 walk_tree (tp, mostly_copy_tree_r, data, NULL);
939 *walk_subtrees = 0;
940 }
941
942 /* Otherwise, mark the node as visited and keep looking. */
943 else
944 TREE_VISITED (t) = 1;
945
946 return NULL_TREE;
947}
948
949/* Unshare most of the shared trees rooted at *TP. DATA is passed to the
950 copy_if_shared_r callback unmodified. */
951
952void
953copy_if_shared (tree *tp, void *data)
954{
955 walk_tree (tp, copy_if_shared_r, data, NULL);
956}
957
958/* Unshare all the trees in the body of FNDECL, as well as in the bodies of
959 any nested functions. */
960
961static void
962unshare_body (tree fndecl)
963{
964 struct cgraph_node *cgn = cgraph_node::get (decl: fndecl);
965 /* If the language requires deep unsharing, we need a pointer set to make
966 sure we don't repeatedly unshare subtrees of unshareable nodes. */
967 hash_set<tree> *visited
968 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
969
970 copy_if_shared (tp: &DECL_SAVED_TREE (fndecl), data: visited);
971 copy_if_shared (tp: &DECL_SIZE (DECL_RESULT (fndecl)), data: visited);
972 copy_if_shared (tp: &DECL_SIZE_UNIT (DECL_RESULT (fndecl)), data: visited);
973
974 delete visited;
975
976 if (cgn)
977 for (cgn = first_nested_function (node: cgn); cgn;
978 cgn = next_nested_function (node: cgn))
979 unshare_body (fndecl: cgn->decl);
980}
981
982/* Callback for walk_tree to unmark the visited trees rooted at *TP.
983 Subtrees are walked until the first unvisited node is encountered. */
984
985static tree
986unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
987{
988 tree t = *tp;
989
990 /* If this node has been visited, unmark it and keep looking. */
991 if (TREE_VISITED (t))
992 TREE_VISITED (t) = 0;
993
994 /* Otherwise, don't look any deeper. */
995 else
996 *walk_subtrees = 0;
997
998 return NULL_TREE;
999}
1000
1001/* Unmark the visited trees rooted at *TP. */
1002
1003static inline void
1004unmark_visited (tree *tp)
1005{
1006 walk_tree (tp, unmark_visited_r, NULL, NULL);
1007}
1008
1009/* Likewise, but mark all trees as not visited. */
1010
1011static void
1012unvisit_body (tree fndecl)
1013{
1014 struct cgraph_node *cgn = cgraph_node::get (decl: fndecl);
1015
1016 unmark_visited (tp: &DECL_SAVED_TREE (fndecl));
1017 unmark_visited (tp: &DECL_SIZE (DECL_RESULT (fndecl)));
1018 unmark_visited (tp: &DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
1019
1020 if (cgn)
1021 for (cgn = first_nested_function (node: cgn);
1022 cgn; cgn = next_nested_function (node: cgn))
1023 unvisit_body (fndecl: cgn->decl);
1024}
1025
1026/* Unconditionally make an unshared copy of EXPR. This is used when using
1027 stored expressions which span multiple functions, such as BINFO_VTABLE,
1028 as the normal unsharing process can't tell that they're shared. */
1029
1030tree
1031unshare_expr (tree expr)
1032{
1033 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1034 return expr;
1035}
1036
1037/* Worker for unshare_expr_without_location. */
1038
1039static tree
1040prune_expr_location (tree *tp, int *walk_subtrees, void *)
1041{
1042 if (EXPR_P (*tp))
1043 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
1044 else
1045 *walk_subtrees = 0;
1046 return NULL_TREE;
1047}
1048
1049/* Similar to unshare_expr but also prune all expression locations
1050 from EXPR. */
1051
1052tree
1053unshare_expr_without_location (tree expr)
1054{
1055 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1056 if (EXPR_P (expr))
1057 walk_tree (&expr, prune_expr_location, NULL, NULL);
1058 return expr;
1059}
1060
1061/* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1062 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1063 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1064 EXPR is the location of the EXPR. */
1065
1066static location_t
1067rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION)
1068{
1069 if (!expr)
1070 return or_else;
1071
1072 if (EXPR_HAS_LOCATION (expr))
1073 return EXPR_LOCATION (expr);
1074
1075 if (TREE_CODE (expr) != STATEMENT_LIST)
1076 return or_else;
1077
1078 tree_stmt_iterator i = tsi_start (t: expr);
1079
1080 bool found = false;
1081 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
1082 {
1083 found = true;
1084 tsi_next (i: &i);
1085 }
1086
1087 if (!found || !tsi_one_before_end_p (i))
1088 return or_else;
1089
1090 return rexpr_location (expr: tsi_stmt (i), or_else);
1091}
1092
1093/* Return TRUE iff EXPR (maybe recursively) has a location; see
1094 rexpr_location for the potential recursion. */
1095
1096static inline bool
1097rexpr_has_location (tree expr)
1098{
1099 return rexpr_location (expr) != UNKNOWN_LOCATION;
1100}
1101
1102
1103/* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1104 contain statements and have a value. Assign its value to a temporary
1105 and give it void_type_node. Return the temporary, or NULL_TREE if
1106 WRAPPER was already void. */
1107
1108tree
1109voidify_wrapper_expr (tree wrapper, tree temp)
1110{
1111 tree type = TREE_TYPE (wrapper);
1112 if (type && !VOID_TYPE_P (type))
1113 {
1114 tree *p;
1115
1116 /* Set p to point to the body of the wrapper. Loop until we find
1117 something that isn't a wrapper. */
1118 for (p = &wrapper; p && *p; )
1119 {
1120 switch (TREE_CODE (*p))
1121 {
1122 case BIND_EXPR:
1123 TREE_SIDE_EFFECTS (*p) = 1;
1124 TREE_TYPE (*p) = void_type_node;
1125 /* For a BIND_EXPR, the body is operand 1. */
1126 p = &BIND_EXPR_BODY (*p);
1127 break;
1128
1129 case CLEANUP_POINT_EXPR:
1130 case TRY_FINALLY_EXPR:
1131 case TRY_CATCH_EXPR:
1132 TREE_SIDE_EFFECTS (*p) = 1;
1133 TREE_TYPE (*p) = void_type_node;
1134 p = &TREE_OPERAND (*p, 0);
1135 break;
1136
1137 case STATEMENT_LIST:
1138 {
1139 tree_stmt_iterator i = tsi_last (t: *p);
1140 TREE_SIDE_EFFECTS (*p) = 1;
1141 TREE_TYPE (*p) = void_type_node;
1142 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1143 }
1144 break;
1145
1146 case COMPOUND_EXPR:
1147 /* Advance to the last statement. Set all container types to
1148 void. */
1149 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1150 {
1151 TREE_SIDE_EFFECTS (*p) = 1;
1152 TREE_TYPE (*p) = void_type_node;
1153 }
1154 break;
1155
1156 case TRANSACTION_EXPR:
1157 TREE_SIDE_EFFECTS (*p) = 1;
1158 TREE_TYPE (*p) = void_type_node;
1159 p = &TRANSACTION_EXPR_BODY (*p);
1160 break;
1161
1162 default:
1163 /* Assume that any tree upon which voidify_wrapper_expr is
1164 directly called is a wrapper, and that its body is op0. */
1165 if (p == &wrapper)
1166 {
1167 TREE_SIDE_EFFECTS (*p) = 1;
1168 TREE_TYPE (*p) = void_type_node;
1169 p = &TREE_OPERAND (*p, 0);
1170 break;
1171 }
1172 goto out;
1173 }
1174 }
1175
1176 out:
1177 if (p == NULL || IS_EMPTY_STMT (*p))
1178 temp = NULL_TREE;
1179 else if (temp)
1180 {
1181 /* The wrapper is on the RHS of an assignment that we're pushing
1182 down. */
1183 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1184 || TREE_CODE (temp) == MODIFY_EXPR);
1185 TREE_OPERAND (temp, 1) = *p;
1186 *p = temp;
1187 }
1188 else
1189 {
1190 temp = create_tmp_var (type, "retval");
1191 *p = build2 (INIT_EXPR, type, temp, *p);
1192 }
1193
1194 return temp;
1195 }
1196
1197 return NULL_TREE;
1198}
1199
1200/* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1201 a temporary through which they communicate. */
1202
1203static void
1204build_stack_save_restore (gcall **save, gcall **restore)
1205{
1206 tree tmp_var;
1207
1208 *save = gimple_build_call (builtin_decl_implicit (fncode: BUILT_IN_STACK_SAVE), 0);
1209 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1210 gimple_call_set_lhs (gs: *save, lhs: tmp_var);
1211
1212 *restore
1213 = gimple_build_call (builtin_decl_implicit (fncode: BUILT_IN_STACK_RESTORE),
1214 1, tmp_var);
1215}
1216
1217/* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1218
1219static tree
1220build_asan_poison_call_expr (tree decl)
1221{
1222 /* Do not poison variables that have size equal to zero. */
1223 tree unit_size = DECL_SIZE_UNIT (decl);
1224 if (zerop (unit_size))
1225 return NULL_TREE;
1226
1227 tree base = build_fold_addr_expr (decl);
1228
1229 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1230 void_type_node, 3,
1231 build_int_cst (integer_type_node,
1232 ASAN_MARK_POISON),
1233 base, unit_size);
1234}
1235
1236/* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1237 on POISON flag, shadow memory of a DECL variable. The call will be
1238 put on location identified by IT iterator, where BEFORE flag drives
1239 position where the stmt will be put. */
1240
1241static void
1242asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1243 bool before)
1244{
1245 tree unit_size = DECL_SIZE_UNIT (decl);
1246 tree base = build_fold_addr_expr (decl);
1247
1248 /* Do not poison variables that have size equal to zero. */
1249 if (zerop (unit_size))
1250 return;
1251
1252 /* It's necessary to have all stack variables aligned to ASAN granularity
1253 bytes. */
1254 gcc_assert (!hwasan_sanitize_p () || hwasan_sanitize_stack_p ());
1255 unsigned shadow_granularity
1256 = hwasan_sanitize_p () ? HWASAN_TAG_GRANULE_SIZE : ASAN_SHADOW_GRANULARITY;
1257 if (DECL_ALIGN_UNIT (decl) <= shadow_granularity)
1258 SET_DECL_ALIGN (decl, BITS_PER_UNIT * shadow_granularity);
1259
1260 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1261
1262 gimple *g
1263 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1264 build_int_cst (integer_type_node, flags),
1265 base, unit_size);
1266
1267 if (before)
1268 gsi_insert_before (it, g, GSI_NEW_STMT);
1269 else
1270 gsi_insert_after (it, g, GSI_NEW_STMT);
1271}
1272
1273/* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1274 either poisons or unpoisons a DECL. Created statement is appended
1275 to SEQ_P gimple sequence. */
1276
1277static void
1278asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1279{
1280 gimple_stmt_iterator it = gsi_last (seq&: *seq_p);
1281 bool before = false;
1282
1283 if (gsi_end_p (i: it))
1284 before = true;
1285
1286 asan_poison_variable (decl, poison, it: &it, before);
1287}
1288
1289/* Sort pair of VAR_DECLs A and B by DECL_UID. */
1290
1291static int
1292sort_by_decl_uid (const void *a, const void *b)
1293{
1294 const tree *t1 = (const tree *)a;
1295 const tree *t2 = (const tree *)b;
1296
1297 int uid1 = DECL_UID (*t1);
1298 int uid2 = DECL_UID (*t2);
1299
1300 if (uid1 < uid2)
1301 return -1;
1302 else if (uid1 > uid2)
1303 return 1;
1304 else
1305 return 0;
1306}
1307
1308/* Generate IFN_ASAN_MARK internal call for all VARIABLES
1309 depending on POISON flag. Created statement is appended
1310 to SEQ_P gimple sequence. */
1311
1312static void
1313asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1314{
1315 unsigned c = variables->elements ();
1316 if (c == 0)
1317 return;
1318
1319 auto_vec<tree> sorted_variables (c);
1320
1321 for (hash_set<tree>::iterator it = variables->begin ();
1322 it != variables->end (); ++it)
1323 sorted_variables.safe_push (obj: *it);
1324
1325 sorted_variables.qsort (sort_by_decl_uid);
1326
1327 unsigned i;
1328 tree var;
1329 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1330 {
1331 asan_poison_variable (decl: var, poison, seq_p);
1332
1333 /* Add use_after_scope_memory attribute for the variable in order
1334 to prevent re-written into SSA. */
1335 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1336 DECL_ATTRIBUTES (var)))
1337 DECL_ATTRIBUTES (var)
1338 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1339 integer_one_node,
1340 DECL_ATTRIBUTES (var));
1341 }
1342}
1343
1344/* Gimplify a BIND_EXPR. Just voidify and recurse. */
1345
1346static enum gimplify_status
1347gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1348{
1349 tree bind_expr = *expr_p;
1350 bool old_keep_stack = gimplify_ctxp->keep_stack;
1351 bool old_save_stack = gimplify_ctxp->save_stack;
1352 tree t;
1353 gbind *bind_stmt;
1354 gimple_seq body, cleanup;
1355 gcall *stack_save;
1356 location_t start_locus = 0, end_locus = 0;
1357 tree ret_clauses = NULL;
1358
1359 tree temp = voidify_wrapper_expr (wrapper: bind_expr, NULL);
1360
1361 /* Mark variables seen in this bind expr. */
1362 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1363 {
1364 if (VAR_P (t))
1365 {
1366 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1367 tree attr;
1368
1369 if (flag_openmp
1370 && !is_global_var (t)
1371 && DECL_CONTEXT (t) == current_function_decl
1372 && TREE_USED (t)
1373 && (attr = lookup_attribute (attr_name: "omp allocate", DECL_ATTRIBUTES (t)))
1374 != NULL_TREE)
1375 {
1376 gcc_assert (!DECL_HAS_VALUE_EXPR_P (t));
1377 tree alloc = TREE_PURPOSE (TREE_VALUE (attr));
1378 tree align = TREE_VALUE (TREE_VALUE (attr));
1379 /* Allocate directives that appear in a target region must specify
1380 an allocator clause unless a requires directive with the
1381 dynamic_allocators clause is present in the same compilation
1382 unit. */
1383 bool missing_dyn_alloc = false;
1384 if (alloc == NULL_TREE
1385 && ((omp_requires_mask & OMP_REQUIRES_DYNAMIC_ALLOCATORS)
1386 == 0))
1387 {
1388 /* This comes too early for omp_discover_declare_target...,
1389 but should at least catch the most common cases. */
1390 missing_dyn_alloc
1391 = cgraph_node::get (decl: current_function_decl)->offloadable;
1392 for (struct gimplify_omp_ctx *ctx2 = ctx;
1393 ctx2 && !missing_dyn_alloc; ctx2 = ctx2->outer_context)
1394 if (ctx2->code == OMP_TARGET)
1395 missing_dyn_alloc = true;
1396 }
1397 if (missing_dyn_alloc)
1398 error_at (DECL_SOURCE_LOCATION (t),
1399 "%<allocate%> directive for %qD inside a target "
1400 "region must specify an %<allocator%> clause", t);
1401 /* Skip for omp_default_mem_alloc (= 1),
1402 unless align is present. */
1403 else if (!errorcount
1404 && (align != NULL_TREE
1405 || alloc == NULL_TREE
1406 || !integer_onep (alloc)))
1407 {
1408 /* Fortran might already use a pointer type internally;
1409 use that pointer except for type(C_ptr) and type(C_funptr);
1410 note that normal proc pointers are rejected. */
1411 tree type = TREE_TYPE (t);
1412 tree tmp, v;
1413 if (lang_GNU_Fortran ()
1414 && POINTER_TYPE_P (type)
1415 && TREE_TYPE (type) != void_type_node
1416 && TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE)
1417 {
1418 type = TREE_TYPE (type);
1419 v = t;
1420 }
1421 else
1422 {
1423 tmp = build_pointer_type (type);
1424 v = create_tmp_var (tmp, get_name (t));
1425 DECL_IGNORED_P (v) = 0;
1426 DECL_ATTRIBUTES (v)
1427 = tree_cons (get_identifier ("omp allocate var"),
1428 build_tree_list (NULL_TREE, t),
1429 remove_attribute ("omp allocate",
1430 DECL_ATTRIBUTES (t)));
1431 tmp = build_fold_indirect_ref (v);
1432 TREE_THIS_NOTRAP (tmp) = 1;
1433 SET_DECL_VALUE_EXPR (t, tmp);
1434 DECL_HAS_VALUE_EXPR_P (t) = 1;
1435 }
1436 tree sz = TYPE_SIZE_UNIT (type);
1437 /* The size to use in Fortran might not match TYPE_SIZE_UNIT;
1438 hence, for some decls, a size variable is saved in the
1439 attributes; use it, if available. */
1440 if (TREE_CHAIN (TREE_VALUE (attr))
1441 && TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr)))
1442 && TREE_PURPOSE (
1443 TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr)))))
1444 {
1445 sz = TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr)));
1446 sz = TREE_PURPOSE (sz);
1447 }
1448 if (alloc == NULL_TREE)
1449 alloc = build_zero_cst (ptr_type_node);
1450 if (align == NULL_TREE)
1451 align = build_int_cst (size_type_node, DECL_ALIGN_UNIT (t));
1452 else
1453 align = build_int_cst (size_type_node,
1454 MAX (tree_to_uhwi (align),
1455 DECL_ALIGN_UNIT (t)));
1456 location_t loc = DECL_SOURCE_LOCATION (t);
1457 tmp = builtin_decl_explicit (fncode: BUILT_IN_GOMP_ALLOC);
1458 tmp = build_call_expr_loc (loc, tmp, 3, align, sz, alloc);
1459 tmp = fold_build2_loc (loc, MODIFY_EXPR, TREE_TYPE (v), v,
1460 fold_convert (TREE_TYPE (v), tmp));
1461 gcc_assert (BIND_EXPR_BODY (bind_expr) != NULL_TREE);
1462 /* Ensure that either TREE_CHAIN (TREE_VALUE (attr) is set
1463 and GOMP_FREE added here or that DECL_HAS_VALUE_EXPR_P (t)
1464 is set, using in a condition much further below. */
1465 gcc_assert (DECL_HAS_VALUE_EXPR_P (t)
1466 || TREE_CHAIN (TREE_VALUE (attr)));
1467 if (TREE_CHAIN (TREE_VALUE (attr)))
1468 {
1469 /* Fortran is special as it does not have properly nest
1470 declarations in blocks. And as there is no
1471 initializer, there is also no expression to look for.
1472 Hence, the FE makes the statement list of the
1473 try-finally block available. We can put the GOMP_alloc
1474 at the top, unless an allocator or size expression
1475 requires to put it afterward; note that the size is
1476 always later in generated code; for strings, no
1477 size expr but still an expr might be available.
1478 As LTO does not handle a statement list, 'sl' has
1479 to be removed; done so by removing the attribute. */
1480 DECL_ATTRIBUTES (t)
1481 = remove_attribute ("omp allocate",
1482 DECL_ATTRIBUTES (t));
1483 tree sl = TREE_PURPOSE (TREE_CHAIN (TREE_VALUE (attr)));
1484 tree_stmt_iterator e = tsi_start (t: sl);
1485 tree needle = NULL_TREE;
1486 if (TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr))))
1487 {
1488 needle = TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr)));
1489 needle = (TREE_VALUE (needle) ? TREE_VALUE (needle)
1490 : sz);
1491 }
1492 else if (TREE_CHAIN (TREE_CHAIN (TREE_VALUE (attr))))
1493 needle = sz;
1494 else if (DECL_P (alloc) && DECL_ARTIFICIAL (alloc))
1495 needle = alloc;
1496
1497 if (needle != NULL_TREE)
1498 {
1499 while (!tsi_end_p (i: e))
1500 {
1501 if (*e == needle
1502 || (TREE_CODE (*e) == MODIFY_EXPR
1503 && TREE_OPERAND (*e, 0) == needle))
1504 break;
1505 ++e;
1506 }
1507 gcc_assert (!tsi_end_p (e));
1508 }
1509 tsi_link_after (&e, tmp, TSI_SAME_STMT);
1510
1511 /* As the cleanup is in BIND_EXPR_BODY, GOMP_free is added
1512 here; for C/C++ it will be added in the 'cleanup'
1513 section after gimplification. But Fortran already has
1514 a try-finally block. */
1515 sl = TREE_VALUE (TREE_CHAIN (TREE_VALUE (attr)));
1516 e = tsi_last (t: sl);
1517 tmp = builtin_decl_explicit (fncode: BUILT_IN_GOMP_FREE);
1518 tmp = build_call_expr_loc (EXPR_LOCATION (*e), tmp, 2, v,
1519 build_zero_cst (ptr_type_node));
1520 tsi_link_after (&e, tmp, TSI_SAME_STMT);
1521 tmp = build_clobber (TREE_TYPE (v), CLOBBER_EOL);
1522 tmp = fold_build2_loc (loc, MODIFY_EXPR, TREE_TYPE (v), v,
1523 fold_convert (TREE_TYPE (v), tmp));
1524 ++e;
1525 tsi_link_after (&e, tmp, TSI_SAME_STMT);
1526 }
1527 else
1528 {
1529 gcc_assert (TREE_CODE (BIND_EXPR_BODY (bind_expr))
1530 == STATEMENT_LIST);
1531 tree_stmt_iterator e;
1532 e = tsi_start (BIND_EXPR_BODY (bind_expr));
1533 while (!tsi_end_p (i: e))
1534 {
1535 if ((TREE_CODE (*e) == DECL_EXPR
1536 && TREE_OPERAND (*e, 0) == t)
1537 || (TREE_CODE (*e) == CLEANUP_POINT_EXPR
1538 && (TREE_CODE (TREE_OPERAND (*e, 0))
1539 == DECL_EXPR)
1540 && (TREE_OPERAND (TREE_OPERAND (*e, 0), 0)
1541 == t)))
1542 break;
1543 ++e;
1544 }
1545 gcc_assert (!tsi_end_p (e));
1546 tsi_link_before (&e, tmp, TSI_SAME_STMT);
1547 }
1548 }
1549 }
1550
1551 /* Mark variable as local. */
1552 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t))
1553 {
1554 if (! DECL_SEEN_IN_BIND_EXPR_P (t)
1555 || splay_tree_lookup (ctx->variables,
1556 (splay_tree_key) t) == NULL)
1557 {
1558 int flag = GOVD_LOCAL;
1559 if (ctx->region_type == ORT_SIMD
1560 && TREE_ADDRESSABLE (t)
1561 && !TREE_STATIC (t))
1562 {
1563 if (TREE_CODE (DECL_SIZE_UNIT (t)) != INTEGER_CST)
1564 ctx->add_safelen1 = true;
1565 else
1566 flag = GOVD_PRIVATE;
1567 }
1568 omp_add_variable (ctx, t, flag | GOVD_SEEN);
1569 }
1570 /* Static locals inside of target construct or offloaded
1571 routines need to be "omp declare target". */
1572 if (TREE_STATIC (t))
1573 for (; ctx; ctx = ctx->outer_context)
1574 if ((ctx->region_type & ORT_TARGET) != 0)
1575 {
1576 if (!lookup_attribute (attr_name: "omp declare target",
1577 DECL_ATTRIBUTES (t)))
1578 {
1579 tree id = get_identifier ("omp declare target");
1580 DECL_ATTRIBUTES (t)
1581 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (t));
1582 varpool_node *node = varpool_node::get (decl: t);
1583 if (node)
1584 {
1585 node->offloadable = 1;
1586 if (ENABLE_OFFLOADING && !DECL_EXTERNAL (t))
1587 {
1588 g->have_offload = true;
1589 if (!in_lto_p)
1590 vec_safe_push (v&: offload_vars, obj: t);
1591 }
1592 }
1593 }
1594 break;
1595 }
1596 }
1597
1598 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1599
1600 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1601 cfun->has_local_explicit_reg_vars = true;
1602 }
1603 }
1604
1605 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1606 BIND_EXPR_BLOCK (bind_expr));
1607 gimple_push_bind_expr (bind_stmt);
1608
1609 gimplify_ctxp->keep_stack = false;
1610 gimplify_ctxp->save_stack = false;
1611
1612 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1613 body = NULL;
1614 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1615 gimple_bind_set_body (bind_stmt, seq: body);
1616
1617 /* Source location wise, the cleanup code (stack_restore and clobbers)
1618 belongs to the end of the block, so propagate what we have. The
1619 stack_save operation belongs to the beginning of block, which we can
1620 infer from the bind_expr directly if the block has no explicit
1621 assignment. */
1622 if (BIND_EXPR_BLOCK (bind_expr))
1623 {
1624 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1625 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1626 }
1627 if (start_locus == 0)
1628 start_locus = EXPR_LOCATION (bind_expr);
1629
1630 cleanup = NULL;
1631 stack_save = NULL;
1632
1633 /* Add clobbers for all variables that go out of scope. */
1634 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1635 {
1636 if (VAR_P (t)
1637 && !is_global_var (t)
1638 && DECL_CONTEXT (t) == current_function_decl)
1639 {
1640 if (flag_openmp
1641 && DECL_HAS_VALUE_EXPR_P (t)
1642 && TREE_USED (t)
1643 && lookup_attribute (attr_name: "omp allocate", DECL_ATTRIBUTES (t)))
1644 {
1645 /* For Fortran, TREE_CHAIN (TREE_VALUE (attr)) is set, which
1646 causes that the GOMP_free call is already added above;
1647 and "omp allocate" is removed from DECL_ATTRIBUTES. */
1648 tree v = TREE_OPERAND (DECL_VALUE_EXPR (t), 0);
1649 tree tmp = builtin_decl_explicit (fncode: BUILT_IN_GOMP_FREE);
1650 tmp = build_call_expr_loc (end_locus, tmp, 2, v,
1651 build_zero_cst (ptr_type_node));
1652 gimplify_and_add (t: tmp, seq_p: &cleanup);
1653 gimple *clobber_stmt;
1654 tmp = build_clobber (TREE_TYPE (v), CLOBBER_EOL);
1655 clobber_stmt = gimple_build_assign (v, tmp);
1656 gimple_set_location (g: clobber_stmt, location: end_locus);
1657 gimplify_seq_add_stmt (seq_p: &cleanup, gs: clobber_stmt);
1658 }
1659 if (!DECL_HARD_REGISTER (t)
1660 && !TREE_THIS_VOLATILE (t)
1661 && !DECL_HAS_VALUE_EXPR_P (t)
1662 /* Only care for variables that have to be in memory. Others
1663 will be rewritten into SSA names, hence moved to the
1664 top-level. */
1665 && !is_gimple_reg (t)
1666 && flag_stack_reuse != SR_NONE)
1667 {
1668 tree clobber = build_clobber (TREE_TYPE (t), CLOBBER_EOL);
1669 gimple *clobber_stmt;
1670 clobber_stmt = gimple_build_assign (t, clobber);
1671 gimple_set_location (g: clobber_stmt, location: end_locus);
1672 gimplify_seq_add_stmt (seq_p: &cleanup, gs: clobber_stmt);
1673 }
1674
1675 if (flag_openacc && oacc_declare_returns != NULL)
1676 {
1677 tree key = t;
1678 if (DECL_HAS_VALUE_EXPR_P (key))
1679 {
1680 key = DECL_VALUE_EXPR (key);
1681 if (INDIRECT_REF_P (key))
1682 key = TREE_OPERAND (key, 0);
1683 }
1684 tree *c = oacc_declare_returns->get (k: key);
1685 if (c != NULL)
1686 {
1687 if (ret_clauses)
1688 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1689
1690 ret_clauses = unshare_expr (expr: *c);
1691
1692 oacc_declare_returns->remove (k: key);
1693
1694 if (oacc_declare_returns->is_empty ())
1695 {
1696 delete oacc_declare_returns;
1697 oacc_declare_returns = NULL;
1698 }
1699 }
1700 }
1701 }
1702
1703 if (asan_poisoned_variables != NULL
1704 && asan_poisoned_variables->contains (k: t))
1705 {
1706 asan_poisoned_variables->remove (k: t);
1707 asan_poison_variable (decl: t, poison: true, seq_p: &cleanup);
1708 }
1709
1710 if (gimplify_ctxp->live_switch_vars != NULL
1711 && gimplify_ctxp->live_switch_vars->contains (k: t))
1712 gimplify_ctxp->live_switch_vars->remove (k: t);
1713 }
1714
1715 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1716 the stack space allocated to the VLAs. */
1717 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1718 {
1719 gcall *stack_restore;
1720
1721 /* Save stack on entry and restore it on exit. Add a try_finally
1722 block to achieve this. */
1723 build_stack_save_restore (save: &stack_save, restore: &stack_restore);
1724
1725 gimple_set_location (g: stack_save, location: start_locus);
1726 gimple_set_location (g: stack_restore, location: end_locus);
1727
1728 gimplify_seq_add_stmt (seq_p: &cleanup, gs: stack_restore);
1729 }
1730
1731 if (ret_clauses)
1732 {
1733 gomp_target *stmt;
1734 gimple_stmt_iterator si = gsi_start (seq&: cleanup);
1735
1736 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1737 ret_clauses);
1738 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1739 }
1740
1741 if (cleanup)
1742 {
1743 gtry *gs;
1744 gimple_seq new_body;
1745
1746 new_body = NULL;
1747 gs = gimple_build_try (gimple_bind_body (gs: bind_stmt), cleanup,
1748 GIMPLE_TRY_FINALLY);
1749
1750 if (stack_save)
1751 gimplify_seq_add_stmt (seq_p: &new_body, gs: stack_save);
1752 gimplify_seq_add_stmt (seq_p: &new_body, gs);
1753 gimple_bind_set_body (bind_stmt, seq: new_body);
1754 }
1755
1756 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1757 if (!gimplify_ctxp->keep_stack)
1758 gimplify_ctxp->keep_stack = old_keep_stack;
1759 gimplify_ctxp->save_stack = old_save_stack;
1760
1761 gimple_pop_bind_expr ();
1762
1763 gimplify_seq_add_stmt (seq_p: pre_p, gs: bind_stmt);
1764
1765 if (temp)
1766 {
1767 *expr_p = temp;
1768 return GS_OK;
1769 }
1770
1771 *expr_p = NULL_TREE;
1772 return GS_ALL_DONE;
1773}
1774
1775/* Maybe add early return predict statement to PRE_P sequence. */
1776
1777static void
1778maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1779{
1780 /* If we are not in a conditional context, add PREDICT statement. */
1781 if (gimple_conditional_context ())
1782 {
1783 gimple *predict = gimple_build_predict (predictor: PRED_TREE_EARLY_RETURN,
1784 outcome: NOT_TAKEN);
1785 gimplify_seq_add_stmt (seq_p: pre_p, gs: predict);
1786 }
1787}
1788
1789/* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1790 GIMPLE value, it is assigned to a new temporary and the statement is
1791 re-written to return the temporary.
1792
1793 PRE_P points to the sequence where side effects that must happen before
1794 STMT should be stored. */
1795
1796static enum gimplify_status
1797gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1798{
1799 greturn *ret;
1800 tree ret_expr = TREE_OPERAND (stmt, 0);
1801 tree result_decl, result;
1802
1803 if (ret_expr == error_mark_node)
1804 return GS_ERROR;
1805
1806 if (!ret_expr
1807 || TREE_CODE (ret_expr) == RESULT_DECL)
1808 {
1809 maybe_add_early_return_predict_stmt (pre_p);
1810 greturn *ret = gimple_build_return (ret_expr);
1811 copy_warning (ret, stmt);
1812 gimplify_seq_add_stmt (seq_p: pre_p, gs: ret);
1813 return GS_ALL_DONE;
1814 }
1815
1816 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1817 result_decl = NULL_TREE;
1818 else if (TREE_CODE (ret_expr) == COMPOUND_EXPR)
1819 {
1820 /* Used in C++ for handling EH cleanup of the return value if a local
1821 cleanup throws. Assume the front-end knows what it's doing. */
1822 result_decl = DECL_RESULT (current_function_decl);
1823 /* But crash if we end up trying to modify ret_expr below. */
1824 ret_expr = NULL_TREE;
1825 }
1826 else
1827 {
1828 result_decl = TREE_OPERAND (ret_expr, 0);
1829
1830 /* See through a return by reference. */
1831 if (INDIRECT_REF_P (result_decl))
1832 result_decl = TREE_OPERAND (result_decl, 0);
1833
1834 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1835 || TREE_CODE (ret_expr) == INIT_EXPR)
1836 && TREE_CODE (result_decl) == RESULT_DECL);
1837 }
1838
1839 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1840 Recall that aggregate_value_p is FALSE for any aggregate type that is
1841 returned in registers. If we're returning values in registers, then
1842 we don't want to extend the lifetime of the RESULT_DECL, particularly
1843 across another call. In addition, for those aggregates for which
1844 hard_function_value generates a PARALLEL, we'll die during normal
1845 expansion of structure assignments; there's special code in expand_return
1846 to handle this case that does not exist in expand_expr. */
1847 if (!result_decl)
1848 result = NULL_TREE;
1849 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1850 {
1851 if (!poly_int_tree_p (DECL_SIZE (result_decl)))
1852 {
1853 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1854 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1855 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1856 should be effectively allocated by the caller, i.e. all calls to
1857 this function must be subject to the Return Slot Optimization. */
1858 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1859 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1860 }
1861 result = result_decl;
1862 }
1863 else if (gimplify_ctxp->return_temp)
1864 result = gimplify_ctxp->return_temp;
1865 else
1866 {
1867 result = create_tmp_reg (TREE_TYPE (result_decl));
1868
1869 /* ??? With complex control flow (usually involving abnormal edges),
1870 we can wind up warning about an uninitialized value for this. Due
1871 to how this variable is constructed and initialized, this is never
1872 true. Give up and never warn. */
1873 suppress_warning (result, OPT_Wuninitialized);
1874
1875 gimplify_ctxp->return_temp = result;
1876 }
1877
1878 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1879 Then gimplify the whole thing. */
1880 if (result != result_decl)
1881 TREE_OPERAND (ret_expr, 0) = result;
1882
1883 gimplify_and_add (TREE_OPERAND (stmt, 0), seq_p: pre_p);
1884
1885 maybe_add_early_return_predict_stmt (pre_p);
1886 ret = gimple_build_return (result);
1887 copy_warning (ret, stmt);
1888 gimplify_seq_add_stmt (seq_p: pre_p, gs: ret);
1889
1890 return GS_ALL_DONE;
1891}
1892
1893/* Gimplify a variable-length array DECL. */
1894
1895static void
1896gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1897{
1898 /* This is a variable-sized decl. Simplify its size and mark it
1899 for deferred expansion. */
1900 tree t, addr, ptr_type;
1901
1902 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1903 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1904
1905 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1906 if (DECL_HAS_VALUE_EXPR_P (decl))
1907 return;
1908
1909 /* All occurrences of this decl in final gimplified code will be
1910 replaced by indirection. Setting DECL_VALUE_EXPR does two
1911 things: First, it lets the rest of the gimplifier know what
1912 replacement to use. Second, it lets the debug info know
1913 where to find the value. */
1914 ptr_type = build_pointer_type (TREE_TYPE (decl));
1915 addr = create_tmp_var (ptr_type, get_name (decl));
1916 DECL_IGNORED_P (addr) = 0;
1917 t = build_fold_indirect_ref (addr);
1918 TREE_THIS_NOTRAP (t) = 1;
1919 SET_DECL_VALUE_EXPR (decl, t);
1920 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1921
1922 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1923 max_int_size_in_bytes (TREE_TYPE (decl)));
1924 /* The call has been built for a variable-sized object. */
1925 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1926 t = fold_convert (ptr_type, t);
1927 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1928
1929 gimplify_and_add (t, seq_p);
1930
1931 /* Record the dynamic allocation associated with DECL if requested. */
1932 if (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC)
1933 record_dynamic_alloc (decl_or_exp: decl);
1934}
1935
1936/* A helper function to be called via walk_tree. Mark all labels under *TP
1937 as being forced. To be called for DECL_INITIAL of static variables. */
1938
1939static tree
1940force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1941{
1942 if (TYPE_P (*tp))
1943 *walk_subtrees = 0;
1944 if (TREE_CODE (*tp) == LABEL_DECL)
1945 {
1946 FORCED_LABEL (*tp) = 1;
1947 cfun->has_forced_label_in_static = 1;
1948 }
1949
1950 return NULL_TREE;
1951}
1952
1953/* Generate an initialization to automatic variable DECL based on INIT_TYPE.
1954 Build a call to internal const function DEFERRED_INIT:
1955 1st argument: SIZE of the DECL;
1956 2nd argument: INIT_TYPE;
1957 3rd argument: NAME of the DECL;
1958
1959 as LHS = DEFERRED_INIT (SIZE of the DECL, INIT_TYPE, NAME of the DECL). */
1960
1961static void
1962gimple_add_init_for_auto_var (tree decl,
1963 enum auto_init_type init_type,
1964 gimple_seq *seq_p)
1965{
1966 gcc_assert (auto_var_p (decl));
1967 gcc_assert (init_type > AUTO_INIT_UNINITIALIZED);
1968 location_t loc = EXPR_LOCATION (decl);
1969 tree decl_size = TYPE_SIZE_UNIT (TREE_TYPE (decl));
1970
1971 tree init_type_node
1972 = build_int_cst (integer_type_node, (int) init_type);
1973
1974 tree decl_name = NULL_TREE;
1975 if (DECL_NAME (decl))
1976
1977 decl_name = build_string_literal (DECL_NAME (decl));
1978
1979 else
1980 {
1981 char decl_name_anonymous[3 + (HOST_BITS_PER_INT + 2) / 3];
1982 sprintf (s: decl_name_anonymous, format: "D.%u", DECL_UID (decl));
1983 decl_name = build_string_literal (p: decl_name_anonymous);
1984 }
1985
1986 tree call = build_call_expr_internal_loc (loc, IFN_DEFERRED_INIT,
1987 TREE_TYPE (decl), 3,
1988 decl_size, init_type_node,
1989 decl_name);
1990
1991 gimplify_assign (decl, call, seq_p);
1992}
1993
1994/* Generate padding initialization for automatic vairable DECL.
1995 C guarantees that brace-init with fewer initializers than members
1996 aggregate will initialize the rest of the aggregate as-if it were
1997 static initialization. In turn static initialization guarantees
1998 that padding is initialized to zero. So, we always initialize paddings
1999 to zeroes regardless INIT_TYPE.
2000 To do the padding initialization, we insert a call to
2001 __builtin_clear_padding (&decl, 0, for_auto_init = true).
2002 Note, we add an additional dummy argument for __builtin_clear_padding,
2003 'for_auto_init' to distinguish whether this call is for automatic
2004 variable initialization or not.
2005 */
2006static void
2007gimple_add_padding_init_for_auto_var (tree decl, bool is_vla,
2008 gimple_seq *seq_p)
2009{
2010 tree addr_of_decl = NULL_TREE;
2011 tree fn = builtin_decl_explicit (fncode: BUILT_IN_CLEAR_PADDING);
2012
2013 if (is_vla)
2014 {
2015 /* The temporary address variable for this vla should be
2016 created in gimplify_vla_decl. */
2017 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
2018 gcc_assert (INDIRECT_REF_P (DECL_VALUE_EXPR (decl)));
2019 addr_of_decl = TREE_OPERAND (DECL_VALUE_EXPR (decl), 0);
2020 }
2021 else
2022 {
2023 mark_addressable (decl);
2024 addr_of_decl = build_fold_addr_expr (decl);
2025 }
2026
2027 gimple *call = gimple_build_call (fn, 2, addr_of_decl,
2028 build_one_cst (TREE_TYPE (addr_of_decl)));
2029 gimplify_seq_add_stmt (seq_p, gs: call);
2030}
2031
2032/* Return true if the DECL need to be automaticly initialized by the
2033 compiler. */
2034static bool
2035is_var_need_auto_init (tree decl)
2036{
2037 if (auto_var_p (decl)
2038 && (TREE_CODE (decl) != VAR_DECL
2039 || !DECL_HARD_REGISTER (decl))
2040 && (flag_auto_var_init > AUTO_INIT_UNINITIALIZED)
2041 && (!lookup_attribute (attr_name: "uninitialized", DECL_ATTRIBUTES (decl)))
2042 && !OPAQUE_TYPE_P (TREE_TYPE (decl))
2043 && !is_empty_type (TREE_TYPE (decl)))
2044 return true;
2045 return false;
2046}
2047
2048/* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
2049 and initialization explicit. */
2050
2051static enum gimplify_status
2052gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
2053{
2054 tree stmt = *stmt_p;
2055 tree decl = DECL_EXPR_DECL (stmt);
2056
2057 *stmt_p = NULL_TREE;
2058
2059 if (TREE_TYPE (decl) == error_mark_node)
2060 return GS_ERROR;
2061
2062 if ((TREE_CODE (decl) == TYPE_DECL
2063 || VAR_P (decl))
2064 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
2065 {
2066 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
2067 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
2068 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
2069 }
2070
2071 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
2072 in case its size expressions contain problematic nodes like CALL_EXPR. */
2073 if (TREE_CODE (decl) == TYPE_DECL
2074 && DECL_ORIGINAL_TYPE (decl)
2075 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
2076 {
2077 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
2078 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
2079 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
2080 }
2081
2082 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
2083 {
2084 tree init = DECL_INITIAL (decl);
2085 bool is_vla = false;
2086 /* Check whether a decl has FE created VALUE_EXPR here BEFORE
2087 gimplify_vla_decl creates VALUE_EXPR for a vla decl.
2088 If the decl has VALUE_EXPR that was created by FE (usually
2089 C++FE), it's a proxy varaible, and FE already initialized
2090 the VALUE_EXPR of it, we should not initialize it anymore. */
2091 bool decl_had_value_expr_p = DECL_HAS_VALUE_EXPR_P (decl);
2092
2093 poly_uint64 size;
2094 if (!poly_int_tree_p (DECL_SIZE_UNIT (decl), value: &size)
2095 || (!TREE_STATIC (decl)
2096 && flag_stack_check == GENERIC_STACK_CHECK
2097 && maybe_gt (size,
2098 (unsigned HOST_WIDE_INT) STACK_CHECK_MAX_VAR_SIZE)))
2099 {
2100 gimplify_vla_decl (decl, seq_p);
2101 is_vla = true;
2102 }
2103
2104 if (asan_poisoned_variables
2105 && !is_vla
2106 && TREE_ADDRESSABLE (decl)
2107 && !TREE_STATIC (decl)
2108 && !DECL_HAS_VALUE_EXPR_P (decl)
2109 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
2110 && dbg_cnt (index: asan_use_after_scope)
2111 && !gimplify_omp_ctxp
2112 /* GNAT introduces temporaries to hold return values of calls in
2113 initializers of variables defined in other units, so the
2114 declaration of the variable is discarded completely. We do not
2115 want to issue poison calls for such dropped variables. */
2116 && (DECL_SEEN_IN_BIND_EXPR_P (decl)
2117 || (DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)))
2118 {
2119 asan_poisoned_variables->add (k: decl);
2120 asan_poison_variable (decl, poison: false, seq_p);
2121 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
2122 gimplify_ctxp->live_switch_vars->add (k: decl);
2123 }
2124
2125 /* Some front ends do not explicitly declare all anonymous
2126 artificial variables. We compensate here by declaring the
2127 variables, though it would be better if the front ends would
2128 explicitly declare them. */
2129 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
2130 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
2131 gimple_add_tmp_var (tmp: decl);
2132
2133 if (init && init != error_mark_node)
2134 {
2135 if (!TREE_STATIC (decl))
2136 {
2137 DECL_INITIAL (decl) = NULL_TREE;
2138 init = build2 (INIT_EXPR, void_type_node, decl, init);
2139 gimplify_and_add (t: init, seq_p);
2140 ggc_free (init);
2141 /* Clear TREE_READONLY if we really have an initialization. */
2142 if (!DECL_INITIAL (decl)
2143 && !omp_privatize_by_reference (decl))
2144 TREE_READONLY (decl) = 0;
2145 }
2146 else
2147 /* We must still examine initializers for static variables
2148 as they may contain a label address. */
2149 walk_tree (&init, force_labels_r, NULL, NULL);
2150 }
2151 /* When there is no explicit initializer, if the user requested,
2152 We should insert an artifical initializer for this automatic
2153 variable. */
2154 else if (is_var_need_auto_init (decl)
2155 && !decl_had_value_expr_p)
2156 {
2157 gimple_add_init_for_auto_var (decl,
2158 flag_auto_var_init,
2159 seq_p);
2160 /* The expanding of a call to the above .DEFERRED_INIT will apply
2161 block initialization to the whole space covered by this variable.
2162 As a result, all the paddings will be initialized to zeroes
2163 for zero initialization and 0xFE byte-repeatable patterns for
2164 pattern initialization.
2165 In order to make the paddings as zeroes for pattern init, We
2166 should add a call to __builtin_clear_padding to clear the
2167 paddings to zero in compatiple with CLANG.
2168 We cannot insert this call if the variable is a gimple register
2169 since __builtin_clear_padding will take the address of the
2170 variable. As a result, if a long double/_Complex long double
2171 variable will spilled into stack later, its padding is 0XFE. */
2172 if (flag_auto_var_init == AUTO_INIT_PATTERN
2173 && !is_gimple_reg (decl)
2174 && clear_padding_type_may_have_padding_p (TREE_TYPE (decl)))
2175 gimple_add_padding_init_for_auto_var (decl, is_vla, seq_p);
2176 }
2177 }
2178
2179 return GS_ALL_DONE;
2180}
2181
2182/* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
2183 and replacing the LOOP_EXPR with goto, but if the loop contains an
2184 EXIT_EXPR, we need to append a label for it to jump to. */
2185
2186static enum gimplify_status
2187gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
2188{
2189 tree saved_label = gimplify_ctxp->exit_label;
2190 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
2191
2192 gimplify_seq_add_stmt (seq_p: pre_p, gs: gimple_build_label (label: start_label));
2193
2194 gimplify_ctxp->exit_label = NULL_TREE;
2195
2196 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), seq_p: pre_p);
2197
2198 gimplify_seq_add_stmt (seq_p: pre_p, gs: gimple_build_goto (dest: start_label));
2199
2200 if (gimplify_ctxp->exit_label)
2201 gimplify_seq_add_stmt (seq_p: pre_p,
2202 gs: gimple_build_label (label: gimplify_ctxp->exit_label));
2203
2204 gimplify_ctxp->exit_label = saved_label;
2205
2206 *expr_p = NULL;
2207 return GS_ALL_DONE;
2208}
2209
2210/* Gimplify a statement list onto a sequence. These may be created either
2211 by an enlightened front-end, or by shortcut_cond_expr. */
2212
2213static enum gimplify_status
2214gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
2215{
2216 tree temp = voidify_wrapper_expr (wrapper: *expr_p, NULL);
2217
2218 tree_stmt_iterator i = tsi_start (t: *expr_p);
2219
2220 while (!tsi_end_p (i))
2221 {
2222 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
2223 tsi_delink (&i);
2224 }
2225
2226 if (temp)
2227 {
2228 *expr_p = temp;
2229 return GS_OK;
2230 }
2231
2232 return GS_ALL_DONE;
2233}
2234
2235
2236/* Emit warning for the unreachable statment STMT if needed.
2237 Return the gimple itself when the warning is emitted, otherwise
2238 return NULL. */
2239static gimple *
2240emit_warn_switch_unreachable (gimple *stmt)
2241{
2242 if (gimple_code (g: stmt) == GIMPLE_GOTO
2243 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
2244 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
2245 /* Don't warn for compiler-generated gotos. These occur
2246 in Duff's devices, for example. */
2247 return NULL;
2248 else if ((flag_auto_var_init > AUTO_INIT_UNINITIALIZED)
2249 && ((gimple_call_internal_p (gs: stmt, fn: IFN_DEFERRED_INIT))
2250 || (gimple_call_builtin_p (stmt, BUILT_IN_CLEAR_PADDING)
2251 && (bool) TREE_INT_CST_LOW (gimple_call_arg (stmt, 1)))
2252 || (is_gimple_assign (gs: stmt)
2253 && gimple_assign_single_p (gs: stmt)
2254 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2255 && gimple_call_internal_p (
2256 SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt)),
2257 fn: IFN_DEFERRED_INIT))))
2258 /* Don't warn for compiler-generated initializations for
2259 -ftrivial-auto-var-init.
2260 There are 3 cases:
2261 case 1: a call to .DEFERRED_INIT;
2262 case 2: a call to __builtin_clear_padding with the 2nd argument is
2263 present and non-zero;
2264 case 3: a gimple assign store right after the call to .DEFERRED_INIT
2265 that has the LHS of .DEFERRED_INIT as the RHS as following:
2266 _1 = .DEFERRED_INIT (4, 2, &"i1"[0]);
2267 i1 = _1. */
2268 return NULL;
2269 else
2270 warning_at (gimple_location (g: stmt), OPT_Wswitch_unreachable,
2271 "statement will never be executed");
2272 return stmt;
2273}
2274
2275/* Callback for walk_gimple_seq. */
2276
2277static tree
2278warn_switch_unreachable_and_auto_init_r (gimple_stmt_iterator *gsi_p,
2279 bool *handled_ops_p,
2280 struct walk_stmt_info *wi)
2281{
2282 gimple *stmt = gsi_stmt (i: *gsi_p);
2283 bool unreachable_issued = wi->info != NULL;
2284
2285 *handled_ops_p = true;
2286 switch (gimple_code (g: stmt))
2287 {
2288 case GIMPLE_TRY:
2289 /* A compiler-generated cleanup or a user-written try block.
2290 If it's empty, don't dive into it--that would result in
2291 worse location info. */
2292 if (gimple_try_eval (gs: stmt) == NULL)
2293 {
2294 if (warn_switch_unreachable && !unreachable_issued)
2295 wi->info = emit_warn_switch_unreachable (stmt);
2296
2297 /* Stop when auto var init warning is not on. */
2298 if (!warn_trivial_auto_var_init)
2299 return integer_zero_node;
2300 }
2301 /* Fall through. */
2302 case GIMPLE_BIND:
2303 case GIMPLE_CATCH:
2304 case GIMPLE_EH_FILTER:
2305 case GIMPLE_TRANSACTION:
2306 /* Walk the sub-statements. */
2307 *handled_ops_p = false;
2308 break;
2309
2310 case GIMPLE_DEBUG:
2311 /* Ignore these. We may generate them before declarations that
2312 are never executed. If there's something to warn about,
2313 there will be non-debug stmts too, and we'll catch those. */
2314 break;
2315
2316 case GIMPLE_LABEL:
2317 /* Stop till the first Label. */
2318 return integer_zero_node;
2319 case GIMPLE_CALL:
2320 if (gimple_call_internal_p (gs: stmt, fn: IFN_ASAN_MARK))
2321 {
2322 *handled_ops_p = false;
2323 break;
2324 }
2325 if (warn_trivial_auto_var_init
2326 && flag_auto_var_init > AUTO_INIT_UNINITIALIZED
2327 && gimple_call_internal_p (gs: stmt, fn: IFN_DEFERRED_INIT))
2328 {
2329 /* Get the variable name from the 3rd argument of call. */
2330 tree var_name = gimple_call_arg (gs: stmt, index: 2);
2331 var_name = TREE_OPERAND (TREE_OPERAND (var_name, 0), 0);
2332 const char *var_name_str = TREE_STRING_POINTER (var_name);
2333
2334 warning_at (gimple_location (g: stmt), OPT_Wtrivial_auto_var_init,
2335 "%qs cannot be initialized with"
2336 "%<-ftrivial-auto-var_init%>",
2337 var_name_str);
2338 break;
2339 }
2340
2341 /* Fall through. */
2342 default:
2343 /* check the first "real" statement (not a decl/lexical scope/...), issue
2344 warning if needed. */
2345 if (warn_switch_unreachable && !unreachable_issued)
2346 wi->info = emit_warn_switch_unreachable (stmt);
2347 /* Stop when auto var init warning is not on. */
2348 if (!warn_trivial_auto_var_init)
2349 return integer_zero_node;
2350 break;
2351 }
2352 return NULL_TREE;
2353}
2354
2355
2356/* Possibly warn about unreachable statements between switch's controlling
2357 expression and the first case. Also warn about -ftrivial-auto-var-init
2358 cannot initialize the auto variable under such situation.
2359 SEQ is the body of a switch expression. */
2360
2361static void
2362maybe_warn_switch_unreachable_and_auto_init (gimple_seq seq)
2363{
2364 if ((!warn_switch_unreachable && !warn_trivial_auto_var_init)
2365 /* This warning doesn't play well with Fortran when optimizations
2366 are on. */
2367 || lang_GNU_Fortran ()
2368 || seq == NULL)
2369 return;
2370
2371 struct walk_stmt_info wi;
2372
2373 memset (s: &wi, c: 0, n: sizeof (wi));
2374 walk_gimple_seq (seq, warn_switch_unreachable_and_auto_init_r, NULL, &wi);
2375}
2376
2377
2378/* A label entry that pairs label and a location. */
2379struct label_entry
2380{
2381 tree label;
2382 location_t loc;
2383};
2384
2385/* Find LABEL in vector of label entries VEC. */
2386
2387static struct label_entry *
2388find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
2389{
2390 unsigned int i;
2391 struct label_entry *l;
2392
2393 FOR_EACH_VEC_ELT (*vec, i, l)
2394 if (l->label == label)
2395 return l;
2396 return NULL;
2397}
2398
2399/* Return true if LABEL, a LABEL_DECL, represents a case label
2400 in a vector of labels CASES. */
2401
2402static bool
2403case_label_p (const vec<tree> *cases, tree label)
2404{
2405 unsigned int i;
2406 tree l;
2407
2408 FOR_EACH_VEC_ELT (*cases, i, l)
2409 if (CASE_LABEL (l) == label)
2410 return true;
2411 return false;
2412}
2413
2414/* Find the last nondebug statement in a scope STMT. */
2415
2416static gimple *
2417last_stmt_in_scope (gimple *stmt)
2418{
2419 if (!stmt)
2420 return NULL;
2421
2422 switch (gimple_code (g: stmt))
2423 {
2424 case GIMPLE_BIND:
2425 {
2426 gbind *bind = as_a <gbind *> (p: stmt);
2427 stmt = gimple_seq_last_nondebug_stmt (s: gimple_bind_body (gs: bind));
2428 return last_stmt_in_scope (stmt);
2429 }
2430
2431 case GIMPLE_TRY:
2432 {
2433 gtry *try_stmt = as_a <gtry *> (p: stmt);
2434 stmt = gimple_seq_last_nondebug_stmt (s: gimple_try_eval (gs: try_stmt));
2435 gimple *last_eval = last_stmt_in_scope (stmt);
2436 if (gimple_stmt_may_fallthru (last_eval)
2437 && (last_eval == NULL
2438 || !gimple_call_internal_p (gs: last_eval, fn: IFN_FALLTHROUGH))
2439 && gimple_try_kind (gs: try_stmt) == GIMPLE_TRY_FINALLY)
2440 {
2441 stmt = gimple_seq_last_nondebug_stmt (s: gimple_try_cleanup (gs: try_stmt));
2442 return last_stmt_in_scope (stmt);
2443 }
2444 else
2445 return last_eval;
2446 }
2447
2448 case GIMPLE_DEBUG:
2449 gcc_unreachable ();
2450
2451 default:
2452 return stmt;
2453 }
2454}
2455
2456/* Collect labels that may fall through into LABELS and return the statement
2457 preceding another case label, or a user-defined label. Store a location
2458 useful to give warnings at *PREVLOC (usually the location of the returned
2459 statement or of its surrounding scope). */
2460
2461static gimple *
2462collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
2463 auto_vec <struct label_entry> *labels,
2464 location_t *prevloc)
2465{
2466 gimple *prev = NULL;
2467
2468 *prevloc = UNKNOWN_LOCATION;
2469 do
2470 {
2471 if (gimple_code (g: gsi_stmt (i: *gsi_p)) == GIMPLE_BIND)
2472 {
2473 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2474 which starts on a GIMPLE_SWITCH and ends with a break label.
2475 Handle that as a single statement that can fall through. */
2476 gbind *bind = as_a <gbind *> (p: gsi_stmt (i: *gsi_p));
2477 gimple *first = gimple_seq_first_stmt (s: gimple_bind_body (gs: bind));
2478 gimple *last = gimple_seq_last_stmt (s: gimple_bind_body (gs: bind));
2479 if (last
2480 && gimple_code (g: first) == GIMPLE_SWITCH
2481 && gimple_code (g: last) == GIMPLE_LABEL)
2482 {
2483 tree label = gimple_label_label (gs: as_a <glabel *> (p: last));
2484 if (SWITCH_BREAK_LABEL_P (label))
2485 {
2486 prev = bind;
2487 gsi_next (i: gsi_p);
2488 continue;
2489 }
2490 }
2491 }
2492 if (gimple_code (g: gsi_stmt (i: *gsi_p)) == GIMPLE_BIND
2493 || gimple_code (g: gsi_stmt (i: *gsi_p)) == GIMPLE_TRY)
2494 {
2495 /* Nested scope. Only look at the last statement of
2496 the innermost scope. */
2497 location_t bind_loc = gimple_location (g: gsi_stmt (i: *gsi_p));
2498 gimple *last = last_stmt_in_scope (stmt: gsi_stmt (i: *gsi_p));
2499 if (last)
2500 {
2501 prev = last;
2502 /* It might be a label without a location. Use the
2503 location of the scope then. */
2504 if (!gimple_has_location (g: prev))
2505 *prevloc = bind_loc;
2506 }
2507 gsi_next (i: gsi_p);
2508 continue;
2509 }
2510
2511 /* Ifs are tricky. */
2512 if (gimple_code (g: gsi_stmt (i: *gsi_p)) == GIMPLE_COND)
2513 {
2514 gcond *cond_stmt = as_a <gcond *> (p: gsi_stmt (i: *gsi_p));
2515 tree false_lab = gimple_cond_false_label (gs: cond_stmt);
2516 location_t if_loc = gimple_location (g: cond_stmt);
2517
2518 /* If we have e.g.
2519 if (i > 1) goto <D.2259>; else goto D;
2520 we can't do much with the else-branch. */
2521 if (!DECL_ARTIFICIAL (false_lab))
2522 break;
2523
2524 /* Go on until the false label, then one step back. */
2525 for (; !gsi_end_p (i: *gsi_p); gsi_next (i: gsi_p))
2526 {
2527 gimple *stmt = gsi_stmt (i: *gsi_p);
2528 if (gimple_code (g: stmt) == GIMPLE_LABEL
2529 && gimple_label_label (gs: as_a <glabel *> (p: stmt)) == false_lab)
2530 break;
2531 }
2532
2533 /* Not found? Oops. */
2534 if (gsi_end_p (i: *gsi_p))
2535 break;
2536
2537 /* A dead label can't fall through. */
2538 if (!UNUSED_LABEL_P (false_lab))
2539 {
2540 struct label_entry l = { .label: false_lab, .loc: if_loc };
2541 labels->safe_push (obj: l);
2542 }
2543
2544 /* Go to the last statement of the then branch. */
2545 gsi_prev (i: gsi_p);
2546
2547 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2548 <D.1759>:
2549 <stmt>;
2550 goto <D.1761>;
2551 <D.1760>:
2552 */
2553 if (gimple_code (g: gsi_stmt (i: *gsi_p)) == GIMPLE_GOTO
2554 && !gimple_has_location (g: gsi_stmt (i: *gsi_p)))
2555 {
2556 /* Look at the statement before, it might be
2557 attribute fallthrough, in which case don't warn. */
2558 gsi_prev (i: gsi_p);
2559 bool fallthru_before_dest
2560 = gimple_call_internal_p (gs: gsi_stmt (i: *gsi_p), fn: IFN_FALLTHROUGH);
2561 gsi_next (i: gsi_p);
2562 tree goto_dest = gimple_goto_dest (gs: gsi_stmt (i: *gsi_p));
2563 if (!fallthru_before_dest)
2564 {
2565 struct label_entry l = { .label: goto_dest, .loc: if_loc };
2566 labels->safe_push (obj: l);
2567 }
2568 }
2569 /* This case is about
2570 if (1 != 0) goto <D.2022>; else goto <D.2023>;
2571 <D.2022>:
2572 n = n + 1; // #1
2573 <D.2023>: // #2
2574 <D.1988>: // #3
2575 where #2 is UNUSED_LABEL_P and we want to warn about #1 falling
2576 through to #3. So set PREV to #1. */
2577 else if (UNUSED_LABEL_P (false_lab))
2578 prev = gsi_stmt (i: *gsi_p);
2579
2580 /* And move back. */
2581 gsi_next (i: gsi_p);
2582 }
2583
2584 /* Remember the last statement. Skip labels that are of no interest
2585 to us. */
2586 if (gimple_code (g: gsi_stmt (i: *gsi_p)) == GIMPLE_LABEL)
2587 {
2588 tree label = gimple_label_label (gs: as_a <glabel *> (p: gsi_stmt (i: *gsi_p)));
2589 if (find_label_entry (vec: labels, label))
2590 prev = gsi_stmt (i: *gsi_p);
2591 }
2592 else if (gimple_call_internal_p (gs: gsi_stmt (i: *gsi_p), fn: IFN_ASAN_MARK))
2593 ;
2594 else if (gimple_code (g: gsi_stmt (i: *gsi_p)) == GIMPLE_PREDICT)
2595 ;
2596 else if (!is_gimple_debug (gs: gsi_stmt (i: *gsi_p)))
2597 prev = gsi_stmt (i: *gsi_p);
2598 gsi_next (i: gsi_p);
2599 }
2600 while (!gsi_end_p (i: *gsi_p)
2601 /* Stop if we find a case or a user-defined label. */
2602 && (gimple_code (g: gsi_stmt (i: *gsi_p)) != GIMPLE_LABEL
2603 || !gimple_has_location (g: gsi_stmt (i: *gsi_p))));
2604
2605 if (prev && gimple_has_location (g: prev))
2606 *prevloc = gimple_location (g: prev);
2607 return prev;
2608}
2609
2610/* Return true if the switch fallthough warning should occur. LABEL is
2611 the label statement that we're falling through to. */
2612
2613static bool
2614should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2615{
2616 gimple_stmt_iterator gsi = *gsi_p;
2617
2618 /* Don't warn if the label is marked with a "falls through" comment. */
2619 if (FALLTHROUGH_LABEL_P (label))
2620 return false;
2621
2622 /* Don't warn for non-case labels followed by a statement:
2623 case 0:
2624 foo ();
2625 label:
2626 bar ();
2627 as these are likely intentional. */
2628 if (!case_label_p (cases: &gimplify_ctxp->case_labels, label))
2629 {
2630 tree l;
2631 while (!gsi_end_p (i: gsi)
2632 && gimple_code (g: gsi_stmt (i: gsi)) == GIMPLE_LABEL
2633 && (l = gimple_label_label (gs: as_a <glabel *> (p: gsi_stmt (i: gsi))))
2634 && !case_label_p (cases: &gimplify_ctxp->case_labels, label: l))
2635 gsi_next_nondebug (i: &gsi);
2636 if (gsi_end_p (i: gsi) || gimple_code (g: gsi_stmt (i: gsi)) != GIMPLE_LABEL)
2637 return false;
2638 }
2639
2640 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2641 immediately breaks. */
2642 gsi = *gsi_p;
2643
2644 /* Skip all immediately following labels. */
2645 while (!gsi_end_p (i: gsi)
2646 && (gimple_code (g: gsi_stmt (i: gsi)) == GIMPLE_LABEL
2647 || gimple_code (g: gsi_stmt (i: gsi)) == GIMPLE_PREDICT))
2648 gsi_next_nondebug (i: &gsi);
2649
2650 /* { ... something; default:; } */
2651 if (gsi_end_p (i: gsi)
2652 /* { ... something; default: break; } or
2653 { ... something; default: goto L; } */
2654 || gimple_code (g: gsi_stmt (i: gsi)) == GIMPLE_GOTO
2655 /* { ... something; default: return; } */
2656 || gimple_code (g: gsi_stmt (i: gsi)) == GIMPLE_RETURN)
2657 return false;
2658
2659 return true;
2660}
2661
2662/* Callback for walk_gimple_seq. */
2663
2664static tree
2665warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2666 struct walk_stmt_info *)
2667{
2668 gimple *stmt = gsi_stmt (i: *gsi_p);
2669
2670 *handled_ops_p = true;
2671 switch (gimple_code (g: stmt))
2672 {
2673 case GIMPLE_TRY:
2674 case GIMPLE_BIND:
2675 case GIMPLE_CATCH:
2676 case GIMPLE_EH_FILTER:
2677 case GIMPLE_TRANSACTION:
2678 /* Walk the sub-statements. */
2679 *handled_ops_p = false;
2680 break;
2681
2682 /* Find a sequence of form:
2683
2684 GIMPLE_LABEL
2685 [...]
2686 <may fallthru stmt>
2687 GIMPLE_LABEL
2688
2689 and possibly warn. */
2690 case GIMPLE_LABEL:
2691 {
2692 /* Found a label. Skip all immediately following labels. */
2693 while (!gsi_end_p (i: *gsi_p)
2694 && gimple_code (g: gsi_stmt (i: *gsi_p)) == GIMPLE_LABEL)
2695 gsi_next_nondebug (i: gsi_p);
2696
2697 /* There might be no more statements. */
2698 if (gsi_end_p (i: *gsi_p))
2699 return integer_zero_node;
2700
2701 /* Vector of labels that fall through. */
2702 auto_vec <struct label_entry> labels;
2703 location_t prevloc;
2704 gimple *prev = collect_fallthrough_labels (gsi_p, labels: &labels, prevloc: &prevloc);
2705
2706 /* There might be no more statements. */
2707 if (gsi_end_p (i: *gsi_p))
2708 return integer_zero_node;
2709
2710 gimple *next = gsi_stmt (i: *gsi_p);
2711 tree label;
2712 /* If what follows is a label, then we may have a fallthrough. */
2713 if (gimple_code (g: next) == GIMPLE_LABEL
2714 && gimple_has_location (g: next)
2715 && (label = gimple_label_label (gs: as_a <glabel *> (p: next)))
2716 && prev != NULL)
2717 {
2718 struct label_entry *l;
2719 bool warned_p = false;
2720 auto_diagnostic_group d;
2721 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2722 /* Quiet. */;
2723 else if (gimple_code (g: prev) == GIMPLE_LABEL
2724 && (label = gimple_label_label (gs: as_a <glabel *> (p: prev)))
2725 && (l = find_label_entry (vec: &labels, label)))
2726 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2727 "this statement may fall through");
2728 else if (!gimple_call_internal_p (gs: prev, fn: IFN_FALLTHROUGH)
2729 /* Try to be clever and don't warn when the statement
2730 can't actually fall through. */
2731 && gimple_stmt_may_fallthru (prev)
2732 && prevloc != UNKNOWN_LOCATION)
2733 warned_p = warning_at (prevloc,
2734 OPT_Wimplicit_fallthrough_,
2735 "this statement may fall through");
2736 if (warned_p)
2737 inform (gimple_location (g: next), "here");
2738
2739 /* Mark this label as processed so as to prevent multiple
2740 warnings in nested switches. */
2741 FALLTHROUGH_LABEL_P (label) = true;
2742
2743 /* So that next warn_implicit_fallthrough_r will start looking for
2744 a new sequence starting with this label. */
2745 gsi_prev (i: gsi_p);
2746 }
2747 }
2748 break;
2749 default:
2750 break;
2751 }
2752 return NULL_TREE;
2753}
2754
2755/* Warn when a switch case falls through. */
2756
2757static void
2758maybe_warn_implicit_fallthrough (gimple_seq seq)
2759{
2760 if (!warn_implicit_fallthrough)
2761 return;
2762
2763 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2764 if (!(lang_GNU_C ()
2765 || lang_GNU_CXX ()
2766 || lang_GNU_OBJC ()))
2767 return;
2768
2769 struct walk_stmt_info wi;
2770 memset (s: &wi, c: 0, n: sizeof (wi));
2771 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2772}
2773
2774/* Callback for walk_gimple_seq. */
2775
2776static tree
2777expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2778 struct walk_stmt_info *wi)
2779{
2780 gimple *stmt = gsi_stmt (i: *gsi_p);
2781
2782 *handled_ops_p = true;
2783 switch (gimple_code (g: stmt))
2784 {
2785 case GIMPLE_TRY:
2786 case GIMPLE_BIND:
2787 case GIMPLE_CATCH:
2788 case GIMPLE_EH_FILTER:
2789 case GIMPLE_TRANSACTION:
2790 /* Walk the sub-statements. */
2791 *handled_ops_p = false;
2792 break;
2793 case GIMPLE_CALL:
2794 if (gimple_call_internal_p (gs: stmt, fn: IFN_FALLTHROUGH))
2795 {
2796 gsi_remove (gsi_p, true);
2797 if (gsi_end_p (i: *gsi_p))
2798 {
2799 *static_cast<location_t *>(wi->info) = gimple_location (g: stmt);
2800 return integer_zero_node;
2801 }
2802
2803 bool found = false;
2804 location_t loc = gimple_location (g: stmt);
2805
2806 gimple_stmt_iterator gsi2 = *gsi_p;
2807 stmt = gsi_stmt (i: gsi2);
2808 if (gimple_code (g: stmt) == GIMPLE_GOTO && !gimple_has_location (g: stmt))
2809 {
2810 /* Go on until the artificial label. */
2811 tree goto_dest = gimple_goto_dest (gs: stmt);
2812 for (; !gsi_end_p (i: gsi2); gsi_next (i: &gsi2))
2813 {
2814 if (gimple_code (g: gsi_stmt (i: gsi2)) == GIMPLE_LABEL
2815 && gimple_label_label (gs: as_a <glabel *> (p: gsi_stmt (i: gsi2)))
2816 == goto_dest)
2817 break;
2818 }
2819
2820 /* Not found? Stop. */
2821 if (gsi_end_p (i: gsi2))
2822 break;
2823
2824 /* Look one past it. */
2825 gsi_next (i: &gsi2);
2826 }
2827
2828 /* We're looking for a case label or default label here. */
2829 while (!gsi_end_p (i: gsi2))
2830 {
2831 stmt = gsi_stmt (i: gsi2);
2832 if (gimple_code (g: stmt) == GIMPLE_LABEL)
2833 {
2834 tree label = gimple_label_label (gs: as_a <glabel *> (p: stmt));
2835 if (gimple_has_location (g: stmt) && DECL_ARTIFICIAL (label))
2836 {
2837 found = true;
2838 break;
2839 }
2840 }
2841 else if (gimple_call_internal_p (gs: stmt, fn: IFN_ASAN_MARK))
2842 ;
2843 else if (!is_gimple_debug (gs: stmt))
2844 /* Anything else is not expected. */
2845 break;
2846 gsi_next (i: &gsi2);
2847 }
2848 if (!found)
2849 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2850 "a case label or default label");
2851 }
2852 break;
2853 default:
2854 break;
2855 }
2856 return NULL_TREE;
2857}
2858
2859/* Expand all FALLTHROUGH () calls in SEQ. */
2860
2861static void
2862expand_FALLTHROUGH (gimple_seq *seq_p)
2863{
2864 struct walk_stmt_info wi;
2865 location_t loc;
2866 memset (s: &wi, c: 0, n: sizeof (wi));
2867 wi.info = (void *) &loc;
2868 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2869 if (wi.callback_result == integer_zero_node)
2870 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2871 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2872 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2873 "a case label or default label");
2874}
2875
2876
2877/* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2878 branch to. */
2879
2880static enum gimplify_status
2881gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2882{
2883 tree switch_expr = *expr_p;
2884 gimple_seq switch_body_seq = NULL;
2885 enum gimplify_status ret;
2886 tree index_type = TREE_TYPE (switch_expr);
2887 if (index_type == NULL_TREE)
2888 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2889
2890 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2891 fb_rvalue);
2892 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2893 return ret;
2894
2895 if (SWITCH_BODY (switch_expr))
2896 {
2897 vec<tree> labels;
2898 vec<tree> saved_labels;
2899 hash_set<tree> *saved_live_switch_vars = NULL;
2900 tree default_case = NULL_TREE;
2901 gswitch *switch_stmt;
2902
2903 /* Save old labels, get new ones from body, then restore the old
2904 labels. Save all the things from the switch body to append after. */
2905 saved_labels = gimplify_ctxp->case_labels;
2906 gimplify_ctxp->case_labels.create (nelems: 8);
2907
2908 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2909 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2910 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2911 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2912 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2913 else
2914 gimplify_ctxp->live_switch_vars = NULL;
2915
2916 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2917 gimplify_ctxp->in_switch_expr = true;
2918
2919 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2920
2921 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2922 maybe_warn_switch_unreachable_and_auto_init (seq: switch_body_seq);
2923 maybe_warn_implicit_fallthrough (seq: switch_body_seq);
2924 /* Only do this for the outermost GIMPLE_SWITCH. */
2925 if (!gimplify_ctxp->in_switch_expr)
2926 expand_FALLTHROUGH (seq_p: &switch_body_seq);
2927
2928 labels = gimplify_ctxp->case_labels;
2929 gimplify_ctxp->case_labels = saved_labels;
2930
2931 if (gimplify_ctxp->live_switch_vars)
2932 {
2933 gcc_assert (gimplify_ctxp->live_switch_vars->is_empty ());
2934 delete gimplify_ctxp->live_switch_vars;
2935 }
2936 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2937
2938 preprocess_case_label_vec_for_gimple (labels, index_type,
2939 &default_case);
2940
2941 bool add_bind = false;
2942 if (!default_case)
2943 {
2944 glabel *new_default;
2945
2946 default_case
2947 = build_case_label (NULL_TREE, NULL_TREE,
2948 create_artificial_label (UNKNOWN_LOCATION));
2949 if (old_in_switch_expr)
2950 {
2951 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1;
2952 add_bind = true;
2953 }
2954 new_default = gimple_build_label (CASE_LABEL (default_case));
2955 gimplify_seq_add_stmt (seq_p: &switch_body_seq, gs: new_default);
2956 }
2957 else if (old_in_switch_expr)
2958 {
2959 gimple *last = gimple_seq_last_stmt (s: switch_body_seq);
2960 if (last && gimple_code (g: last) == GIMPLE_LABEL)
2961 {
2962 tree label = gimple_label_label (gs: as_a <glabel *> (p: last));
2963 if (SWITCH_BREAK_LABEL_P (label))
2964 add_bind = true;
2965 }
2966 }
2967
2968 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2969 default_case, labels);
2970 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2971 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2972 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2973 so that we can easily find the start and end of the switch
2974 statement. */
2975 if (add_bind)
2976 {
2977 gimple_seq bind_body = NULL;
2978 gimplify_seq_add_stmt (seq_p: &bind_body, gs: switch_stmt);
2979 gimple_seq_add_seq (&bind_body, switch_body_seq);
2980 gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE);
2981 gimple_set_location (g: bind, EXPR_LOCATION (switch_expr));
2982 gimplify_seq_add_stmt (seq_p: pre_p, gs: bind);
2983 }
2984 else
2985 {
2986 gimplify_seq_add_stmt (seq_p: pre_p, gs: switch_stmt);
2987 gimplify_seq_add_seq (dst_p: pre_p, src: switch_body_seq);
2988 }
2989 labels.release ();
2990 }
2991 else
2992 gcc_unreachable ();
2993
2994 return GS_ALL_DONE;
2995}
2996
2997/* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2998
2999static enum gimplify_status
3000gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
3001{
3002 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
3003 == current_function_decl);
3004
3005 tree label = LABEL_EXPR_LABEL (*expr_p);
3006 glabel *label_stmt = gimple_build_label (label);
3007 gimple_set_location (g: label_stmt, EXPR_LOCATION (*expr_p));
3008 gimplify_seq_add_stmt (seq_p: pre_p, gs: label_stmt);
3009
3010 if (lookup_attribute (attr_name: "cold", DECL_ATTRIBUTES (label)))
3011 gimple_seq_add_stmt (pre_p, gimple_build_predict (predictor: PRED_COLD_LABEL,
3012 outcome: NOT_TAKEN));
3013 else if (lookup_attribute (attr_name: "hot", DECL_ATTRIBUTES (label)))
3014 gimple_seq_add_stmt (pre_p, gimple_build_predict (predictor: PRED_HOT_LABEL,
3015 outcome: TAKEN));
3016
3017 return GS_ALL_DONE;
3018}
3019
3020/* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
3021
3022static enum gimplify_status
3023gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
3024{
3025 struct gimplify_ctx *ctxp;
3026 glabel *label_stmt;
3027
3028 /* Invalid programs can play Duff's Device type games with, for example,
3029 #pragma omp parallel. At least in the C front end, we don't
3030 detect such invalid branches until after gimplification, in the
3031 diagnose_omp_blocks pass. */
3032 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
3033 if (ctxp->case_labels.exists ())
3034 break;
3035
3036 tree label = CASE_LABEL (*expr_p);
3037 label_stmt = gimple_build_label (label);
3038 gimple_set_location (g: label_stmt, EXPR_LOCATION (*expr_p));
3039 ctxp->case_labels.safe_push (obj: *expr_p);
3040 gimplify_seq_add_stmt (seq_p: pre_p, gs: label_stmt);
3041
3042 if (lookup_attribute (attr_name: "cold", DECL_ATTRIBUTES (label)))
3043 gimple_seq_add_stmt (pre_p, gimple_build_predict (predictor: PRED_COLD_LABEL,
3044 outcome: NOT_TAKEN));
3045 else if (lookup_attribute (attr_name: "hot", DECL_ATTRIBUTES (label)))
3046 gimple_seq_add_stmt (pre_p, gimple_build_predict (predictor: PRED_HOT_LABEL,
3047 outcome: TAKEN));
3048
3049 return GS_ALL_DONE;
3050}
3051
3052/* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
3053 if necessary. */
3054
3055tree
3056build_and_jump (tree *label_p)
3057{
3058 if (label_p == NULL)
3059 /* If there's nowhere to jump, just fall through. */
3060 return NULL_TREE;
3061
3062 if (*label_p == NULL_TREE)
3063 {
3064 tree label = create_artificial_label (UNKNOWN_LOCATION);
3065 *label_p = label;
3066 }
3067
3068 return build1 (GOTO_EXPR, void_type_node, *label_p);
3069}
3070
3071/* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
3072 This also involves building a label to jump to and communicating it to
3073 gimplify_loop_expr through gimplify_ctxp->exit_label. */
3074
3075static enum gimplify_status
3076gimplify_exit_expr (tree *expr_p)
3077{
3078 tree cond = TREE_OPERAND (*expr_p, 0);
3079 tree expr;
3080
3081 expr = build_and_jump (label_p: &gimplify_ctxp->exit_label);
3082 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
3083 *expr_p = expr;
3084
3085 return GS_OK;
3086}
3087
3088/* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
3089 different from its canonical type, wrap the whole thing inside a
3090 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
3091 type.
3092
3093 The canonical type of a COMPONENT_REF is the type of the field being
3094 referenced--unless the field is a bit-field which can be read directly
3095 in a smaller mode, in which case the canonical type is the
3096 sign-appropriate type corresponding to that mode. */
3097
3098static void
3099canonicalize_component_ref (tree *expr_p)
3100{
3101 tree expr = *expr_p;
3102 tree type;
3103
3104 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
3105
3106 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
3107 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
3108 else
3109 type = TREE_TYPE (TREE_OPERAND (expr, 1));
3110
3111 /* One could argue that all the stuff below is not necessary for
3112 the non-bitfield case and declare it a FE error if type
3113 adjustment would be needed. */
3114 if (TREE_TYPE (expr) != type)
3115 {
3116#ifdef ENABLE_TYPES_CHECKING
3117 tree old_type = TREE_TYPE (expr);
3118#endif
3119 int type_quals;
3120
3121 /* We need to preserve qualifiers and propagate them from
3122 operand 0. */
3123 type_quals = TYPE_QUALS (type)
3124 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
3125 if (TYPE_QUALS (type) != type_quals)
3126 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
3127
3128 /* Set the type of the COMPONENT_REF to the underlying type. */
3129 TREE_TYPE (expr) = type;
3130
3131#ifdef ENABLE_TYPES_CHECKING
3132 /* It is now a FE error, if the conversion from the canonical
3133 type to the original expression type is not useless. */
3134 gcc_assert (useless_type_conversion_p (old_type, type));
3135#endif
3136 }
3137}
3138
3139/* If a NOP conversion is changing a pointer to array of foo to a pointer
3140 to foo, embed that change in the ADDR_EXPR by converting
3141 T array[U];
3142 (T *)&array
3143 ==>
3144 &array[L]
3145 where L is the lower bound. For simplicity, only do this for constant
3146 lower bound.
3147 The constraint is that the type of &array[L] is trivially convertible
3148 to T *. */
3149
3150static void
3151canonicalize_addr_expr (tree *expr_p)
3152{
3153 tree expr = *expr_p;
3154 tree addr_expr = TREE_OPERAND (expr, 0);
3155 tree datype, ddatype, pddatype;
3156
3157 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
3158 if (!POINTER_TYPE_P (TREE_TYPE (expr))
3159 || TREE_CODE (addr_expr) != ADDR_EXPR)
3160 return;
3161
3162 /* The addr_expr type should be a pointer to an array. */
3163 datype = TREE_TYPE (TREE_TYPE (addr_expr));
3164 if (TREE_CODE (datype) != ARRAY_TYPE)
3165 return;
3166
3167 /* The pointer to element type shall be trivially convertible to
3168 the expression pointer type. */
3169 ddatype = TREE_TYPE (datype);
3170 pddatype = build_pointer_type (ddatype);
3171 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
3172 pddatype))
3173 return;
3174
3175 /* The lower bound and element sizes must be constant. */
3176 if (!TYPE_SIZE_UNIT (ddatype)
3177 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
3178 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
3179 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
3180 return;
3181
3182 /* All checks succeeded. Build a new node to merge the cast. */
3183 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
3184 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
3185 NULL_TREE, NULL_TREE);
3186 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
3187
3188 /* We can have stripped a required restrict qualifier above. */
3189 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
3190 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
3191}
3192
3193/* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
3194 underneath as appropriate. */
3195
3196static enum gimplify_status
3197gimplify_conversion (tree *expr_p)
3198{
3199 location_t loc = EXPR_LOCATION (*expr_p);
3200 gcc_assert (CONVERT_EXPR_P (*expr_p));
3201
3202 /* Then strip away all but the outermost conversion. */
3203 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
3204
3205 /* And remove the outermost conversion if it's useless. */
3206 if (tree_ssa_useless_type_conversion (*expr_p))
3207 *expr_p = TREE_OPERAND (*expr_p, 0);
3208
3209 /* If we still have a conversion at the toplevel,
3210 then canonicalize some constructs. */
3211 if (CONVERT_EXPR_P (*expr_p))
3212 {
3213 tree sub = TREE_OPERAND (*expr_p, 0);
3214
3215 /* If a NOP conversion is changing the type of a COMPONENT_REF
3216 expression, then canonicalize its type now in order to expose more
3217 redundant conversions. */
3218 if (TREE_CODE (sub) == COMPONENT_REF)
3219 canonicalize_component_ref (expr_p: &TREE_OPERAND (*expr_p, 0));
3220
3221 /* If a NOP conversion is changing a pointer to array of foo
3222 to a pointer to foo, embed that change in the ADDR_EXPR. */
3223 else if (TREE_CODE (sub) == ADDR_EXPR)
3224 canonicalize_addr_expr (expr_p);
3225 }
3226
3227 /* If we have a conversion to a non-register type force the
3228 use of a VIEW_CONVERT_EXPR instead. */
3229 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
3230 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
3231 TREE_OPERAND (*expr_p, 0));
3232
3233 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
3234 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
3235 TREE_SET_CODE (*expr_p, NOP_EXPR);
3236
3237 return GS_OK;
3238}
3239
3240/* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
3241 DECL_VALUE_EXPR, and it's worth re-examining things. */
3242
3243static enum gimplify_status
3244gimplify_var_or_parm_decl (tree *expr_p)
3245{
3246 tree decl = *expr_p;
3247
3248 /* ??? If this is a local variable, and it has not been seen in any
3249 outer BIND_EXPR, then it's probably the result of a duplicate
3250 declaration, for which we've already issued an error. It would
3251 be really nice if the front end wouldn't leak these at all.
3252 Currently the only known culprit is C++ destructors, as seen
3253 in g++.old-deja/g++.jason/binding.C.
3254 Another possible culpit are size expressions for variably modified
3255 types which are lost in the FE or not gimplified correctly. */
3256 if (VAR_P (decl)
3257 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
3258 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
3259 && decl_function_context (decl) == current_function_decl)
3260 {
3261 gcc_assert (seen_error ());
3262 return GS_ERROR;
3263 }
3264
3265 /* When within an OMP context, notice uses of variables. */
3266 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
3267 return GS_ALL_DONE;
3268
3269 /* If the decl is an alias for another expression, substitute it now. */
3270 if (DECL_HAS_VALUE_EXPR_P (decl))
3271 {
3272 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
3273 return GS_OK;
3274 }
3275
3276 return GS_ALL_DONE;
3277}
3278
3279/* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
3280
3281static void
3282recalculate_side_effects (tree t)
3283{
3284 enum tree_code code = TREE_CODE (t);
3285 int len = TREE_OPERAND_LENGTH (t);
3286 int i;
3287
3288 switch (TREE_CODE_CLASS (code))
3289 {
3290 case tcc_expression:
3291 switch (code)
3292 {
3293 case INIT_EXPR:
3294 case MODIFY_EXPR:
3295 case VA_ARG_EXPR:
3296 case PREDECREMENT_EXPR:
3297 case PREINCREMENT_EXPR:
3298 case POSTDECREMENT_EXPR:
3299 case POSTINCREMENT_EXPR:
3300 /* All of these have side-effects, no matter what their
3301 operands are. */
3302 return;
3303
3304 default:
3305 break;
3306 }
3307 /* Fall through. */
3308
3309 case tcc_comparison: /* a comparison expression */
3310 case tcc_unary: /* a unary arithmetic expression */
3311 case tcc_binary: /* a binary arithmetic expression */
3312 case tcc_reference: /* a reference */
3313 case tcc_vl_exp: /* a function call */
3314 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
3315 for (i = 0; i < len; ++i)
3316 {
3317 tree op = TREE_OPERAND (t, i);
3318 if (op && TREE_SIDE_EFFECTS (op))
3319 TREE_SIDE_EFFECTS (t) = 1;
3320 }
3321 break;
3322
3323 case tcc_constant:
3324 /* No side-effects. */
3325 return;
3326
3327 default:
3328 gcc_unreachable ();
3329 }
3330}
3331
3332/* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
3333 node *EXPR_P.
3334
3335 compound_lval
3336 : min_lval '[' val ']'
3337 | min_lval '.' ID
3338 | compound_lval '[' val ']'
3339 | compound_lval '.' ID
3340
3341 This is not part of the original SIMPLE definition, which separates
3342 array and member references, but it seems reasonable to handle them
3343 together. Also, this way we don't run into problems with union
3344 aliasing; gcc requires that for accesses through a union to alias, the
3345 union reference must be explicit, which was not always the case when we
3346 were splitting up array and member refs.
3347
3348 PRE_P points to the sequence where side effects that must happen before
3349 *EXPR_P should be stored.
3350
3351 POST_P points to the sequence where side effects that must happen after
3352 *EXPR_P should be stored. */
3353
3354static enum gimplify_status
3355gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3356 fallback_t fallback)
3357{
3358 tree *p;
3359 enum gimplify_status ret = GS_ALL_DONE, tret;
3360 int i;
3361 location_t loc = EXPR_LOCATION (*expr_p);
3362 tree expr = *expr_p;
3363
3364 /* Create a stack of the subexpressions so later we can walk them in
3365 order from inner to outer. */
3366 auto_vec<tree, 10> expr_stack;
3367
3368 /* We can handle anything that get_inner_reference can deal with. */
3369 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
3370 {
3371 restart:
3372 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
3373 if (TREE_CODE (*p) == INDIRECT_REF)
3374 *p = fold_indirect_ref_loc (loc, *p);
3375
3376 if (handled_component_p (t: *p))
3377 ;
3378 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
3379 additional COMPONENT_REFs. */
3380 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
3381 && gimplify_var_or_parm_decl (expr_p: p) == GS_OK)
3382 goto restart;
3383 else
3384 break;
3385
3386 expr_stack.safe_push (obj: *p);
3387 }
3388
3389 gcc_assert (expr_stack.length ());
3390
3391 /* Now EXPR_STACK is a stack of pointers to all the refs we've
3392 walked through and P points to the innermost expression.
3393
3394 Java requires that we elaborated nodes in source order. That
3395 means we must gimplify the inner expression followed by each of
3396 the indices, in order. But we can't gimplify the inner
3397 expression until we deal with any variable bounds, sizes, or
3398 positions in order to deal with PLACEHOLDER_EXPRs.
3399
3400 The base expression may contain a statement expression that
3401 has declarations used in size expressions, so has to be
3402 gimplified before gimplifying the size expressions.
3403
3404 So we do this in three steps. First we deal with variable
3405 bounds, sizes, and positions, then we gimplify the base and
3406 ensure it is memory if needed, then we deal with the annotations
3407 for any variables in the components and any indices, from left
3408 to right. */
3409
3410 bool need_non_reg = false;
3411 for (i = expr_stack.length () - 1; i >= 0; i--)
3412 {
3413 tree t = expr_stack[i];
3414
3415 if (error_operand_p (TREE_OPERAND (t, 0)))
3416 return GS_ERROR;
3417
3418 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3419 {
3420 /* Deal with the low bound and element type size and put them into
3421 the ARRAY_REF. If these values are set, they have already been
3422 gimplified. */
3423 if (TREE_OPERAND (t, 2) == NULL_TREE)
3424 {
3425 tree low = unshare_expr (expr: array_ref_low_bound (t));
3426 if (!is_gimple_min_invariant (low))
3427 {
3428 TREE_OPERAND (t, 2) = low;
3429 }
3430 }
3431
3432 if (TREE_OPERAND (t, 3) == NULL_TREE)
3433 {
3434 tree elmt_size = array_ref_element_size (t);
3435 if (!is_gimple_min_invariant (elmt_size))
3436 {
3437 elmt_size = unshare_expr (expr: elmt_size);
3438 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
3439 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
3440
3441 /* Divide the element size by the alignment of the element
3442 type (above). */
3443 elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR,
3444 elmt_size, factor);
3445
3446 TREE_OPERAND (t, 3) = elmt_size;
3447 }
3448 }
3449 need_non_reg = true;
3450 }
3451 else if (TREE_CODE (t) == COMPONENT_REF)
3452 {
3453 /* Set the field offset into T and gimplify it. */
3454 if (TREE_OPERAND (t, 2) == NULL_TREE)
3455 {
3456 tree offset = component_ref_field_offset (t);
3457 if (!is_gimple_min_invariant (offset))
3458 {
3459 offset = unshare_expr (expr: offset);
3460 tree field = TREE_OPERAND (t, 1);
3461 tree factor
3462 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
3463
3464 /* Divide the offset by its alignment. */
3465 offset = size_binop_loc (loc, EXACT_DIV_EXPR,
3466 offset, factor);
3467
3468 TREE_OPERAND (t, 2) = offset;
3469 }
3470 }
3471 need_non_reg = true;
3472 }
3473 else if (!is_gimple_reg_type (TREE_TYPE (t)))
3474 /* When the result of an operation, in particular a VIEW_CONVERT_EXPR
3475 is a non-register type then require the base object to be a
3476 non-register as well. */
3477 need_non_reg = true;
3478 }
3479
3480 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3481 so as to match the min_lval predicate. Failure to do so may result
3482 in the creation of large aggregate temporaries. */
3483 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
3484 fallback | fb_lvalue);
3485 ret = MIN (ret, tret);
3486 if (ret == GS_ERROR)
3487 return GS_ERROR;
3488
3489 /* Step 2a: if we have component references we do not support on
3490 registers then make sure the base isn't a register. Of course
3491 we can only do so if an rvalue is OK. */
3492 if (need_non_reg && (fallback & fb_rvalue))
3493 prepare_gimple_addressable (p, pre_p);
3494
3495
3496 /* Step 3: gimplify size expressions and the indices and operands of
3497 ARRAY_REF. During this loop we also remove any useless conversions.
3498 If we operate on a register also make sure to properly gimplify
3499 to individual operations. */
3500
3501 bool reg_operations = is_gimple_reg (*p);
3502 for (; expr_stack.length () > 0; )
3503 {
3504 tree t = expr_stack.pop ();
3505
3506 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3507 {
3508 gcc_assert (!reg_operations);
3509
3510 /* Gimplify the low bound and element type size. */
3511 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3512 is_gimple_reg, fb_rvalue);
3513 ret = MIN (ret, tret);
3514
3515 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
3516 is_gimple_reg, fb_rvalue);
3517 ret = MIN (ret, tret);
3518
3519 /* Gimplify the dimension. */
3520 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
3521 is_gimple_val, fb_rvalue);
3522 ret = MIN (ret, tret);
3523 }
3524 else if (TREE_CODE (t) == COMPONENT_REF)
3525 {
3526 gcc_assert (!reg_operations);
3527
3528 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3529 is_gimple_reg, fb_rvalue);
3530 ret = MIN (ret, tret);
3531 }
3532 else if (reg_operations)
3533 {
3534 tret = gimplify_expr (&TREE_OPERAND (t, 0), pre_p, post_p,
3535 is_gimple_val, fb_rvalue);
3536 ret = MIN (ret, tret);
3537 }
3538
3539 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
3540
3541 /* The innermost expression P may have originally had
3542 TREE_SIDE_EFFECTS set which would have caused all the outer
3543 expressions in *EXPR_P leading to P to also have had
3544 TREE_SIDE_EFFECTS set. */
3545 recalculate_side_effects (t);
3546 }
3547
3548 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3549 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
3550 {
3551 canonicalize_component_ref (expr_p);
3552 }
3553
3554 expr_stack.release ();
3555
3556 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
3557
3558 return ret;
3559}
3560
3561/* Gimplify the self modifying expression pointed to by EXPR_P
3562 (++, --, +=, -=).
3563
3564 PRE_P points to the list where side effects that must happen before
3565 *EXPR_P should be stored.
3566
3567 POST_P points to the list where side effects that must happen after
3568 *EXPR_P should be stored.
3569
3570 WANT_VALUE is nonzero iff we want to use the value of this expression
3571 in another expression.
3572
3573 ARITH_TYPE is the type the computation should be performed in. */
3574
3575enum gimplify_status
3576gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3577 bool want_value, tree arith_type)
3578{
3579 enum tree_code code;
3580 tree lhs, lvalue, rhs, t1;
3581 gimple_seq post = NULL, *orig_post_p = post_p;
3582 bool postfix;
3583 enum tree_code arith_code;
3584 enum gimplify_status ret;
3585 location_t loc = EXPR_LOCATION (*expr_p);
3586
3587 code = TREE_CODE (*expr_p);
3588
3589 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
3590 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
3591
3592 /* Prefix or postfix? */
3593 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
3594 /* Faster to treat as prefix if result is not used. */
3595 postfix = want_value;
3596 else
3597 postfix = false;
3598
3599 /* For postfix, make sure the inner expression's post side effects
3600 are executed after side effects from this expression. */
3601 if (postfix)
3602 post_p = &post;
3603
3604 /* Add or subtract? */
3605 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
3606 arith_code = PLUS_EXPR;
3607 else
3608 arith_code = MINUS_EXPR;
3609
3610 /* Gimplify the LHS into a GIMPLE lvalue. */
3611 lvalue = TREE_OPERAND (*expr_p, 0);
3612 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3613 if (ret == GS_ERROR)
3614 return ret;
3615
3616 /* Extract the operands to the arithmetic operation. */
3617 lhs = lvalue;
3618 rhs = TREE_OPERAND (*expr_p, 1);
3619
3620 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3621 that as the result value and in the postqueue operation. */
3622 if (postfix)
3623 {
3624 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
3625 if (ret == GS_ERROR)
3626 return ret;
3627
3628 lhs = get_initialized_tmp_var (val: lhs, pre_p);
3629 }
3630
3631 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3632 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
3633 {
3634 rhs = convert_to_ptrofftype_loc (loc, off: rhs);
3635 if (arith_code == MINUS_EXPR)
3636 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
3637 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
3638 }
3639 else
3640 t1 = fold_convert (TREE_TYPE (*expr_p),
3641 fold_build2 (arith_code, arith_type,
3642 fold_convert (arith_type, lhs),
3643 fold_convert (arith_type, rhs)));
3644
3645 if (postfix)
3646 {
3647 gimplify_assign (lvalue, t1, pre_p);
3648 gimplify_seq_add_seq (dst_p: orig_post_p, src: post);
3649 *expr_p = lhs;
3650 return GS_ALL_DONE;
3651 }
3652 else
3653 {
3654 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3655 return GS_OK;
3656 }
3657}
3658
3659/* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3660
3661static void
3662maybe_with_size_expr (tree *expr_p)
3663{
3664 tree expr = *expr_p;
3665 tree type = TREE_TYPE (expr);
3666 tree size;
3667
3668 /* If we've already wrapped this or the type is error_mark_node, we can't do
3669 anything. */
3670 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3671 || type == error_mark_node)
3672 return;
3673
3674 /* If the size isn't known or is a constant, we have nothing to do. */
3675 size = TYPE_SIZE_UNIT (type);
3676 if (!size || poly_int_tree_p (t: size))
3677 return;
3678
3679 /* Otherwise, make a WITH_SIZE_EXPR. */
3680 size = unshare_expr (expr: size);
3681 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3682 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3683}
3684
3685/* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3686 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3687 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3688 gimplified to an SSA name. */
3689
3690enum gimplify_status
3691gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3692 bool allow_ssa)
3693{
3694 bool (*test) (tree);
3695 fallback_t fb;
3696
3697 /* In general, we allow lvalues for function arguments to avoid
3698 extra overhead of copying large aggregates out of even larger
3699 aggregates into temporaries only to copy the temporaries to
3700 the argument list. Make optimizers happy by pulling out to
3701 temporaries those types that fit in registers. */
3702 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3703 test = is_gimple_val, fb = fb_rvalue;
3704 else
3705 {
3706 test = is_gimple_lvalue, fb = fb_either;
3707 /* Also strip a TARGET_EXPR that would force an extra copy. */
3708 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3709 {
3710 tree init = TARGET_EXPR_INITIAL (*arg_p);
3711 if (init
3712 && !VOID_TYPE_P (TREE_TYPE (init)))
3713 *arg_p = init;
3714 }
3715 }
3716
3717 /* If this is a variable sized type, we must remember the size. */
3718 maybe_with_size_expr (expr_p: arg_p);
3719
3720 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3721 /* Make sure arguments have the same location as the function call
3722 itself. */
3723 protected_set_expr_location (*arg_p, call_location);
3724
3725 /* There is a sequence point before a function call. Side effects in
3726 the argument list must occur before the actual call. So, when
3727 gimplifying arguments, force gimplify_expr to use an internal
3728 post queue which is then appended to the end of PRE_P. */
3729 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3730}
3731
3732/* Don't fold inside offloading or taskreg regions: it can break code by
3733 adding decl references that weren't in the source. We'll do it during
3734 omplower pass instead. */
3735
3736static bool
3737maybe_fold_stmt (gimple_stmt_iterator *gsi)
3738{
3739 struct gimplify_omp_ctx *ctx;
3740 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3741 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3742 return false;
3743 else if ((ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
3744 return false;
3745 /* Delay folding of builtins until the IL is in consistent state
3746 so the diagnostic machinery can do a better job. */
3747 if (gimple_call_builtin_p (gsi_stmt (i: *gsi)))
3748 return false;
3749 return fold_stmt (gsi);
3750}
3751
3752/* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3753 WANT_VALUE is true if the result of the call is desired. */
3754
3755static enum gimplify_status
3756gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3757{
3758 tree fndecl, parms, p, fnptrtype;
3759 enum gimplify_status ret;
3760 int i, nargs;
3761 gcall *call;
3762 bool builtin_va_start_p = false;
3763 location_t loc = EXPR_LOCATION (*expr_p);
3764
3765 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3766
3767 /* For reliable diagnostics during inlining, it is necessary that
3768 every call_expr be annotated with file and line. */
3769 if (! EXPR_HAS_LOCATION (*expr_p))
3770 SET_EXPR_LOCATION (*expr_p, input_location);
3771
3772 /* Gimplify internal functions created in the FEs. */
3773 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3774 {
3775 if (want_value)
3776 return GS_ALL_DONE;
3777
3778 nargs = call_expr_nargs (*expr_p);
3779 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3780 auto_vec<tree> vargs (nargs);
3781
3782 if (ifn == IFN_ASSUME)
3783 {
3784 if (simple_condition_p (CALL_EXPR_ARG (*expr_p, 0)))
3785 {
3786 /* If the [[assume (cond)]]; condition is simple
3787 enough and can be evaluated unconditionally
3788 without side-effects, expand it as
3789 if (!cond) __builtin_unreachable (); */
3790 tree fndecl = builtin_decl_explicit (fncode: BUILT_IN_UNREACHABLE);
3791 *expr_p = build3 (COND_EXPR, void_type_node,
3792 CALL_EXPR_ARG (*expr_p, 0), void_node,
3793 build_call_expr_loc (EXPR_LOCATION (*expr_p),
3794 fndecl, 0));
3795 return GS_OK;
3796 }
3797 /* If not optimizing, ignore the assumptions. */
3798 if (!optimize || seen_error ())
3799 {
3800 *expr_p = NULL_TREE;
3801 return GS_ALL_DONE;
3802 }
3803 /* Temporarily, until gimple lowering, transform
3804 .ASSUME (cond);
3805 into:
3806 [[assume (guard)]]
3807 {
3808 guard = cond;
3809 }
3810 such that gimple lowering can outline the condition into
3811 a separate function easily. */
3812 tree guard = create_tmp_var (boolean_type_node);
3813 *expr_p = build2 (MODIFY_EXPR, void_type_node, guard,
3814 gimple_boolify (CALL_EXPR_ARG (*expr_p, 0)));
3815 *expr_p = build3 (BIND_EXPR, void_type_node, NULL, *expr_p, NULL);
3816 push_gimplify_context ();
3817 gimple_seq body = NULL;
3818 gimple *g = gimplify_and_return_first (t: *expr_p, seq_p: &body);
3819 pop_gimplify_context (body: g);
3820 g = gimple_build_assume (guard, body);
3821 gimple_set_location (g, location: loc);
3822 gimplify_seq_add_stmt (seq_p: pre_p, gs: g);
3823 *expr_p = NULL_TREE;
3824 return GS_ALL_DONE;
3825 }
3826
3827 for (i = 0; i < nargs; i++)
3828 {
3829 gimplify_arg (arg_p: &CALL_EXPR_ARG (*expr_p, i), pre_p,
3830 EXPR_LOCATION (*expr_p));
3831 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3832 }
3833
3834 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3835 gimple_call_set_nothrow (s: call, TREE_NOTHROW (*expr_p));
3836 gimplify_seq_add_stmt (seq_p: pre_p, gs: call);
3837 return GS_ALL_DONE;
3838 }
3839
3840 /* This may be a call to a builtin function.
3841
3842 Builtin function calls may be transformed into different
3843 (and more efficient) builtin function calls under certain
3844 circumstances. Unfortunately, gimplification can muck things
3845 up enough that the builtin expanders are not aware that certain
3846 transformations are still valid.
3847
3848 So we attempt transformation/gimplification of the call before
3849 we gimplify the CALL_EXPR. At this time we do not manage to
3850 transform all calls in the same manner as the expanders do, but
3851 we do transform most of them. */
3852 fndecl = get_callee_fndecl (*expr_p);
3853 if (fndecl && fndecl_built_in_p (node: fndecl, klass: BUILT_IN_NORMAL))
3854 switch (DECL_FUNCTION_CODE (decl: fndecl))
3855 {
3856 CASE_BUILT_IN_ALLOCA:
3857 /* If the call has been built for a variable-sized object, then we
3858 want to restore the stack level when the enclosing BIND_EXPR is
3859 exited to reclaim the allocated space; otherwise, we precisely
3860 need to do the opposite and preserve the latest stack level. */
3861 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3862 gimplify_ctxp->save_stack = true;
3863 else
3864 gimplify_ctxp->keep_stack = true;
3865 break;
3866
3867 case BUILT_IN_VA_START:
3868 {
3869 builtin_va_start_p = true;
3870 if (call_expr_nargs (*expr_p) < 2)
3871 {
3872 error ("too few arguments to function %<va_start%>");
3873 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3874 return GS_OK;
3875 }
3876
3877 if (fold_builtin_next_arg (*expr_p, true))
3878 {
3879 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3880 return GS_OK;
3881 }
3882 break;
3883 }
3884
3885 case BUILT_IN_EH_RETURN:
3886 cfun->calls_eh_return = true;
3887 break;
3888
3889 case BUILT_IN_CLEAR_PADDING:
3890 if (call_expr_nargs (*expr_p) == 1)
3891 {
3892 /* Remember the original type of the argument in an internal
3893 dummy second argument, as in GIMPLE pointer conversions are
3894 useless. Also mark this call as not for automatic
3895 initialization in the internal dummy third argument. */
3896 p = CALL_EXPR_ARG (*expr_p, 0);
3897 *expr_p
3898 = build_call_expr_loc (EXPR_LOCATION (*expr_p), fndecl, 2, p,
3899 build_zero_cst (TREE_TYPE (p)));
3900 return GS_OK;
3901 }
3902 break;
3903
3904 default:
3905 ;
3906 }
3907 if (fndecl && fndecl_built_in_p (node: fndecl))
3908 {
3909 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3910 if (new_tree && new_tree != *expr_p)
3911 {
3912 /* There was a transformation of this call which computes the
3913 same value, but in a more efficient way. Return and try
3914 again. */
3915 *expr_p = new_tree;
3916 return GS_OK;
3917 }
3918 }
3919
3920 /* Remember the original function pointer type. */
3921 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3922
3923 if (flag_openmp
3924 && fndecl
3925 && cfun
3926 && (cfun->curr_properties & PROP_gimple_any) == 0)
3927 {
3928 tree variant = omp_resolve_declare_variant (fndecl);
3929 if (variant != fndecl)
3930 CALL_EXPR_FN (*expr_p) = build1 (ADDR_EXPR, fnptrtype, variant);
3931 }
3932
3933 /* There is a sequence point before the call, so any side effects in
3934 the calling expression must occur before the actual call. Force
3935 gimplify_expr to use an internal post queue. */
3936 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3937 is_gimple_call_addr, fb_rvalue);
3938
3939 if (ret == GS_ERROR)
3940 return GS_ERROR;
3941
3942 nargs = call_expr_nargs (*expr_p);
3943
3944 /* Get argument types for verification. */
3945 fndecl = get_callee_fndecl (*expr_p);
3946 parms = NULL_TREE;
3947 if (fndecl)
3948 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3949 else
3950 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3951
3952 if (fndecl && DECL_ARGUMENTS (fndecl))
3953 p = DECL_ARGUMENTS (fndecl);
3954 else if (parms)
3955 p = parms;
3956 else
3957 p = NULL_TREE;
3958 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3959 ;
3960
3961 /* If the last argument is __builtin_va_arg_pack () and it is not
3962 passed as a named argument, decrease the number of CALL_EXPR
3963 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3964 if (!p
3965 && i < nargs
3966 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3967 {
3968 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3969 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3970
3971 if (last_arg_fndecl
3972 && fndecl_built_in_p (node: last_arg_fndecl, name1: BUILT_IN_VA_ARG_PACK))
3973 {
3974 tree call = *expr_p;
3975
3976 --nargs;
3977 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3978 CALL_EXPR_FN (call),
3979 nargs, CALL_EXPR_ARGP (call));
3980
3981 /* Copy all CALL_EXPR flags, location and block, except
3982 CALL_EXPR_VA_ARG_PACK flag. */
3983 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3984 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3985 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3986 = CALL_EXPR_RETURN_SLOT_OPT (call);
3987 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3988 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3989
3990 /* Set CALL_EXPR_VA_ARG_PACK. */
3991 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3992 }
3993 }
3994
3995 /* If the call returns twice then after building the CFG the call
3996 argument computations will no longer dominate the call because
3997 we add an abnormal incoming edge to the call. So do not use SSA
3998 vars there. */
3999 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
4000
4001 /* Gimplify the function arguments. */
4002 if (nargs > 0)
4003 {
4004 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
4005 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
4006 PUSH_ARGS_REVERSED ? i-- : i++)
4007 {
4008 enum gimplify_status t;
4009
4010 /* Avoid gimplifying the second argument to va_start, which needs to
4011 be the plain PARM_DECL. */
4012 if ((i != 1) || !builtin_va_start_p)
4013 {
4014 t = gimplify_arg (arg_p: &CALL_EXPR_ARG (*expr_p, i), pre_p,
4015 EXPR_LOCATION (*expr_p), allow_ssa: ! returns_twice);
4016
4017 if (t == GS_ERROR)
4018 ret = GS_ERROR;
4019 }
4020 }
4021 }
4022
4023 /* Gimplify the static chain. */
4024 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
4025 {
4026 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
4027 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
4028 else
4029 {
4030 enum gimplify_status t;
4031 t = gimplify_arg (arg_p: &CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
4032 EXPR_LOCATION (*expr_p), allow_ssa: ! returns_twice);
4033 if (t == GS_ERROR)
4034 ret = GS_ERROR;
4035 }
4036 }
4037
4038 /* Verify the function result. */
4039 if (want_value && fndecl
4040 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
4041 {
4042 error_at (loc, "using result of function returning %<void%>");
4043 ret = GS_ERROR;
4044 }
4045
4046 /* Try this again in case gimplification exposed something. */
4047 if (ret != GS_ERROR)
4048 {
4049 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
4050
4051 if (new_tree && new_tree != *expr_p)
4052 {
4053 /* There was a transformation of this call which computes the
4054 same value, but in a more efficient way. Return and try
4055 again. */
4056 *expr_p = new_tree;
4057 return GS_OK;
4058 }
4059 }
4060 else
4061 {
4062 *expr_p = error_mark_node;
4063 return GS_ERROR;
4064 }
4065
4066 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
4067 decl. This allows us to eliminate redundant or useless
4068 calls to "const" functions. */
4069 if (TREE_CODE (*expr_p) == CALL_EXPR)
4070 {
4071 int flags = call_expr_flags (*expr_p);
4072 if (flags & (ECF_CONST | ECF_PURE)
4073 /* An infinite loop is considered a side effect. */
4074 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
4075 TREE_SIDE_EFFECTS (*expr_p) = 0;
4076 }
4077
4078 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
4079 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
4080 form and delegate the creation of a GIMPLE_CALL to
4081 gimplify_modify_expr. This is always possible because when
4082 WANT_VALUE is true, the caller wants the result of this call into
4083 a temporary, which means that we will emit an INIT_EXPR in
4084 internal_get_tmp_var which will then be handled by
4085 gimplify_modify_expr. */
4086 if (!want_value)
4087 {
4088 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
4089 have to do is replicate it as a GIMPLE_CALL tuple. */
4090 gimple_stmt_iterator gsi;
4091 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
4092 notice_special_calls (call);
4093 gimplify_seq_add_stmt (seq_p: pre_p, gs: call);
4094 gsi = gsi_last (seq&: *pre_p);
4095 maybe_fold_stmt (gsi: &gsi);
4096 *expr_p = NULL_TREE;
4097 }
4098 else
4099 /* Remember the original function type. */
4100 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
4101 CALL_EXPR_FN (*expr_p));
4102
4103 return ret;
4104}
4105
4106/* Handle shortcut semantics in the predicate operand of a COND_EXPR by
4107 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
4108
4109 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
4110 condition is true or false, respectively. If null, we should generate
4111 our own to skip over the evaluation of this specific expression.
4112
4113 LOCUS is the source location of the COND_EXPR.
4114
4115 This function is the tree equivalent of do_jump.
4116
4117 shortcut_cond_r should only be called by shortcut_cond_expr. */
4118
4119static tree
4120shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
4121 location_t locus)
4122{
4123 tree local_label = NULL_TREE;
4124 tree t, expr = NULL;
4125
4126 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
4127 retain the shortcut semantics. Just insert the gotos here;
4128 shortcut_cond_expr will append the real blocks later. */
4129 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
4130 {
4131 location_t new_locus;
4132
4133 /* Turn if (a && b) into
4134
4135 if (a); else goto no;
4136 if (b) goto yes; else goto no;
4137 (no:) */
4138
4139 if (false_label_p == NULL)
4140 false_label_p = &local_label;
4141
4142 /* Keep the original source location on the first 'if'. */
4143 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
4144 append_to_statement_list (t, &expr);
4145
4146 /* Set the source location of the && on the second 'if'. */
4147 new_locus = rexpr_location (expr: pred, or_else: locus);
4148 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
4149 locus: new_locus);
4150 append_to_statement_list (t, &expr);
4151 }
4152 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
4153 {
4154 location_t new_locus;
4155
4156 /* Turn if (a || b) into
4157
4158 if (a) goto yes;
4159 if (b) goto yes; else goto no;
4160 (yes:) */
4161
4162 if (true_label_p == NULL)
4163 true_label_p = &local_label;
4164
4165 /* Keep the original source location on the first 'if'. */
4166 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
4167 append_to_statement_list (t, &expr);
4168
4169 /* Set the source location of the || on the second 'if'. */
4170 new_locus = rexpr_location (expr: pred, or_else: locus);
4171 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
4172 locus: new_locus);
4173 append_to_statement_list (t, &expr);
4174 }
4175 else if (TREE_CODE (pred) == COND_EXPR
4176 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
4177 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
4178 {
4179 location_t new_locus;
4180
4181 /* As long as we're messing with gotos, turn if (a ? b : c) into
4182 if (a)
4183 if (b) goto yes; else goto no;
4184 else
4185 if (c) goto yes; else goto no;
4186
4187 Don't do this if one of the arms has void type, which can happen
4188 in C++ when the arm is throw. */
4189
4190 /* Keep the original source location on the first 'if'. Set the source
4191 location of the ? on the second 'if'. */
4192 new_locus = rexpr_location (expr: pred, or_else: locus);
4193 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
4194 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
4195 false_label_p, locus),
4196 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
4197 false_label_p, locus: new_locus));
4198 }
4199 else
4200 {
4201 expr = build3 (COND_EXPR, void_type_node, pred,
4202 build_and_jump (label_p: true_label_p),
4203 build_and_jump (label_p: false_label_p));
4204 SET_EXPR_LOCATION (expr, locus);
4205 }
4206
4207 if (local_label)
4208 {
4209 t = build1 (LABEL_EXPR, void_type_node, local_label);
4210 append_to_statement_list (t, &expr);
4211 }
4212
4213 return expr;
4214}
4215
4216/* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
4217 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
4218 statement, if it is the last one. Otherwise, return NULL. */
4219
4220static tree
4221find_goto (tree expr)
4222{
4223 if (!expr)
4224 return NULL_TREE;
4225
4226 if (TREE_CODE (expr) == GOTO_EXPR)
4227 return expr;
4228
4229 if (TREE_CODE (expr) != STATEMENT_LIST)
4230 return NULL_TREE;
4231
4232 tree_stmt_iterator i = tsi_start (t: expr);
4233
4234 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
4235 tsi_next (i: &i);
4236
4237 if (!tsi_one_before_end_p (i))
4238 return NULL_TREE;
4239
4240 return find_goto (expr: tsi_stmt (i));
4241}
4242
4243/* Same as find_goto, except that it returns NULL if the destination
4244 is not a LABEL_DECL. */
4245
4246static inline tree
4247find_goto_label (tree expr)
4248{
4249 tree dest = find_goto (expr);
4250 if (dest && TREE_CODE (GOTO_DESTINATION (dest)) == LABEL_DECL)
4251 return dest;
4252 return NULL_TREE;
4253}
4254
4255/* Given a conditional expression EXPR with short-circuit boolean
4256 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
4257 predicate apart into the equivalent sequence of conditionals. */
4258
4259static tree
4260shortcut_cond_expr (tree expr)
4261{
4262 tree pred = TREE_OPERAND (expr, 0);
4263 tree then_ = TREE_OPERAND (expr, 1);
4264 tree else_ = TREE_OPERAND (expr, 2);
4265 tree true_label, false_label, end_label, t;
4266 tree *true_label_p;
4267 tree *false_label_p;
4268 bool emit_end, emit_false, jump_over_else;
4269 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
4270 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
4271
4272 /* First do simple transformations. */
4273 if (!else_se)
4274 {
4275 /* If there is no 'else', turn
4276 if (a && b) then c
4277 into
4278 if (a) if (b) then c. */
4279 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
4280 {
4281 /* Keep the original source location on the first 'if'. */
4282 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
4283 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
4284 /* Set the source location of the && on the second 'if'. */
4285 if (rexpr_has_location (expr: pred))
4286 SET_EXPR_LOCATION (expr, rexpr_location (pred));
4287 then_ = shortcut_cond_expr (expr);
4288 then_se = then_ && TREE_SIDE_EFFECTS (then_);
4289 pred = TREE_OPERAND (pred, 0);
4290 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
4291 SET_EXPR_LOCATION (expr, locus);
4292 }
4293 }
4294
4295 if (!then_se)
4296 {
4297 /* If there is no 'then', turn
4298 if (a || b); else d
4299 into
4300 if (a); else if (b); else d. */
4301 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
4302 {
4303 /* Keep the original source location on the first 'if'. */
4304 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
4305 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
4306 /* Set the source location of the || on the second 'if'. */
4307 if (rexpr_has_location (expr: pred))
4308 SET_EXPR_LOCATION (expr, rexpr_location (pred));
4309 else_ = shortcut_cond_expr (expr);
4310 else_se = else_ && TREE_SIDE_EFFECTS (else_);
4311 pred = TREE_OPERAND (pred, 0);
4312 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
4313 SET_EXPR_LOCATION (expr, locus);
4314 }
4315 }
4316
4317 /* If we're done, great. */
4318 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
4319 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
4320 return expr;
4321
4322 /* Otherwise we need to mess with gotos. Change
4323 if (a) c; else d;
4324 to
4325 if (a); else goto no;
4326 c; goto end;
4327 no: d; end:
4328 and recursively gimplify the condition. */
4329
4330 true_label = false_label = end_label = NULL_TREE;
4331
4332 /* If our arms just jump somewhere, hijack those labels so we don't
4333 generate jumps to jumps. */
4334
4335 if (tree then_goto = find_goto_label (expr: then_))
4336 {
4337 true_label = GOTO_DESTINATION (then_goto);
4338 then_ = NULL;
4339 then_se = false;
4340 }
4341
4342 if (tree else_goto = find_goto_label (expr: else_))
4343 {
4344 false_label = GOTO_DESTINATION (else_goto);
4345 else_ = NULL;
4346 else_se = false;
4347 }
4348
4349 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
4350 if (true_label)
4351 true_label_p = &true_label;
4352 else
4353 true_label_p = NULL;
4354
4355 /* The 'else' branch also needs a label if it contains interesting code. */
4356 if (false_label || else_se)
4357 false_label_p = &false_label;
4358 else
4359 false_label_p = NULL;
4360
4361 /* If there was nothing else in our arms, just forward the label(s). */
4362 if (!then_se && !else_se)
4363 return shortcut_cond_r (pred, true_label_p, false_label_p,
4364 EXPR_LOC_OR_LOC (expr, input_location));
4365
4366 /* If our last subexpression already has a terminal label, reuse it. */
4367 if (else_se)
4368 t = expr_last (else_);
4369 else if (then_se)
4370 t = expr_last (then_);
4371 else
4372 t = NULL;
4373 if (t && TREE_CODE (t) == LABEL_EXPR)
4374 end_label = LABEL_EXPR_LABEL (t);
4375
4376 /* If we don't care about jumping to the 'else' branch, jump to the end
4377 if the condition is false. */
4378 if (!false_label_p)
4379 false_label_p = &end_label;
4380
4381 /* We only want to emit these labels if we aren't hijacking them. */
4382 emit_end = (end_label == NULL_TREE);
4383 emit_false = (false_label == NULL_TREE);
4384
4385 /* We only emit the jump over the else clause if we have to--if the
4386 then clause may fall through. Otherwise we can wind up with a
4387 useless jump and a useless label at the end of gimplified code,
4388 which will cause us to think that this conditional as a whole
4389 falls through even if it doesn't. If we then inline a function
4390 which ends with such a condition, that can cause us to issue an
4391 inappropriate warning about control reaching the end of a
4392 non-void function. */
4393 jump_over_else = block_may_fallthru (then_);
4394
4395 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
4396 EXPR_LOC_OR_LOC (expr, input_location));
4397
4398 expr = NULL;
4399 append_to_statement_list (pred, &expr);
4400
4401 append_to_statement_list (then_, &expr);
4402 if (else_se)
4403 {
4404 if (jump_over_else)
4405 {
4406 tree last = expr_last (expr);
4407 t = build_and_jump (label_p: &end_label);
4408 if (rexpr_has_location (expr: last))
4409 SET_EXPR_LOCATION (t, rexpr_location (last));
4410 append_to_statement_list (t, &expr);
4411 }
4412 if (emit_false)
4413 {
4414 t = build1 (LABEL_EXPR, void_type_node, false_label);
4415 append_to_statement_list (t, &expr);
4416 }
4417 append_to_statement_list (else_, &expr);
4418 }
4419 if (emit_end && end_label)
4420 {
4421 t = build1 (LABEL_EXPR, void_type_node, end_label);
4422 append_to_statement_list (t, &expr);
4423 }
4424
4425 return expr;
4426}
4427
4428/* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
4429
4430tree
4431gimple_boolify (tree expr)
4432{
4433 tree type = TREE_TYPE (expr);
4434 location_t loc = EXPR_LOCATION (expr);
4435
4436 if (TREE_CODE (expr) == NE_EXPR
4437 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
4438 && integer_zerop (TREE_OPERAND (expr, 1)))
4439 {
4440 tree call = TREE_OPERAND (expr, 0);
4441 tree fn = get_callee_fndecl (call);
4442
4443 /* For __builtin_expect ((long) (x), y) recurse into x as well
4444 if x is truth_value_p. */
4445 if (fn
4446 && fndecl_built_in_p (node: fn, name1: BUILT_IN_EXPECT)
4447 && call_expr_nargs (call) == 2)
4448 {
4449 tree arg = CALL_EXPR_ARG (call, 0);
4450 if (arg)
4451 {
4452 if (TREE_CODE (arg) == NOP_EXPR
4453 && TREE_TYPE (arg) == TREE_TYPE (call))
4454 arg = TREE_OPERAND (arg, 0);
4455 if (truth_value_p (TREE_CODE (arg)))
4456 {
4457 arg = gimple_boolify (expr: arg);
4458 CALL_EXPR_ARG (call, 0)
4459 = fold_convert_loc (loc, TREE_TYPE (call), arg);
4460 }
4461 }
4462 }
4463 }
4464
4465 switch (TREE_CODE (expr))
4466 {
4467 case TRUTH_AND_EXPR:
4468 case TRUTH_OR_EXPR:
4469 case TRUTH_XOR_EXPR:
4470 case TRUTH_ANDIF_EXPR:
4471 case TRUTH_ORIF_EXPR:
4472 /* Also boolify the arguments of truth exprs. */
4473 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
4474 /* FALLTHRU */
4475
4476 case TRUTH_NOT_EXPR:
4477 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4478
4479 /* These expressions always produce boolean results. */
4480 if (TREE_CODE (type) != BOOLEAN_TYPE)
4481 TREE_TYPE (expr) = boolean_type_node;
4482 return expr;
4483
4484 case ANNOTATE_EXPR:
4485 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
4486 {
4487 case annot_expr_ivdep_kind:
4488 case annot_expr_unroll_kind:
4489 case annot_expr_no_vector_kind:
4490 case annot_expr_vector_kind:
4491 case annot_expr_parallel_kind:
4492 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4493 if (TREE_CODE (type) != BOOLEAN_TYPE)
4494 TREE_TYPE (expr) = boolean_type_node;
4495 return expr;
4496 default:
4497 gcc_unreachable ();
4498 }
4499
4500 default:
4501 if (COMPARISON_CLASS_P (expr))
4502 {
4503 /* These expressions always produce boolean results. */
4504 if (TREE_CODE (type) != BOOLEAN_TYPE)
4505 TREE_TYPE (expr) = boolean_type_node;
4506 return expr;
4507 }
4508 /* Other expressions that get here must have boolean values, but
4509 might need to be converted to the appropriate mode. */
4510 if (TREE_CODE (type) == BOOLEAN_TYPE)
4511 return expr;
4512 return fold_convert_loc (loc, boolean_type_node, expr);
4513 }
4514}
4515
4516/* Given a conditional expression *EXPR_P without side effects, gimplify
4517 its operands. New statements are inserted to PRE_P. */
4518
4519static enum gimplify_status
4520gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
4521{
4522 tree expr = *expr_p, cond;
4523 enum gimplify_status ret, tret;
4524 enum tree_code code;
4525
4526 cond = gimple_boolify (COND_EXPR_COND (expr));
4527
4528 /* We need to handle && and || specially, as their gimplification
4529 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
4530 code = TREE_CODE (cond);
4531 if (code == TRUTH_ANDIF_EXPR)
4532 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
4533 else if (code == TRUTH_ORIF_EXPR)
4534 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
4535 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_val, fb_rvalue);
4536 COND_EXPR_COND (*expr_p) = cond;
4537
4538 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
4539 is_gimple_val, fb_rvalue);
4540 ret = MIN (ret, tret);
4541 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
4542 is_gimple_val, fb_rvalue);
4543
4544 return MIN (ret, tret);
4545}
4546
4547/* Return true if evaluating EXPR could trap.
4548 EXPR is GENERIC, while tree_could_trap_p can be called
4549 only on GIMPLE. */
4550
4551bool
4552generic_expr_could_trap_p (tree expr)
4553{
4554 unsigned i, n;
4555
4556 if (!expr || is_gimple_val (expr))
4557 return false;
4558
4559 if (!EXPR_P (expr) || tree_could_trap_p (expr))
4560 return true;
4561
4562 n = TREE_OPERAND_LENGTH (expr);
4563 for (i = 0; i < n; i++)
4564 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
4565 return true;
4566
4567 return false;
4568}
4569
4570/* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4571 into
4572
4573 if (p) if (p)
4574 t1 = a; a;
4575 else or else
4576 t1 = b; b;
4577 t1;
4578
4579 The second form is used when *EXPR_P is of type void.
4580
4581 PRE_P points to the list where side effects that must happen before
4582 *EXPR_P should be stored. */
4583
4584static enum gimplify_status
4585gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
4586{
4587 tree expr = *expr_p;
4588 tree type = TREE_TYPE (expr);
4589 location_t loc = EXPR_LOCATION (expr);
4590 tree tmp, arm1, arm2;
4591 enum gimplify_status ret;
4592 tree label_true, label_false, label_cont;
4593 bool have_then_clause_p, have_else_clause_p;
4594 gcond *cond_stmt;
4595 enum tree_code pred_code;
4596 gimple_seq seq = NULL;
4597
4598 /* If this COND_EXPR has a value, copy the values into a temporary within
4599 the arms. */
4600 if (!VOID_TYPE_P (type))
4601 {
4602 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
4603 tree result;
4604
4605 /* If either an rvalue is ok or we do not require an lvalue, create the
4606 temporary. But we cannot do that if the type is addressable. */
4607 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
4608 && !TREE_ADDRESSABLE (type))
4609 {
4610 if (gimplify_ctxp->allow_rhs_cond_expr
4611 /* If either branch has side effects or could trap, it can't be
4612 evaluated unconditionally. */
4613 && !TREE_SIDE_EFFECTS (then_)
4614 && !generic_expr_could_trap_p (expr: then_)
4615 && !TREE_SIDE_EFFECTS (else_)
4616 && !generic_expr_could_trap_p (expr: else_))
4617 return gimplify_pure_cond_expr (expr_p, pre_p);
4618
4619 tmp = create_tmp_var (type, "iftmp");
4620 result = tmp;
4621 }
4622
4623 /* Otherwise, only create and copy references to the values. */
4624 else
4625 {
4626 type = build_pointer_type (type);
4627
4628 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4629 then_ = build_fold_addr_expr_loc (loc, then_);
4630
4631 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4632 else_ = build_fold_addr_expr_loc (loc, else_);
4633
4634 expr
4635 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
4636
4637 tmp = create_tmp_var (type, "iftmp");
4638 result = build_simple_mem_ref_loc (loc, tmp);
4639 }
4640
4641 /* Build the new then clause, `tmp = then_;'. But don't build the
4642 assignment if the value is void; in C++ it can be if it's a throw. */
4643 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4644 TREE_OPERAND (expr, 1) = build2 (INIT_EXPR, type, tmp, then_);
4645
4646 /* Similarly, build the new else clause, `tmp = else_;'. */
4647 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4648 TREE_OPERAND (expr, 2) = build2 (INIT_EXPR, type, tmp, else_);
4649
4650 TREE_TYPE (expr) = void_type_node;
4651 recalculate_side_effects (t: expr);
4652
4653 /* Move the COND_EXPR to the prequeue. */
4654 gimplify_stmt (&expr, pre_p);
4655
4656 *expr_p = result;
4657 return GS_ALL_DONE;
4658 }
4659
4660 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4661 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
4662 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
4663 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
4664
4665 /* Make sure the condition has BOOLEAN_TYPE. */
4666 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4667
4668 /* Break apart && and || conditions. */
4669 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
4670 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
4671 {
4672 expr = shortcut_cond_expr (expr);
4673
4674 if (expr != *expr_p)
4675 {
4676 *expr_p = expr;
4677
4678 /* We can't rely on gimplify_expr to re-gimplify the expanded
4679 form properly, as cleanups might cause the target labels to be
4680 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4681 set up a conditional context. */
4682 gimple_push_condition ();
4683 gimplify_stmt (expr_p, &seq);
4684 gimple_pop_condition (pre_p);
4685 gimple_seq_add_seq (pre_p, seq);
4686
4687 return GS_ALL_DONE;
4688 }
4689 }
4690
4691 /* Now do the normal gimplification. */
4692
4693 /* Gimplify condition. */
4694 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
4695 is_gimple_condexpr_for_cond, fb_rvalue);
4696 if (ret == GS_ERROR)
4697 return GS_ERROR;
4698 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
4699
4700 gimple_push_condition ();
4701
4702 have_then_clause_p = have_else_clause_p = false;
4703 label_true = find_goto_label (TREE_OPERAND (expr, 1));
4704 if (label_true
4705 && DECL_CONTEXT (GOTO_DESTINATION (label_true)) == current_function_decl
4706 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4707 have different locations, otherwise we end up with incorrect
4708 location information on the branches. */
4709 && (optimize
4710 || !EXPR_HAS_LOCATION (expr)
4711 || !rexpr_has_location (expr: label_true)
4712 || EXPR_LOCATION (expr) == rexpr_location (expr: label_true)))
4713 {
4714 have_then_clause_p = true;
4715 label_true = GOTO_DESTINATION (label_true);
4716 }
4717 else
4718 label_true = create_artificial_label (UNKNOWN_LOCATION);
4719 label_false = find_goto_label (TREE_OPERAND (expr, 2));
4720 if (label_false
4721 && DECL_CONTEXT (GOTO_DESTINATION (label_false)) == current_function_decl
4722 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4723 have different locations, otherwise we end up with incorrect
4724 location information on the branches. */
4725 && (optimize
4726 || !EXPR_HAS_LOCATION (expr)
4727 || !rexpr_has_location (expr: label_false)
4728 || EXPR_LOCATION (expr) == rexpr_location (expr: label_false)))
4729 {
4730 have_else_clause_p = true;
4731 label_false = GOTO_DESTINATION (label_false);
4732 }
4733 else
4734 label_false = create_artificial_label (UNKNOWN_LOCATION);
4735
4736 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
4737 &arm2);
4738 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4739 label_false);
4740 gimple_set_location (g: cond_stmt, EXPR_LOCATION (expr));
4741 copy_warning (cond_stmt, COND_EXPR_COND (expr));
4742 gimplify_seq_add_stmt (seq_p: &seq, gs: cond_stmt);
4743 gimple_stmt_iterator gsi = gsi_last (seq);
4744 maybe_fold_stmt (gsi: &gsi);
4745
4746 label_cont = NULL_TREE;
4747 if (!have_then_clause_p)
4748 {
4749 /* For if (...) {} else { code; } put label_true after
4750 the else block. */
4751 if (TREE_OPERAND (expr, 1) == NULL_TREE
4752 && !have_else_clause_p
4753 && TREE_OPERAND (expr, 2) != NULL_TREE)
4754 {
4755 /* For if (0) {} else { code; } tell -Wimplicit-fallthrough
4756 handling that label_cont == label_true can be only reached
4757 through fallthrough from { code; }. */
4758 if (integer_zerop (COND_EXPR_COND (expr)))
4759 UNUSED_LABEL_P (label_true) = 1;
4760 label_cont = label_true;
4761 }
4762 else
4763 {
4764 bool then_side_effects
4765 = (TREE_OPERAND (expr, 1)
4766 && TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 1)));
4767 gimplify_seq_add_stmt (seq_p: &seq, gs: gimple_build_label (label: label_true));
4768 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4769 /* For if (...) { code; } else {} or
4770 if (...) { code; } else goto label; or
4771 if (...) { code; return; } else { ... }
4772 label_cont isn't needed. */
4773 if (!have_else_clause_p
4774 && TREE_OPERAND (expr, 2) != NULL_TREE
4775 && gimple_seq_may_fallthru (seq))
4776 {
4777 gimple *g;
4778 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4779
4780 /* For if (0) { non-side-effect-code } else { code }
4781 tell -Wimplicit-fallthrough handling that label_cont can
4782 be only reached through fallthrough from { code }. */
4783 if (integer_zerop (COND_EXPR_COND (expr)))
4784 {
4785 UNUSED_LABEL_P (label_true) = 1;
4786 if (!then_side_effects)
4787 UNUSED_LABEL_P (label_cont) = 1;
4788 }
4789
4790 g = gimple_build_goto (dest: label_cont);
4791
4792 /* GIMPLE_COND's are very low level; they have embedded
4793 gotos. This particular embedded goto should not be marked
4794 with the location of the original COND_EXPR, as it would
4795 correspond to the COND_EXPR's condition, not the ELSE or the
4796 THEN arms. To avoid marking it with the wrong location, flag
4797 it as "no location". */
4798 gimple_set_do_not_emit_location (g);
4799
4800 gimplify_seq_add_stmt (seq_p: &seq, gs: g);
4801 }
4802 }
4803 }
4804 if (!have_else_clause_p)
4805 {
4806 /* For if (1) { code } or if (1) { code } else { non-side-effect-code }
4807 tell -Wimplicit-fallthrough handling that label_false can be only
4808 reached through fallthrough from { code }. */
4809 if (integer_nonzerop (COND_EXPR_COND (expr))
4810 && (TREE_OPERAND (expr, 2) == NULL_TREE
4811 || !TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 2))))
4812 UNUSED_LABEL_P (label_false) = 1;
4813 gimplify_seq_add_stmt (seq_p: &seq, gs: gimple_build_label (label: label_false));
4814 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4815 }
4816 if (label_cont)
4817 gimplify_seq_add_stmt (seq_p: &seq, gs: gimple_build_label (label: label_cont));
4818
4819 gimple_pop_condition (pre_p);
4820 gimple_seq_add_seq (pre_p, seq);
4821
4822 if (ret == GS_ERROR)
4823 ; /* Do nothing. */
4824 else if (have_then_clause_p || have_else_clause_p)
4825 ret = GS_ALL_DONE;
4826 else
4827 {
4828 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4829 expr = TREE_OPERAND (expr, 0);
4830 gimplify_stmt (&expr, pre_p);
4831 }
4832
4833 *expr_p = NULL;
4834 return ret;
4835}
4836
4837/* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4838 to be marked addressable.
4839
4840 We cannot rely on such an expression being directly markable if a temporary
4841 has been created by the gimplification. In this case, we create another
4842 temporary and initialize it with a copy, which will become a store after we
4843 mark it addressable. This can happen if the front-end passed us something
4844 that it could not mark addressable yet, like a Fortran pass-by-reference
4845 parameter (int) floatvar. */
4846
4847static void
4848prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4849{
4850 while (handled_component_p (t: *expr_p))
4851 expr_p = &TREE_OPERAND (*expr_p, 0);
4852
4853 /* Do not allow an SSA name as the temporary. */
4854 if (is_gimple_reg (*expr_p))
4855 *expr_p = internal_get_tmp_var (val: *expr_p, pre_p: seq_p, NULL, is_formal: false, allow_ssa: false, not_gimple_reg: true);
4856}
4857
4858/* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4859 a call to __builtin_memcpy. */
4860
4861static enum gimplify_status
4862gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4863 gimple_seq *seq_p)
4864{
4865 tree t, to, to_ptr, from, from_ptr;
4866 gcall *gs;
4867 location_t loc = EXPR_LOCATION (*expr_p);
4868
4869 to = TREE_OPERAND (*expr_p, 0);
4870 from = TREE_OPERAND (*expr_p, 1);
4871
4872 /* Mark the RHS addressable. Beware that it may not be possible to do so
4873 directly if a temporary has been created by the gimplification. */
4874 prepare_gimple_addressable (expr_p: &from, seq_p);
4875
4876 mark_addressable (from);
4877 from_ptr = build_fold_addr_expr_loc (loc, from);
4878 gimplify_arg (arg_p: &from_ptr, pre_p: seq_p, call_location: loc);
4879
4880 mark_addressable (to);
4881 to_ptr = build_fold_addr_expr_loc (loc, to);
4882 gimplify_arg (arg_p: &to_ptr, pre_p: seq_p, call_location: loc);
4883
4884 t = builtin_decl_implicit (fncode: BUILT_IN_MEMCPY);
4885
4886 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4887 gimple_call_set_alloca_for_var (s: gs, for_var: true);
4888
4889 if (want_value)
4890 {
4891 /* tmp = memcpy() */
4892 t = create_tmp_var (TREE_TYPE (to_ptr));
4893 gimple_call_set_lhs (gs, lhs: t);
4894 gimplify_seq_add_stmt (seq_p, gs);
4895
4896 *expr_p = build_simple_mem_ref (t);
4897 return GS_ALL_DONE;
4898 }
4899
4900 gimplify_seq_add_stmt (seq_p, gs);
4901 *expr_p = NULL;
4902 return GS_ALL_DONE;
4903}
4904
4905/* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4906 a call to __builtin_memset. In this case we know that the RHS is
4907 a CONSTRUCTOR with an empty element list. */
4908
4909static enum gimplify_status
4910gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4911 gimple_seq *seq_p)
4912{
4913 tree t, from, to, to_ptr;
4914 gcall *gs;
4915 location_t loc = EXPR_LOCATION (*expr_p);
4916
4917 /* Assert our assumptions, to abort instead of producing wrong code
4918 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4919 not be immediately exposed. */
4920 from = TREE_OPERAND (*expr_p, 1);
4921 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4922 from = TREE_OPERAND (from, 0);
4923
4924 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4925 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4926
4927 /* Now proceed. */
4928 to = TREE_OPERAND (*expr_p, 0);
4929
4930 to_ptr = build_fold_addr_expr_loc (loc, to);
4931 gimplify_arg (arg_p: &to_ptr, pre_p: seq_p, call_location: loc);
4932 t = builtin_decl_implicit (fncode: BUILT_IN_MEMSET);
4933
4934 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4935
4936 if (want_value)
4937 {
4938 /* tmp = memset() */
4939 t = create_tmp_var (TREE_TYPE (to_ptr));
4940 gimple_call_set_lhs (gs, lhs: t);
4941 gimplify_seq_add_stmt (seq_p, gs);
4942
4943 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4944 return GS_ALL_DONE;
4945 }
4946
4947 gimplify_seq_add_stmt (seq_p, gs);
4948 *expr_p = NULL;
4949 return GS_ALL_DONE;
4950}
4951
4952/* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4953 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4954 assignment. Return non-null if we detect a potential overlap. */
4955
4956struct gimplify_init_ctor_preeval_data
4957{
4958 /* The base decl of the lhs object. May be NULL, in which case we
4959 have to assume the lhs is indirect. */
4960 tree lhs_base_decl;
4961
4962 /* The alias set of the lhs object. */
4963 alias_set_type lhs_alias_set;
4964};
4965
4966static tree
4967gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4968{
4969 struct gimplify_init_ctor_preeval_data *data
4970 = (struct gimplify_init_ctor_preeval_data *) xdata;
4971 tree t = *tp;
4972
4973 /* If we find the base object, obviously we have overlap. */
4974 if (data->lhs_base_decl == t)
4975 return t;
4976
4977 /* If the constructor component is indirect, determine if we have a
4978 potential overlap with the lhs. The only bits of information we
4979 have to go on at this point are addressability and alias sets. */
4980 if ((INDIRECT_REF_P (t)
4981 || TREE_CODE (t) == MEM_REF)
4982 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4983 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4984 return t;
4985
4986 /* If the constructor component is a call, determine if it can hide a
4987 potential overlap with the lhs through an INDIRECT_REF like above.
4988 ??? Ugh - this is completely broken. In fact this whole analysis
4989 doesn't look conservative. */
4990 if (TREE_CODE (t) == CALL_EXPR)
4991 {
4992 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4993
4994 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4995 if (POINTER_TYPE_P (TREE_VALUE (type))
4996 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4997 && alias_sets_conflict_p (data->lhs_alias_set,
4998 get_alias_set
4999 (TREE_TYPE (TREE_VALUE (type)))))
5000 return t;
5001 }
5002
5003 if (IS_TYPE_OR_DECL_P (t))
5004 *walk_subtrees = 0;
5005 return NULL;
5006}
5007
5008/* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
5009 force values that overlap with the lhs (as described by *DATA)
5010 into temporaries. */
5011
5012static void
5013gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5014 struct gimplify_init_ctor_preeval_data *data)
5015{
5016 enum gimplify_status one;
5017
5018 /* If the value is constant, then there's nothing to pre-evaluate. */
5019 if (TREE_CONSTANT (*expr_p))
5020 {
5021 /* Ensure it does not have side effects, it might contain a reference to
5022 the object we're initializing. */
5023 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
5024 return;
5025 }
5026
5027 /* If the type has non-trivial constructors, we can't pre-evaluate. */
5028 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
5029 return;
5030
5031 /* Recurse for nested constructors. */
5032 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
5033 {
5034 unsigned HOST_WIDE_INT ix;
5035 constructor_elt *ce;
5036 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
5037
5038 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
5039 gimplify_init_ctor_preeval (expr_p: &ce->value, pre_p, post_p, data);
5040
5041 return;
5042 }
5043
5044 /* If this is a variable sized type, we must remember the size. */
5045 maybe_with_size_expr (expr_p);
5046
5047 /* Gimplify the constructor element to something appropriate for the rhs
5048 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
5049 the gimplifier will consider this a store to memory. Doing this
5050 gimplification now means that we won't have to deal with complicated
5051 language-specific trees, nor trees like SAVE_EXPR that can induce
5052 exponential search behavior. */
5053 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
5054 if (one == GS_ERROR)
5055 {
5056 *expr_p = NULL;
5057 return;
5058 }
5059
5060 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
5061 with the lhs, since "a = { .x=a }" doesn't make sense. This will
5062 always be true for all scalars, since is_gimple_mem_rhs insists on a
5063 temporary variable for them. */
5064 if (DECL_P (*expr_p))
5065 return;
5066
5067 /* If this is of variable size, we have no choice but to assume it doesn't
5068 overlap since we can't make a temporary for it. */
5069 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
5070 return;
5071
5072 /* Otherwise, we must search for overlap ... */
5073 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
5074 return;
5075
5076 /* ... and if found, force the value into a temporary. */
5077 *expr_p = get_formal_tmp_var (val: *expr_p, pre_p);
5078}
5079
5080/* A subroutine of gimplify_init_ctor_eval. Create a loop for
5081 a RANGE_EXPR in a CONSTRUCTOR for an array.
5082
5083 var = lower;
5084 loop_entry:
5085 object[var] = value;
5086 if (var == upper)
5087 goto loop_exit;
5088 var = var + 1;
5089 goto loop_entry;
5090 loop_exit:
5091
5092 We increment var _after_ the loop exit check because we might otherwise
5093 fail if upper == TYPE_MAX_VALUE (type for upper).
5094
5095 Note that we never have to deal with SAVE_EXPRs here, because this has
5096 already been taken care of for us, in gimplify_init_ctor_preeval(). */
5097
5098static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
5099 gimple_seq *, bool);
5100
5101static void
5102gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
5103 tree value, tree array_elt_type,
5104 gimple_seq *pre_p, bool cleared)
5105{
5106 tree loop_entry_label, loop_exit_label, fall_thru_label;
5107 tree var, var_type, cref, tmp;
5108
5109 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
5110 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
5111 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
5112
5113 /* Create and initialize the index variable. */
5114 var_type = TREE_TYPE (upper);
5115 var = create_tmp_var (var_type);
5116 gimplify_seq_add_stmt (seq_p: pre_p, gs: gimple_build_assign (var, lower));
5117
5118 /* Add the loop entry label. */
5119 gimplify_seq_add_stmt (seq_p: pre_p, gs: gimple_build_label (label: loop_entry_label));
5120
5121 /* Build the reference. */
5122 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (expr: object),
5123 var, NULL_TREE, NULL_TREE);
5124
5125 /* If we are a constructor, just call gimplify_init_ctor_eval to do
5126 the store. Otherwise just assign value to the reference. */
5127
5128 if (TREE_CODE (value) == CONSTRUCTOR)
5129 /* NB we might have to call ourself recursively through
5130 gimplify_init_ctor_eval if the value is a constructor. */
5131 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
5132 pre_p, cleared);
5133 else
5134 {
5135 if (gimplify_expr (&value, pre_p, NULL, is_gimple_val, fb_rvalue)
5136 != GS_ERROR)
5137 gimplify_seq_add_stmt (seq_p: pre_p, gs: gimple_build_assign (cref, value));
5138 }
5139
5140 /* We exit the loop when the index var is equal to the upper bound. */
5141 gimplify_seq_add_stmt (seq_p: pre_p,
5142 gs: gimple_build_cond (EQ_EXPR, var, upper,
5143 loop_exit_label, fall_thru_label));
5144
5145 gimplify_seq_add_stmt (seq_p: pre_p, gs: gimple_build_label (label: fall_thru_label));
5146
5147 /* Otherwise, increment the index var... */
5148 tmp = build2 (PLUS_EXPR, var_type, var,
5149 fold_convert (var_type, integer_one_node));
5150 gimplify_seq_add_stmt (seq_p: pre_p, gs: gimple_build_assign (var, tmp));
5151
5152 /* ...and jump back to the loop entry. */
5153 gimplify_seq_add_stmt (seq_p: pre_p, gs: gimple_build_goto (dest: loop_entry_label));
5154
5155 /* Add the loop exit label. */
5156 gimplify_seq_add_stmt (seq_p: pre_p, gs: gimple_build_label (label: loop_exit_label));
5157}
5158
5159/* A subroutine of gimplify_init_constructor. Generate individual
5160 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
5161 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
5162 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
5163 zeroed first. */
5164
5165static void
5166gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
5167 gimple_seq *pre_p, bool cleared)
5168{
5169 tree array_elt_type = NULL;
5170 unsigned HOST_WIDE_INT ix;
5171 tree purpose, value;
5172
5173 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
5174 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
5175
5176 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
5177 {
5178 tree cref;
5179
5180 /* NULL values are created above for gimplification errors. */
5181 if (value == NULL)
5182 continue;
5183
5184 if (cleared && initializer_zerop (value))
5185 continue;
5186
5187 /* ??? Here's to hoping the front end fills in all of the indices,
5188 so we don't have to figure out what's missing ourselves. */
5189 gcc_assert (purpose);
5190
5191 /* Skip zero-sized fields, unless value has side-effects. This can
5192 happen with calls to functions returning a empty type, which
5193 we shouldn't discard. As a number of downstream passes don't
5194 expect sets of empty type fields, we rely on the gimplification of
5195 the MODIFY_EXPR we make below to drop the assignment statement. */
5196 if (!TREE_SIDE_EFFECTS (value)
5197 && TREE_CODE (purpose) == FIELD_DECL
5198 && is_empty_type (TREE_TYPE (purpose)))
5199 continue;
5200
5201 /* If we have a RANGE_EXPR, we have to build a loop to assign the
5202 whole range. */
5203 if (TREE_CODE (purpose) == RANGE_EXPR)
5204 {
5205 tree lower = TREE_OPERAND (purpose, 0);
5206 tree upper = TREE_OPERAND (purpose, 1);
5207
5208 /* If the lower bound is equal to upper, just treat it as if
5209 upper was the index. */
5210 if (simple_cst_equal (lower, upper))
5211 purpose = upper;
5212 else
5213 {
5214 gimplify_init_ctor_eval_range (object, lower, upper, value,
5215 array_elt_type, pre_p, cleared);
5216 continue;
5217 }
5218 }
5219
5220 if (array_elt_type)
5221 {
5222 /* Do not use bitsizetype for ARRAY_REF indices. */
5223 if (TYPE_DOMAIN (TREE_TYPE (object)))
5224 purpose
5225 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
5226 purpose);
5227 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (expr: object),
5228 purpose, NULL_TREE, NULL_TREE);
5229 }
5230 else
5231 {
5232 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
5233 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
5234 unshare_expr (expr: object), purpose, NULL_TREE);
5235 }
5236
5237 if (TREE_CODE (value) == CONSTRUCTOR
5238 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
5239 gimplify_init_ctor_eval (object: cref, CONSTRUCTOR_ELTS (value),
5240 pre_p, cleared);
5241 else
5242 {
5243 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
5244 gimplify_and_add (t: init, seq_p: pre_p);
5245 ggc_free (init);
5246 }
5247 }
5248}
5249
5250/* Return the appropriate RHS predicate for this LHS. */
5251
5252gimple_predicate
5253rhs_predicate_for (tree lhs)
5254{
5255 if (is_gimple_reg (lhs))
5256 return is_gimple_reg_rhs_or_call;
5257 else
5258 return is_gimple_mem_rhs_or_call;
5259}
5260
5261/* Return the initial guess for an appropriate RHS predicate for this LHS,
5262 before the LHS has been gimplified. */
5263
5264static gimple_predicate
5265initial_rhs_predicate_for (tree lhs)
5266{
5267 if (is_gimple_reg_type (TREE_TYPE (lhs)))
5268 return is_gimple_reg_rhs_or_call;
5269 else
5270 return is_gimple_mem_rhs_or_call;
5271}
5272
5273/* Gimplify a C99 compound literal expression. This just means adding
5274 the DECL_EXPR before the current statement and using its anonymous
5275 decl instead. */
5276
5277static enum gimplify_status
5278gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
5279 bool (*gimple_test_f) (tree),
5280 fallback_t fallback)
5281{
5282 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
5283 tree decl = DECL_EXPR_DECL (decl_s);
5284 tree init = DECL_INITIAL (decl);
5285 /* Mark the decl as addressable if the compound literal
5286 expression is addressable now, otherwise it is marked too late
5287 after we gimplify the initialization expression. */
5288 if (TREE_ADDRESSABLE (*expr_p))
5289 TREE_ADDRESSABLE (decl) = 1;
5290 /* Otherwise, if we don't need an lvalue and have a literal directly
5291 substitute it. Check if it matches the gimple predicate, as
5292 otherwise we'd generate a new temporary, and we can as well just
5293 use the decl we already have. */
5294 else if (!TREE_ADDRESSABLE (decl)
5295 && !TREE_THIS_VOLATILE (decl)
5296 && init
5297 && (fallback & fb_lvalue) == 0
5298 && gimple_test_f (init))
5299 {
5300 *expr_p = init;
5301 return GS_OK;
5302 }
5303
5304 /* If the decl is not addressable, then it is being used in some
5305 expression or on the right hand side of a statement, and it can
5306 be put into a readonly data section. */
5307 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
5308 TREE_READONLY (decl) = 1;
5309
5310 /* This decl isn't mentioned in the enclosing block, so add it to the
5311 list of temps. FIXME it seems a bit of a kludge to say that
5312 anonymous artificial vars aren't pushed, but everything else is. */
5313 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
5314 gimple_add_tmp_var (tmp: decl);
5315
5316 gimplify_and_add (t: decl_s, seq_p: pre_p);
5317 *expr_p = decl;
5318 return GS_OK;
5319}
5320
5321/* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
5322 return a new CONSTRUCTOR if something changed. */
5323
5324static tree
5325optimize_compound_literals_in_ctor (tree orig_ctor)
5326{
5327 tree ctor = orig_ctor;
5328 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
5329 unsigned int idx, num = vec_safe_length (v: elts);
5330
5331 for (idx = 0; idx < num; idx++)
5332 {
5333 tree value = (*elts)[idx].value;
5334 tree newval = value;
5335 if (TREE_CODE (value) == CONSTRUCTOR)
5336 newval = optimize_compound_literals_in_ctor (orig_ctor: value);
5337 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
5338 {
5339 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
5340 tree decl = DECL_EXPR_DECL (decl_s);
5341 tree init = DECL_INITIAL (decl);
5342
5343 if (!TREE_ADDRESSABLE (value)
5344 && !TREE_ADDRESSABLE (decl)
5345 && init
5346 && TREE_CODE (init) == CONSTRUCTOR)
5347 newval = optimize_compound_literals_in_ctor (orig_ctor: init);
5348 }
5349 if (newval == value)
5350 continue;
5351
5352 if (ctor == orig_ctor)
5353 {
5354 ctor = copy_node (orig_ctor);
5355 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (src: elts);
5356 elts = CONSTRUCTOR_ELTS (ctor);
5357 }
5358 (*elts)[idx].value = newval;
5359 }
5360 return ctor;
5361}
5362
5363/* A subroutine of gimplify_modify_expr. Break out elements of a
5364 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
5365
5366 Note that we still need to clear any elements that don't have explicit
5367 initializers, so if not all elements are initialized we keep the
5368 original MODIFY_EXPR, we just remove all of the constructor elements.
5369
5370 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
5371 GS_ERROR if we would have to create a temporary when gimplifying
5372 this constructor. Otherwise, return GS_OK.
5373
5374 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
5375
5376static enum gimplify_status
5377gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5378 bool want_value, bool notify_temp_creation)
5379{
5380 tree object, ctor, type;
5381 enum gimplify_status ret;
5382 vec<constructor_elt, va_gc> *elts;
5383 bool cleared = false;
5384 bool is_empty_ctor = false;
5385 bool is_init_expr = (TREE_CODE (*expr_p) == INIT_EXPR);
5386
5387 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
5388
5389 if (!notify_temp_creation)
5390 {
5391 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5392 is_gimple_lvalue, fb_lvalue);
5393 if (ret == GS_ERROR)
5394 return ret;
5395 }
5396
5397 object = TREE_OPERAND (*expr_p, 0);
5398 ctor = TREE_OPERAND (*expr_p, 1)
5399 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
5400 type = TREE_TYPE (ctor);
5401 elts = CONSTRUCTOR_ELTS (ctor);
5402 ret = GS_ALL_DONE;
5403
5404 switch (TREE_CODE (type))
5405 {
5406 case RECORD_TYPE:
5407 case UNION_TYPE:
5408 case QUAL_UNION_TYPE:
5409 case ARRAY_TYPE:
5410 {
5411 /* Use readonly data for initializers of this or smaller size
5412 regardless of the num_nonzero_elements / num_unique_nonzero_elements
5413 ratio. */
5414 const HOST_WIDE_INT min_unique_size = 64;
5415 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
5416 is smaller than this, use readonly data. */
5417 const int unique_nonzero_ratio = 8;
5418 /* True if a single access of the object must be ensured. This is the
5419 case if the target is volatile, the type is non-addressable and more
5420 than one field need to be assigned. */
5421 const bool ensure_single_access
5422 = TREE_THIS_VOLATILE (object)
5423 && !TREE_ADDRESSABLE (type)
5424 && vec_safe_length (v: elts) > 1;
5425 struct gimplify_init_ctor_preeval_data preeval_data;
5426 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
5427 HOST_WIDE_INT num_unique_nonzero_elements;
5428 bool complete_p, valid_const_initializer;
5429
5430 /* Aggregate types must lower constructors to initialization of
5431 individual elements. The exception is that a CONSTRUCTOR node
5432 with no elements indicates zero-initialization of the whole. */
5433 if (vec_safe_is_empty (v: elts))
5434 {
5435 if (notify_temp_creation)
5436 return GS_OK;
5437
5438 /* The var will be initialized and so appear on lhs of
5439 assignment, it can't be TREE_READONLY anymore. */
5440 if (VAR_P (object))
5441 TREE_READONLY (object) = 0;
5442
5443 is_empty_ctor = true;
5444 break;
5445 }
5446
5447 /* Fetch information about the constructor to direct later processing.
5448 We might want to make static versions of it in various cases, and
5449 can only do so if it known to be a valid constant initializer. */
5450 valid_const_initializer
5451 = categorize_ctor_elements (ctor, &num_nonzero_elements,
5452 &num_unique_nonzero_elements,
5453 &num_ctor_elements, &complete_p);
5454
5455 /* If a const aggregate variable is being initialized, then it
5456 should never be a lose to promote the variable to be static. */
5457 if (valid_const_initializer
5458 && num_nonzero_elements > 1
5459 && TREE_READONLY (object)
5460 && VAR_P (object)
5461 && !DECL_REGISTER (object)
5462 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)
5463 || DECL_MERGEABLE (object))
5464 /* For ctors that have many repeated nonzero elements
5465 represented through RANGE_EXPRs, prefer initializing
5466 those through runtime loops over copies of large amounts
5467 of data from readonly data section. */
5468 && (num_unique_nonzero_elements
5469 > num_nonzero_elements / unique_nonzero_ratio
5470 || ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
5471 <= (unsigned HOST_WIDE_INT) min_unique_size)))
5472 {
5473 if (notify_temp_creation)
5474 return GS_ERROR;
5475
5476 DECL_INITIAL (object) = ctor;
5477 TREE_STATIC (object) = 1;
5478 if (!DECL_NAME (object))
5479 DECL_NAME (object) = create_tmp_var_name ("C");
5480 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
5481
5482 /* ??? C++ doesn't automatically append a .<number> to the
5483 assembler name, and even when it does, it looks at FE private
5484 data structures to figure out what that number should be,
5485 which are not set for this variable. I suppose this is
5486 important for local statics for inline functions, which aren't
5487 "local" in the object file sense. So in order to get a unique
5488 TU-local symbol, we must invoke the lhd version now. */
5489 lhd_set_decl_assembler_name (decl: object);
5490
5491 *expr_p = NULL_TREE;
5492 break;
5493 }
5494
5495 /* The var will be initialized and so appear on lhs of
5496 assignment, it can't be TREE_READONLY anymore. */
5497 if (VAR_P (object) && !notify_temp_creation)
5498 TREE_READONLY (object) = 0;
5499
5500 /* If there are "lots" of initialized elements, even discounting
5501 those that are not address constants (and thus *must* be
5502 computed at runtime), then partition the constructor into
5503 constant and non-constant parts. Block copy the constant
5504 parts in, then generate code for the non-constant parts. */
5505 /* TODO. There's code in cp/typeck.cc to do this. */
5506
5507 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
5508 /* store_constructor will ignore the clearing of variable-sized
5509 objects. Initializers for such objects must explicitly set
5510 every field that needs to be set. */
5511 cleared = false;
5512 else if (!complete_p)
5513 /* If the constructor isn't complete, clear the whole object
5514 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
5515
5516 ??? This ought not to be needed. For any element not present
5517 in the initializer, we should simply set them to zero. Except
5518 we'd need to *find* the elements that are not present, and that
5519 requires trickery to avoid quadratic compile-time behavior in
5520 large cases or excessive memory use in small cases. */
5521 cleared = !CONSTRUCTOR_NO_CLEARING (ctor);
5522 else if (num_ctor_elements - num_nonzero_elements
5523 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
5524 && num_nonzero_elements < num_ctor_elements / 4)
5525 /* If there are "lots" of zeros, it's more efficient to clear
5526 the memory and then set the nonzero elements. */
5527 cleared = true;
5528 else if (ensure_single_access && num_nonzero_elements == 0)
5529 /* If a single access to the target must be ensured and all elements
5530 are zero, then it's optimal to clear whatever their number. */
5531 cleared = true;
5532 else
5533 cleared = false;
5534
5535 /* If there are "lots" of initialized elements, and all of them
5536 are valid address constants, then the entire initializer can
5537 be dropped to memory, and then memcpy'd out. Don't do this
5538 for sparse arrays, though, as it's more efficient to follow
5539 the standard CONSTRUCTOR behavior of memset followed by
5540 individual element initialization. Also don't do this for small
5541 all-zero initializers (which aren't big enough to merit
5542 clearing), and don't try to make bitwise copies of
5543 TREE_ADDRESSABLE types. */
5544 if (valid_const_initializer
5545 && complete_p
5546 && !(cleared || num_nonzero_elements == 0)
5547 && !TREE_ADDRESSABLE (type))
5548 {
5549 HOST_WIDE_INT size = int_size_in_bytes (type);
5550 unsigned int align;
5551
5552 /* ??? We can still get unbounded array types, at least
5553 from the C++ front end. This seems wrong, but attempt
5554 to work around it for now. */
5555 if (size < 0)
5556 {
5557 size = int_size_in_bytes (TREE_TYPE (object));
5558 if (size >= 0)
5559 TREE_TYPE (ctor) = type = TREE_TYPE (object);
5560 }
5561
5562 /* Find the maximum alignment we can assume for the object. */
5563 /* ??? Make use of DECL_OFFSET_ALIGN. */
5564 if (DECL_P (object))
5565 align = DECL_ALIGN (object);
5566 else
5567 align = TYPE_ALIGN (type);
5568
5569 /* Do a block move either if the size is so small as to make
5570 each individual move a sub-unit move on average, or if it
5571 is so large as to make individual moves inefficient. */
5572 if (size > 0
5573 && num_nonzero_elements > 1
5574 /* For ctors that have many repeated nonzero elements
5575 represented through RANGE_EXPRs, prefer initializing
5576 those through runtime loops over copies of large amounts
5577 of data from readonly data section. */
5578 && (num_unique_nonzero_elements
5579 > num_nonzero_elements / unique_nonzero_ratio
5580 || size <= min_unique_size)
5581 && (size < num_nonzero_elements
5582 || !can_move_by_pieces (size, align)))
5583 {
5584 if (notify_temp_creation)
5585 return GS_ERROR;
5586
5587 walk_tree (&ctor, force_labels_r, NULL, NULL);
5588 ctor = tree_output_constant_def (ctor);
5589 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
5590 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
5591 TREE_OPERAND (*expr_p, 1) = ctor;
5592
5593 /* This is no longer an assignment of a CONSTRUCTOR, but
5594 we still may have processing to do on the LHS. So
5595 pretend we didn't do anything here to let that happen. */
5596 return GS_UNHANDLED;
5597 }
5598 }
5599
5600 /* If a single access to the target must be ensured and there are
5601 nonzero elements or the zero elements are not assigned en masse,
5602 initialize the target from a temporary. */
5603 if (ensure_single_access && (num_nonzero_elements > 0 || !cleared))
5604 {
5605 if (notify_temp_creation)
5606 return GS_ERROR;
5607
5608 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
5609 TREE_OPERAND (*expr_p, 0) = temp;
5610 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
5611 *expr_p,
5612 build2 (MODIFY_EXPR, void_type_node,
5613 object, temp));
5614 return GS_OK;
5615 }
5616
5617 if (notify_temp_creation)
5618 return GS_OK;
5619
5620 /* If there are nonzero elements and if needed, pre-evaluate to capture
5621 elements overlapping with the lhs into temporaries. We must do this
5622 before clearing to fetch the values before they are zeroed-out. */
5623 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
5624 {
5625 preeval_data.lhs_base_decl = get_base_address (t: object);
5626 if (!DECL_P (preeval_data.lhs_base_decl))
5627 preeval_data.lhs_base_decl = NULL;
5628 preeval_data.lhs_alias_set = get_alias_set (object);
5629
5630 gimplify_init_ctor_preeval (expr_p: &TREE_OPERAND (*expr_p, 1),
5631 pre_p, post_p, data: &preeval_data);
5632 }
5633
5634 bool ctor_has_side_effects_p
5635 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
5636
5637 if (cleared)
5638 {
5639 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5640 Note that we still have to gimplify, in order to handle the
5641 case of variable sized types. Avoid shared tree structures. */
5642 CONSTRUCTOR_ELTS (ctor) = NULL;
5643 TREE_SIDE_EFFECTS (ctor) = 0;
5644 object = unshare_expr (expr: object);
5645 gimplify_stmt (expr_p, pre_p);
5646 }
5647
5648 /* If we have not block cleared the object, or if there are nonzero
5649 elements in the constructor, or if the constructor has side effects,
5650 add assignments to the individual scalar fields of the object. */
5651 if (!cleared
5652 || num_nonzero_elements > 0
5653 || ctor_has_side_effects_p)
5654 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
5655
5656 *expr_p = NULL_TREE;
5657 }
5658 break;
5659
5660 case COMPLEX_TYPE:
5661 {
5662 tree r, i;
5663
5664 if (notify_temp_creation)
5665 return GS_OK;
5666
5667 /* Extract the real and imaginary parts out of the ctor. */
5668 gcc_assert (elts->length () == 2);
5669 r = (*elts)[0].value;
5670 i = (*elts)[1].value;
5671 if (r == NULL || i == NULL)
5672 {
5673 tree zero = build_zero_cst (TREE_TYPE (type));
5674 if (r == NULL)
5675 r = zero;
5676 if (i == NULL)
5677 i = zero;
5678 }
5679
5680 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5681 represent creation of a complex value. */
5682 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
5683 {
5684 ctor = build_complex (type, r, i);
5685 TREE_OPERAND (*expr_p, 1) = ctor;
5686 }
5687 else
5688 {
5689 ctor = build2 (COMPLEX_EXPR, type, r, i);
5690 TREE_OPERAND (*expr_p, 1) = ctor;
5691 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
5692 pre_p,
5693 post_p,
5694 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
5695 fb_rvalue);
5696 }
5697 }
5698 break;
5699
5700 case VECTOR_TYPE:
5701 {
5702 unsigned HOST_WIDE_INT ix;
5703 constructor_elt *ce;
5704
5705 if (notify_temp_creation)
5706 return GS_OK;
5707
5708 /* Vector types use CONSTRUCTOR all the way through gimple
5709 compilation as a general initializer. */
5710 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
5711 {
5712 enum gimplify_status tret;
5713 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
5714 fb_rvalue);
5715 if (tret == GS_ERROR)
5716 ret = GS_ERROR;
5717 else if (TREE_STATIC (ctor)
5718 && !initializer_constant_valid_p (ce->value,
5719 TREE_TYPE (ce->value)))
5720 TREE_STATIC (ctor) = 0;
5721 }
5722 recompute_constructor_flags (ctor);
5723
5724 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5725 if (TREE_CONSTANT (ctor))
5726 {
5727 bool constant_p = true;
5728 tree value;
5729
5730 /* Even when ctor is constant, it might contain non-*_CST
5731 elements, such as addresses or trapping values like
5732 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5733 in VECTOR_CST nodes. */
5734 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
5735 if (!CONSTANT_CLASS_P (value))
5736 {
5737 constant_p = false;
5738 break;
5739 }
5740
5741 if (constant_p)
5742 {
5743 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
5744 break;
5745 }
5746 }
5747
5748 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
5749 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (val: ctor, pre_p);
5750 }
5751 break;
5752
5753 default:
5754 /* So how did we get a CONSTRUCTOR for a scalar type? */
5755 gcc_unreachable ();
5756 }
5757
5758 if (ret == GS_ERROR)
5759 return GS_ERROR;
5760 /* If we have gimplified both sides of the initializer but have
5761 not emitted an assignment, do so now. */
5762 if (*expr_p
5763 /* If the type is an empty type, we don't need to emit the
5764 assignment. */
5765 && !is_empty_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
5766 {
5767 tree lhs = TREE_OPERAND (*expr_p, 0);
5768 tree rhs = TREE_OPERAND (*expr_p, 1);
5769 if (want_value && object == lhs)
5770 lhs = unshare_expr (expr: lhs);
5771 gassign *init = gimple_build_assign (lhs, rhs);
5772 gimplify_seq_add_stmt (seq_p: pre_p, gs: init);
5773 }
5774 if (want_value)
5775 {
5776 *expr_p = object;
5777 ret = GS_OK;
5778 }
5779 else
5780 {
5781 *expr_p = NULL;
5782 ret = GS_ALL_DONE;
5783 }
5784
5785 /* If the user requests to initialize automatic variables, we
5786 should initialize paddings inside the variable. Add a call to
5787 __builtin_clear_pading (&object, 0, for_auto_init = true) to
5788 initialize paddings of object always to zero regardless of
5789 INIT_TYPE. Note, we will not insert this call if the aggregate
5790 variable has be completely cleared already or it's initialized
5791 with an empty constructor. We cannot insert this call if the
5792 variable is a gimple register since __builtin_clear_padding will take
5793 the address of the variable. As a result, if a long double/_Complex long
5794 double variable will be spilled into stack later, its padding cannot
5795 be cleared with __builtin_clear_padding. We should clear its padding
5796 when it is spilled into memory. */
5797 if (is_init_expr
5798 && !is_gimple_reg (object)
5799 && clear_padding_type_may_have_padding_p (type)
5800 && ((AGGREGATE_TYPE_P (type) && !cleared && !is_empty_ctor)
5801 || !AGGREGATE_TYPE_P (type))
5802 && is_var_need_auto_init (decl: object))
5803 gimple_add_padding_init_for_auto_var (decl: object, is_vla: false, seq_p: pre_p);
5804
5805 return ret;
5806}
5807
5808/* Given a pointer value OP0, return a simplified version of an
5809 indirection through OP0, or NULL_TREE if no simplification is
5810 possible. This may only be applied to a rhs of an expression.
5811 Note that the resulting type may be different from the type pointed
5812 to in the sense that it is still compatible from the langhooks
5813 point of view. */
5814
5815static tree
5816gimple_fold_indirect_ref_rhs (tree t)
5817{
5818 return gimple_fold_indirect_ref (t);
5819}
5820
5821/* Subroutine of gimplify_modify_expr to do simplifications of
5822 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5823 something changes. */
5824
5825static enum gimplify_status
5826gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5827 gimple_seq *pre_p, gimple_seq *post_p,
5828 bool want_value)
5829{
5830 enum gimplify_status ret = GS_UNHANDLED;
5831 bool changed;
5832
5833 do
5834 {
5835 changed = false;
5836 switch (TREE_CODE (*from_p))
5837 {
5838 case VAR_DECL:
5839 /* If we're assigning from a read-only variable initialized with
5840 a constructor and not volatile, do the direct assignment from
5841 the constructor, but only if the target is not volatile either
5842 since this latter assignment might end up being done on a per
5843 field basis. However, if the target is volatile and the type
5844 is aggregate and non-addressable, gimplify_init_constructor
5845 knows that it needs to ensure a single access to the target
5846 and it will return GS_OK only in this case. */
5847 if (TREE_READONLY (*from_p)
5848 && DECL_INITIAL (*from_p)
5849 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR
5850 && !TREE_THIS_VOLATILE (*from_p)
5851 && (!TREE_THIS_VOLATILE (*to_p)
5852 || (AGGREGATE_TYPE_P (TREE_TYPE (*to_p))
5853 && !TREE_ADDRESSABLE (TREE_TYPE (*to_p)))))
5854 {
5855 tree old_from = *from_p;
5856 enum gimplify_status subret;
5857
5858 /* Move the constructor into the RHS. */
5859 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5860
5861 /* Let's see if gimplify_init_constructor will need to put
5862 it in memory. */
5863 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5864 want_value: false, notify_temp_creation: true);
5865 if (subret == GS_ERROR)
5866 {
5867 /* If so, revert the change. */
5868 *from_p = old_from;
5869 }
5870 else
5871 {
5872 ret = GS_OK;
5873 changed = true;
5874 }
5875 }
5876 break;
5877 case INDIRECT_REF:
5878 if (!TREE_ADDRESSABLE (TREE_TYPE (*from_p)))
5879 /* If we have code like
5880
5881 *(const A*)(A*)&x
5882
5883 where the type of "x" is a (possibly cv-qualified variant
5884 of "A"), treat the entire expression as identical to "x".
5885 This kind of code arises in C++ when an object is bound
5886 to a const reference, and if "x" is a TARGET_EXPR we want
5887 to take advantage of the optimization below. But not if
5888 the type is TREE_ADDRESSABLE; then C++17 says that the
5889 TARGET_EXPR needs to be a temporary. */
5890 if (tree t
5891 = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0)))
5892 {
5893 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5894 if (TREE_THIS_VOLATILE (t) != volatile_p)
5895 {
5896 if (DECL_P (t))
5897 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5898 build_fold_addr_expr (t));
5899 if (REFERENCE_CLASS_P (t))
5900 TREE_THIS_VOLATILE (t) = volatile_p;
5901 }
5902 *from_p = t;
5903 ret = GS_OK;
5904 changed = true;
5905 }
5906 break;
5907
5908 case TARGET_EXPR:
5909 {
5910 /* If we are initializing something from a TARGET_EXPR, strip the
5911 TARGET_EXPR and initialize it directly, if possible. This can't
5912 be done if the initializer is void, since that implies that the
5913 temporary is set in some non-trivial way.
5914
5915 ??? What about code that pulls out the temp and uses it
5916 elsewhere? I think that such code never uses the TARGET_EXPR as
5917 an initializer. If I'm wrong, we'll die because the temp won't
5918 have any RTL. In that case, I guess we'll need to replace
5919 references somehow. */
5920 tree init = TARGET_EXPR_INITIAL (*from_p);
5921
5922 if (init
5923 && (TREE_CODE (*expr_p) != MODIFY_EXPR
5924 || !TARGET_EXPR_NO_ELIDE (*from_p))
5925 && !VOID_TYPE_P (TREE_TYPE (init)))
5926 {
5927 *from_p = init;
5928 ret = GS_OK;
5929 changed = true;
5930 }
5931 }
5932 break;
5933
5934 case COMPOUND_EXPR:
5935 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5936 caught. */
5937 gimplify_compound_expr (from_p, pre_p, true);
5938 ret = GS_OK;
5939 changed = true;
5940 break;
5941
5942 case CONSTRUCTOR:
5943 /* If we already made some changes, let the front end have a
5944 crack at this before we break it down. */
5945 if (ret != GS_UNHANDLED)
5946 break;
5947
5948 /* If we're initializing from a CONSTRUCTOR, break this into
5949 individual MODIFY_EXPRs. */
5950 ret = gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5951 notify_temp_creation: false);
5952 return ret;
5953
5954 case COND_EXPR:
5955 /* If we're assigning to a non-register type, push the assignment
5956 down into the branches. This is mandatory for ADDRESSABLE types,
5957 since we cannot generate temporaries for such, but it saves a
5958 copy in other cases as well. */
5959 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5960 {
5961 /* This code should mirror the code in gimplify_cond_expr. */
5962 enum tree_code code = TREE_CODE (*expr_p);
5963 tree cond = *from_p;
5964 tree result = *to_p;
5965
5966 ret = gimplify_expr (&result, pre_p, post_p,
5967 is_gimple_lvalue, fb_lvalue);
5968 if (ret != GS_ERROR)
5969 ret = GS_OK;
5970
5971 /* If we are going to write RESULT more than once, clear
5972 TREE_READONLY flag, otherwise we might incorrectly promote
5973 the variable to static const and initialize it at compile
5974 time in one of the branches. */
5975 if (VAR_P (result)
5976 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5977 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5978 TREE_READONLY (result) = 0;
5979 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5980 TREE_OPERAND (cond, 1)
5981 = build2 (code, void_type_node, result,
5982 TREE_OPERAND (cond, 1));
5983 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5984 TREE_OPERAND (cond, 2)
5985 = build2 (code, void_type_node, unshare_expr (expr: result),
5986 TREE_OPERAND (cond, 2));
5987
5988 TREE_TYPE (cond) = void_type_node;
5989 recalculate_side_effects (t: cond);
5990
5991 if (want_value)
5992 {
5993 gimplify_and_add (t: cond, seq_p: pre_p);
5994 *expr_p = unshare_expr (expr: result);
5995 }
5996 else
5997 *expr_p = cond;
5998 return ret;
5999 }
6000 break;
6001
6002 case CALL_EXPR:
6003 /* For calls that return in memory, give *to_p as the CALL_EXPR's
6004 return slot so that we don't generate a temporary. */
6005 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
6006 && aggregate_value_p (*from_p, *from_p))
6007 {
6008 bool use_target;
6009
6010 if (!(rhs_predicate_for (lhs: *to_p))(*from_p))
6011 /* If we need a temporary, *to_p isn't accurate. */
6012 use_target = false;
6013 /* It's OK to use the return slot directly unless it's an NRV. */
6014 else if (TREE_CODE (*to_p) == RESULT_DECL
6015 && DECL_NAME (*to_p) == NULL_TREE
6016 && needs_to_live_in_memory (*to_p))
6017 use_target = true;
6018 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
6019 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
6020 /* Don't force regs into memory. */
6021 use_target = false;
6022 else if (TREE_CODE (*expr_p) == INIT_EXPR)
6023 /* It's OK to use the target directly if it's being
6024 initialized. */
6025 use_target = true;
6026 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
6027 != INTEGER_CST)
6028 /* Always use the target and thus RSO for variable-sized types.
6029 GIMPLE cannot deal with a variable-sized assignment
6030 embedded in a call statement. */
6031 use_target = true;
6032 else if (TREE_CODE (*to_p) != SSA_NAME
6033 && (!is_gimple_variable (t: *to_p)
6034 || needs_to_live_in_memory (*to_p)))
6035 /* Don't use the original target if it's already addressable;
6036 if its address escapes, and the called function uses the
6037 NRV optimization, a conforming program could see *to_p
6038 change before the called function returns; see c++/19317.
6039 When optimizing, the return_slot pass marks more functions
6040 as safe after we have escape info. */
6041 use_target = false;
6042 else
6043 use_target = true;
6044
6045 if (use_target)
6046 {
6047 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
6048 mark_addressable (*to_p);
6049 }
6050 }
6051 break;
6052
6053 case WITH_SIZE_EXPR:
6054 /* Likewise for calls that return an aggregate of non-constant size,
6055 since we would not be able to generate a temporary at all. */
6056 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
6057 {
6058 *from_p = TREE_OPERAND (*from_p, 0);
6059 /* We don't change ret in this case because the
6060 WITH_SIZE_EXPR might have been added in
6061 gimplify_modify_expr, so returning GS_OK would lead to an
6062 infinite loop. */
6063 changed = true;
6064 }
6065 break;
6066
6067 /* If we're initializing from a container, push the initialization
6068 inside it. */
6069 case CLEANUP_POINT_EXPR:
6070 case BIND_EXPR:
6071 case STATEMENT_LIST:
6072 {
6073 tree wrap = *from_p;
6074 tree t;
6075
6076 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
6077 fb_lvalue);
6078 if (ret != GS_ERROR)
6079 ret = GS_OK;
6080
6081 t = voidify_wrapper_expr (wrapper: wrap, temp: *expr_p);
6082 gcc_assert (t == *expr_p);
6083
6084 if (want_value)
6085 {
6086 gimplify_and_add (t: wrap, seq_p: pre_p);
6087 *expr_p = unshare_expr (expr: *to_p);
6088 }
6089 else
6090 *expr_p = wrap;
6091 return GS_OK;
6092 }
6093
6094 case NOP_EXPR:
6095 /* Pull out compound literal expressions from a NOP_EXPR.
6096 Those are created in the C FE to drop qualifiers during
6097 lvalue conversion. */
6098 if ((TREE_CODE (TREE_OPERAND (*from_p, 0)) == COMPOUND_LITERAL_EXPR)
6099 && tree_ssa_useless_type_conversion (*from_p))
6100 {
6101 *from_p = TREE_OPERAND (*from_p, 0);
6102 ret = GS_OK;
6103 changed = true;
6104 }
6105 break;
6106
6107 case COMPOUND_LITERAL_EXPR:
6108 {
6109 tree complit = TREE_OPERAND (*expr_p, 1);
6110 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
6111 tree decl = DECL_EXPR_DECL (decl_s);
6112 tree init = DECL_INITIAL (decl);
6113
6114 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
6115 into struct T x = { 0, 1, 2 } if the address of the
6116 compound literal has never been taken. */
6117 if (!TREE_ADDRESSABLE (complit)
6118 && !TREE_ADDRESSABLE (decl)
6119 && init)
6120 {
6121 *expr_p = copy_node (*expr_p);
6122 TREE_OPERAND (*expr_p, 1) = init;
6123 return GS_OK;
6124 }
6125 }
6126
6127 default:
6128 break;
6129 }
6130 }
6131 while (changed);
6132
6133 return ret;
6134}
6135
6136
6137/* Return true if T looks like a valid GIMPLE statement. */
6138
6139static bool
6140is_gimple_stmt (tree t)
6141{
6142 const enum tree_code code = TREE_CODE (t);
6143
6144 switch (code)
6145 {
6146 case NOP_EXPR:
6147 /* The only valid NOP_EXPR is the empty statement. */
6148 return IS_EMPTY_STMT (t);
6149
6150 case BIND_EXPR:
6151 case COND_EXPR:
6152 /* These are only valid if they're void. */
6153 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
6154
6155 case SWITCH_EXPR:
6156 case GOTO_EXPR:
6157 case RETURN_EXPR:
6158 case LABEL_EXPR:
6159 case CASE_LABEL_EXPR:
6160 case TRY_CATCH_EXPR:
6161 case TRY_FINALLY_EXPR:
6162 case EH_FILTER_EXPR:
6163 case CATCH_EXPR:
6164 case ASM_EXPR:
6165 case STATEMENT_LIST:
6166 case OACC_PARALLEL:
6167 case OACC_KERNELS:
6168 case OACC_SERIAL:
6169 case OACC_DATA:
6170 case OACC_HOST_DATA:
6171 case OACC_DECLARE:
6172 case OACC_UPDATE:
6173 case OACC_ENTER_DATA:
6174 case OACC_EXIT_DATA:
6175 case OACC_CACHE:
6176 case OMP_PARALLEL:
6177 case OMP_FOR:
6178 case OMP_SIMD:
6179 case OMP_DISTRIBUTE:
6180 case OMP_LOOP:
6181 case OACC_LOOP:
6182 case OMP_SCAN:
6183 case OMP_SCOPE:
6184 case OMP_SECTIONS:
6185 case OMP_SECTION:
6186 case OMP_STRUCTURED_BLOCK:
6187 case OMP_SINGLE:
6188 case OMP_MASTER:
6189 case OMP_MASKED:
6190 case OMP_TASKGROUP:
6191 case OMP_ORDERED:
6192 case OMP_CRITICAL:
6193 case OMP_TASK:
6194 case OMP_TARGET:
6195 case OMP_TARGET_DATA:
6196 case OMP_TARGET_UPDATE:
6197 case OMP_TARGET_ENTER_DATA:
6198 case OMP_TARGET_EXIT_DATA:
6199 case OMP_TASKLOOP:
6200 case OMP_TEAMS:
6201 /* These are always void. */
6202 return true;
6203
6204 case CALL_EXPR:
6205 case MODIFY_EXPR:
6206 case PREDICT_EXPR:
6207 /* These are valid regardless of their type. */
6208 return true;
6209
6210 default:
6211 return false;
6212 }
6213}
6214
6215
6216/* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
6217 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a gimple register.
6218
6219 IMPORTANT NOTE: This promotion is performed by introducing a load of the
6220 other, unmodified part of the complex object just before the total store.
6221 As a consequence, if the object is still uninitialized, an undefined value
6222 will be loaded into a register, which may result in a spurious exception
6223 if the register is floating-point and the value happens to be a signaling
6224 NaN for example. Then the fully-fledged complex operations lowering pass
6225 followed by a DCE pass are necessary in order to fix things up. */
6226
6227static enum gimplify_status
6228gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
6229 bool want_value)
6230{
6231 enum tree_code code, ocode;
6232 tree lhs, rhs, new_rhs, other, realpart, imagpart;
6233
6234 lhs = TREE_OPERAND (*expr_p, 0);
6235 rhs = TREE_OPERAND (*expr_p, 1);
6236 code = TREE_CODE (lhs);
6237 lhs = TREE_OPERAND (lhs, 0);
6238
6239 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
6240 other = build1 (ocode, TREE_TYPE (rhs), lhs);
6241 suppress_warning (other);
6242 other = get_formal_tmp_var (val: other, pre_p);
6243
6244 realpart = code == REALPART_EXPR ? rhs : other;
6245 imagpart = code == REALPART_EXPR ? other : rhs;
6246
6247 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
6248 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
6249 else
6250 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
6251
6252 gimplify_seq_add_stmt (seq_p: pre_p, gs: gimple_build_assign (lhs, new_rhs));
6253 *expr_p = (want_value) ? rhs : NULL_TREE;
6254
6255 return GS_ALL_DONE;
6256}
6257
6258/* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
6259
6260 modify_expr
6261 : varname '=' rhs
6262 | '*' ID '=' rhs
6263
6264 PRE_P points to the list where side effects that must happen before
6265 *EXPR_P should be stored.
6266
6267 POST_P points to the list where side effects that must happen after
6268 *EXPR_P should be stored.
6269
6270 WANT_VALUE is nonzero iff we want to use the value of this expression
6271 in another expression. */
6272
6273static enum gimplify_status
6274gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
6275 bool want_value)
6276{
6277 tree *from_p = &TREE_OPERAND (*expr_p, 1);
6278 tree *to_p = &TREE_OPERAND (*expr_p, 0);
6279 enum gimplify_status ret = GS_UNHANDLED;
6280 gimple *assign;
6281 location_t loc = EXPR_LOCATION (*expr_p);
6282 gimple_stmt_iterator gsi;
6283
6284 if (error_operand_p (t: *from_p) || error_operand_p (t: *to_p))
6285 return GS_ERROR;
6286
6287 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
6288 || TREE_CODE (*expr_p) == INIT_EXPR);
6289
6290 /* Trying to simplify a clobber using normal logic doesn't work,
6291 so handle it here. */
6292 if (TREE_CLOBBER_P (*from_p))
6293 {
6294 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
6295 if (ret == GS_ERROR)
6296 return ret;
6297 gcc_assert (!want_value);
6298 if (!VAR_P (*to_p) && TREE_CODE (*to_p) != MEM_REF)
6299 {
6300 tree addr = get_initialized_tmp_var (build_fold_addr_expr (*to_p),
6301 pre_p, post_p);
6302 *to_p = build_simple_mem_ref_loc (EXPR_LOCATION (*to_p), addr);
6303 }
6304 gimplify_seq_add_stmt (seq_p: pre_p, gs: gimple_build_assign (*to_p, *from_p));
6305 *expr_p = NULL;
6306 return GS_ALL_DONE;
6307 }
6308
6309 /* Convert initialization from an empty variable-size CONSTRUCTOR to
6310 memset. */
6311 if (TREE_TYPE (*from_p) != error_mark_node
6312 && TYPE_SIZE_UNIT (TREE_TYPE (*from_p))
6313 && !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (*from_p)))
6314 && TREE_CODE (*from_p) == CONSTRUCTOR
6315 && CONSTRUCTOR_NELTS (*from_p) == 0)
6316 {
6317 maybe_with_size_expr (expr_p: from_p);
6318 gcc_assert (TREE_CODE (*from_p) == WITH_SIZE_EXPR);
6319 return gimplify_modify_expr_to_memset (expr_p,
6320 TREE_OPERAND (*from_p, 1),
6321 want_value, seq_p: pre_p);
6322 }
6323
6324 /* Insert pointer conversions required by the middle-end that are not
6325 required by the frontend. This fixes middle-end type checking for
6326 for example gcc.dg/redecl-6.c. */
6327 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
6328 {
6329 STRIP_USELESS_TYPE_CONVERSION (*from_p);
6330 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
6331 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
6332 }
6333
6334 /* See if any simplifications can be done based on what the RHS is. */
6335 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
6336 want_value);
6337 if (ret != GS_UNHANDLED)
6338 return ret;
6339
6340 /* For empty types only gimplify the left hand side and right hand
6341 side as statements and throw away the assignment. Do this after
6342 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
6343 types properly. */
6344 if (is_empty_type (TREE_TYPE (*from_p))
6345 && !want_value
6346 /* Don't do this for calls that return addressable types, expand_call
6347 relies on those having a lhs. */
6348 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
6349 && TREE_CODE (*from_p) == CALL_EXPR))
6350 {
6351 gimplify_stmt (from_p, pre_p);
6352 gimplify_stmt (to_p, pre_p);
6353 *expr_p = NULL_TREE;
6354 return GS_ALL_DONE;
6355 }
6356
6357 /* If the value being copied is of variable width, compute the length
6358 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
6359 before gimplifying any of the operands so that we can resolve any
6360 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
6361 the size of the expression to be copied, not of the destination, so
6362 that is what we must do here. */
6363 maybe_with_size_expr (expr_p: from_p);
6364
6365 /* As a special case, we have to temporarily allow for assignments
6366 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
6367 a toplevel statement, when gimplifying the GENERIC expression
6368 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
6369 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
6370
6371 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
6372 prevent gimplify_expr from trying to create a new temporary for
6373 foo's LHS, we tell it that it should only gimplify until it
6374 reaches the CALL_EXPR. On return from gimplify_expr, the newly
6375 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
6376 and all we need to do here is set 'a' to be its LHS. */
6377
6378 /* Gimplify the RHS first for C++17 and bug 71104. */
6379 gimple_predicate initial_pred = initial_rhs_predicate_for (lhs: *to_p);
6380 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
6381 if (ret == GS_ERROR)
6382 return ret;
6383
6384 /* Then gimplify the LHS. */
6385 /* If we gimplified the RHS to a CALL_EXPR and that call may return
6386 twice we have to make sure to gimplify into non-SSA as otherwise
6387 the abnormal edge added later will make those defs not dominate
6388 their uses.
6389 ??? Technically this applies only to the registers used in the
6390 resulting non-register *TO_P. */
6391 bool saved_into_ssa = gimplify_ctxp->into_ssa;
6392 if (saved_into_ssa
6393 && TREE_CODE (*from_p) == CALL_EXPR
6394 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
6395 gimplify_ctxp->into_ssa = false;
6396 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
6397 gimplify_ctxp->into_ssa = saved_into_ssa;
6398 if (ret == GS_ERROR)
6399 return ret;
6400
6401 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
6402 guess for the predicate was wrong. */
6403 gimple_predicate final_pred = rhs_predicate_for (lhs: *to_p);
6404 if (final_pred != initial_pred)
6405 {
6406 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
6407 if (ret == GS_ERROR)
6408 return ret;
6409 }
6410
6411 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
6412 size as argument to the call. */
6413 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
6414 {
6415 tree call = TREE_OPERAND (*from_p, 0);
6416 tree vlasize = TREE_OPERAND (*from_p, 1);
6417
6418 if (TREE_CODE (call) == CALL_EXPR
6419 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
6420 {
6421 int nargs = call_expr_nargs (call);
6422 tree type = TREE_TYPE (call);
6423 tree ap = CALL_EXPR_ARG (call, 0);
6424 tree tag = CALL_EXPR_ARG (call, 1);
6425 tree aptag = CALL_EXPR_ARG (call, 2);
6426 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
6427 IFN_VA_ARG, type,
6428 nargs + 1, ap, tag,
6429 aptag, vlasize);
6430 TREE_OPERAND (*from_p, 0) = newcall;
6431 }
6432 }
6433
6434 /* Now see if the above changed *from_p to something we handle specially. */
6435 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
6436 want_value);
6437 if (ret != GS_UNHANDLED)
6438 return ret;
6439
6440 /* If we've got a variable sized assignment between two lvalues (i.e. does
6441 not involve a call), then we can make things a bit more straightforward
6442 by converting the assignment to memcpy or memset. */
6443 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
6444 {
6445 tree from = TREE_OPERAND (*from_p, 0);
6446 tree size = TREE_OPERAND (*from_p, 1);
6447
6448 if (TREE_CODE (from) == CONSTRUCTOR)
6449 return gimplify_modify_expr_to_memset (expr_p, size, want_value, seq_p: pre_p);
6450
6451 if (is_gimple_addressable (t: from))
6452 {
6453 *from_p = from;
6454 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
6455 seq_p: pre_p);
6456 }
6457 }
6458
6459 /* Transform partial stores to non-addressable complex variables into
6460 total stores. This allows us to use real instead of virtual operands
6461 for these variables, which improves optimization. */
6462 if ((TREE_CODE (*to_p) == REALPART_EXPR
6463 || TREE_CODE (*to_p) == IMAGPART_EXPR)
6464 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
6465 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
6466
6467 /* Try to alleviate the effects of the gimplification creating artificial
6468 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
6469 make sure not to create DECL_DEBUG_EXPR links across functions. */
6470 if (!gimplify_ctxp->into_ssa
6471 && VAR_P (*from_p)
6472 && DECL_IGNORED_P (*from_p)
6473 && DECL_P (*to_p)
6474 && !DECL_IGNORED_P (*to_p)
6475 && decl_function_context (*to_p) == current_function_decl
6476 && decl_function_context (*from_p) == current_function_decl)
6477 {
6478 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
6479 DECL_NAME (*from_p)
6480 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
6481 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
6482 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
6483 }
6484
6485 if (want_value && TREE_THIS_VOLATILE (*to_p))
6486 *from_p = get_initialized_tmp_var (val: *from_p, pre_p, post_p);
6487
6488 if (TREE_CODE (*from_p) == CALL_EXPR)
6489 {
6490 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
6491 instead of a GIMPLE_ASSIGN. */
6492 gcall *call_stmt;
6493 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
6494 {
6495 /* Gimplify internal functions created in the FEs. */
6496 int nargs = call_expr_nargs (*from_p), i;
6497 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
6498 auto_vec<tree> vargs (nargs);
6499
6500 for (i = 0; i < nargs; i++)
6501 {
6502 gimplify_arg (arg_p: &CALL_EXPR_ARG (*from_p, i), pre_p,
6503 EXPR_LOCATION (*from_p));
6504 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
6505 }
6506 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
6507 gimple_call_set_nothrow (s: call_stmt, TREE_NOTHROW (*from_p));
6508 gimple_set_location (g: call_stmt, EXPR_LOCATION (*expr_p));
6509 }
6510 else
6511 {
6512 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
6513 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
6514 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
6515 tree fndecl = get_callee_fndecl (*from_p);
6516 if (fndecl
6517 && fndecl_built_in_p (node: fndecl, name1: BUILT_IN_EXPECT)
6518 && call_expr_nargs (*from_p) == 3)
6519 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
6520 CALL_EXPR_ARG (*from_p, 0),
6521 CALL_EXPR_ARG (*from_p, 1),
6522 CALL_EXPR_ARG (*from_p, 2));
6523 else
6524 {
6525 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
6526 }
6527 }
6528 notice_special_calls (call_stmt);
6529 if (!gimple_call_noreturn_p (s: call_stmt) || !should_remove_lhs_p (lhs: *to_p))
6530 gimple_call_set_lhs (gs: call_stmt, lhs: *to_p);
6531 else if (TREE_CODE (*to_p) == SSA_NAME)
6532 /* The above is somewhat premature, avoid ICEing later for a
6533 SSA name w/o a definition. We may have uses in the GIMPLE IL.
6534 ??? This doesn't make it a default-def. */
6535 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
6536
6537 assign = call_stmt;
6538 }
6539 else
6540 {
6541 assign = gimple_build_assign (*to_p, *from_p);
6542 gimple_set_location (g: assign, EXPR_LOCATION (*expr_p));
6543 if (COMPARISON_CLASS_P (*from_p))
6544 copy_warning (assign, *from_p);
6545 }
6546
6547 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
6548 {
6549 /* We should have got an SSA name from the start. */
6550 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
6551 || ! gimple_in_ssa_p (cfun));
6552 }
6553
6554 gimplify_seq_add_stmt (seq_p: pre_p, gs: assign);
6555 gsi = gsi_last (seq&: *pre_p);
6556 maybe_fold_stmt (gsi: &gsi);
6557
6558 if (want_value)
6559 {
6560 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (expr: *to_p);
6561 return GS_OK;
6562 }
6563 else
6564 *expr_p = NULL;
6565
6566 return GS_ALL_DONE;
6567}
6568
6569/* Gimplify a comparison between two variable-sized objects. Do this
6570 with a call to BUILT_IN_MEMCMP. */
6571
6572static enum gimplify_status
6573gimplify_variable_sized_compare (tree *expr_p)
6574{
6575 location_t loc = EXPR_LOCATION (*expr_p);
6576 tree op0 = TREE_OPERAND (*expr_p, 0);
6577 tree op1 = TREE_OPERAND (*expr_p, 1);
6578 tree t, arg, dest, src, expr;
6579
6580 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
6581 arg = unshare_expr (expr: arg);
6582 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
6583 src = build_fold_addr_expr_loc (loc, op1);
6584 dest = build_fold_addr_expr_loc (loc, op0);
6585 t = builtin_decl_implicit (fncode: BUILT_IN_MEMCMP);
6586 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
6587
6588 expr
6589 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
6590 SET_EXPR_LOCATION (expr, loc);
6591 *expr_p = expr;
6592
6593 return GS_OK;
6594}
6595
6596/* Gimplify a comparison between two aggregate objects of integral scalar
6597 mode as a comparison between the bitwise equivalent scalar values. */
6598
6599static enum gimplify_status
6600gimplify_scalar_mode_aggregate_compare (tree *expr_p)
6601{
6602 location_t loc = EXPR_LOCATION (*expr_p);
6603 tree op0 = TREE_OPERAND (*expr_p, 0);
6604 tree op1 = TREE_OPERAND (*expr_p, 1);
6605
6606 tree type = TREE_TYPE (op0);
6607 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
6608
6609 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
6610 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
6611
6612 *expr_p
6613 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
6614
6615 return GS_OK;
6616}
6617
6618/* Gimplify an expression sequence. This function gimplifies each
6619 expression and rewrites the original expression with the last
6620 expression of the sequence in GIMPLE form.
6621
6622 PRE_P points to the list where the side effects for all the
6623 expressions in the sequence will be emitted.
6624
6625 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6626
6627static enum gimplify_status
6628gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6629{
6630 tree t = *expr_p;
6631
6632 do
6633 {
6634 tree *sub_p = &TREE_OPERAND (t, 0);
6635
6636 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
6637 gimplify_compound_expr (expr_p: sub_p, pre_p, want_value: false);
6638 else
6639 gimplify_stmt (sub_p, pre_p);
6640
6641 t = TREE_OPERAND (t, 1);
6642 }
6643 while (TREE_CODE (t) == COMPOUND_EXPR);
6644
6645 *expr_p = t;
6646 if (want_value)
6647 return GS_OK;
6648 else
6649 {
6650 gimplify_stmt (expr_p, pre_p);
6651 return GS_ALL_DONE;
6652 }
6653}
6654
6655/* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
6656 gimplify. After gimplification, EXPR_P will point to a new temporary
6657 that holds the original value of the SAVE_EXPR node.
6658
6659 PRE_P points to the list where side effects that must happen before
6660 *EXPR_P should be stored. */
6661
6662static enum gimplify_status
6663gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6664{
6665 enum gimplify_status ret = GS_ALL_DONE;
6666 tree val;
6667
6668 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
6669 val = TREE_OPERAND (*expr_p, 0);
6670
6671 if (val && TREE_TYPE (val) == error_mark_node)
6672 return GS_ERROR;
6673
6674 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
6675 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
6676 {
6677 /* The operand may be a void-valued expression. It is
6678 being executed only for its side-effects. */
6679 if (TREE_TYPE (val) == void_type_node)
6680 {
6681 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6682 is_gimple_stmt, fb_none);
6683 val = NULL;
6684 }
6685 else
6686 /* The temporary may not be an SSA name as later abnormal and EH
6687 control flow may invalidate use/def domination. When in SSA
6688 form then assume there are no such issues and SAVE_EXPRs only
6689 appear via GENERIC foldings. */
6690 val = get_initialized_tmp_var (val, pre_p, post_p,
6691 allow_ssa: gimple_in_ssa_p (cfun));
6692
6693 TREE_OPERAND (*expr_p, 0) = val;
6694 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
6695 }
6696
6697 *expr_p = val;
6698
6699 return ret;
6700}
6701
6702/* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6703
6704 unary_expr
6705 : ...
6706 | '&' varname
6707 ...
6708
6709 PRE_P points to the list where side effects that must happen before
6710 *EXPR_P should be stored.
6711
6712 POST_P points to the list where side effects that must happen after
6713 *EXPR_P should be stored. */
6714
6715static enum gimplify_status
6716gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6717{
6718 tree expr = *expr_p;
6719 tree op0 = TREE_OPERAND (expr, 0);
6720 enum gimplify_status ret;
6721 location_t loc = EXPR_LOCATION (*expr_p);
6722
6723 switch (TREE_CODE (op0))
6724 {
6725 case INDIRECT_REF:
6726 do_indirect_ref:
6727 /* Check if we are dealing with an expression of the form '&*ptr'.
6728 While the front end folds away '&*ptr' into 'ptr', these
6729 expressions may be generated internally by the compiler (e.g.,
6730 builtins like __builtin_va_end). */
6731 /* Caution: the silent array decomposition semantics we allow for
6732 ADDR_EXPR means we can't always discard the pair. */
6733 /* Gimplification of the ADDR_EXPR operand may drop
6734 cv-qualification conversions, so make sure we add them if
6735 needed. */
6736 {
6737 tree op00 = TREE_OPERAND (op0, 0);
6738 tree t_expr = TREE_TYPE (expr);
6739 tree t_op00 = TREE_TYPE (op00);
6740
6741 if (!useless_type_conversion_p (t_expr, t_op00))
6742 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
6743 *expr_p = op00;
6744 ret = GS_OK;
6745 }
6746 break;
6747
6748 case VIEW_CONVERT_EXPR:
6749 /* Take the address of our operand and then convert it to the type of
6750 this ADDR_EXPR.
6751
6752 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6753 all clear. The impact of this transformation is even less clear. */
6754
6755 /* If the operand is a useless conversion, look through it. Doing so
6756 guarantees that the ADDR_EXPR and its operand will remain of the
6757 same type. */
6758 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
6759 op0 = TREE_OPERAND (op0, 0);
6760
6761 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
6762 build_fold_addr_expr_loc (loc,
6763 TREE_OPERAND (op0, 0)));
6764 ret = GS_OK;
6765 break;
6766
6767 case MEM_REF:
6768 if (integer_zerop (TREE_OPERAND (op0, 1)))
6769 goto do_indirect_ref;
6770
6771 /* fall through */
6772
6773 default:
6774 /* If we see a call to a declared builtin or see its address
6775 being taken (we can unify those cases here) then we can mark
6776 the builtin for implicit generation by GCC. */
6777 if (TREE_CODE (op0) == FUNCTION_DECL
6778 && fndecl_built_in_p (node: op0, klass: BUILT_IN_NORMAL)
6779 && builtin_decl_declared_p (fncode: DECL_FUNCTION_CODE (decl: op0)))
6780 set_builtin_decl_implicit_p (fncode: DECL_FUNCTION_CODE (decl: op0), implicit_p: true);
6781
6782 /* We use fb_either here because the C frontend sometimes takes
6783 the address of a call that returns a struct; see
6784 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6785 the implied temporary explicit. */
6786
6787 /* Make the operand addressable. */
6788 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
6789 is_gimple_addressable, fb_either);
6790 if (ret == GS_ERROR)
6791 break;
6792
6793 /* Then mark it. Beware that it may not be possible to do so directly
6794 if a temporary has been created by the gimplification. */
6795 prepare_gimple_addressable (expr_p: &TREE_OPERAND (expr, 0), seq_p: pre_p);
6796
6797 op0 = TREE_OPERAND (expr, 0);
6798
6799 /* For various reasons, the gimplification of the expression
6800 may have made a new INDIRECT_REF. */
6801 if (INDIRECT_REF_P (op0)
6802 || (TREE_CODE (op0) == MEM_REF
6803 && integer_zerop (TREE_OPERAND (op0, 1))))
6804 goto do_indirect_ref;
6805
6806 mark_addressable (TREE_OPERAND (expr, 0));
6807
6808 /* The FEs may end up building ADDR_EXPRs early on a decl with
6809 an incomplete type. Re-build ADDR_EXPRs in canonical form
6810 here. */
6811 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
6812 *expr_p = build_fold_addr_expr (op0);
6813
6814 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6815 recompute_tree_invariant_for_addr_expr (*expr_p);
6816
6817 /* If we re-built the ADDR_EXPR add a conversion to the original type
6818 if required. */
6819 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
6820 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
6821
6822 break;
6823 }
6824
6825 return ret;
6826}
6827
6828/* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6829 value; output operands should be a gimple lvalue. */
6830
6831static enum gimplify_status
6832gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6833{
6834 tree expr;
6835 int noutputs;
6836 const char **oconstraints;
6837 int i;
6838 tree link;
6839 const char *constraint;
6840 bool allows_mem, allows_reg, is_inout;
6841 enum gimplify_status ret, tret;
6842 gasm *stmt;
6843 vec<tree, va_gc> *inputs;
6844 vec<tree, va_gc> *outputs;
6845 vec<tree, va_gc> *clobbers;
6846 vec<tree, va_gc> *labels;
6847 tree link_next;
6848
6849 expr = *expr_p;
6850 noutputs = list_length (ASM_OUTPUTS (expr));
6851 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
6852
6853 inputs = NULL;
6854 outputs = NULL;
6855 clobbers = NULL;
6856 labels = NULL;
6857
6858 ret = GS_ALL_DONE;
6859 link_next = NULL_TREE;
6860 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6861 {
6862 bool ok;
6863 size_t constraint_len;
6864
6865 link_next = TREE_CHAIN (link);
6866
6867 oconstraints[i]
6868 = constraint
6869 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6870 constraint_len = strlen (s: constraint);
6871 if (constraint_len == 0)
6872 continue;
6873
6874 ok = parse_output_constraint (&constraint, i, 0, 0,
6875 &allows_mem, &allows_reg, &is_inout);
6876 if (!ok)
6877 {
6878 ret = GS_ERROR;
6879 is_inout = false;
6880 }
6881
6882 /* If we can't make copies, we can only accept memory.
6883 Similarly for VLAs. */
6884 tree outtype = TREE_TYPE (TREE_VALUE (link));
6885 if (outtype != error_mark_node
6886 && (TREE_ADDRESSABLE (outtype)
6887 || !COMPLETE_TYPE_P (outtype)
6888 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (outtype))))
6889 {
6890 if (allows_mem)
6891 allows_reg = 0;
6892 else
6893 {
6894 error ("impossible constraint in %<asm%>");
6895 error ("non-memory output %d must stay in memory", i);
6896 return GS_ERROR;
6897 }
6898 }
6899
6900 if (!allows_reg && allows_mem)
6901 mark_addressable (TREE_VALUE (link));
6902
6903 tree orig = TREE_VALUE (link);
6904 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6905 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6906 fb_lvalue | fb_mayfail);
6907 if (tret == GS_ERROR)
6908 {
6909 if (orig != error_mark_node)
6910 error ("invalid lvalue in %<asm%> output %d", i);
6911 ret = tret;
6912 }
6913
6914 /* If the constraint does not allow memory make sure we gimplify
6915 it to a register if it is not already but its base is. This
6916 happens for complex and vector components. */
6917 if (!allows_mem)
6918 {
6919 tree op = TREE_VALUE (link);
6920 if (! is_gimple_val (op)
6921 && is_gimple_reg_type (TREE_TYPE (op))
6922 && is_gimple_reg (get_base_address (t: op)))
6923 {
6924 tree tem = create_tmp_reg (TREE_TYPE (op));
6925 tree ass;
6926 if (is_inout)
6927 {
6928 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6929 tem, unshare_expr (expr: op));
6930 gimplify_and_add (t: ass, seq_p: pre_p);
6931 }
6932 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6933 gimplify_and_add (t: ass, seq_p: post_p);
6934
6935 TREE_VALUE (link) = tem;
6936 tret = GS_OK;
6937 }
6938 }
6939
6940 vec_safe_push (v&: outputs, obj: link);
6941 TREE_CHAIN (link) = NULL_TREE;
6942
6943 if (is_inout)
6944 {
6945 /* An input/output operand. To give the optimizers more
6946 flexibility, split it into separate input and output
6947 operands. */
6948 tree input;
6949 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6950 char buf[11];
6951
6952 /* Turn the in/out constraint into an output constraint. */
6953 char *p = xstrdup (constraint);
6954 p[0] = '=';
6955 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6956
6957 /* And add a matching input constraint. */
6958 if (allows_reg)
6959 {
6960 sprintf (s: buf, format: "%u", i);
6961
6962 /* If there are multiple alternatives in the constraint,
6963 handle each of them individually. Those that allow register
6964 will be replaced with operand number, the others will stay
6965 unchanged. */
6966 if (strchr (s: p, c: ',') != NULL)
6967 {
6968 size_t len = 0, buflen = strlen (s: buf);
6969 char *beg, *end, *str, *dst;
6970
6971 for (beg = p + 1;;)
6972 {
6973 end = strchr (s: beg, c: ',');
6974 if (end == NULL)
6975 end = strchr (s: beg, c: '\0');
6976 if ((size_t) (end - beg) < buflen)
6977 len += buflen + 1;
6978 else
6979 len += end - beg + 1;
6980 if (*end)
6981 beg = end + 1;
6982 else
6983 break;
6984 }
6985
6986 str = (char *) alloca (len);
6987 for (beg = p + 1, dst = str;;)
6988 {
6989 const char *tem;
6990 bool mem_p, reg_p, inout_p;
6991
6992 end = strchr (s: beg, c: ',');
6993 if (end)
6994 *end = '\0';
6995 beg[-1] = '=';
6996 tem = beg - 1;
6997 parse_output_constraint (&tem, i, 0, 0,
6998 &mem_p, &reg_p, &inout_p);
6999 if (dst != str)
7000 *dst++ = ',';
7001 if (reg_p)
7002 {
7003 memcpy (dest: dst, src: buf, n: buflen);
7004 dst += buflen;
7005 }
7006 else
7007 {
7008 if (end)
7009 len = end - beg;
7010 else
7011 len = strlen (s: beg);
7012 memcpy (dest: dst, src: beg, n: len);
7013 dst += len;
7014 }
7015 if (end)
7016 beg = end + 1;
7017 else
7018 break;
7019 }
7020 *dst = '\0';
7021 input = build_string (dst - str, str);
7022 }
7023 else
7024 input = build_string (strlen (s: buf), buf);
7025 }
7026 else
7027 input = build_string (constraint_len - 1, constraint + 1);
7028
7029 free (ptr: p);
7030
7031 input = build_tree_list (build_tree_list (NULL_TREE, input),
7032 unshare_expr (TREE_VALUE (link)));
7033 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
7034 }
7035 }
7036
7037 link_next = NULL_TREE;
7038 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
7039 {
7040 link_next = TREE_CHAIN (link);
7041 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
7042 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
7043 oconstraints, &allows_mem, &allows_reg);
7044
7045 /* If we can't make copies, we can only accept memory. */
7046 tree intype = TREE_TYPE (TREE_VALUE (link));
7047 if (intype != error_mark_node
7048 && (TREE_ADDRESSABLE (intype)
7049 || !COMPLETE_TYPE_P (intype)
7050 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (intype))))
7051 {
7052 if (allows_mem)
7053 allows_reg = 0;
7054 else
7055 {
7056 error ("impossible constraint in %<asm%>");
7057 error ("non-memory input %d must stay in memory", i);
7058 return GS_ERROR;
7059 }
7060 }
7061
7062 /* If the operand is a memory input, it should be an lvalue. */
7063 if (!allows_reg && allows_mem)
7064 {
7065 tree inputv = TREE_VALUE (link);
7066 STRIP_NOPS (inputv);
7067 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
7068 || TREE_CODE (inputv) == PREINCREMENT_EXPR
7069 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
7070 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
7071 || TREE_CODE (inputv) == MODIFY_EXPR)
7072 TREE_VALUE (link) = error_mark_node;
7073 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
7074 is_gimple_lvalue, fb_lvalue | fb_mayfail);
7075 if (tret != GS_ERROR)
7076 {
7077 /* Unlike output operands, memory inputs are not guaranteed
7078 to be lvalues by the FE, and while the expressions are
7079 marked addressable there, if it is e.g. a statement
7080 expression, temporaries in it might not end up being
7081 addressable. They might be already used in the IL and thus
7082 it is too late to make them addressable now though. */
7083 tree x = TREE_VALUE (link);
7084 while (handled_component_p (t: x))
7085 x = TREE_OPERAND (x, 0);
7086 if (TREE_CODE (x) == MEM_REF
7087 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
7088 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
7089 if ((VAR_P (x)
7090 || TREE_CODE (x) == PARM_DECL
7091 || TREE_CODE (x) == RESULT_DECL)
7092 && !TREE_ADDRESSABLE (x)
7093 && is_gimple_reg (x))
7094 {
7095 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
7096 input_location), 0,
7097 "memory input %d is not directly addressable",
7098 i);
7099 prepare_gimple_addressable (expr_p: &TREE_VALUE (link), seq_p: pre_p);
7100 }
7101 }
7102 mark_addressable (TREE_VALUE (link));
7103 if (tret == GS_ERROR)
7104 {
7105 if (inputv != error_mark_node)
7106 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
7107 "memory input %d is not directly addressable", i);
7108 ret = tret;
7109 }
7110 }
7111 else
7112 {
7113 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
7114 is_gimple_asm_val, fb_rvalue);
7115 if (tret == GS_ERROR)
7116 ret = tret;
7117 }
7118
7119 TREE_CHAIN (link) = NULL_TREE;
7120 vec_safe_push (v&: inputs, obj: link);
7121 }
7122
7123 link_next = NULL_TREE;
7124 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
7125 {
7126 link_next = TREE_CHAIN (link);
7127 TREE_CHAIN (link) = NULL_TREE;
7128 vec_safe_push (v&: clobbers, obj: link);
7129 }
7130
7131 link_next = NULL_TREE;
7132 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
7133 {
7134 link_next = TREE_CHAIN (link);
7135 TREE_CHAIN (link) = NULL_TREE;
7136 vec_safe_push (v&: labels, obj: link);
7137 }
7138
7139 /* Do not add ASMs with errors to the gimple IL stream. */
7140 if (ret != GS_ERROR)
7141 {
7142 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
7143 inputs, outputs, clobbers, labels);
7144
7145 /* asm is volatile if it was marked by the user as volatile or
7146 there are no outputs or this is an asm goto. */
7147 gimple_asm_set_volatile (asm_stmt: stmt,
7148 ASM_VOLATILE_P (expr)
7149 || noutputs == 0
7150 || labels);
7151 gimple_asm_set_input (asm_stmt: stmt, ASM_INPUT_P (expr));
7152 gimple_asm_set_inline (asm_stmt: stmt, ASM_INLINE_P (expr));
7153
7154 gimplify_seq_add_stmt (seq_p: pre_p, gs: stmt);
7155 }
7156
7157 return ret;
7158}
7159
7160/* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
7161 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
7162 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
7163 return to this function.
7164
7165 FIXME should we complexify the prequeue handling instead? Or use flags
7166 for all the cleanups and let the optimizer tighten them up? The current
7167 code seems pretty fragile; it will break on a cleanup within any
7168 non-conditional nesting. But any such nesting would be broken, anyway;
7169 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
7170 and continues out of it. We can do that at the RTL level, though, so
7171 having an optimizer to tighten up try/finally regions would be a Good
7172 Thing. */
7173
7174static enum gimplify_status
7175gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
7176{
7177 gimple_stmt_iterator iter;
7178 gimple_seq body_sequence = NULL;
7179
7180 tree temp = voidify_wrapper_expr (wrapper: *expr_p, NULL);
7181
7182 /* We only care about the number of conditions between the innermost
7183 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
7184 any cleanups collected outside the CLEANUP_POINT_EXPR. */
7185 int old_conds = gimplify_ctxp->conditions;
7186 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
7187 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
7188 gimplify_ctxp->conditions = 0;
7189 gimplify_ctxp->conditional_cleanups = NULL;
7190 gimplify_ctxp->in_cleanup_point_expr = true;
7191
7192 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
7193
7194 gimplify_ctxp->conditions = old_conds;
7195 gimplify_ctxp->conditional_cleanups = old_cleanups;
7196 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
7197
7198 for (iter = gsi_start (seq&: body_sequence); !gsi_end_p (i: iter); )
7199 {
7200 gimple *wce = gsi_stmt (i: iter);
7201
7202 if (gimple_code (g: wce) == GIMPLE_WITH_CLEANUP_EXPR)
7203 {
7204 if (gsi_one_before_end_p (i: iter))
7205 {
7206 /* Note that gsi_insert_seq_before and gsi_remove do not
7207 scan operands, unlike some other sequence mutators. */
7208 if (!gimple_wce_cleanup_eh_only (gs: wce))
7209 gsi_insert_seq_before_without_update (&iter,
7210 gimple_wce_cleanup (gs: wce),
7211 GSI_SAME_STMT);
7212 gsi_remove (&iter, true);
7213 break;
7214 }
7215 else
7216 {
7217 gtry *gtry;
7218 gimple_seq seq;
7219 enum gimple_try_flags kind;
7220
7221 if (gimple_wce_cleanup_eh_only (gs: wce))
7222 kind = GIMPLE_TRY_CATCH;
7223 else
7224 kind = GIMPLE_TRY_FINALLY;
7225 seq = gsi_split_seq_after (iter);
7226
7227 gtry = gimple_build_try (seq, gimple_wce_cleanup (gs: wce), kind);
7228 /* Do not use gsi_replace here, as it may scan operands.
7229 We want to do a simple structural modification only. */
7230 gsi_set_stmt (&iter, gtry);
7231 iter = gsi_start (seq&: gtry->eval);
7232 }
7233 }
7234 else
7235 gsi_next (i: &iter);
7236 }
7237
7238 gimplify_seq_add_seq (dst_p: pre_p, src: body_sequence);
7239 if (temp)
7240 {
7241 *expr_p = temp;
7242 return GS_OK;
7243 }
7244 else
7245 {
7246 *expr_p = NULL;
7247 return GS_ALL_DONE;
7248 }
7249}
7250
7251/* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
7252 is the cleanup action required. EH_ONLY is true if the cleanup should
7253 only be executed if an exception is thrown, not on normal exit.
7254 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
7255 only valid for clobbers. */
7256
7257static void
7258gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
7259 bool force_uncond = false)
7260{
7261 gimple *wce;
7262 gimple_seq cleanup_stmts = NULL;
7263
7264 /* Errors can result in improperly nested cleanups. Which results in
7265 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
7266 if (seen_error ())
7267 return;
7268
7269 if (gimple_conditional_context ())
7270 {
7271 /* If we're in a conditional context, this is more complex. We only
7272 want to run the cleanup if we actually ran the initialization that
7273 necessitates it, but we want to run it after the end of the
7274 conditional context. So we wrap the try/finally around the
7275 condition and use a flag to determine whether or not to actually
7276 run the destructor. Thus
7277
7278 test ? f(A()) : 0
7279
7280 becomes (approximately)
7281
7282 flag = 0;
7283 try {
7284 if (test) { A::A(temp); flag = 1; val = f(temp); }
7285 else { val = 0; }
7286 } finally {
7287 if (flag) A::~A(temp);
7288 }
7289 val
7290 */
7291 if (force_uncond)
7292 {
7293 gimplify_stmt (&cleanup, &cleanup_stmts);
7294 wce = gimple_build_wce (cleanup_stmts);
7295 gimplify_seq_add_stmt (seq_p: &gimplify_ctxp->conditional_cleanups, gs: wce);
7296 }
7297 else
7298 {
7299 tree flag = create_tmp_var (boolean_type_node, "cleanup");
7300 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
7301 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
7302
7303 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
7304 gimplify_stmt (&cleanup, &cleanup_stmts);
7305 wce = gimple_build_wce (cleanup_stmts);
7306 gimple_wce_set_cleanup_eh_only (gs: wce, eh_only_p: eh_only);
7307
7308 gimplify_seq_add_stmt (seq_p: &gimplify_ctxp->conditional_cleanups, gs: ffalse);
7309 gimplify_seq_add_stmt (seq_p: &gimplify_ctxp->conditional_cleanups, gs: wce);
7310 gimplify_seq_add_stmt (seq_p: pre_p, gs: ftrue);
7311
7312 /* Because of this manipulation, and the EH edges that jump
7313 threading cannot redirect, the temporary (VAR) will appear
7314 to be used uninitialized. Don't warn. */
7315 suppress_warning (var, OPT_Wuninitialized);
7316 }
7317 }
7318 else
7319 {
7320 gimplify_stmt (&cleanup, &cleanup_stmts);
7321 wce = gimple_build_wce (cleanup_stmts);
7322 gimple_wce_set_cleanup_eh_only (gs: wce, eh_only_p: eh_only);
7323 gimplify_seq_add_stmt (seq_p: pre_p, gs: wce);
7324 }
7325}
7326
7327/* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
7328
7329static enum gimplify_status
7330gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
7331{
7332 tree targ = *expr_p;
7333 tree temp = TARGET_EXPR_SLOT (targ);
7334 tree init = TARGET_EXPR_INITIAL (targ);
7335 enum gimplify_status ret;
7336
7337 bool unpoison_empty_seq = false;
7338 gimple_stmt_iterator unpoison_it;
7339
7340 if (init)
7341 {
7342 gimple_seq init_pre_p = NULL;
7343
7344 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
7345 to the temps list. Handle also variable length TARGET_EXPRs. */
7346 if (!poly_int_tree_p (DECL_SIZE (temp)))
7347 {
7348 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
7349 gimplify_type_sizes (TREE_TYPE (temp), &init_pre_p);
7350 /* FIXME: this is correct only when the size of the type does
7351 not depend on expressions evaluated in init. */
7352 gimplify_vla_decl (decl: temp, seq_p: &init_pre_p);
7353 }
7354 else
7355 {
7356 /* Save location where we need to place unpoisoning. It's possible
7357 that a variable will be converted to needs_to_live_in_memory. */
7358 unpoison_it = gsi_last (seq&: *pre_p);
7359 unpoison_empty_seq = gsi_end_p (i: unpoison_it);
7360
7361 gimple_add_tmp_var (tmp: temp);
7362 }
7363
7364 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
7365 expression is supposed to initialize the slot. */
7366 if (VOID_TYPE_P (TREE_TYPE (init)))
7367 ret = gimplify_expr (&init, &init_pre_p, post_p, is_gimple_stmt,
7368 fb_none);
7369 else
7370 {
7371 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
7372 init = init_expr;
7373 ret = gimplify_expr (&init, &init_pre_p, post_p, is_gimple_stmt,
7374 fb_none);
7375 init = NULL;
7376 ggc_free (init_expr);
7377 }
7378 if (ret == GS_ERROR)
7379 {
7380 /* PR c++/28266 Make sure this is expanded only once. */
7381 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
7382 return GS_ERROR;
7383 }
7384
7385 if (init)
7386 gimplify_and_add (t: init, seq_p: &init_pre_p);
7387
7388 /* Add a clobber for the temporary going out of scope, like
7389 gimplify_bind_expr. But only if we did not promote the
7390 temporary to static storage. */
7391 if (gimplify_ctxp->in_cleanup_point_expr
7392 && !TREE_STATIC (temp)
7393 && needs_to_live_in_memory (temp))
7394 {
7395 if (flag_stack_reuse == SR_ALL)
7396 {
7397 tree clobber = build_clobber (TREE_TYPE (temp), CLOBBER_EOL);
7398 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
7399 gimple_push_cleanup (var: temp, cleanup: clobber, eh_only: false, pre_p, force_uncond: true);
7400 }
7401 if (asan_poisoned_variables
7402 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
7403 && !TREE_STATIC (temp)
7404 && dbg_cnt (index: asan_use_after_scope)
7405 && !gimplify_omp_ctxp)
7406 {
7407 tree asan_cleanup = build_asan_poison_call_expr (decl: temp);
7408 if (asan_cleanup)
7409 {
7410 if (unpoison_empty_seq)
7411 unpoison_it = gsi_start (seq&: *pre_p);
7412
7413 asan_poison_variable (decl: temp, poison: false, it: &unpoison_it,
7414 before: unpoison_empty_seq);
7415 gimple_push_cleanup (var: temp, cleanup: asan_cleanup, eh_only: false, pre_p);
7416 }
7417 }
7418 }
7419
7420 gimple_seq_add_seq (pre_p, init_pre_p);
7421
7422 /* If needed, push the cleanup for the temp. */
7423 if (TARGET_EXPR_CLEANUP (targ))
7424 gimple_push_cleanup (var: temp, TARGET_EXPR_CLEANUP (targ),
7425 CLEANUP_EH_ONLY (targ), pre_p);
7426
7427 /* Only expand this once. */
7428 TREE_OPERAND (targ, 3) = init;
7429 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
7430 }
7431 else
7432 /* We should have expanded this before. */
7433 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
7434
7435 *expr_p = temp;
7436 return GS_OK;
7437}
7438
7439/* Gimplification of expression trees. */
7440
7441/* Gimplify an expression which appears at statement context. The
7442 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
7443 NULL, a new sequence is allocated.
7444
7445 Return true if we actually added a statement to the queue. */
7446
7447bool
7448gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
7449{
7450 gimple_seq_node last;
7451
7452 last = gimple_seq_last (s: *seq_p);
7453 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
7454 return last != gimple_seq_last (s: *seq_p);
7455}
7456
7457/* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
7458 to CTX. If entries already exist, force them to be some flavor of private.
7459 If there is no enclosing parallel, do nothing. */
7460
7461void
7462omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
7463{
7464 splay_tree_node n;
7465
7466 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
7467 return;
7468
7469 do
7470 {
7471 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7472 if (n != NULL)
7473 {
7474 if (n->value & GOVD_SHARED)
7475 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
7476 else if (n->value & GOVD_MAP)
7477 n->value |= GOVD_MAP_TO_ONLY;
7478 else
7479 return;
7480 }
7481 else if ((ctx->region_type & ORT_TARGET) != 0)
7482 {
7483 if (ctx->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
7484 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
7485 else
7486 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
7487 }
7488 else if (ctx->region_type != ORT_WORKSHARE
7489 && ctx->region_type != ORT_TASKGROUP
7490 && ctx->region_type != ORT_SIMD
7491 && ctx->region_type != ORT_ACC
7492 && !(ctx->region_type & ORT_TARGET_DATA))
7493 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
7494
7495 ctx = ctx->outer_context;
7496 }
7497 while (ctx);
7498}
7499
7500/* Similarly for each of the type sizes of TYPE. */
7501
7502static void
7503omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
7504{
7505 if (type == NULL || type == error_mark_node)
7506 return;
7507 type = TYPE_MAIN_VARIANT (type);
7508
7509 if (ctx->privatized_types->add (k: type))
7510 return;
7511
7512 switch (TREE_CODE (type))
7513 {
7514 case INTEGER_TYPE:
7515 case ENUMERAL_TYPE:
7516 case BOOLEAN_TYPE:
7517 case REAL_TYPE:
7518 case FIXED_POINT_TYPE:
7519 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
7520 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
7521 break;
7522
7523 case ARRAY_TYPE:
7524 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
7525 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
7526 break;
7527
7528 case RECORD_TYPE:
7529 case UNION_TYPE:
7530 case QUAL_UNION_TYPE:
7531 {
7532 tree field;
7533 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
7534 if (TREE_CODE (field) == FIELD_DECL)
7535 {
7536 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
7537 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
7538 }
7539 }
7540 break;
7541
7542 case POINTER_TYPE:
7543 case REFERENCE_TYPE:
7544 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
7545 break;
7546
7547 default:
7548 break;
7549 }
7550
7551 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
7552 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
7553 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
7554}
7555
7556/* Add an entry for DECL in the OMP context CTX with FLAGS. */
7557
7558static void
7559omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
7560{
7561 splay_tree_node n;
7562 unsigned int nflags;
7563 tree t;
7564
7565 if (error_operand_p (t: decl) || ctx->region_type == ORT_NONE)
7566 return;
7567
7568 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
7569 there are constructors involved somewhere. Exception is a shared clause,
7570 there is nothing privatized in that case. */
7571 if ((flags & GOVD_SHARED) == 0
7572 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
7573 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
7574 flags |= GOVD_SEEN;
7575
7576 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7577 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7578 {
7579 /* We shouldn't be re-adding the decl with the same data
7580 sharing class. */
7581 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
7582 nflags = n->value | flags;
7583 /* The only combination of data sharing classes we should see is
7584 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
7585 reduction variables to be used in data sharing clauses. */
7586 gcc_assert ((ctx->region_type & ORT_ACC) != 0
7587 || ((nflags & GOVD_DATA_SHARE_CLASS)
7588 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
7589 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
7590 n->value = nflags;
7591 return;
7592 }
7593
7594 /* When adding a variable-sized variable, we have to handle all sorts
7595 of additional bits of data: the pointer replacement variable, and
7596 the parameters of the type. */
7597 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7598 {
7599 /* Add the pointer replacement variable as PRIVATE if the variable
7600 replacement is private, else FIRSTPRIVATE since we'll need the
7601 address of the original variable either for SHARED, or for the
7602 copy into or out of the context. */
7603 if (!(flags & GOVD_LOCAL) && ctx->region_type != ORT_TASKGROUP)
7604 {
7605 if (flags & GOVD_MAP)
7606 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
7607 else if (flags & GOVD_PRIVATE)
7608 nflags = GOVD_PRIVATE;
7609 else if (((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7610 && (flags & GOVD_FIRSTPRIVATE))
7611 || (ctx->region_type == ORT_TARGET_DATA
7612 && (flags & GOVD_DATA_SHARE_CLASS) == 0))
7613 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
7614 else
7615 nflags = GOVD_FIRSTPRIVATE;
7616 nflags |= flags & GOVD_SEEN;
7617 t = DECL_VALUE_EXPR (decl);
7618 gcc_assert (INDIRECT_REF_P (t));
7619 t = TREE_OPERAND (t, 0);
7620 gcc_assert (DECL_P (t));
7621 omp_add_variable (ctx, decl: t, flags: nflags);
7622 }
7623
7624 /* Add all of the variable and type parameters (which should have
7625 been gimplified to a formal temporary) as FIRSTPRIVATE. */
7626 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
7627 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
7628 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7629
7630 /* The variable-sized variable itself is never SHARED, only some form
7631 of PRIVATE. The sharing would take place via the pointer variable
7632 which we remapped above. */
7633 if (flags & GOVD_SHARED)
7634 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
7635 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
7636
7637 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
7638 alloca statement we generate for the variable, so make sure it
7639 is available. This isn't automatically needed for the SHARED
7640 case, since we won't be allocating local storage then.
7641 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
7642 in this case omp_notice_variable will be called later
7643 on when it is gimplified. */
7644 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
7645 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
7646 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
7647 }
7648 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
7649 && omp_privatize_by_reference (decl))
7650 {
7651 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7652
7653 /* Similar to the direct variable sized case above, we'll need the
7654 size of references being privatized. */
7655 if ((flags & GOVD_SHARED) == 0)
7656 {
7657 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7658 if (t && DECL_P (t))
7659 omp_notice_variable (ctx, t, true);
7660 }
7661 }
7662
7663 if (n != NULL)
7664 n->value |= flags;
7665 else
7666 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
7667
7668 /* For reductions clauses in OpenACC loop directives, by default create a
7669 copy clause on the enclosing parallel construct for carrying back the
7670 results. */
7671 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
7672 {
7673 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
7674 while (outer_ctx)
7675 {
7676 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
7677 if (n != NULL)
7678 {
7679 /* Ignore local variables and explicitly declared clauses. */
7680 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
7681 break;
7682 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
7683 {
7684 /* According to the OpenACC spec, such a reduction variable
7685 should already have a copy map on a kernels construct,
7686 verify that here. */
7687 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
7688 && (n->value & GOVD_MAP));
7689 }
7690 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7691 {
7692 /* Remove firstprivate and make it a copy map. */
7693 n->value &= ~GOVD_FIRSTPRIVATE;
7694 n->value |= GOVD_MAP;
7695 }
7696 }
7697 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7698 {
7699 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
7700 GOVD_MAP | GOVD_SEEN);
7701 break;
7702 }
7703 outer_ctx = outer_ctx->outer_context;
7704 }
7705 }
7706}
7707
7708/* Notice a threadprivate variable DECL used in OMP context CTX.
7709 This just prints out diagnostics about threadprivate variable uses
7710 in untied tasks. If DECL2 is non-NULL, prevent this warning
7711 on that variable. */
7712
7713static bool
7714omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
7715 tree decl2)
7716{
7717 splay_tree_node n;
7718 struct gimplify_omp_ctx *octx;
7719
7720 for (octx = ctx; octx; octx = octx->outer_context)
7721 if ((octx->region_type & ORT_TARGET) != 0
7722 || octx->order_concurrent)
7723 {
7724 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
7725 if (n == NULL)
7726 {
7727 if (octx->order_concurrent)
7728 {
7729 error ("threadprivate variable %qE used in a region with"
7730 " %<order(concurrent)%> clause", DECL_NAME (decl));
7731 inform (octx->location, "enclosing region");
7732 }
7733 else
7734 {
7735 error ("threadprivate variable %qE used in target region",
7736 DECL_NAME (decl));
7737 inform (octx->location, "enclosing target region");
7738 }
7739 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
7740 }
7741 if (decl2)
7742 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
7743 }
7744
7745 if (ctx->region_type != ORT_UNTIED_TASK)
7746 return false;
7747 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7748 if (n == NULL)
7749 {
7750 error ("threadprivate variable %qE used in untied task",
7751 DECL_NAME (decl));
7752 inform (ctx->location, "enclosing task");
7753 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
7754 }
7755 if (decl2)
7756 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
7757 return false;
7758}
7759
7760/* Return true if global var DECL is device resident. */
7761
7762static bool
7763device_resident_p (tree decl)
7764{
7765 tree attr = lookup_attribute (attr_name: "oacc declare target", DECL_ATTRIBUTES (decl));
7766
7767 if (!attr)
7768 return false;
7769
7770 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
7771 {
7772 tree c = TREE_VALUE (t);
7773 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
7774 return true;
7775 }
7776
7777 return false;
7778}
7779
7780/* Return true if DECL has an ACC DECLARE attribute. */
7781
7782static bool
7783is_oacc_declared (tree decl)
7784{
7785 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
7786 tree declared = lookup_attribute (attr_name: "oacc declare target", DECL_ATTRIBUTES (t));
7787 return declared != NULL_TREE;
7788}
7789
7790/* Determine outer default flags for DECL mentioned in an OMP region
7791 but not declared in an enclosing clause.
7792
7793 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7794 remapped firstprivate instead of shared. To some extent this is
7795 addressed in omp_firstprivatize_type_sizes, but not
7796 effectively. */
7797
7798static unsigned
7799omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
7800 bool in_code, unsigned flags)
7801{
7802 enum omp_clause_default_kind default_kind = ctx->default_kind;
7803 enum omp_clause_default_kind kind;
7804
7805 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
7806 if (ctx->region_type & ORT_TASK)
7807 {
7808 tree detach_clause = omp_find_clause (clauses: ctx->clauses, kind: OMP_CLAUSE_DETACH);
7809
7810 /* The event-handle specified by a detach clause should always be firstprivate,
7811 regardless of the current default. */
7812 if (detach_clause && OMP_CLAUSE_DECL (detach_clause) == decl)
7813 kind = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
7814 }
7815 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
7816 default_kind = kind;
7817 else if (VAR_P (decl) && TREE_STATIC (decl) && DECL_IN_CONSTANT_POOL (decl))
7818 default_kind = OMP_CLAUSE_DEFAULT_SHARED;
7819 /* For C/C++ default({,first}private), variables with static storage duration
7820 declared in a namespace or global scope and referenced in construct
7821 must be explicitly specified, i.e. acts as default(none). */
7822 else if ((default_kind == OMP_CLAUSE_DEFAULT_PRIVATE
7823 || default_kind == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
7824 && VAR_P (decl)
7825 && is_global_var (t: decl)
7826 && (DECL_FILE_SCOPE_P (decl)
7827 || (DECL_CONTEXT (decl)
7828 && TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL))
7829 && !lang_GNU_Fortran ())
7830 default_kind = OMP_CLAUSE_DEFAULT_NONE;
7831
7832 switch (default_kind)
7833 {
7834 case OMP_CLAUSE_DEFAULT_NONE:
7835 {
7836 const char *rtype;
7837
7838 if (ctx->region_type & ORT_PARALLEL)
7839 rtype = "parallel";
7840 else if ((ctx->region_type & ORT_TASKLOOP) == ORT_TASKLOOP)
7841 rtype = "taskloop";
7842 else if (ctx->region_type & ORT_TASK)
7843 rtype = "task";
7844 else if (ctx->region_type & ORT_TEAMS)
7845 rtype = "teams";
7846 else
7847 gcc_unreachable ();
7848
7849 error ("%qE not specified in enclosing %qs",
7850 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
7851 inform (ctx->location, "enclosing %qs", rtype);
7852 }
7853 /* FALLTHRU */
7854 case OMP_CLAUSE_DEFAULT_SHARED:
7855 flags |= GOVD_SHARED;
7856 break;
7857 case OMP_CLAUSE_DEFAULT_PRIVATE:
7858 flags |= GOVD_PRIVATE;
7859 break;
7860 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
7861 flags |= GOVD_FIRSTPRIVATE;
7862 break;
7863 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
7864 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7865 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
7866 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
7867 {
7868 omp_notice_variable (octx, decl, in_code);
7869 for (; octx; octx = octx->outer_context)
7870 {
7871 splay_tree_node n2;
7872
7873 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
7874 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
7875 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
7876 continue;
7877 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
7878 {
7879 flags |= GOVD_FIRSTPRIVATE;
7880 goto found_outer;
7881 }
7882 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
7883 {
7884 flags |= GOVD_SHARED;
7885 goto found_outer;
7886 }
7887 }
7888 }
7889
7890 if (TREE_CODE (decl) == PARM_DECL
7891 || (!is_global_var (t: decl)
7892 && DECL_CONTEXT (decl) == current_function_decl))
7893 flags |= GOVD_FIRSTPRIVATE;
7894 else
7895 flags |= GOVD_SHARED;
7896 found_outer:
7897 break;
7898
7899 default:
7900 gcc_unreachable ();
7901 }
7902
7903 return flags;
7904}
7905
7906/* Return string name for types of OpenACC constructs from ORT_* values. */
7907
7908static const char *
7909oacc_region_type_name (enum omp_region_type region_type)
7910{
7911 switch (region_type)
7912 {
7913 case ORT_ACC_DATA:
7914 return "data";
7915 case ORT_ACC_PARALLEL:
7916 return "parallel";
7917 case ORT_ACC_KERNELS:
7918 return "kernels";
7919 case ORT_ACC_SERIAL:
7920 return "serial";
7921 default:
7922 gcc_unreachable ();
7923 }
7924}
7925
7926/* Determine outer default flags for DECL mentioned in an OACC region
7927 but not declared in an enclosing clause. */
7928
7929static unsigned
7930oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
7931{
7932 struct gimplify_omp_ctx *ctx_default = ctx;
7933 /* If no 'default' clause appears on this compute construct... */
7934 if (ctx_default->default_kind == OMP_CLAUSE_DEFAULT_SHARED)
7935 {
7936 /* ..., see if one appears on a lexically containing 'data'
7937 construct. */
7938 while ((ctx_default = ctx_default->outer_context))
7939 {
7940 if (ctx_default->region_type == ORT_ACC_DATA
7941 && ctx_default->default_kind != OMP_CLAUSE_DEFAULT_SHARED)
7942 break;
7943 }
7944 /* If not, reset. */
7945 if (!ctx_default)
7946 ctx_default = ctx;
7947 }
7948
7949 bool on_device = false;
7950 bool is_private = false;
7951 bool declared = is_oacc_declared (decl);
7952 tree type = TREE_TYPE (decl);
7953
7954 if (omp_privatize_by_reference (decl))
7955 type = TREE_TYPE (type);
7956
7957 /* For Fortran COMMON blocks, only used variables in those blocks are
7958 transfered and remapped. The block itself will have a private clause to
7959 avoid transfering the data twice.
7960 The hook evaluates to false by default. For a variable in Fortran's COMMON
7961 or EQUIVALENCE block, returns 'true' (as we have shared=false) - as only
7962 the variables in such a COMMON/EQUIVALENCE block shall be privatized not
7963 the whole block. For C++ and Fortran, it can also be true under certain
7964 other conditions, if DECL_HAS_VALUE_EXPR. */
7965 if (RECORD_OR_UNION_TYPE_P (type))
7966 is_private = lang_hooks.decls.omp_disregard_value_expr (decl, false);
7967
7968 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
7969 && is_global_var (t: decl)
7970 && device_resident_p (decl)
7971 && !is_private)
7972 {
7973 on_device = true;
7974 flags |= GOVD_MAP_TO_ONLY;
7975 }
7976
7977 switch (ctx->region_type)
7978 {
7979 case ORT_ACC_KERNELS:
7980 if (is_private)
7981 flags |= GOVD_FIRSTPRIVATE;
7982 else if (AGGREGATE_TYPE_P (type))
7983 {
7984 /* Aggregates default to 'present_or_copy', or 'present'. */
7985 if (ctx_default->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7986 flags |= GOVD_MAP;
7987 else
7988 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7989 }
7990 else
7991 /* Scalars default to 'copy'. */
7992 flags |= GOVD_MAP | GOVD_MAP_FORCE;
7993
7994 break;
7995
7996 case ORT_ACC_PARALLEL:
7997 case ORT_ACC_SERIAL:
7998 if (is_private)
7999 flags |= GOVD_FIRSTPRIVATE;
8000 else if (on_device || declared)
8001 flags |= GOVD_MAP;
8002 else if (AGGREGATE_TYPE_P (type))
8003 {
8004 /* Aggregates default to 'present_or_copy', or 'present'. */
8005 if (ctx_default->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
8006 flags |= GOVD_MAP;
8007 else
8008 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
8009 }
8010 else
8011 /* Scalars default to 'firstprivate'. */
8012 flags |= GOVD_FIRSTPRIVATE;
8013
8014 break;
8015
8016 default:
8017 gcc_unreachable ();
8018 }
8019
8020 if (DECL_ARTIFICIAL (decl))
8021 ; /* We can get compiler-generated decls, and should not complain
8022 about them. */
8023 else if (ctx_default->default_kind == OMP_CLAUSE_DEFAULT_NONE)
8024 {
8025 error ("%qE not specified in enclosing OpenACC %qs construct",
8026 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)),
8027 oacc_region_type_name (region_type: ctx->region_type));
8028 if (ctx_default != ctx)
8029 inform (ctx->location, "enclosing OpenACC %qs construct and",
8030 oacc_region_type_name (region_type: ctx->region_type));
8031 inform (ctx_default->location,
8032 "enclosing OpenACC %qs construct with %qs clause",
8033 oacc_region_type_name (region_type: ctx_default->region_type),
8034 "default(none)");
8035 }
8036 else if (ctx_default->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
8037 ; /* Handled above. */
8038 else
8039 gcc_checking_assert (ctx_default->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
8040
8041 return flags;
8042}
8043
8044/* Record the fact that DECL was used within the OMP context CTX.
8045 IN_CODE is true when real code uses DECL, and false when we should
8046 merely emit default(none) errors. Return true if DECL is going to
8047 be remapped and thus DECL shouldn't be gimplified into its
8048 DECL_VALUE_EXPR (if any). */
8049
8050static bool
8051omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
8052{
8053 splay_tree_node n;
8054 unsigned flags = in_code ? GOVD_SEEN : 0;
8055 bool ret = false, shared;
8056
8057 if (error_operand_p (t: decl))
8058 return false;
8059
8060 if (DECL_ARTIFICIAL (decl))
8061 {
8062 tree attr = lookup_attribute (attr_name: "omp allocate var", DECL_ATTRIBUTES (decl));
8063 if (attr)
8064 decl = TREE_VALUE (TREE_VALUE (attr));
8065 }
8066
8067 if (ctx->region_type == ORT_NONE)
8068 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
8069
8070 if (is_global_var (t: decl))
8071 {
8072 /* Threadprivate variables are predetermined. */
8073 if (DECL_THREAD_LOCAL_P (decl))
8074 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
8075
8076 if (DECL_HAS_VALUE_EXPR_P (decl))
8077 {
8078 if (ctx->region_type & ORT_ACC)
8079 /* For OpenACC, defer expansion of value to avoid transfering
8080 privatized common block data instead of im-/explicitly transfered
8081 variables which are in common blocks. */
8082 ;
8083 else
8084 {
8085 tree value = get_base_address (DECL_VALUE_EXPR (decl));
8086
8087 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
8088 return omp_notice_threadprivate_variable (ctx, decl, decl2: value);
8089 }
8090 }
8091
8092 if (gimplify_omp_ctxp->outer_context == NULL
8093 && VAR_P (decl)
8094 && oacc_get_fn_attrib (fn: current_function_decl))
8095 {
8096 location_t loc = DECL_SOURCE_LOCATION (decl);
8097
8098 if (lookup_attribute (attr_name: "omp declare target link",
8099 DECL_ATTRIBUTES (decl)))
8100 {
8101 error_at (loc,
8102 "%qE with %<link%> clause used in %<routine%> function",
8103 DECL_NAME (decl));
8104 return false;
8105 }
8106 else if (!lookup_attribute (attr_name: "omp declare target",
8107 DECL_ATTRIBUTES (decl)))
8108 {
8109 error_at (loc,
8110 "%qE requires a %<declare%> directive for use "
8111 "in a %<routine%> function", DECL_NAME (decl));
8112 return false;
8113 }
8114 }
8115 }
8116
8117 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
8118 if ((ctx->region_type & ORT_TARGET) != 0)
8119 {
8120 if (ctx->region_type & ORT_ACC)
8121 /* For OpenACC, as remarked above, defer expansion. */
8122 shared = false;
8123 else
8124 shared = true;
8125
8126 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
8127 if (n == NULL)
8128 {
8129 unsigned nflags = flags;
8130 if ((ctx->region_type & ORT_ACC) == 0)
8131 {
8132 bool is_declare_target = false;
8133 if (is_global_var (t: decl)
8134 && varpool_node::get_create (decl)->offloadable)
8135 {
8136 struct gimplify_omp_ctx *octx;
8137 for (octx = ctx->outer_context;
8138 octx; octx = octx->outer_context)
8139 {
8140 n = splay_tree_lookup (octx->variables,
8141 (splay_tree_key)decl);
8142 if (n
8143 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
8144 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
8145 break;
8146 }
8147 is_declare_target = octx == NULL;
8148 }
8149 if (!is_declare_target)
8150 {
8151 int gdmk;
8152 enum omp_clause_defaultmap_kind kind;
8153 if (lang_hooks.decls.omp_allocatable_p (decl))
8154 gdmk = GDMK_ALLOCATABLE;
8155 else if (lang_hooks.decls.omp_scalar_target_p (decl))
8156 gdmk = GDMK_SCALAR_TARGET;
8157 else if (lang_hooks.decls.omp_scalar_p (decl, false))
8158 gdmk = GDMK_SCALAR;
8159 else if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
8160 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
8161 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
8162 == POINTER_TYPE)))
8163 gdmk = GDMK_POINTER;
8164 else
8165 gdmk = GDMK_AGGREGATE;
8166 kind = lang_hooks.decls.omp_predetermined_mapping (decl);
8167 if (kind != OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED)
8168 {
8169 if (kind == OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE)
8170 nflags |= GOVD_FIRSTPRIVATE;
8171 else if (kind == OMP_CLAUSE_DEFAULTMAP_TO)
8172 nflags |= GOVD_MAP | GOVD_MAP_TO_ONLY;
8173 else
8174 gcc_unreachable ();
8175 }
8176 else if (ctx->defaultmap[gdmk] == 0)
8177 {
8178 tree d = lang_hooks.decls.omp_report_decl (decl);
8179 error ("%qE not specified in enclosing %<target%>",
8180 DECL_NAME (d));
8181 inform (ctx->location, "enclosing %<target%>");
8182 }
8183 else if (ctx->defaultmap[gdmk]
8184 & (GOVD_MAP_0LEN_ARRAY | GOVD_FIRSTPRIVATE))
8185 nflags |= ctx->defaultmap[gdmk];
8186 else if (ctx->defaultmap[gdmk] & GOVD_MAP_FORCE_PRESENT)
8187 {
8188 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
8189 nflags |= ctx->defaultmap[gdmk] | GOVD_MAP_ALLOC_ONLY;
8190 }
8191 else
8192 {
8193 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
8194 nflags |= ctx->defaultmap[gdmk] & ~GOVD_MAP;
8195 }
8196 }
8197 }
8198
8199 struct gimplify_omp_ctx *octx = ctx->outer_context;
8200 if ((ctx->region_type & ORT_ACC) && octx)
8201 {
8202 /* Look in outer OpenACC contexts, to see if there's a
8203 data attribute for this variable. */
8204 omp_notice_variable (ctx: octx, decl, in_code);
8205
8206 for (; octx; octx = octx->outer_context)
8207 {
8208 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
8209 break;
8210 splay_tree_node n2
8211 = splay_tree_lookup (octx->variables,
8212 (splay_tree_key) decl);
8213 if (n2)
8214 {
8215 if (octx->region_type == ORT_ACC_HOST_DATA)
8216 error ("variable %qE declared in enclosing "
8217 "%<host_data%> region", DECL_NAME (decl));
8218 nflags |= GOVD_MAP;
8219 if (octx->region_type == ORT_ACC_DATA
8220 && (n2->value & GOVD_MAP_0LEN_ARRAY))
8221 nflags |= GOVD_MAP_0LEN_ARRAY;
8222 goto found_outer;
8223 }
8224 }
8225 }
8226
8227 if ((nflags & ~(GOVD_MAP_TO_ONLY | GOVD_MAP_FROM_ONLY
8228 | GOVD_MAP_ALLOC_ONLY)) == flags)
8229 {
8230 tree type = TREE_TYPE (decl);
8231
8232 if (gimplify_omp_ctxp->target_firstprivatize_array_bases
8233 && omp_privatize_by_reference (decl))
8234 type = TREE_TYPE (type);
8235 if (!omp_mappable_type (type))
8236 {
8237 error ("%qD referenced in target region does not have "
8238 "a mappable type", decl);
8239 nflags |= GOVD_MAP | GOVD_EXPLICIT;
8240 }
8241 else
8242 {
8243 if ((ctx->region_type & ORT_ACC) != 0)
8244 nflags = oacc_default_clause (ctx, decl, flags);
8245 else
8246 nflags |= GOVD_MAP;
8247 }
8248 }
8249 found_outer:
8250 omp_add_variable (ctx, decl, flags: nflags);
8251 }
8252 else
8253 {
8254 /* If nothing changed, there's nothing left to do. */
8255 if ((n->value & flags) == flags)
8256 return ret;
8257 flags |= n->value;
8258 n->value = flags;
8259 }
8260 goto do_outer;
8261 }
8262
8263 if (n == NULL)
8264 {
8265 if (ctx->region_type == ORT_WORKSHARE
8266 || ctx->region_type == ORT_TASKGROUP
8267 || ctx->region_type == ORT_SIMD
8268 || ctx->region_type == ORT_ACC
8269 || (ctx->region_type & ORT_TARGET_DATA) != 0)
8270 goto do_outer;
8271
8272 flags = omp_default_clause (ctx, decl, in_code, flags);
8273
8274 if ((flags & GOVD_PRIVATE)
8275 && lang_hooks.decls.omp_private_outer_ref (decl))
8276 flags |= GOVD_PRIVATE_OUTER_REF;
8277
8278 omp_add_variable (ctx, decl, flags);
8279
8280 shared = (flags & GOVD_SHARED) != 0;
8281 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
8282 goto do_outer;
8283 }
8284
8285 /* Don't mark as GOVD_SEEN addressable temporaries seen only in simd
8286 lb, b or incr expressions, those shouldn't be turned into simd arrays. */
8287 if (ctx->region_type == ORT_SIMD
8288 && ctx->in_for_exprs
8289 && ((n->value & (GOVD_PRIVATE | GOVD_SEEN | GOVD_EXPLICIT))
8290 == GOVD_PRIVATE))
8291 flags &= ~GOVD_SEEN;
8292
8293 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
8294 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
8295 && DECL_SIZE (decl))
8296 {
8297 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
8298 {
8299 splay_tree_node n2;
8300 tree t = DECL_VALUE_EXPR (decl);
8301 gcc_assert (INDIRECT_REF_P (t));
8302 t = TREE_OPERAND (t, 0);
8303 gcc_assert (DECL_P (t));
8304 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
8305 n2->value |= GOVD_SEEN;
8306 }
8307 else if (omp_privatize_by_reference (decl)
8308 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
8309 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
8310 != INTEGER_CST))
8311 {
8312 splay_tree_node n2;
8313 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
8314 gcc_assert (DECL_P (t));
8315 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
8316 if (n2)
8317 omp_notice_variable (ctx, decl: t, in_code: true);
8318 }
8319 }
8320
8321 if (ctx->region_type & ORT_ACC)
8322 /* For OpenACC, as remarked above, defer expansion. */
8323 shared = false;
8324 else
8325 shared = ((flags | n->value) & GOVD_SHARED) != 0;
8326 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
8327
8328 /* If nothing changed, there's nothing left to do. */
8329 if ((n->value & flags) == flags)
8330 return ret;
8331 flags |= n->value;
8332 n->value = flags;
8333
8334 do_outer:
8335 /* If the variable is private in the current context, then we don't
8336 need to propagate anything to an outer context. */
8337 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
8338 return ret;
8339 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8340 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8341 return ret;
8342 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
8343 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8344 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
8345 return ret;
8346 if (ctx->outer_context
8347 && omp_notice_variable (ctx: ctx->outer_context, decl, in_code))
8348 return true;
8349 return ret;
8350}
8351
8352/* Verify that DECL is private within CTX. If there's specific information
8353 to the contrary in the innermost scope, generate an error. */
8354
8355static bool
8356omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
8357{
8358 splay_tree_node n;
8359
8360 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
8361 if (n != NULL)
8362 {
8363 if (n->value & GOVD_SHARED)
8364 {
8365 if (ctx == gimplify_omp_ctxp)
8366 {
8367 if (simd)
8368 error ("iteration variable %qE is predetermined linear",
8369 DECL_NAME (decl));
8370 else
8371 error ("iteration variable %qE should be private",
8372 DECL_NAME (decl));
8373 n->value = GOVD_PRIVATE;
8374 return true;
8375 }
8376 else
8377 return false;
8378 }
8379 else if ((n->value & GOVD_EXPLICIT) != 0
8380 && (ctx == gimplify_omp_ctxp
8381 || (ctx->region_type == ORT_COMBINED_PARALLEL
8382 && gimplify_omp_ctxp->outer_context == ctx)))
8383 {
8384 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
8385 error ("iteration variable %qE should not be firstprivate",
8386 DECL_NAME (decl));
8387 else if ((n->value & GOVD_REDUCTION) != 0)
8388 error ("iteration variable %qE should not be reduction",
8389 DECL_NAME (decl));
8390 else if (simd != 1 && (n->value & GOVD_LINEAR) != 0)
8391 error ("iteration variable %qE should not be linear",
8392 DECL_NAME (decl));
8393 }
8394 return (ctx == gimplify_omp_ctxp
8395 || (ctx->region_type == ORT_COMBINED_PARALLEL
8396 && gimplify_omp_ctxp->outer_context == ctx));
8397 }
8398
8399 if (ctx->region_type != ORT_WORKSHARE
8400 && ctx->region_type != ORT_TASKGROUP
8401 && ctx->region_type != ORT_SIMD
8402 && ctx->region_type != ORT_ACC)
8403 return false;
8404 else if (ctx->outer_context)
8405 return omp_is_private (ctx: ctx->outer_context, decl, simd);
8406 return false;
8407}
8408
8409/* Return true if DECL is private within a parallel region
8410 that binds to the current construct's context or in parallel
8411 region's REDUCTION clause. */
8412
8413static bool
8414omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
8415{
8416 splay_tree_node n;
8417
8418 do
8419 {
8420 ctx = ctx->outer_context;
8421 if (ctx == NULL)
8422 {
8423 if (is_global_var (t: decl))
8424 return false;
8425
8426 /* References might be private, but might be shared too,
8427 when checking for copyprivate, assume they might be
8428 private, otherwise assume they might be shared. */
8429 if (copyprivate)
8430 return true;
8431
8432 if (omp_privatize_by_reference (decl))
8433 return false;
8434
8435 /* Treat C++ privatized non-static data members outside
8436 of the privatization the same. */
8437 if (omp_member_access_dummy_var (decl))
8438 return false;
8439
8440 return true;
8441 }
8442
8443 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8444
8445 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
8446 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
8447 {
8448 if ((ctx->region_type & ORT_TARGET_DATA) != 0
8449 || n == NULL
8450 || (n->value & GOVD_MAP) == 0)
8451 continue;
8452 return false;
8453 }
8454
8455 if (n != NULL)
8456 {
8457 if ((n->value & GOVD_LOCAL) != 0
8458 && omp_member_access_dummy_var (decl))
8459 return false;
8460 return (n->value & GOVD_SHARED) == 0;
8461 }
8462
8463 if (ctx->region_type == ORT_WORKSHARE
8464 || ctx->region_type == ORT_TASKGROUP
8465 || ctx->region_type == ORT_SIMD
8466 || ctx->region_type == ORT_ACC)
8467 continue;
8468
8469 break;
8470 }
8471 while (1);
8472 return false;
8473}
8474
8475/* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
8476
8477static tree
8478find_decl_expr (tree *tp, int *walk_subtrees, void *data)
8479{
8480 tree t = *tp;
8481
8482 /* If this node has been visited, unmark it and keep looking. */
8483 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
8484 return t;
8485
8486 if (IS_TYPE_OR_DECL_P (t))
8487 *walk_subtrees = 0;
8488 return NULL_TREE;
8489}
8490
8491
8492/* Gimplify the affinity clause but effectively ignore it.
8493 Generate:
8494 var = begin;
8495 if ((step > 1) ? var <= end : var > end)
8496 locatator_var_expr; */
8497
8498static void
8499gimplify_omp_affinity (tree *list_p, gimple_seq *pre_p)
8500{
8501 tree last_iter = NULL_TREE;
8502 tree last_bind = NULL_TREE;
8503 tree label = NULL_TREE;
8504 tree *last_body = NULL;
8505 for (tree c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8506 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY)
8507 {
8508 tree t = OMP_CLAUSE_DECL (c);
8509 if (TREE_CODE (t) == TREE_LIST
8510 && TREE_PURPOSE (t)
8511 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8512 {
8513 if (TREE_VALUE (t) == null_pointer_node)
8514 continue;
8515 if (TREE_PURPOSE (t) != last_iter)
8516 {
8517 if (last_bind)
8518 {
8519 append_to_statement_list (label, last_body);
8520 gimplify_and_add (t: last_bind, seq_p: pre_p);
8521 last_bind = NULL_TREE;
8522 }
8523 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8524 {
8525 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
8526 is_gimple_val, fb_rvalue) == GS_ERROR
8527 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
8528 is_gimple_val, fb_rvalue) == GS_ERROR
8529 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
8530 is_gimple_val, fb_rvalue) == GS_ERROR
8531 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
8532 is_gimple_val, fb_rvalue)
8533 == GS_ERROR))
8534 return;
8535 }
8536 last_iter = TREE_PURPOSE (t);
8537 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
8538 last_bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block),
8539 NULL, block);
8540 last_body = &BIND_EXPR_BODY (last_bind);
8541 tree cond = NULL_TREE;
8542 location_t loc = OMP_CLAUSE_LOCATION (c);
8543 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8544 {
8545 tree var = TREE_VEC_ELT (it, 0);
8546 tree begin = TREE_VEC_ELT (it, 1);
8547 tree end = TREE_VEC_ELT (it, 2);
8548 tree step = TREE_VEC_ELT (it, 3);
8549 loc = DECL_SOURCE_LOCATION (var);
8550 tree tem = build2_loc (loc, code: MODIFY_EXPR, void_type_node,
8551 arg0: var, arg1: begin);
8552 append_to_statement_list_force (tem, last_body);
8553
8554 tree cond1 = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8555 step, build_zero_cst (TREE_TYPE (step)));
8556 tree cond2 = fold_build2_loc (loc, LE_EXPR, boolean_type_node,
8557 var, end);
8558 tree cond3 = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8559 var, end);
8560 cond1 = fold_build3_loc (loc, COND_EXPR, boolean_type_node,
8561 cond1, cond2, cond3);
8562 if (cond)
8563 cond = fold_build2_loc (loc, TRUTH_AND_EXPR,
8564 boolean_type_node, cond, cond1);
8565 else
8566 cond = cond1;
8567 }
8568 tree cont_label = create_artificial_label (loc);
8569 label = build1 (LABEL_EXPR, void_type_node, cont_label);
8570 tree tem = fold_build3_loc (loc, COND_EXPR, void_type_node, cond,
8571 void_node,
8572 build_and_jump (label_p: &cont_label));
8573 append_to_statement_list_force (tem, last_body);
8574 }
8575 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
8576 {
8577 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t), 0),
8578 last_body);
8579 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
8580 }
8581 if (error_operand_p (TREE_VALUE (t)))
8582 return;
8583 append_to_statement_list_force (TREE_VALUE (t), last_body);
8584 TREE_VALUE (t) = null_pointer_node;
8585 }
8586 else
8587 {
8588 if (last_bind)
8589 {
8590 append_to_statement_list (label, last_body);
8591 gimplify_and_add (t: last_bind, seq_p: pre_p);
8592 last_bind = NULL_TREE;
8593 }
8594 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8595 {
8596 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8597 NULL, is_gimple_val, fb_rvalue);
8598 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8599 }
8600 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8601 return;
8602 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8603 is_gimple_lvalue, fb_lvalue) == GS_ERROR)
8604 return;
8605 gimplify_and_add (OMP_CLAUSE_DECL (c), seq_p: pre_p);
8606 }
8607 }
8608 if (last_bind)
8609 {
8610 append_to_statement_list (label, last_body);
8611 gimplify_and_add (t: last_bind, seq_p: pre_p);
8612 }
8613 return;
8614}
8615
8616/* If *LIST_P contains any OpenMP depend clauses with iterators,
8617 lower all the depend clauses by populating corresponding depend
8618 array. Returns 0 if there are no such depend clauses, or
8619 2 if all depend clauses should be removed, 1 otherwise. */
8620
8621static int
8622gimplify_omp_depend (tree *list_p, gimple_seq *pre_p)
8623{
8624 tree c;
8625 gimple *g;
8626 size_t n[5] = { 0, 0, 0, 0, 0 };
8627 bool unused[5];
8628 tree counts[5] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
8629 tree last_iter = NULL_TREE, last_count = NULL_TREE;
8630 size_t i, j;
8631 location_t first_loc = UNKNOWN_LOCATION;
8632
8633 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8634 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8635 {
8636 switch (OMP_CLAUSE_DEPEND_KIND (c))
8637 {
8638 case OMP_CLAUSE_DEPEND_IN:
8639 i = 2;
8640 break;
8641 case OMP_CLAUSE_DEPEND_OUT:
8642 case OMP_CLAUSE_DEPEND_INOUT:
8643 i = 0;
8644 break;
8645 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8646 i = 1;
8647 break;
8648 case OMP_CLAUSE_DEPEND_DEPOBJ:
8649 i = 3;
8650 break;
8651 case OMP_CLAUSE_DEPEND_INOUTSET:
8652 i = 4;
8653 break;
8654 default:
8655 gcc_unreachable ();
8656 }
8657 tree t = OMP_CLAUSE_DECL (c);
8658 if (first_loc == UNKNOWN_LOCATION)
8659 first_loc = OMP_CLAUSE_LOCATION (c);
8660 if (TREE_CODE (t) == TREE_LIST
8661 && TREE_PURPOSE (t)
8662 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8663 {
8664 if (TREE_PURPOSE (t) != last_iter)
8665 {
8666 tree tcnt = size_one_node;
8667 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8668 {
8669 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
8670 is_gimple_val, fb_rvalue) == GS_ERROR
8671 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
8672 is_gimple_val, fb_rvalue) == GS_ERROR
8673 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
8674 is_gimple_val, fb_rvalue) == GS_ERROR
8675 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
8676 is_gimple_val, fb_rvalue)
8677 == GS_ERROR))
8678 return 2;
8679 tree var = TREE_VEC_ELT (it, 0);
8680 tree begin = TREE_VEC_ELT (it, 1);
8681 tree end = TREE_VEC_ELT (it, 2);
8682 tree step = TREE_VEC_ELT (it, 3);
8683 tree orig_step = TREE_VEC_ELT (it, 4);
8684 tree type = TREE_TYPE (var);
8685 tree stype = TREE_TYPE (step);
8686 location_t loc = DECL_SOURCE_LOCATION (var);
8687 tree endmbegin;
8688 /* Compute count for this iterator as
8689 orig_step > 0
8690 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
8691 : (begin > end ? (end - begin + (step + 1)) / step : 0)
8692 and compute product of those for the entire depend
8693 clause. */
8694 if (POINTER_TYPE_P (type))
8695 endmbegin = fold_build2_loc (loc, POINTER_DIFF_EXPR,
8696 stype, end, begin);
8697 else
8698 endmbegin = fold_build2_loc (loc, MINUS_EXPR, type,
8699 end, begin);
8700 tree stepm1 = fold_build2_loc (loc, MINUS_EXPR, stype,
8701 step,
8702 build_int_cst (stype, 1));
8703 tree stepp1 = fold_build2_loc (loc, PLUS_EXPR, stype, step,
8704 build_int_cst (stype, 1));
8705 tree pos = fold_build2_loc (loc, PLUS_EXPR, stype,
8706 unshare_expr (expr: endmbegin),
8707 stepm1);
8708 pos = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
8709 pos, step);
8710 tree neg = fold_build2_loc (loc, PLUS_EXPR, stype,
8711 endmbegin, stepp1);
8712 if (TYPE_UNSIGNED (stype))
8713 {
8714 neg = fold_build1_loc (loc, NEGATE_EXPR, stype, neg);
8715 step = fold_build1_loc (loc, NEGATE_EXPR, stype, step);
8716 }
8717 neg = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
8718 neg, step);
8719 step = NULL_TREE;
8720 tree cond = fold_build2_loc (loc, LT_EXPR,
8721 boolean_type_node,
8722 begin, end);
8723 pos = fold_build3_loc (loc, COND_EXPR, stype, cond, pos,
8724 build_int_cst (stype, 0));
8725 cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node,
8726 end, begin);
8727 neg = fold_build3_loc (loc, COND_EXPR, stype, cond, neg,
8728 build_int_cst (stype, 0));
8729 tree osteptype = TREE_TYPE (orig_step);
8730 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8731 orig_step,
8732 build_int_cst (osteptype, 0));
8733 tree cnt = fold_build3_loc (loc, COND_EXPR, stype,
8734 cond, pos, neg);
8735 cnt = fold_convert_loc (loc, sizetype, cnt);
8736 if (gimplify_expr (&cnt, pre_p, NULL, is_gimple_val,
8737 fb_rvalue) == GS_ERROR)
8738 return 2;
8739 tcnt = size_binop_loc (loc, MULT_EXPR, tcnt, cnt);
8740 }
8741 if (gimplify_expr (&tcnt, pre_p, NULL, is_gimple_val,
8742 fb_rvalue) == GS_ERROR)
8743 return 2;
8744 last_iter = TREE_PURPOSE (t);
8745 last_count = tcnt;
8746 }
8747 if (counts[i] == NULL_TREE)
8748 counts[i] = last_count;
8749 else
8750 counts[i] = size_binop_loc (OMP_CLAUSE_LOCATION (c),
8751 PLUS_EXPR, counts[i], last_count);
8752 }
8753 else
8754 n[i]++;
8755 }
8756 for (i = 0; i < 5; i++)
8757 if (counts[i])
8758 break;
8759 if (i == 5)
8760 return 0;
8761
8762 tree total = size_zero_node;
8763 for (i = 0; i < 5; i++)
8764 {
8765 unused[i] = counts[i] == NULL_TREE && n[i] == 0;
8766 if (counts[i] == NULL_TREE)
8767 counts[i] = size_zero_node;
8768 if (n[i])
8769 counts[i] = size_binop (PLUS_EXPR, counts[i], size_int (n[i]));
8770 if (gimplify_expr (&counts[i], pre_p, NULL, is_gimple_val,
8771 fb_rvalue) == GS_ERROR)
8772 return 2;
8773 total = size_binop (PLUS_EXPR, total, counts[i]);
8774 }
8775
8776 if (gimplify_expr (&total, pre_p, NULL, is_gimple_val, fb_rvalue)
8777 == GS_ERROR)
8778 return 2;
8779 bool is_old = unused[1] && unused[3] && unused[4];
8780 tree totalpx = size_binop (PLUS_EXPR, unshare_expr (total),
8781 size_int (is_old ? 1 : 4));
8782 if (!unused[4])
8783 totalpx = size_binop (PLUS_EXPR, totalpx,
8784 size_binop (MULT_EXPR, counts[4], size_int (2)));
8785 tree type = build_array_type (ptr_type_node, build_index_type (totalpx));
8786 tree array = create_tmp_var_raw (type);
8787 TREE_ADDRESSABLE (array) = 1;
8788 if (!poly_int_tree_p (t: totalpx))
8789 {
8790 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array)))
8791 gimplify_type_sizes (TREE_TYPE (array), pre_p);
8792 if (gimplify_omp_ctxp)
8793 {
8794 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8795 while (ctx
8796 && (ctx->region_type == ORT_WORKSHARE
8797 || ctx->region_type == ORT_TASKGROUP
8798 || ctx->region_type == ORT_SIMD
8799 || ctx->region_type == ORT_ACC))
8800 ctx = ctx->outer_context;
8801 if (ctx)
8802 omp_add_variable (ctx, decl: array, flags: GOVD_LOCAL | GOVD_SEEN);
8803 }
8804 gimplify_vla_decl (decl: array, seq_p: pre_p);
8805 }
8806 else
8807 gimple_add_tmp_var (tmp: array);
8808 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
8809 NULL_TREE);
8810 tree tem;
8811 if (!is_old)
8812 {
8813 tem = build2 (MODIFY_EXPR, void_type_node, r,
8814 build_int_cst (ptr_type_node, 0));
8815 gimplify_and_add (t: tem, seq_p: pre_p);
8816 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
8817 NULL_TREE);
8818 }
8819 tem = build2 (MODIFY_EXPR, void_type_node, r,
8820 fold_convert (ptr_type_node, total));
8821 gimplify_and_add (t: tem, seq_p: pre_p);
8822 for (i = 1; i < (is_old ? 2 : 4); i++)
8823 {
8824 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (i + !is_old),
8825 NULL_TREE, NULL_TREE);
8826 tem = build2 (MODIFY_EXPR, void_type_node, r, counts[i - 1]);
8827 gimplify_and_add (t: tem, seq_p: pre_p);
8828 }
8829
8830 tree cnts[6];
8831 for (j = 5; j; j--)
8832 if (!unused[j - 1])
8833 break;
8834 for (i = 0; i < 5; i++)
8835 {
8836 if (i && (i >= j || unused[i - 1]))
8837 {
8838 cnts[i] = cnts[i - 1];
8839 continue;
8840 }
8841 cnts[i] = create_tmp_var (sizetype);
8842 if (i == 0)
8843 g = gimple_build_assign (cnts[i], size_int (is_old ? 2 : 5));
8844 else
8845 {
8846 tree t;
8847 if (is_old)
8848 t = size_binop (PLUS_EXPR, counts[0], size_int (2));
8849 else
8850 t = size_binop (PLUS_EXPR, cnts[i - 1], counts[i - 1]);
8851 if (gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue)
8852 == GS_ERROR)
8853 return 2;
8854 g = gimple_build_assign (cnts[i], t);
8855 }
8856 gimple_seq_add_stmt (pre_p, g);
8857 }
8858 if (unused[4])
8859 cnts[5] = NULL_TREE;
8860 else
8861 {
8862 tree t = size_binop (PLUS_EXPR, total, size_int (5));
8863 cnts[5] = create_tmp_var (sizetype);
8864 g = gimple_build_assign (cnts[i], t);
8865 gimple_seq_add_stmt (pre_p, g);
8866 }
8867
8868 last_iter = NULL_TREE;
8869 tree last_bind = NULL_TREE;
8870 tree *last_body = NULL;
8871 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8872 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8873 {
8874 switch (OMP_CLAUSE_DEPEND_KIND (c))
8875 {
8876 case OMP_CLAUSE_DEPEND_IN:
8877 i = 2;
8878 break;
8879 case OMP_CLAUSE_DEPEND_OUT:
8880 case OMP_CLAUSE_DEPEND_INOUT:
8881 i = 0;
8882 break;
8883 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8884 i = 1;
8885 break;
8886 case OMP_CLAUSE_DEPEND_DEPOBJ:
8887 i = 3;
8888 break;
8889 case OMP_CLAUSE_DEPEND_INOUTSET:
8890 i = 4;
8891 break;
8892 default:
8893 gcc_unreachable ();
8894 }
8895 tree t = OMP_CLAUSE_DECL (c);
8896 if (TREE_CODE (t) == TREE_LIST
8897 && TREE_PURPOSE (t)
8898 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8899 {
8900 if (TREE_PURPOSE (t) != last_iter)
8901 {
8902 if (last_bind)
8903 gimplify_and_add (t: last_bind, seq_p: pre_p);
8904 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
8905 last_bind = build3 (BIND_EXPR, void_type_node,
8906 BLOCK_VARS (block), NULL, block);
8907 TREE_SIDE_EFFECTS (last_bind) = 1;
8908 SET_EXPR_LOCATION (last_bind, OMP_CLAUSE_LOCATION (c));
8909 tree *p = &BIND_EXPR_BODY (last_bind);
8910 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8911 {
8912 tree var = TREE_VEC_ELT (it, 0);
8913 tree begin = TREE_VEC_ELT (it, 1);
8914 tree end = TREE_VEC_ELT (it, 2);
8915 tree step = TREE_VEC_ELT (it, 3);
8916 tree orig_step = TREE_VEC_ELT (it, 4);
8917 tree type = TREE_TYPE (var);
8918 location_t loc = DECL_SOURCE_LOCATION (var);
8919 /* Emit:
8920 var = begin;
8921 goto cond_label;
8922 beg_label:
8923 ...
8924 var = var + step;
8925 cond_label:
8926 if (orig_step > 0) {
8927 if (var < end) goto beg_label;
8928 } else {
8929 if (var > end) goto beg_label;
8930 }
8931 for each iterator, with inner iterators added to
8932 the ... above. */
8933 tree beg_label = create_artificial_label (loc);
8934 tree cond_label = NULL_TREE;
8935 tem = build2_loc (loc, code: MODIFY_EXPR, void_type_node,
8936 arg0: var, arg1: begin);
8937 append_to_statement_list_force (tem, p);
8938 tem = build_and_jump (label_p: &cond_label);
8939 append_to_statement_list_force (tem, p);
8940 tem = build1 (LABEL_EXPR, void_type_node, beg_label);
8941 append_to_statement_list (tem, p);
8942 tree bind = build3 (BIND_EXPR, void_type_node, NULL_TREE,
8943 NULL_TREE, NULL_TREE);
8944 TREE_SIDE_EFFECTS (bind) = 1;
8945 SET_EXPR_LOCATION (bind, loc);
8946 append_to_statement_list_force (bind, p);
8947 if (POINTER_TYPE_P (type))
8948 tem = build2_loc (loc, code: POINTER_PLUS_EXPR, type,
8949 arg0: var, arg1: fold_convert_loc (loc, sizetype,
8950 step));
8951 else
8952 tem = build2_loc (loc, code: PLUS_EXPR, type, arg0: var, arg1: step);
8953 tem = build2_loc (loc, code: MODIFY_EXPR, void_type_node,
8954 arg0: var, arg1: tem);
8955 append_to_statement_list_force (tem, p);
8956 tem = build1 (LABEL_EXPR, void_type_node, cond_label);
8957 append_to_statement_list (tem, p);
8958 tree cond = fold_build2_loc (loc, LT_EXPR,
8959 boolean_type_node,
8960 var, end);
8961 tree pos
8962 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8963 cond, build_and_jump (label_p: &beg_label),
8964 void_node);
8965 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8966 var, end);
8967 tree neg
8968 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8969 cond, build_and_jump (label_p: &beg_label),
8970 void_node);
8971 tree osteptype = TREE_TYPE (orig_step);
8972 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8973 orig_step,
8974 build_int_cst (osteptype, 0));
8975 tem = fold_build3_loc (loc, COND_EXPR, void_type_node,
8976 cond, pos, neg);
8977 append_to_statement_list_force (tem, p);
8978 p = &BIND_EXPR_BODY (bind);
8979 }
8980 last_body = p;
8981 }
8982 last_iter = TREE_PURPOSE (t);
8983 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
8984 {
8985 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t),
8986 0), last_body);
8987 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
8988 }
8989 if (error_operand_p (TREE_VALUE (t)))
8990 return 2;
8991 if (TREE_VALUE (t) != null_pointer_node)
8992 TREE_VALUE (t) = build_fold_addr_expr (TREE_VALUE (t));
8993 if (i == 4)
8994 {
8995 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8996 NULL_TREE, NULL_TREE);
8997 tree r2 = build4 (ARRAY_REF, ptr_type_node, array, cnts[5],
8998 NULL_TREE, NULL_TREE);
8999 r2 = build_fold_addr_expr_with_type (r2, ptr_type_node);
9000 tem = build2_loc (OMP_CLAUSE_LOCATION (c), code: MODIFY_EXPR,
9001 void_type_node, arg0: r, arg1: r2);
9002 append_to_statement_list_force (tem, last_body);
9003 tem = build2_loc (OMP_CLAUSE_LOCATION (c), code: MODIFY_EXPR,
9004 void_type_node, arg0: cnts[i],
9005 size_binop (PLUS_EXPR, cnts[i],
9006 size_int (1)));
9007 append_to_statement_list_force (tem, last_body);
9008 i = 5;
9009 }
9010 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
9011 NULL_TREE, NULL_TREE);
9012 tem = build2_loc (OMP_CLAUSE_LOCATION (c), code: MODIFY_EXPR,
9013 void_type_node, arg0: r, TREE_VALUE (t));
9014 append_to_statement_list_force (tem, last_body);
9015 if (i == 5)
9016 {
9017 r = build4 (ARRAY_REF, ptr_type_node, array,
9018 size_binop (PLUS_EXPR, cnts[i], size_int (1)),
9019 NULL_TREE, NULL_TREE);
9020 tem = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET);
9021 tem = build2_loc (OMP_CLAUSE_LOCATION (c), code: MODIFY_EXPR,
9022 void_type_node, arg0: r, arg1: tem);
9023 append_to_statement_list_force (tem, last_body);
9024 }
9025 tem = build2_loc (OMP_CLAUSE_LOCATION (c), code: MODIFY_EXPR,
9026 void_type_node, arg0: cnts[i],
9027 size_binop (PLUS_EXPR, cnts[i],
9028 size_int (1 + (i == 5))));
9029 append_to_statement_list_force (tem, last_body);
9030 TREE_VALUE (t) = null_pointer_node;
9031 }
9032 else
9033 {
9034 if (last_bind)
9035 {
9036 gimplify_and_add (t: last_bind, seq_p: pre_p);
9037 last_bind = NULL_TREE;
9038 }
9039 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
9040 {
9041 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
9042 NULL, is_gimple_val, fb_rvalue);
9043 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
9044 }
9045 if (error_operand_p (OMP_CLAUSE_DECL (c)))
9046 return 2;
9047 if (OMP_CLAUSE_DECL (c) != null_pointer_node)
9048 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
9049 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
9050 is_gimple_val, fb_rvalue) == GS_ERROR)
9051 return 2;
9052 if (i == 4)
9053 {
9054 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
9055 NULL_TREE, NULL_TREE);
9056 tree r2 = build4 (ARRAY_REF, ptr_type_node, array, cnts[5],
9057 NULL_TREE, NULL_TREE);
9058 r2 = build_fold_addr_expr_with_type (r2, ptr_type_node);
9059 tem = build2 (MODIFY_EXPR, void_type_node, r, r2);
9060 gimplify_and_add (t: tem, seq_p: pre_p);
9061 g = gimple_build_assign (cnts[i], size_binop (PLUS_EXPR,
9062 cnts[i],
9063 size_int (1)));
9064 gimple_seq_add_stmt (pre_p, g);
9065 i = 5;
9066 }
9067 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
9068 NULL_TREE, NULL_TREE);
9069 tem = build2 (MODIFY_EXPR, void_type_node, r, OMP_CLAUSE_DECL (c));
9070 gimplify_and_add (t: tem, seq_p: pre_p);
9071 if (i == 5)
9072 {
9073 r = build4 (ARRAY_REF, ptr_type_node, array,
9074 size_binop (PLUS_EXPR, cnts[i], size_int (1)),
9075 NULL_TREE, NULL_TREE);
9076 tem = build_int_cst (ptr_type_node, GOMP_DEPEND_INOUTSET);
9077 tem = build2 (MODIFY_EXPR, void_type_node, r, tem);
9078 append_to_statement_list_force (tem, last_body);
9079 gimplify_and_add (t: tem, seq_p: pre_p);
9080 }
9081 g = gimple_build_assign (cnts[i],
9082 size_binop (PLUS_EXPR, cnts[i],
9083 size_int (1 + (i == 5))));
9084 gimple_seq_add_stmt (pre_p, g);
9085 }
9086 }
9087 if (last_bind)
9088 gimplify_and_add (t: last_bind, seq_p: pre_p);
9089 tree cond = boolean_false_node;
9090 if (is_old)
9091 {
9092 if (!unused[0])
9093 cond = build2_loc (loc: first_loc, code: NE_EXPR, boolean_type_node, arg0: cnts[0],
9094 arg1: size_binop_loc (first_loc, PLUS_EXPR, counts[0],
9095 size_int (2)));
9096 if (!unused[2])
9097 cond = build2_loc (loc: first_loc, code: TRUTH_OR_EXPR, boolean_type_node, arg0: cond,
9098 arg1: build2_loc (loc: first_loc, code: NE_EXPR, boolean_type_node,
9099 arg0: cnts[2],
9100 arg1: size_binop_loc (first_loc, PLUS_EXPR,
9101 totalpx,
9102 size_int (1))));
9103 }
9104 else
9105 {
9106 tree prev = size_int (5);
9107 for (i = 0; i < 5; i++)
9108 {
9109 if (unused[i])
9110 continue;
9111 prev = size_binop_loc (first_loc, PLUS_EXPR, counts[i], prev);
9112 cond = build2_loc (loc: first_loc, code: TRUTH_OR_EXPR, boolean_type_node, arg0: cond,
9113 arg1: build2_loc (loc: first_loc, code: NE_EXPR, boolean_type_node,
9114 arg0: cnts[i], arg1: unshare_expr (expr: prev)));
9115 }
9116 }
9117 tem = build3_loc (loc: first_loc, code: COND_EXPR, void_type_node, arg0: cond,
9118 arg1: build_call_expr_loc (first_loc,
9119 builtin_decl_explicit (fncode: BUILT_IN_TRAP),
9120 0), void_node);
9121 gimplify_and_add (t: tem, seq_p: pre_p);
9122 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
9123 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
9124 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
9125 OMP_CLAUSE_CHAIN (c) = *list_p;
9126 *list_p = c;
9127 return 1;
9128}
9129
9130/* For a set of mappings describing an array section pointed to by a struct
9131 (or derived type, etc.) component, create an "alloc" or "release" node to
9132 insert into a list following a GOMP_MAP_STRUCT node. For some types of
9133 mapping (e.g. Fortran arrays with descriptors), an additional mapping may
9134 be created that is inserted into the list of mapping nodes attached to the
9135 directive being processed -- not part of the sorted list of nodes after
9136 GOMP_MAP_STRUCT.
9137
9138 CODE is the code of the directive being processed. GRP_START and GRP_END
9139 are the first and last of two or three nodes representing this array section
9140 mapping (e.g. a data movement node like GOMP_MAP_{TO,FROM}, optionally a
9141 GOMP_MAP_TO_PSET, and finally a GOMP_MAP_ALWAYS_POINTER). EXTRA_NODE is
9142 filled with the additional node described above, if needed.
9143
9144 This function does not add the new nodes to any lists itself. It is the
9145 responsibility of the caller to do that. */
9146
9147static tree
9148build_omp_struct_comp_nodes (enum tree_code code, tree grp_start, tree grp_end,
9149 tree *extra_node)
9150{
9151 enum gomp_map_kind mkind
9152 = (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA)
9153 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
9154
9155 gcc_assert (grp_start != grp_end);
9156
9157 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
9158 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
9159 OMP_CLAUSE_DECL (c2) = unshare_expr (OMP_CLAUSE_DECL (grp_end));
9160 OMP_CLAUSE_CHAIN (c2) = NULL_TREE;
9161 tree grp_mid = NULL_TREE;
9162 if (OMP_CLAUSE_CHAIN (grp_start) != grp_end)
9163 grp_mid = OMP_CLAUSE_CHAIN (grp_start);
9164
9165 if (grp_mid
9166 && OMP_CLAUSE_CODE (grp_mid) == OMP_CLAUSE_MAP
9167 && OMP_CLAUSE_MAP_KIND (grp_mid) == GOMP_MAP_TO_PSET)
9168 OMP_CLAUSE_SIZE (c2) = OMP_CLAUSE_SIZE (grp_mid);
9169 else
9170 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (ptr_type_node);
9171
9172 if (grp_mid
9173 && OMP_CLAUSE_CODE (grp_mid) == OMP_CLAUSE_MAP
9174 && (OMP_CLAUSE_MAP_KIND (grp_mid) == GOMP_MAP_ALWAYS_POINTER
9175 || OMP_CLAUSE_MAP_KIND (grp_mid) == GOMP_MAP_ATTACH_DETACH))
9176 {
9177 tree c3
9178 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
9179 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
9180 OMP_CLAUSE_DECL (c3) = unshare_expr (OMP_CLAUSE_DECL (grp_mid));
9181 OMP_CLAUSE_SIZE (c3) = TYPE_SIZE_UNIT (ptr_type_node);
9182 OMP_CLAUSE_CHAIN (c3) = NULL_TREE;
9183
9184 *extra_node = c3;
9185 }
9186 else
9187 *extra_node = NULL_TREE;
9188
9189 return c2;
9190}
9191
9192/* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
9193 and set *BITPOSP and *POFFSETP to the bit offset of the access.
9194 If BASE_REF is non-NULL and the containing object is a reference, set
9195 *BASE_REF to that reference before dereferencing the object.
9196 If BASE_REF is NULL, check that the containing object is a COMPONENT_REF or
9197 has array type, else return NULL. */
9198
9199static tree
9200extract_base_bit_offset (tree base, poly_int64 *bitposp,
9201 poly_offset_int *poffsetp)
9202{
9203 tree offset;
9204 poly_int64 bitsize, bitpos;
9205 machine_mode mode;
9206 int unsignedp, reversep, volatilep = 0;
9207 poly_offset_int poffset;
9208
9209 STRIP_NOPS (base);
9210
9211 base = get_inner_reference (base, &bitsize, &bitpos, &offset, &mode,
9212 &unsignedp, &reversep, &volatilep);
9213
9214 STRIP_NOPS (base);
9215
9216 if (offset && poly_int_tree_p (t: offset))
9217 {
9218 poffset = wi::to_poly_offset (t: offset);
9219 offset = NULL_TREE;
9220 }
9221 else
9222 poffset = 0;
9223
9224 if (maybe_ne (a: bitpos, b: 0))
9225 poffset += bits_to_bytes_round_down (bitpos);
9226
9227 *bitposp = bitpos;
9228 *poffsetp = poffset;
9229
9230 return base;
9231}
9232
9233/* Used for topological sorting of mapping groups. UNVISITED means we haven't
9234 started processing the group yet. The TEMPORARY mark is used when we first
9235 encounter a group on a depth-first traversal, and the PERMANENT mark is used
9236 when we have processed all the group's children (i.e. all the base pointers
9237 referred to by the group's mapping nodes, recursively). */
9238
9239enum omp_tsort_mark {
9240 UNVISITED,
9241 TEMPORARY,
9242 PERMANENT
9243};
9244
9245/* Hash for trees based on operand_equal_p. Like tree_operand_hash
9246 but ignores side effects in the equality comparisons. */
9247
9248struct tree_operand_hash_no_se : tree_operand_hash
9249{
9250 static inline bool equal (const value_type &,
9251 const compare_type &);
9252};
9253
9254inline bool
9255tree_operand_hash_no_se::equal (const value_type &t1,
9256 const compare_type &t2)
9257{
9258 return operand_equal_p (t1, t2, flags: OEP_MATCH_SIDE_EFFECTS);
9259}
9260
9261/* A group of OMP_CLAUSE_MAP nodes that correspond to a single "map"
9262 clause. */
9263
9264struct omp_mapping_group {
9265 tree *grp_start;
9266 tree grp_end;
9267 omp_tsort_mark mark;
9268 /* If we've removed the group but need to reindex, mark the group as
9269 deleted. */
9270 bool deleted;
9271 struct omp_mapping_group *sibling;
9272 struct omp_mapping_group *next;
9273};
9274
9275DEBUG_FUNCTION void
9276debug_mapping_group (omp_mapping_group *grp)
9277{
9278 tree tmp = OMP_CLAUSE_CHAIN (grp->grp_end);
9279 OMP_CLAUSE_CHAIN (grp->grp_end) = NULL;
9280 debug_generic_expr (*grp->grp_start);
9281 OMP_CLAUSE_CHAIN (grp->grp_end) = tmp;
9282}
9283
9284/* Return the OpenMP "base pointer" of an expression EXPR, or NULL if there
9285 isn't one. */
9286
9287static tree
9288omp_get_base_pointer (tree expr)
9289{
9290 while (TREE_CODE (expr) == ARRAY_REF
9291 || TREE_CODE (expr) == COMPONENT_REF)
9292 expr = TREE_OPERAND (expr, 0);
9293
9294 if (INDIRECT_REF_P (expr)
9295 || (TREE_CODE (expr) == MEM_REF
9296 && integer_zerop (TREE_OPERAND (expr, 1))))
9297 {
9298 expr = TREE_OPERAND (expr, 0);
9299 while (TREE_CODE (expr) == COMPOUND_EXPR)
9300 expr = TREE_OPERAND (expr, 1);
9301 if (TREE_CODE (expr) == POINTER_PLUS_EXPR)
9302 expr = TREE_OPERAND (expr, 0);
9303 if (TREE_CODE (expr) == SAVE_EXPR)
9304 expr = TREE_OPERAND (expr, 0);
9305 STRIP_NOPS (expr);
9306 return expr;
9307 }
9308
9309 return NULL_TREE;
9310}
9311
9312/* Remove COMPONENT_REFS and indirections from EXPR. */
9313
9314static tree
9315omp_strip_components_and_deref (tree expr)
9316{
9317 while (TREE_CODE (expr) == COMPONENT_REF
9318 || INDIRECT_REF_P (expr)
9319 || (TREE_CODE (expr) == MEM_REF
9320 && integer_zerop (TREE_OPERAND (expr, 1)))
9321 || TREE_CODE (expr) == POINTER_PLUS_EXPR
9322 || TREE_CODE (expr) == COMPOUND_EXPR)
9323 if (TREE_CODE (expr) == COMPOUND_EXPR)
9324 expr = TREE_OPERAND (expr, 1);
9325 else
9326 expr = TREE_OPERAND (expr, 0);
9327
9328 STRIP_NOPS (expr);
9329
9330 return expr;
9331}
9332
9333static tree
9334omp_strip_indirections (tree expr)
9335{
9336 while (INDIRECT_REF_P (expr)
9337 || (TREE_CODE (expr) == MEM_REF
9338 && integer_zerop (TREE_OPERAND (expr, 1))))
9339 expr = TREE_OPERAND (expr, 0);
9340
9341 return expr;
9342}
9343
9344/* An attach or detach operation depends directly on the address being
9345 attached/detached. Return that address, or none if there are no
9346 attachments/detachments. */
9347
9348static tree
9349omp_get_attachment (omp_mapping_group *grp)
9350{
9351 tree node = *grp->grp_start;
9352
9353 switch (OMP_CLAUSE_MAP_KIND (node))
9354 {
9355 case GOMP_MAP_TO:
9356 case GOMP_MAP_FROM:
9357 case GOMP_MAP_TOFROM:
9358 case GOMP_MAP_ALWAYS_FROM:
9359 case GOMP_MAP_ALWAYS_TO:
9360 case GOMP_MAP_ALWAYS_TOFROM:
9361 case GOMP_MAP_FORCE_FROM:
9362 case GOMP_MAP_FORCE_TO:
9363 case GOMP_MAP_FORCE_TOFROM:
9364 case GOMP_MAP_FORCE_PRESENT:
9365 case GOMP_MAP_PRESENT_ALLOC:
9366 case GOMP_MAP_PRESENT_FROM:
9367 case GOMP_MAP_PRESENT_TO:
9368 case GOMP_MAP_PRESENT_TOFROM:
9369 case GOMP_MAP_ALWAYS_PRESENT_FROM:
9370 case GOMP_MAP_ALWAYS_PRESENT_TO:
9371 case GOMP_MAP_ALWAYS_PRESENT_TOFROM:
9372 case GOMP_MAP_ALLOC:
9373 case GOMP_MAP_RELEASE:
9374 case GOMP_MAP_DELETE:
9375 case GOMP_MAP_FORCE_ALLOC:
9376 if (node == grp->grp_end)
9377 return NULL_TREE;
9378
9379 node = OMP_CLAUSE_CHAIN (node);
9380 if (node && OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_TO_PSET)
9381 {
9382 gcc_assert (node != grp->grp_end);
9383 node = OMP_CLAUSE_CHAIN (node);
9384 }
9385 if (node)
9386 switch (OMP_CLAUSE_MAP_KIND (node))
9387 {
9388 case GOMP_MAP_POINTER:
9389 case GOMP_MAP_ALWAYS_POINTER:
9390 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9391 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9392 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9393 return NULL_TREE;
9394
9395 case GOMP_MAP_ATTACH_DETACH:
9396 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9397 return OMP_CLAUSE_DECL (node);
9398
9399 default:
9400 internal_error ("unexpected mapping node");
9401 }
9402 return error_mark_node;
9403
9404 case GOMP_MAP_TO_PSET:
9405 gcc_assert (node != grp->grp_end);
9406 node = OMP_CLAUSE_CHAIN (node);
9407 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_ATTACH
9408 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_DETACH)
9409 return OMP_CLAUSE_DECL (node);
9410 else
9411 internal_error ("unexpected mapping node");
9412 return error_mark_node;
9413
9414 case GOMP_MAP_ATTACH:
9415 case GOMP_MAP_DETACH:
9416 node = OMP_CLAUSE_CHAIN (node);
9417 if (!node || *grp->grp_start == grp->grp_end)
9418 return OMP_CLAUSE_DECL (*grp->grp_start);
9419 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9420 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9421 return OMP_CLAUSE_DECL (*grp->grp_start);
9422 else
9423 internal_error ("unexpected mapping node");
9424 return error_mark_node;
9425
9426 case GOMP_MAP_STRUCT:
9427 case GOMP_MAP_FORCE_DEVICEPTR:
9428 case GOMP_MAP_DEVICE_RESIDENT:
9429 case GOMP_MAP_LINK:
9430 case GOMP_MAP_IF_PRESENT:
9431 case GOMP_MAP_FIRSTPRIVATE:
9432 case GOMP_MAP_FIRSTPRIVATE_INT:
9433 case GOMP_MAP_USE_DEVICE_PTR:
9434 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9435 return NULL_TREE;
9436
9437 default:
9438 internal_error ("unexpected mapping node");
9439 }
9440
9441 return error_mark_node;
9442}
9443
9444/* Given a pointer START_P to the start of a group of related (e.g. pointer)
9445 mappings, return the chain pointer to the end of that group in the list. */
9446
9447static tree *
9448omp_group_last (tree *start_p)
9449{
9450 tree c = *start_p, nc, *grp_last_p = start_p;
9451
9452 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP);
9453
9454 nc = OMP_CLAUSE_CHAIN (c);
9455
9456 if (!nc || OMP_CLAUSE_CODE (nc) != OMP_CLAUSE_MAP)
9457 return grp_last_p;
9458
9459 switch (OMP_CLAUSE_MAP_KIND (c))
9460 {
9461 default:
9462 while (nc
9463 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9464 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9465 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER
9466 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH_DETACH
9467 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
9468 || (OMP_CLAUSE_MAP_KIND (nc)
9469 == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION)
9470 || (OMP_CLAUSE_MAP_KIND (nc)
9471 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION)
9472 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ALWAYS_POINTER
9473 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_TO_PSET))
9474 {
9475 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9476 c = nc;
9477 tree nc2 = OMP_CLAUSE_CHAIN (nc);
9478 if (nc2
9479 && OMP_CLAUSE_CODE (nc2) == OMP_CLAUSE_MAP
9480 && (OMP_CLAUSE_MAP_KIND (nc)
9481 == GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION)
9482 && OMP_CLAUSE_MAP_KIND (nc2) == GOMP_MAP_ATTACH)
9483 {
9484 grp_last_p = &OMP_CLAUSE_CHAIN (nc);
9485 c = nc2;
9486 nc2 = OMP_CLAUSE_CHAIN (nc2);
9487 }
9488 nc = nc2;
9489 }
9490 break;
9491
9492 case GOMP_MAP_ATTACH:
9493 case GOMP_MAP_DETACH:
9494 /* This is a weird artifact of how directives are parsed: bare attach or
9495 detach clauses get a subsequent (meaningless) FIRSTPRIVATE_POINTER or
9496 FIRSTPRIVATE_REFERENCE node. FIXME. */
9497 if (nc
9498 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9499 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9500 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER))
9501 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9502 break;
9503
9504 case GOMP_MAP_TO_PSET:
9505 if (OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9506 && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH
9507 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_DETACH))
9508 grp_last_p = &OMP_CLAUSE_CHAIN (c);
9509 break;
9510
9511 case GOMP_MAP_STRUCT:
9512 {
9513 unsigned HOST_WIDE_INT num_mappings
9514 = tree_to_uhwi (OMP_CLAUSE_SIZE (c));
9515 if (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_POINTER
9516 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
9517 || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_ATTACH_DETACH)
9518 grp_last_p = &OMP_CLAUSE_CHAIN (*grp_last_p);
9519 for (unsigned i = 0; i < num_mappings; i++)
9520 grp_last_p = &OMP_CLAUSE_CHAIN (*grp_last_p);
9521 }
9522 break;
9523 }
9524
9525 return grp_last_p;
9526}
9527
9528/* Walk through LIST_P, and return a list of groups of mappings found (e.g.
9529 OMP_CLAUSE_MAP with GOMP_MAP_{TO/FROM/TOFROM} followed by one or two
9530 associated GOMP_MAP_POINTER mappings). Return a vector of omp_mapping_group
9531 if we have more than one such group, else return NULL. */
9532
9533static void
9534omp_gather_mapping_groups_1 (tree *list_p, vec<omp_mapping_group> *groups,
9535 tree gather_sentinel)
9536{
9537 for (tree *cp = list_p;
9538 *cp && *cp != gather_sentinel;
9539 cp = &OMP_CLAUSE_CHAIN (*cp))
9540 {
9541 if (OMP_CLAUSE_CODE (*cp) != OMP_CLAUSE_MAP)
9542 continue;
9543
9544 tree *grp_last_p = omp_group_last (start_p: cp);
9545 omp_mapping_group grp;
9546
9547 grp.grp_start = cp;
9548 grp.grp_end = *grp_last_p;
9549 grp.mark = UNVISITED;
9550 grp.sibling = NULL;
9551 grp.deleted = false;
9552 grp.next = NULL;
9553 groups->safe_push (obj: grp);
9554
9555 cp = grp_last_p;
9556 }
9557}
9558
9559static vec<omp_mapping_group> *
9560omp_gather_mapping_groups (tree *list_p)
9561{
9562 vec<omp_mapping_group> *groups = new vec<omp_mapping_group> ();
9563
9564 omp_gather_mapping_groups_1 (list_p, groups, NULL_TREE);
9565
9566 if (groups->length () > 0)
9567 return groups;
9568 else
9569 {
9570 delete groups;
9571 return NULL;
9572 }
9573}
9574
9575/* A pointer mapping group GRP may define a block of memory starting at some
9576 base address, and maybe also define a firstprivate pointer or firstprivate
9577 reference that points to that block. The return value is a node containing
9578 the former, and the *FIRSTPRIVATE pointer is set if we have the latter.
9579 If we define several base pointers, i.e. for a GOMP_MAP_STRUCT mapping,
9580 return the number of consecutive chained nodes in CHAINED. */
9581
9582static tree
9583omp_group_base (omp_mapping_group *grp, unsigned int *chained,
9584 tree *firstprivate)
9585{
9586 tree node = *grp->grp_start;
9587
9588 *firstprivate = NULL_TREE;
9589 *chained = 1;
9590
9591 switch (OMP_CLAUSE_MAP_KIND (node))
9592 {
9593 case GOMP_MAP_TO:
9594 case GOMP_MAP_FROM:
9595 case GOMP_MAP_TOFROM:
9596 case GOMP_MAP_ALWAYS_FROM:
9597 case GOMP_MAP_ALWAYS_TO:
9598 case GOMP_MAP_ALWAYS_TOFROM:
9599 case GOMP_MAP_FORCE_FROM:
9600 case GOMP_MAP_FORCE_TO:
9601 case GOMP_MAP_FORCE_TOFROM:
9602 case GOMP_MAP_FORCE_PRESENT:
9603 case GOMP_MAP_PRESENT_ALLOC:
9604 case GOMP_MAP_PRESENT_FROM:
9605 case GOMP_MAP_PRESENT_TO:
9606 case GOMP_MAP_PRESENT_TOFROM:
9607 case GOMP_MAP_ALWAYS_PRESENT_FROM:
9608 case GOMP_MAP_ALWAYS_PRESENT_TO:
9609 case GOMP_MAP_ALWAYS_PRESENT_TOFROM:
9610 case GOMP_MAP_ALLOC:
9611 case GOMP_MAP_RELEASE:
9612 case GOMP_MAP_DELETE:
9613 case GOMP_MAP_FORCE_ALLOC:
9614 case GOMP_MAP_IF_PRESENT:
9615 if (node == grp->grp_end)
9616 return node;
9617
9618 node = OMP_CLAUSE_CHAIN (node);
9619 if (node && OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_TO_PSET)
9620 {
9621 if (node == grp->grp_end)
9622 return *grp->grp_start;
9623 node = OMP_CLAUSE_CHAIN (node);
9624 }
9625 if (node)
9626 switch (OMP_CLAUSE_MAP_KIND (node))
9627 {
9628 case GOMP_MAP_POINTER:
9629 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9630 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9631 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9632 *firstprivate = OMP_CLAUSE_DECL (node);
9633 return *grp->grp_start;
9634
9635 case GOMP_MAP_ALWAYS_POINTER:
9636 case GOMP_MAP_ATTACH_DETACH:
9637 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9638 return *grp->grp_start;
9639
9640 default:
9641 internal_error ("unexpected mapping node");
9642 }
9643 else
9644 internal_error ("unexpected mapping node");
9645 return error_mark_node;
9646
9647 case GOMP_MAP_TO_PSET:
9648 gcc_assert (node != grp->grp_end);
9649 node = OMP_CLAUSE_CHAIN (node);
9650 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_ATTACH
9651 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_DETACH)
9652 return NULL_TREE;
9653 else
9654 internal_error ("unexpected mapping node");
9655 return error_mark_node;
9656
9657 case GOMP_MAP_ATTACH:
9658 case GOMP_MAP_DETACH:
9659 node = OMP_CLAUSE_CHAIN (node);
9660 if (!node || *grp->grp_start == grp->grp_end)
9661 return NULL_TREE;
9662 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9663 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9664 {
9665 /* We're mapping the base pointer itself in a bare attach or detach
9666 node. This is a side effect of how parsing works, and the mapping
9667 will be removed anyway (at least for enter/exit data directives).
9668 We should ignore the mapping here. FIXME. */
9669 return NULL_TREE;
9670 }
9671 else
9672 internal_error ("unexpected mapping node");
9673 return error_mark_node;
9674
9675 case GOMP_MAP_STRUCT:
9676 {
9677 unsigned HOST_WIDE_INT num_mappings
9678 = tree_to_uhwi (OMP_CLAUSE_SIZE (node));
9679 node = OMP_CLAUSE_CHAIN (node);
9680 if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_POINTER
9681 || OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9682 {
9683 *firstprivate = OMP_CLAUSE_DECL (node);
9684 node = OMP_CLAUSE_CHAIN (node);
9685 }
9686 *chained = num_mappings;
9687 return node;
9688 }
9689
9690 case GOMP_MAP_FORCE_DEVICEPTR:
9691 case GOMP_MAP_DEVICE_RESIDENT:
9692 case GOMP_MAP_LINK:
9693 case GOMP_MAP_FIRSTPRIVATE:
9694 case GOMP_MAP_FIRSTPRIVATE_INT:
9695 case GOMP_MAP_USE_DEVICE_PTR:
9696 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9697 return NULL_TREE;
9698
9699 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9700 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9701 case GOMP_MAP_POINTER:
9702 case GOMP_MAP_ALWAYS_POINTER:
9703 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9704 /* These shouldn't appear by themselves. */
9705 if (!seen_error ())
9706 internal_error ("unexpected pointer mapping node");
9707 return error_mark_node;
9708
9709 default:
9710 gcc_unreachable ();
9711 }
9712
9713 return error_mark_node;
9714}
9715
9716/* Given a vector of omp_mapping_groups, build a hash table so we can look up
9717 nodes by tree_operand_hash_no_se. */
9718
9719static void
9720omp_index_mapping_groups_1 (hash_map<tree_operand_hash_no_se,
9721 omp_mapping_group *> *grpmap,
9722 vec<omp_mapping_group> *groups,
9723 tree reindex_sentinel)
9724{
9725 omp_mapping_group *grp;
9726 unsigned int i;
9727 bool reindexing = reindex_sentinel != NULL_TREE, above_hwm = false;
9728
9729 FOR_EACH_VEC_ELT (*groups, i, grp)
9730 {
9731 if (reindexing && *grp->grp_start == reindex_sentinel)
9732 above_hwm = true;
9733
9734 if (reindexing && !above_hwm)
9735 continue;
9736
9737 tree fpp;
9738 unsigned int chained;
9739 tree node = omp_group_base (grp, chained: &chained, firstprivate: &fpp);
9740
9741 if (node == error_mark_node || (!node && !fpp))
9742 continue;
9743
9744 for (unsigned j = 0;
9745 node && j < chained;
9746 node = OMP_CLAUSE_CHAIN (node), j++)
9747 {
9748 tree decl = OMP_CLAUSE_DECL (node);
9749 /* Sometimes we see zero-offset MEM_REF instead of INDIRECT_REF,
9750 meaning node-hash lookups don't work. This is a workaround for
9751 that, but ideally we should just create the INDIRECT_REF at
9752 source instead. FIXME. */
9753 if (TREE_CODE (decl) == MEM_REF
9754 && integer_zerop (TREE_OPERAND (decl, 1)))
9755 decl = build_fold_indirect_ref (TREE_OPERAND (decl, 0));
9756
9757 omp_mapping_group **prev = grpmap->get (k: decl);
9758
9759 if (prev && *prev == grp)
9760 /* Empty. */;
9761 else if (prev)
9762 {
9763 /* Mapping the same thing twice is normally diagnosed as an error,
9764 but can happen under some circumstances, e.g. in pr99928-16.c,
9765 the directive:
9766
9767 #pragma omp target simd reduction(+:a[:3]) \
9768 map(always, tofrom: a[:6])
9769 ...
9770
9771 will result in two "a[0]" mappings (of different sizes). */
9772
9773 grp->sibling = (*prev)->sibling;
9774 (*prev)->sibling = grp;
9775 }
9776 else
9777 grpmap->put (k: decl, v: grp);
9778 }
9779
9780 if (!fpp)
9781 continue;
9782
9783 omp_mapping_group **prev = grpmap->get (k: fpp);
9784 if (prev && *prev != grp)
9785 {
9786 grp->sibling = (*prev)->sibling;
9787 (*prev)->sibling = grp;
9788 }
9789 else
9790 grpmap->put (k: fpp, v: grp);
9791 }
9792}
9793
9794static hash_map<tree_operand_hash_no_se, omp_mapping_group *> *
9795omp_index_mapping_groups (vec<omp_mapping_group> *groups)
9796{
9797 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap
9798 = new hash_map<tree_operand_hash_no_se, omp_mapping_group *>;
9799
9800 omp_index_mapping_groups_1 (grpmap, groups, NULL_TREE);
9801
9802 return grpmap;
9803}
9804
9805/* Rebuild group map from partially-processed clause list (during
9806 omp_build_struct_sibling_lists). We have already processed nodes up until
9807 a high-water mark (HWM). This is a bit tricky because the list is being
9808 reordered as it is scanned, but we know:
9809
9810 1. The list after HWM has not been touched yet, so we can reindex it safely.
9811
9812 2. The list before and including HWM has been altered, but remains
9813 well-formed throughout the sibling-list building operation.
9814
9815 so, we can do the reindex operation in two parts, on the processed and
9816 then the unprocessed halves of the list. */
9817
9818static hash_map<tree_operand_hash_no_se, omp_mapping_group *> *
9819omp_reindex_mapping_groups (tree *list_p,
9820 vec<omp_mapping_group> *groups,
9821 vec<omp_mapping_group> *processed_groups,
9822 tree sentinel)
9823{
9824 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap
9825 = new hash_map<tree_operand_hash_no_se, omp_mapping_group *>;
9826
9827 processed_groups->truncate (size: 0);
9828
9829 omp_gather_mapping_groups_1 (list_p, groups: processed_groups, gather_sentinel: sentinel);
9830 omp_index_mapping_groups_1 (grpmap, groups: processed_groups, NULL_TREE);
9831 if (sentinel)
9832 omp_index_mapping_groups_1 (grpmap, groups, reindex_sentinel: sentinel);
9833
9834 return grpmap;
9835}
9836
9837/* Find the immediately-containing struct for a component ref (etc.)
9838 expression EXPR. */
9839
9840static tree
9841omp_containing_struct (tree expr)
9842{
9843 tree expr0 = expr;
9844
9845 STRIP_NOPS (expr);
9846
9847 /* Note: don't strip NOPs unless we're also stripping off array refs or a
9848 component ref. */
9849 if (TREE_CODE (expr) != ARRAY_REF && TREE_CODE (expr) != COMPONENT_REF)
9850 return expr0;
9851
9852 while (TREE_CODE (expr) == ARRAY_REF)
9853 expr = TREE_OPERAND (expr, 0);
9854
9855 if (TREE_CODE (expr) == COMPONENT_REF)
9856 expr = TREE_OPERAND (expr, 0);
9857
9858 return expr;
9859}
9860
9861/* Return TRUE if DECL describes a component that is part of a whole structure
9862 that is mapped elsewhere in GRPMAP. *MAPPED_BY_GROUP is set to the group
9863 that maps that structure, if present. */
9864
9865static bool
9866omp_mapped_by_containing_struct (hash_map<tree_operand_hash_no_se,
9867 omp_mapping_group *> *grpmap,
9868 tree decl,
9869 omp_mapping_group **mapped_by_group)
9870{
9871 tree wsdecl = NULL_TREE;
9872
9873 *mapped_by_group = NULL;
9874
9875 while (true)
9876 {
9877 wsdecl = omp_containing_struct (expr: decl);
9878 if (wsdecl == decl)
9879 break;
9880 omp_mapping_group **wholestruct = grpmap->get (k: wsdecl);
9881 if (!wholestruct
9882 && TREE_CODE (wsdecl) == MEM_REF
9883 && integer_zerop (TREE_OPERAND (wsdecl, 1)))
9884 {
9885 tree deref = TREE_OPERAND (wsdecl, 0);
9886 deref = build_fold_indirect_ref (deref);
9887 wholestruct = grpmap->get (k: deref);
9888 }
9889 if (wholestruct)
9890 {
9891 *mapped_by_group = *wholestruct;
9892 return true;
9893 }
9894 decl = wsdecl;
9895 }
9896
9897 return false;
9898}
9899
9900/* Helper function for omp_tsort_mapping_groups. Returns TRUE on success, or
9901 FALSE on error. */
9902
9903static bool
9904omp_tsort_mapping_groups_1 (omp_mapping_group ***outlist,
9905 vec<omp_mapping_group> *groups,
9906 hash_map<tree_operand_hash_no_se,
9907 omp_mapping_group *> *grpmap,
9908 omp_mapping_group *grp)
9909{
9910 if (grp->mark == PERMANENT)
9911 return true;
9912 if (grp->mark == TEMPORARY)
9913 {
9914 fprintf (stderr, format: "when processing group:\n");
9915 debug_mapping_group (grp);
9916 internal_error ("base pointer cycle detected");
9917 return false;
9918 }
9919 grp->mark = TEMPORARY;
9920
9921 tree attaches_to = omp_get_attachment (grp);
9922
9923 if (attaches_to)
9924 {
9925 omp_mapping_group **basep = grpmap->get (k: attaches_to);
9926
9927 if (basep && *basep != grp)
9928 {
9929 for (omp_mapping_group *w = *basep; w; w = w->sibling)
9930 if (!omp_tsort_mapping_groups_1 (outlist, groups, grpmap, grp: w))
9931 return false;
9932 }
9933 }
9934
9935 tree decl = OMP_CLAUSE_DECL (*grp->grp_start);
9936
9937 while (decl)
9938 {
9939 tree base = omp_get_base_pointer (expr: decl);
9940
9941 if (!base)
9942 break;
9943
9944 omp_mapping_group **innerp = grpmap->get (k: base);
9945 omp_mapping_group *wholestruct;
9946
9947 /* We should treat whole-structure mappings as if all (pointer, in this
9948 case) members are mapped as individual list items. Check if we have
9949 such a whole-structure mapping, if we don't have an explicit reference
9950 to the pointer member itself. */
9951 if (!innerp
9952 && TREE_CODE (base) == COMPONENT_REF
9953 && omp_mapped_by_containing_struct (grpmap, decl: base, mapped_by_group: &wholestruct))
9954 innerp = &wholestruct;
9955
9956 if (innerp && *innerp != grp)
9957 {
9958 for (omp_mapping_group *w = *innerp; w; w = w->sibling)
9959 if (!omp_tsort_mapping_groups_1 (outlist, groups, grpmap, grp: w))
9960 return false;
9961 break;
9962 }
9963
9964 decl = base;
9965 }
9966
9967 grp->mark = PERMANENT;
9968
9969 /* Emit grp to output list. */
9970
9971 **outlist = grp;
9972 *outlist = &grp->next;
9973
9974 return true;
9975}
9976
9977/* Topologically sort GROUPS, so that OMP 5.0-defined base pointers come
9978 before mappings that use those pointers. This is an implementation of the
9979 depth-first search algorithm, described e.g. at:
9980
9981 https://en.wikipedia.org/wiki/Topological_sorting
9982*/
9983
9984static omp_mapping_group *
9985omp_tsort_mapping_groups (vec<omp_mapping_group> *groups,
9986 hash_map<tree_operand_hash_no_se, omp_mapping_group *>
9987 *grpmap)
9988{
9989 omp_mapping_group *grp, *outlist = NULL, **cursor;
9990 unsigned int i;
9991
9992 cursor = &outlist;
9993
9994 FOR_EACH_VEC_ELT (*groups, i, grp)
9995 {
9996 if (grp->mark != PERMANENT)
9997 if (!omp_tsort_mapping_groups_1 (outlist: &cursor, groups, grpmap, grp))
9998 return NULL;
9999 }
10000
10001 return outlist;
10002}
10003
10004/* Split INLIST into two parts, moving groups corresponding to
10005 ALLOC/RELEASE/DELETE mappings to one list, and other mappings to another.
10006 The former list is then appended to the latter. Each sub-list retains the
10007 order of the original list.
10008 Note that ATTACH nodes are later moved to the end of the list in
10009 gimplify_adjust_omp_clauses, for target regions. */
10010
10011static omp_mapping_group *
10012omp_segregate_mapping_groups (omp_mapping_group *inlist)
10013{
10014 omp_mapping_group *ard_groups = NULL, *tf_groups = NULL;
10015 omp_mapping_group **ard_tail = &ard_groups, **tf_tail = &tf_groups;
10016
10017 for (omp_mapping_group *w = inlist; w;)
10018 {
10019 tree c = *w->grp_start;
10020 omp_mapping_group *next = w->next;
10021
10022 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP);
10023
10024 switch (OMP_CLAUSE_MAP_KIND (c))
10025 {
10026 case GOMP_MAP_ALLOC:
10027 case GOMP_MAP_RELEASE:
10028 case GOMP_MAP_DELETE:
10029 *ard_tail = w;
10030 w->next = NULL;
10031 ard_tail = &w->next;
10032 break;
10033
10034 default:
10035 *tf_tail = w;
10036 w->next = NULL;
10037 tf_tail = &w->next;
10038 }
10039
10040 w = next;
10041 }
10042
10043 /* Now splice the lists together... */
10044 *tf_tail = ard_groups;
10045
10046 return tf_groups;
10047}
10048
10049/* Given a list LIST_P containing groups of mappings given by GROUPS, reorder
10050 those groups based on the output list of omp_tsort_mapping_groups --
10051 singly-linked, threaded through each element's NEXT pointer starting at
10052 HEAD. Each list element appears exactly once in that linked list.
10053
10054 Each element of GROUPS may correspond to one or several mapping nodes.
10055 Node groups are kept together, and in the reordered list, the positions of
10056 the original groups are reused for the positions of the reordered list.
10057 Hence if we have e.g.
10058
10059 {to ptr ptr} firstprivate {tofrom ptr} ...
10060 ^ ^ ^
10061 first group non-"map" second group
10062
10063 and say the second group contains a base pointer for the first so must be
10064 moved before it, the resulting list will contain:
10065
10066 {tofrom ptr} firstprivate {to ptr ptr} ...
10067 ^ prev. second group ^ prev. first group
10068*/
10069
10070static tree *
10071omp_reorder_mapping_groups (vec<omp_mapping_group> *groups,
10072 omp_mapping_group *head,
10073 tree *list_p)
10074{
10075 omp_mapping_group *grp;
10076 unsigned int i;
10077 unsigned numgroups = groups->length ();
10078 auto_vec<tree> old_heads (numgroups);
10079 auto_vec<tree *> old_headps (numgroups);
10080 auto_vec<tree> new_heads (numgroups);
10081 auto_vec<tree> old_succs (numgroups);
10082 bool map_at_start = (list_p == (*groups)[0].grp_start);
10083
10084 tree *new_grp_tail = NULL;
10085
10086 /* Stash the start & end nodes of each mapping group before we start
10087 modifying the list. */
10088 FOR_EACH_VEC_ELT (*groups, i, grp)
10089 {
10090 old_headps.quick_push (obj: grp->grp_start);
10091 old_heads.quick_push (obj: *grp->grp_start);
10092 old_succs.quick_push (OMP_CLAUSE_CHAIN (grp->grp_end));
10093 }
10094
10095 /* And similarly, the heads of the groups in the order we want to rearrange
10096 the list to. */
10097 for (omp_mapping_group *w = head; w; w = w->next)
10098 new_heads.quick_push (obj: *w->grp_start);
10099
10100 FOR_EACH_VEC_ELT (*groups, i, grp)
10101 {
10102 gcc_assert (head);
10103
10104 if (new_grp_tail && old_succs[i - 1] == old_heads[i])
10105 {
10106 /* a {b c d} {e f g} h i j (original)
10107 -->
10108 a {k l m} {e f g} h i j (inserted new group on last iter)
10109 -->
10110 a {k l m} {n o p} h i j (this time, chain last group to new one)
10111 ^new_grp_tail
10112 */
10113 *new_grp_tail = new_heads[i];
10114 }
10115 else if (new_grp_tail)
10116 {
10117 /* a {b c d} e {f g h} i j k (original)
10118 -->
10119 a {l m n} e {f g h} i j k (gap after last iter's group)
10120 -->
10121 a {l m n} e {o p q} h i j (chain last group to old successor)
10122 ^new_grp_tail
10123 */
10124 *new_grp_tail = old_succs[i - 1];
10125 *old_headps[i] = new_heads[i];
10126 }
10127 else
10128 {
10129 /* The first inserted group -- point to new group, and leave end
10130 open.
10131 a {b c d} e f
10132 -->
10133 a {g h i...
10134 */
10135 *grp->grp_start = new_heads[i];
10136 }
10137
10138 new_grp_tail = &OMP_CLAUSE_CHAIN (head->grp_end);
10139
10140 head = head->next;
10141 }
10142
10143 if (new_grp_tail)
10144 *new_grp_tail = old_succs[numgroups - 1];
10145
10146 gcc_assert (!head);
10147
10148 return map_at_start ? (*groups)[0].grp_start : list_p;
10149}
10150
10151/* DECL is supposed to have lastprivate semantics in the outer contexts
10152 of combined/composite constructs, starting with OCTX.
10153 Add needed lastprivate, shared or map clause if no data sharing or
10154 mapping clause are present. IMPLICIT_P is true if it is an implicit
10155 clause (IV on simd), in which case the lastprivate will not be
10156 copied to some constructs. */
10157
10158static void
10159omp_lastprivate_for_combined_outer_constructs (struct gimplify_omp_ctx *octx,
10160 tree decl, bool implicit_p)
10161{
10162 struct gimplify_omp_ctx *orig_octx = octx;
10163 for (; octx; octx = octx->outer_context)
10164 {
10165 if ((octx->region_type == ORT_COMBINED_PARALLEL
10166 || (octx->region_type & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS)
10167 && splay_tree_lookup (octx->variables,
10168 (splay_tree_key) decl) == NULL)
10169 {
10170 omp_add_variable (ctx: octx, decl, flags: GOVD_SHARED | GOVD_SEEN);
10171 continue;
10172 }
10173 if ((octx->region_type & ORT_TASK) != 0
10174 && octx->combined_loop
10175 && splay_tree_lookup (octx->variables,
10176 (splay_tree_key) decl) == NULL)
10177 {
10178 omp_add_variable (ctx: octx, decl, flags: GOVD_LASTPRIVATE | GOVD_SEEN);
10179 continue;
10180 }
10181 if (implicit_p
10182 && octx->region_type == ORT_WORKSHARE
10183 && octx->combined_loop
10184 && splay_tree_lookup (octx->variables,
10185 (splay_tree_key) decl) == NULL
10186 && octx->outer_context
10187 && octx->outer_context->region_type == ORT_COMBINED_PARALLEL
10188 && splay_tree_lookup (octx->outer_context->variables,
10189 (splay_tree_key) decl) == NULL)
10190 {
10191 octx = octx->outer_context;
10192 omp_add_variable (ctx: octx, decl, flags: GOVD_LASTPRIVATE | GOVD_SEEN);
10193 continue;
10194 }
10195 if ((octx->region_type == ORT_WORKSHARE || octx->region_type == ORT_ACC)
10196 && octx->combined_loop
10197 && splay_tree_lookup (octx->variables,
10198 (splay_tree_key) decl) == NULL
10199 && !omp_check_private (ctx: octx, decl, copyprivate: false))
10200 {
10201 omp_add_variable (ctx: octx, decl, flags: GOVD_LASTPRIVATE | GOVD_SEEN);
10202 continue;
10203 }
10204 if (octx->region_type == ORT_COMBINED_TARGET)
10205 {
10206 splay_tree_node n = splay_tree_lookup (octx->variables,
10207 (splay_tree_key) decl);
10208 if (n == NULL)
10209 {
10210 omp_add_variable (ctx: octx, decl, flags: GOVD_MAP | GOVD_SEEN);
10211 octx = octx->outer_context;
10212 }
10213 else if (!implicit_p
10214 && (n->value & GOVD_FIRSTPRIVATE_IMPLICIT))
10215 {
10216 n->value &= ~(GOVD_FIRSTPRIVATE
10217 | GOVD_FIRSTPRIVATE_IMPLICIT
10218 | GOVD_EXPLICIT);
10219 omp_add_variable (ctx: octx, decl, flags: GOVD_MAP | GOVD_SEEN);
10220 octx = octx->outer_context;
10221 }
10222 }
10223 break;
10224 }
10225 if (octx && (implicit_p || octx != orig_octx))
10226 omp_notice_variable (ctx: octx, decl, in_code: true);
10227}
10228
10229/* If we have mappings INNER and OUTER, where INNER is a component access and
10230 OUTER is a mapping of the whole containing struct, check that the mappings
10231 are compatible. We'll be deleting the inner mapping, so we need to make
10232 sure the outer mapping does (at least) the same transfers to/from the device
10233 as the inner mapping. */
10234
10235bool
10236omp_check_mapping_compatibility (location_t loc,
10237 omp_mapping_group *outer,
10238 omp_mapping_group *inner)
10239{
10240 tree first_outer = *outer->grp_start, first_inner = *inner->grp_start;
10241
10242 gcc_assert (OMP_CLAUSE_CODE (first_outer) == OMP_CLAUSE_MAP);
10243 gcc_assert (OMP_CLAUSE_CODE (first_inner) == OMP_CLAUSE_MAP);
10244
10245 enum gomp_map_kind outer_kind = OMP_CLAUSE_MAP_KIND (first_outer);
10246 enum gomp_map_kind inner_kind = OMP_CLAUSE_MAP_KIND (first_inner);
10247
10248 if (outer_kind == inner_kind)
10249 return true;
10250
10251 switch (outer_kind)
10252 {
10253 case GOMP_MAP_ALWAYS_TO:
10254 if (inner_kind == GOMP_MAP_FORCE_PRESENT
10255 || inner_kind == GOMP_MAP_ALLOC
10256 || inner_kind == GOMP_MAP_TO)
10257 return true;
10258 break;
10259
10260 case GOMP_MAP_ALWAYS_FROM:
10261 if (inner_kind == GOMP_MAP_FORCE_PRESENT
10262 || inner_kind == GOMP_MAP_ALLOC
10263 || inner_kind == GOMP_MAP_FROM)
10264 return true;
10265 break;
10266
10267 case GOMP_MAP_TO:
10268 case GOMP_MAP_FROM:
10269 if (inner_kind == GOMP_MAP_FORCE_PRESENT
10270 || inner_kind == GOMP_MAP_ALLOC)
10271 return true;
10272 break;
10273
10274 case GOMP_MAP_ALWAYS_TOFROM:
10275 case GOMP_MAP_TOFROM:
10276 if (inner_kind == GOMP_MAP_FORCE_PRESENT
10277 || inner_kind == GOMP_MAP_ALLOC
10278 || inner_kind == GOMP_MAP_TO
10279 || inner_kind == GOMP_MAP_FROM
10280 || inner_kind == GOMP_MAP_TOFROM)
10281 return true;
10282 break;
10283
10284 default:
10285 ;
10286 }
10287
10288 error_at (loc, "data movement for component %qE is not compatible with "
10289 "movement for struct %qE", OMP_CLAUSE_DECL (first_inner),
10290 OMP_CLAUSE_DECL (first_outer));
10291
10292 return false;
10293}
10294
10295/* Similar to omp_resolve_clause_dependencies, but for OpenACC. The only
10296 clause dependencies we handle for now are struct element mappings and
10297 whole-struct mappings on the same directive, and duplicate clause
10298 detection. */
10299
10300void
10301oacc_resolve_clause_dependencies (vec<omp_mapping_group> *groups,
10302 hash_map<tree_operand_hash_no_se,
10303 omp_mapping_group *> *grpmap)
10304{
10305 int i;
10306 omp_mapping_group *grp;
10307 hash_set<tree_operand_hash> *seen_components = NULL;
10308 hash_set<tree_operand_hash> *shown_error = NULL;
10309
10310 FOR_EACH_VEC_ELT (*groups, i, grp)
10311 {
10312 tree grp_end = grp->grp_end;
10313 tree decl = OMP_CLAUSE_DECL (grp_end);
10314
10315 gcc_assert (OMP_CLAUSE_CODE (grp_end) == OMP_CLAUSE_MAP);
10316
10317 if (DECL_P (grp_end))
10318 continue;
10319
10320 tree c = OMP_CLAUSE_DECL (*grp->grp_start);
10321 while (TREE_CODE (c) == ARRAY_REF)
10322 c = TREE_OPERAND (c, 0);
10323 if (TREE_CODE (c) != COMPONENT_REF)
10324 continue;
10325 if (!seen_components)
10326 seen_components = new hash_set<tree_operand_hash> ();
10327 if (!shown_error)
10328 shown_error = new hash_set<tree_operand_hash> ();
10329 if (seen_components->contains (k: c)
10330 && !shown_error->contains (k: c))
10331 {
10332 error_at (OMP_CLAUSE_LOCATION (grp_end),
10333 "%qE appears more than once in map clauses",
10334 OMP_CLAUSE_DECL (grp_end));
10335 shown_error->add (k: c);
10336 }
10337 else
10338 seen_components->add (k: c);
10339
10340 omp_mapping_group *struct_group;
10341 if (omp_mapped_by_containing_struct (grpmap, decl, mapped_by_group: &struct_group)
10342 && *grp->grp_start == grp_end)
10343 {
10344 omp_check_mapping_compatibility (OMP_CLAUSE_LOCATION (grp_end),
10345 outer: struct_group, inner: grp);
10346 /* Remove the whole of this mapping -- redundant. */
10347 grp->deleted = true;
10348 }
10349 }
10350
10351 if (seen_components)
10352 delete seen_components;
10353 if (shown_error)
10354 delete shown_error;
10355}
10356
10357/* Link node NEWNODE so it is pointed to by chain INSERT_AT. NEWNODE's chain
10358 is linked to the previous node pointed to by INSERT_AT. */
10359
10360static tree *
10361omp_siblist_insert_node_after (tree newnode, tree *insert_at)
10362{
10363 OMP_CLAUSE_CHAIN (newnode) = *insert_at;
10364 *insert_at = newnode;
10365 return &OMP_CLAUSE_CHAIN (newnode);
10366}
10367
10368/* Move NODE (which is currently pointed to by the chain OLD_POS) so it is
10369 pointed to by chain MOVE_AFTER instead. */
10370
10371static void
10372omp_siblist_move_node_after (tree node, tree *old_pos, tree *move_after)
10373{
10374 gcc_assert (node == *old_pos);
10375 *old_pos = OMP_CLAUSE_CHAIN (node);
10376 OMP_CLAUSE_CHAIN (node) = *move_after;
10377 *move_after = node;
10378}
10379
10380/* Move nodes from FIRST_PTR (pointed to by previous node's chain) to
10381 LAST_NODE to after MOVE_AFTER chain. Similar to below function, but no
10382 new nodes are prepended to the list before splicing into the new position.
10383 Return the position we should continue scanning the list at, or NULL to
10384 stay where we were. */
10385
10386static tree *
10387omp_siblist_move_nodes_after (tree *first_ptr, tree last_node,
10388 tree *move_after)
10389{
10390 if (first_ptr == move_after)
10391 return NULL;
10392
10393 tree tmp = *first_ptr;
10394 *first_ptr = OMP_CLAUSE_CHAIN (last_node);
10395 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10396 *move_after = tmp;
10397
10398 return first_ptr;
10399}
10400
10401/* Concatenate two lists described by [FIRST_NEW, LAST_NEW_TAIL] and
10402 [FIRST_PTR, LAST_NODE], and insert them in the OMP clause list after chain
10403 pointer MOVE_AFTER.
10404
10405 The latter list was previously part of the OMP clause list, and the former
10406 (prepended) part is comprised of new nodes.
10407
10408 We start with a list of nodes starting with a struct mapping node. We
10409 rearrange the list so that new nodes starting from FIRST_NEW and whose last
10410 node's chain is LAST_NEW_TAIL comes directly after MOVE_AFTER, followed by
10411 the group of mapping nodes we are currently processing (from the chain
10412 FIRST_PTR to LAST_NODE). The return value is the pointer to the next chain
10413 we should continue processing from, or NULL to stay where we were.
10414
10415 The transformation (in the case where MOVE_AFTER and FIRST_PTR are
10416 different) is worked through below. Here we are processing LAST_NODE, and
10417 FIRST_PTR points at the preceding mapping clause:
10418
10419 #. mapping node chain
10420 ---------------------------------------------------
10421 A. struct_node [->B]
10422 B. comp_1 [->C]
10423 C. comp_2 [->D (move_after)]
10424 D. map_to_3 [->E]
10425 E. attach_3 [->F (first_ptr)]
10426 F. map_to_4 [->G (continue_at)]
10427 G. attach_4 (last_node) [->H]
10428 H. ...
10429
10430 *last_new_tail = *first_ptr;
10431
10432 I. new_node (first_new) [->F (last_new_tail)]
10433
10434 *first_ptr = OMP_CLAUSE_CHAIN (last_node)
10435
10436 #. mapping node chain
10437 ----------------------------------------------------
10438 A. struct_node [->B]
10439 B. comp_1 [->C]
10440 C. comp_2 [->D (move_after)]
10441 D. map_to_3 [->E]
10442 E. attach_3 [->H (first_ptr)]
10443 F. map_to_4 [->G (continue_at)]
10444 G. attach_4 (last_node) [->H]
10445 H. ...
10446
10447 I. new_node (first_new) [->F (last_new_tail)]
10448
10449 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10450
10451 #. mapping node chain
10452 ---------------------------------------------------
10453 A. struct_node [->B]
10454 B. comp_1 [->C]
10455 C. comp_2 [->D (move_after)]
10456 D. map_to_3 [->E]
10457 E. attach_3 [->H (continue_at)]
10458 F. map_to_4 [->G]
10459 G. attach_4 (last_node) [->D]
10460 H. ...
10461
10462 I. new_node (first_new) [->F (last_new_tail)]
10463
10464 *move_after = first_new;
10465
10466 #. mapping node chain
10467 ---------------------------------------------------
10468 A. struct_node [->B]
10469 B. comp_1 [->C]
10470 C. comp_2 [->I (move_after)]
10471 D. map_to_3 [->E]
10472 E. attach_3 [->H (continue_at)]
10473 F. map_to_4 [->G]
10474 G. attach_4 (last_node) [->D]
10475 H. ...
10476 I. new_node (first_new) [->F (last_new_tail)]
10477
10478 or, in order:
10479
10480 #. mapping node chain
10481 ---------------------------------------------------
10482 A. struct_node [->B]
10483 B. comp_1 [->C]
10484 C. comp_2 [->I (move_after)]
10485 I. new_node (first_new) [->F (last_new_tail)]
10486 F. map_to_4 [->G]
10487 G. attach_4 (last_node) [->D]
10488 D. map_to_3 [->E]
10489 E. attach_3 [->H (continue_at)]
10490 H. ...
10491*/
10492
10493static tree *
10494omp_siblist_move_concat_nodes_after (tree first_new, tree *last_new_tail,
10495 tree *first_ptr, tree last_node,
10496 tree *move_after)
10497{
10498 tree *continue_at = NULL;
10499 *last_new_tail = *first_ptr;
10500 if (first_ptr == move_after)
10501 *move_after = first_new;
10502 else
10503 {
10504 *first_ptr = OMP_CLAUSE_CHAIN (last_node);
10505 continue_at = first_ptr;
10506 OMP_CLAUSE_CHAIN (last_node) = *move_after;
10507 *move_after = first_new;
10508 }
10509 return continue_at;
10510}
10511
10512/* Mapping struct members causes an additional set of nodes to be created,
10513 starting with GOMP_MAP_STRUCT followed by a number of mappings equal to the
10514 number of members being mapped, in order of ascending position (address or
10515 bitwise).
10516
10517 We scan through the list of mapping clauses, calling this function for each
10518 struct member mapping we find, and build up the list of mappings after the
10519 initial GOMP_MAP_STRUCT node. For pointer members, these will be
10520 newly-created ALLOC nodes. For non-pointer members, the existing mapping is
10521 moved into place in the sorted list.
10522
10523 struct {
10524 int *a;
10525 int *b;
10526 int c;
10527 int *d;
10528 };
10529
10530 #pragma (acc|omp directive) copy(struct.a[0:n], struct.b[0:n], struct.c,
10531 struct.d[0:n])
10532
10533 GOMP_MAP_STRUCT (4)
10534 [GOMP_MAP_FIRSTPRIVATE_REFERENCE -- for refs to structs]
10535 GOMP_MAP_ALLOC (struct.a)
10536 GOMP_MAP_ALLOC (struct.b)
10537 GOMP_MAP_TO (struct.c)
10538 GOMP_MAP_ALLOC (struct.d)
10539 ...
10540
10541 In the case where we are mapping references to pointers, or in Fortran if
10542 we are mapping an array with a descriptor, additional nodes may be created
10543 after the struct node list also.
10544
10545 The return code is either a pointer to the next node to process (if the
10546 list has been rearranged), else NULL to continue with the next node in the
10547 original list. */
10548
10549static tree *
10550omp_accumulate_sibling_list (enum omp_region_type region_type,
10551 enum tree_code code,
10552 hash_map<tree_operand_hash, tree>
10553 *&struct_map_to_clause, tree *grp_start_p,
10554 tree grp_end, tree *inner)
10555{
10556 poly_offset_int coffset;
10557 poly_int64 cbitpos;
10558 tree ocd = OMP_CLAUSE_DECL (grp_end);
10559 bool openmp = !(region_type & ORT_ACC);
10560 tree *continue_at = NULL;
10561
10562 while (TREE_CODE (ocd) == ARRAY_REF)
10563 ocd = TREE_OPERAND (ocd, 0);
10564
10565 if (INDIRECT_REF_P (ocd))
10566 ocd = TREE_OPERAND (ocd, 0);
10567
10568 tree base = extract_base_bit_offset (base: ocd, bitposp: &cbitpos, poffsetp: &coffset);
10569
10570 bool ptr = (OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_ALWAYS_POINTER);
10571 bool attach_detach = ((OMP_CLAUSE_MAP_KIND (grp_end)
10572 == GOMP_MAP_ATTACH_DETACH)
10573 || (OMP_CLAUSE_MAP_KIND (grp_end)
10574 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION));
10575 bool attach = (OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_ATTACH
10576 || OMP_CLAUSE_MAP_KIND (grp_end) == GOMP_MAP_DETACH);
10577
10578 /* FIXME: If we're not mapping the base pointer in some other clause on this
10579 directive, I think we want to create ALLOC/RELEASE here -- i.e. not
10580 early-exit. */
10581 if (openmp && attach_detach)
10582 return NULL;
10583
10584 if (!struct_map_to_clause || struct_map_to_clause->get (k: base) == NULL)
10585 {
10586 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
10587 gomp_map_kind k = attach ? GOMP_MAP_FORCE_PRESENT : GOMP_MAP_STRUCT;
10588
10589 OMP_CLAUSE_SET_MAP_KIND (l, k);
10590
10591 OMP_CLAUSE_DECL (l) = unshare_expr (expr: base);
10592
10593 OMP_CLAUSE_SIZE (l)
10594 = (!attach ? size_int (1)
10595 : (DECL_P (OMP_CLAUSE_DECL (l))
10596 ? DECL_SIZE_UNIT (OMP_CLAUSE_DECL (l))
10597 : TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (l)))));
10598 if (struct_map_to_clause == NULL)
10599 struct_map_to_clause = new hash_map<tree_operand_hash, tree>;
10600 struct_map_to_clause->put (k: base, v: l);
10601
10602 if (ptr || attach_detach)
10603 {
10604 tree extra_node;
10605 tree alloc_node
10606 = build_omp_struct_comp_nodes (code, grp_start: *grp_start_p, grp_end,
10607 extra_node: &extra_node);
10608 OMP_CLAUSE_CHAIN (l) = alloc_node;
10609
10610 tree *insert_node_pos = grp_start_p;
10611
10612 if (extra_node)
10613 {
10614 OMP_CLAUSE_CHAIN (extra_node) = *insert_node_pos;
10615 OMP_CLAUSE_CHAIN (alloc_node) = extra_node;
10616 }
10617 else
10618 OMP_CLAUSE_CHAIN (alloc_node) = *insert_node_pos;
10619
10620 *insert_node_pos = l;
10621 }
10622 else
10623 {
10624 gcc_assert (*grp_start_p == grp_end);
10625 grp_start_p = omp_siblist_insert_node_after (newnode: l, insert_at: grp_start_p);
10626 }
10627
10628 tree noind = omp_strip_indirections (expr: base);
10629
10630 if (!openmp
10631 && (region_type & ORT_TARGET)
10632 && TREE_CODE (noind) == COMPONENT_REF)
10633 {
10634 /* The base for this component access is a struct component access
10635 itself. Insert a node to be processed on the next iteration of
10636 our caller's loop, which will subsequently be turned into a new,
10637 inner GOMP_MAP_STRUCT mapping.
10638
10639 We need to do this else the non-DECL_P base won't be
10640 rewritten correctly in the offloaded region. */
10641 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end),
10642 OMP_CLAUSE_MAP);
10643 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_FORCE_PRESENT);
10644 OMP_CLAUSE_DECL (c2) = unshare_expr (expr: noind);
10645 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (TREE_TYPE (noind));
10646 *inner = c2;
10647 return NULL;
10648 }
10649
10650 tree sdecl = omp_strip_components_and_deref (expr: base);
10651
10652 if (POINTER_TYPE_P (TREE_TYPE (sdecl)) && (region_type & ORT_TARGET))
10653 {
10654 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end),
10655 OMP_CLAUSE_MAP);
10656 bool base_ref
10657 = (INDIRECT_REF_P (base)
10658 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
10659 == REFERENCE_TYPE)
10660 || (INDIRECT_REF_P (TREE_OPERAND (base, 0))
10661 && (TREE_CODE (TREE_TYPE (TREE_OPERAND
10662 (TREE_OPERAND (base, 0), 0)))
10663 == REFERENCE_TYPE))));
10664 enum gomp_map_kind mkind = base_ref ? GOMP_MAP_FIRSTPRIVATE_REFERENCE
10665 : GOMP_MAP_FIRSTPRIVATE_POINTER;
10666 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
10667 OMP_CLAUSE_DECL (c2) = sdecl;
10668 tree baddr = build_fold_addr_expr (base);
10669 baddr = fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end),
10670 ptrdiff_type_node, baddr);
10671 /* This isn't going to be good enough when we add support for more
10672 complicated lvalue expressions. FIXME. */
10673 if (TREE_CODE (TREE_TYPE (sdecl)) == REFERENCE_TYPE
10674 && TREE_CODE (TREE_TYPE (TREE_TYPE (sdecl))) == POINTER_TYPE)
10675 sdecl = build_simple_mem_ref (sdecl);
10676 tree decladdr = fold_convert_loc (OMP_CLAUSE_LOCATION (grp_end),
10677 ptrdiff_type_node, sdecl);
10678 OMP_CLAUSE_SIZE (c2)
10679 = fold_build2_loc (OMP_CLAUSE_LOCATION (grp_end), MINUS_EXPR,
10680 ptrdiff_type_node, baddr, decladdr);
10681 /* Insert after struct node. */
10682 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
10683 OMP_CLAUSE_CHAIN (l) = c2;
10684 }
10685
10686 return NULL;
10687 }
10688 else if (struct_map_to_clause)
10689 {
10690 tree *osc = struct_map_to_clause->get (k: base);
10691 tree *sc = NULL, *scp = NULL;
10692 sc = &OMP_CLAUSE_CHAIN (*osc);
10693 /* The struct mapping might be immediately followed by a
10694 FIRSTPRIVATE_POINTER and/or FIRSTPRIVATE_REFERENCE -- if it's an
10695 indirect access or a reference, or both. (This added node is removed
10696 in omp-low.c after it has been processed there.) */
10697 if (*sc != grp_end
10698 && (OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_FIRSTPRIVATE_POINTER
10699 || OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
10700 sc = &OMP_CLAUSE_CHAIN (*sc);
10701 for (; *sc != grp_end; sc = &OMP_CLAUSE_CHAIN (*sc))
10702 if ((ptr || attach_detach) && sc == grp_start_p)
10703 break;
10704 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc)) != COMPONENT_REF
10705 && TREE_CODE (OMP_CLAUSE_DECL (*sc)) != INDIRECT_REF
10706 && TREE_CODE (OMP_CLAUSE_DECL (*sc)) != ARRAY_REF)
10707 break;
10708 else
10709 {
10710 tree sc_decl = OMP_CLAUSE_DECL (*sc);
10711 poly_offset_int offset;
10712 poly_int64 bitpos;
10713
10714 if (TREE_CODE (sc_decl) == ARRAY_REF)
10715 {
10716 while (TREE_CODE (sc_decl) == ARRAY_REF)
10717 sc_decl = TREE_OPERAND (sc_decl, 0);
10718 if (TREE_CODE (sc_decl) != COMPONENT_REF
10719 || TREE_CODE (TREE_TYPE (sc_decl)) != ARRAY_TYPE)
10720 break;
10721 }
10722 else if (INDIRECT_REF_P (sc_decl)
10723 && TREE_CODE (TREE_OPERAND (sc_decl, 0)) == COMPONENT_REF
10724 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (sc_decl, 0)))
10725 == REFERENCE_TYPE))
10726 sc_decl = TREE_OPERAND (sc_decl, 0);
10727
10728 tree base2 = extract_base_bit_offset (base: sc_decl, bitposp: &bitpos, poffsetp: &offset);
10729 if (!base2 || !operand_equal_p (base2, base, flags: 0))
10730 break;
10731 if (scp)
10732 continue;
10733 if (maybe_lt (a: coffset, b: offset)
10734 || (known_eq (coffset, offset)
10735 && maybe_lt (a: cbitpos, b: bitpos)))
10736 {
10737 if (ptr || attach_detach)
10738 scp = sc;
10739 else
10740 break;
10741 }
10742 }
10743
10744 if (!attach)
10745 OMP_CLAUSE_SIZE (*osc)
10746 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc), size_one_node);
10747 if (ptr || attach_detach)
10748 {
10749 tree cl = NULL_TREE, extra_node;
10750 tree alloc_node = build_omp_struct_comp_nodes (code, grp_start: *grp_start_p,
10751 grp_end, extra_node: &extra_node);
10752 tree *tail_chain = NULL;
10753
10754 /* Here, we have:
10755
10756 grp_end : the last (or only) node in this group.
10757 grp_start_p : pointer to the first node in a pointer mapping group
10758 up to and including GRP_END.
10759 sc : pointer to the chain for the end of the struct component
10760 list.
10761 scp : pointer to the chain for the sorted position at which we
10762 should insert in the middle of the struct component list
10763 (else NULL to insert at end).
10764 alloc_node : the "alloc" node for the structure (pointer-type)
10765 component. We insert at SCP (if present), else SC
10766 (the end of the struct component list).
10767 extra_node : a newly-synthesized node for an additional indirect
10768 pointer mapping or a Fortran pointer set, if needed.
10769 cl : first node to prepend before grp_start_p.
10770 tail_chain : pointer to chain of last prepended node.
10771
10772 The general idea is we move the nodes for this struct mapping
10773 together: the alloc node goes into the sorted list directly after
10774 the struct mapping, and any extra nodes (together with the nodes
10775 mapping arrays pointed to by struct components) get moved after
10776 that list. When SCP is NULL, we insert the nodes at SC, i.e. at
10777 the end of the struct component mapping list. It's important that
10778 the alloc_node comes first in that case because it's part of the
10779 sorted component mapping list (but subsequent nodes are not!). */
10780
10781 if (scp)
10782 omp_siblist_insert_node_after (newnode: alloc_node, insert_at: scp);
10783
10784 /* Make [cl,tail_chain] a list of the alloc node (if we haven't
10785 already inserted it) and the extra_node (if it is present). The
10786 list can be empty if we added alloc_node above and there is no
10787 extra node. */
10788 if (scp && extra_node)
10789 {
10790 cl = extra_node;
10791 tail_chain = &OMP_CLAUSE_CHAIN (extra_node);
10792 }
10793 else if (extra_node)
10794 {
10795 OMP_CLAUSE_CHAIN (alloc_node) = extra_node;
10796 cl = alloc_node;
10797 tail_chain = &OMP_CLAUSE_CHAIN (extra_node);
10798 }
10799 else if (!scp)
10800 {
10801 cl = alloc_node;
10802 tail_chain = &OMP_CLAUSE_CHAIN (alloc_node);
10803 }
10804
10805 continue_at
10806 = cl ? omp_siblist_move_concat_nodes_after (first_new: cl, last_new_tail: tail_chain,
10807 first_ptr: grp_start_p, last_node: grp_end,
10808 move_after: sc)
10809 : omp_siblist_move_nodes_after (first_ptr: grp_start_p, last_node: grp_end, move_after: sc);
10810 }
10811 else if (*sc != grp_end)
10812 {
10813 gcc_assert (*grp_start_p == grp_end);
10814
10815 /* We are moving the current node back to a previous struct node:
10816 the node that used to point to the current node will now point to
10817 the next node. */
10818 continue_at = grp_start_p;
10819 /* In the non-pointer case, the mapping clause itself is moved into
10820 the correct position in the struct component list, which in this
10821 case is just SC. */
10822 omp_siblist_move_node_after (node: *grp_start_p, old_pos: grp_start_p, move_after: sc);
10823 }
10824 }
10825 return continue_at;
10826}
10827
10828/* Scan through GROUPS, and create sorted structure sibling lists without
10829 gimplifying. */
10830
10831static bool
10832omp_build_struct_sibling_lists (enum tree_code code,
10833 enum omp_region_type region_type,
10834 vec<omp_mapping_group> *groups,
10835 hash_map<tree_operand_hash_no_se,
10836 omp_mapping_group *> **grpmap,
10837 tree *list_p)
10838{
10839 unsigned i;
10840 omp_mapping_group *grp;
10841 hash_map<tree_operand_hash, tree> *struct_map_to_clause = NULL;
10842 bool success = true;
10843 tree *new_next = NULL;
10844 tree *tail = &OMP_CLAUSE_CHAIN ((*groups)[groups->length () - 1].grp_end);
10845 auto_vec<omp_mapping_group> pre_hwm_groups;
10846
10847 FOR_EACH_VEC_ELT (*groups, i, grp)
10848 {
10849 tree c = grp->grp_end;
10850 tree decl = OMP_CLAUSE_DECL (c);
10851 tree grp_end = grp->grp_end;
10852 tree sentinel = OMP_CLAUSE_CHAIN (grp_end);
10853
10854 if (new_next)
10855 grp->grp_start = new_next;
10856
10857 new_next = NULL;
10858
10859 tree *grp_start_p = grp->grp_start;
10860
10861 if (DECL_P (decl))
10862 continue;
10863
10864 /* Skip groups we marked for deletion in
10865 oacc_resolve_clause_dependencies. */
10866 if (grp->deleted)
10867 continue;
10868
10869 if (OMP_CLAUSE_CHAIN (*grp_start_p)
10870 && OMP_CLAUSE_CHAIN (*grp_start_p) != grp_end)
10871 {
10872 /* Don't process an array descriptor that isn't inside a derived type
10873 as a struct (the GOMP_MAP_POINTER following will have the form
10874 "var.data", but such mappings are handled specially). */
10875 tree grpmid = OMP_CLAUSE_CHAIN (*grp_start_p);
10876 if (OMP_CLAUSE_CODE (grpmid) == OMP_CLAUSE_MAP
10877 && OMP_CLAUSE_MAP_KIND (grpmid) == GOMP_MAP_TO_PSET
10878 && DECL_P (OMP_CLAUSE_DECL (grpmid)))
10879 continue;
10880 }
10881
10882 tree d = decl;
10883 if (TREE_CODE (d) == ARRAY_REF)
10884 {
10885 while (TREE_CODE (d) == ARRAY_REF)
10886 d = TREE_OPERAND (d, 0);
10887 if (TREE_CODE (d) == COMPONENT_REF
10888 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
10889 decl = d;
10890 }
10891 if (d == decl
10892 && INDIRECT_REF_P (decl)
10893 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
10894 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
10895 == REFERENCE_TYPE)
10896 && (OMP_CLAUSE_MAP_KIND (c)
10897 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION))
10898 decl = TREE_OPERAND (decl, 0);
10899
10900 STRIP_NOPS (decl);
10901
10902 if (TREE_CODE (decl) != COMPONENT_REF)
10903 continue;
10904
10905 /* If we're mapping the whole struct in another node, skip adding this
10906 node to a sibling list. */
10907 omp_mapping_group *wholestruct;
10908 if (omp_mapped_by_containing_struct (grpmap: *grpmap, OMP_CLAUSE_DECL (c),
10909 mapped_by_group: &wholestruct))
10910 {
10911 if (!(region_type & ORT_ACC)
10912 && *grp_start_p == grp_end)
10913 /* Remove the whole of this mapping -- redundant. */
10914 grp->deleted = true;
10915
10916 continue;
10917 }
10918
10919 if (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
10920 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
10921 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH
10922 && code != OACC_UPDATE
10923 && code != OMP_TARGET_UPDATE)
10924 {
10925 if (error_operand_p (t: decl))
10926 {
10927 success = false;
10928 goto error_out;
10929 }
10930
10931 tree stype = TREE_TYPE (decl);
10932 if (TREE_CODE (stype) == REFERENCE_TYPE)
10933 stype = TREE_TYPE (stype);
10934 if (TYPE_SIZE_UNIT (stype) == NULL
10935 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
10936 {
10937 error_at (OMP_CLAUSE_LOCATION (c),
10938 "mapping field %qE of variable length "
10939 "structure", OMP_CLAUSE_DECL (c));
10940 success = false;
10941 goto error_out;
10942 }
10943
10944 tree inner = NULL_TREE;
10945
10946 new_next
10947 = omp_accumulate_sibling_list (region_type, code,
10948 struct_map_to_clause, grp_start_p,
10949 grp_end, inner: &inner);
10950
10951 if (inner)
10952 {
10953 if (new_next && *new_next == NULL_TREE)
10954 *new_next = inner;
10955 else
10956 *tail = inner;
10957
10958 OMP_CLAUSE_CHAIN (inner) = NULL_TREE;
10959 omp_mapping_group newgrp;
10960 newgrp.grp_start = new_next ? new_next : tail;
10961 newgrp.grp_end = inner;
10962 newgrp.mark = UNVISITED;
10963 newgrp.sibling = NULL;
10964 newgrp.deleted = false;
10965 newgrp.next = NULL;
10966 groups->safe_push (obj: newgrp);
10967
10968 /* !!! Growing GROUPS might invalidate the pointers in the group
10969 map. Rebuild it here. This is a bit inefficient, but
10970 shouldn't happen very often. */
10971 delete (*grpmap);
10972 *grpmap
10973 = omp_reindex_mapping_groups (list_p, groups, processed_groups: &pre_hwm_groups,
10974 sentinel);
10975
10976 tail = &OMP_CLAUSE_CHAIN (inner);
10977 }
10978 }
10979 }
10980
10981 /* Delete groups marked for deletion above. At this point the order of the
10982 groups may no longer correspond to the order of the underlying list,
10983 which complicates this a little. First clear out OMP_CLAUSE_DECL for
10984 deleted nodes... */
10985
10986 FOR_EACH_VEC_ELT (*groups, i, grp)
10987 if (grp->deleted)
10988 for (tree d = *grp->grp_start;
10989 d != OMP_CLAUSE_CHAIN (grp->grp_end);
10990 d = OMP_CLAUSE_CHAIN (d))
10991 OMP_CLAUSE_DECL (d) = NULL_TREE;
10992
10993 /* ...then sweep through the list removing the now-empty nodes. */
10994
10995 tail = list_p;
10996 while (*tail)
10997 {
10998 if (OMP_CLAUSE_CODE (*tail) == OMP_CLAUSE_MAP
10999 && OMP_CLAUSE_DECL (*tail) == NULL_TREE)
11000 *tail = OMP_CLAUSE_CHAIN (*tail);
11001 else
11002 tail = &OMP_CLAUSE_CHAIN (*tail);
11003 }
11004
11005error_out:
11006 if (struct_map_to_clause)
11007 delete struct_map_to_clause;
11008
11009 return success;
11010}
11011
11012/* Scan the OMP clauses in *LIST_P, installing mappings into a new
11013 and previous omp contexts. */
11014
11015static void
11016gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
11017 enum omp_region_type region_type,
11018 enum tree_code code)
11019{
11020 struct gimplify_omp_ctx *ctx, *outer_ctx;
11021 tree c;
11022 tree *orig_list_p = list_p;
11023 int handled_depend_iterators = -1;
11024 int nowait = -1;
11025
11026 ctx = new_omp_context (region_type);
11027 ctx->code = code;
11028 outer_ctx = ctx->outer_context;
11029 if (code == OMP_TARGET)
11030 {
11031 if (!lang_GNU_Fortran ())
11032 ctx->defaultmap[GDMK_POINTER] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
11033 ctx->defaultmap[GDMK_SCALAR] = GOVD_FIRSTPRIVATE;
11034 ctx->defaultmap[GDMK_SCALAR_TARGET] = (lang_GNU_Fortran ()
11035 ? GOVD_MAP : GOVD_FIRSTPRIVATE);
11036 }
11037 if (!lang_GNU_Fortran ())
11038 switch (code)
11039 {
11040 case OMP_TARGET:
11041 case OMP_TARGET_DATA:
11042 case OMP_TARGET_ENTER_DATA:
11043 case OMP_TARGET_EXIT_DATA:
11044 case OACC_DECLARE:
11045 case OACC_HOST_DATA:
11046 case OACC_PARALLEL:
11047 case OACC_KERNELS:
11048 ctx->target_firstprivatize_array_bases = true;
11049 default:
11050 break;
11051 }
11052
11053 if (code == OMP_TARGET
11054 || code == OMP_TARGET_DATA
11055 || code == OMP_TARGET_ENTER_DATA
11056 || code == OMP_TARGET_EXIT_DATA)
11057 {
11058 vec<omp_mapping_group> *groups;
11059 groups = omp_gather_mapping_groups (list_p);
11060 if (groups)
11061 {
11062 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap;
11063 grpmap = omp_index_mapping_groups (groups);
11064
11065 omp_build_struct_sibling_lists (code, region_type, groups, grpmap: &grpmap,
11066 list_p);
11067
11068 omp_mapping_group *outlist = NULL;
11069
11070 /* Topological sorting may fail if we have duplicate nodes, which
11071 we should have detected and shown an error for already. Skip
11072 sorting in that case. */
11073 if (seen_error ())
11074 goto failure;
11075
11076 delete grpmap;
11077 delete groups;
11078
11079 /* Rebuild now we have struct sibling lists. */
11080 groups = omp_gather_mapping_groups (list_p);
11081 grpmap = omp_index_mapping_groups (groups);
11082
11083 outlist = omp_tsort_mapping_groups (groups, grpmap);
11084 outlist = omp_segregate_mapping_groups (inlist: outlist);
11085 list_p = omp_reorder_mapping_groups (groups, head: outlist, list_p);
11086
11087 failure:
11088 delete grpmap;
11089 delete groups;
11090 }
11091
11092 /* OpenMP map clauses with 'present' need to go in front of those
11093 without. */
11094 tree present_map_head = NULL;
11095 tree *present_map_tail_p = &present_map_head;
11096 tree *first_map_clause_p = NULL;
11097
11098 for (tree *c_p = list_p; *c_p; )
11099 {
11100 tree c = *c_p;
11101 tree *next_c_p = &OMP_CLAUSE_CHAIN (c);
11102
11103 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
11104 {
11105 if (!first_map_clause_p)
11106 first_map_clause_p = c_p;
11107 switch (OMP_CLAUSE_MAP_KIND (c))
11108 {
11109 case GOMP_MAP_PRESENT_ALLOC:
11110 case GOMP_MAP_PRESENT_FROM:
11111 case GOMP_MAP_PRESENT_TO:
11112 case GOMP_MAP_PRESENT_TOFROM:
11113 next_c_p = c_p;
11114 *c_p = OMP_CLAUSE_CHAIN (c);
11115
11116 OMP_CLAUSE_CHAIN (c) = NULL;
11117 *present_map_tail_p = c;
11118 present_map_tail_p = &OMP_CLAUSE_CHAIN (c);
11119
11120 break;
11121
11122 default:
11123 break;
11124 }
11125 }
11126
11127 c_p = next_c_p;
11128 }
11129 if (first_map_clause_p && present_map_head)
11130 {
11131 tree next = *first_map_clause_p;
11132 *first_map_clause_p = present_map_head;
11133 *present_map_tail_p = next;
11134 }
11135 }
11136 else if (region_type & ORT_ACC)
11137 {
11138 vec<omp_mapping_group> *groups;
11139 groups = omp_gather_mapping_groups (list_p);
11140 if (groups)
11141 {
11142 hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap;
11143 grpmap = omp_index_mapping_groups (groups);
11144
11145 oacc_resolve_clause_dependencies (groups, grpmap);
11146 omp_build_struct_sibling_lists (code, region_type, groups, grpmap: &grpmap,
11147 list_p);
11148
11149 delete groups;
11150 delete grpmap;
11151 }
11152 }
11153
11154 while ((c = *list_p) != NULL)
11155 {
11156 bool remove = false;
11157 bool notice_outer = true;
11158 const char *check_non_private = NULL;
11159 unsigned int flags;
11160 tree decl;
11161
11162 switch (OMP_CLAUSE_CODE (c))
11163 {
11164 case OMP_CLAUSE_PRIVATE:
11165 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
11166 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
11167 {
11168 flags |= GOVD_PRIVATE_OUTER_REF;
11169 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
11170 }
11171 else
11172 notice_outer = false;
11173 goto do_add;
11174 case OMP_CLAUSE_SHARED:
11175 flags = GOVD_SHARED | GOVD_EXPLICIT;
11176 goto do_add;
11177 case OMP_CLAUSE_FIRSTPRIVATE:
11178 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
11179 check_non_private = "firstprivate";
11180 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11181 {
11182 gcc_assert (code == OMP_TARGET);
11183 flags |= GOVD_FIRSTPRIVATE_IMPLICIT;
11184 }
11185 goto do_add;
11186 case OMP_CLAUSE_LASTPRIVATE:
11187 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
11188 switch (code)
11189 {
11190 case OMP_DISTRIBUTE:
11191 error_at (OMP_CLAUSE_LOCATION (c),
11192 "conditional %<lastprivate%> clause on "
11193 "%qs construct", "distribute");
11194 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
11195 break;
11196 case OMP_TASKLOOP:
11197 error_at (OMP_CLAUSE_LOCATION (c),
11198 "conditional %<lastprivate%> clause on "
11199 "%qs construct", "taskloop");
11200 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
11201 break;
11202 default:
11203 break;
11204 }
11205 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
11206 if (code != OMP_LOOP)
11207 check_non_private = "lastprivate";
11208 decl = OMP_CLAUSE_DECL (c);
11209 if (error_operand_p (t: decl))
11210 goto do_add;
11211 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
11212 && !lang_hooks.decls.omp_scalar_p (decl, true))
11213 {
11214 error_at (OMP_CLAUSE_LOCATION (c),
11215 "non-scalar variable %qD in conditional "
11216 "%<lastprivate%> clause", decl);
11217 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
11218 }
11219 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
11220 flags |= GOVD_LASTPRIVATE_CONDITIONAL;
11221 omp_lastprivate_for_combined_outer_constructs (octx: outer_ctx, decl,
11222 implicit_p: false);
11223 goto do_add;
11224 case OMP_CLAUSE_REDUCTION:
11225 if (OMP_CLAUSE_REDUCTION_TASK (c))
11226 {
11227 if (region_type == ORT_WORKSHARE || code == OMP_SCOPE)
11228 {
11229 if (nowait == -1)
11230 nowait = omp_find_clause (clauses: *list_p,
11231 kind: OMP_CLAUSE_NOWAIT) != NULL_TREE;
11232 if (nowait
11233 && (outer_ctx == NULL
11234 || outer_ctx->region_type != ORT_COMBINED_PARALLEL))
11235 {
11236 error_at (OMP_CLAUSE_LOCATION (c),
11237 "%<task%> reduction modifier on a construct "
11238 "with a %<nowait%> clause");
11239 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
11240 }
11241 }
11242 else if ((region_type & ORT_PARALLEL) != ORT_PARALLEL)
11243 {
11244 error_at (OMP_CLAUSE_LOCATION (c),
11245 "invalid %<task%> reduction modifier on construct "
11246 "other than %<parallel%>, %qs, %<sections%> or "
11247 "%<scope%>", lang_GNU_Fortran () ? "do" : "for");
11248 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
11249 }
11250 }
11251 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
11252 switch (code)
11253 {
11254 case OMP_SECTIONS:
11255 error_at (OMP_CLAUSE_LOCATION (c),
11256 "%<inscan%> %<reduction%> clause on "
11257 "%qs construct", "sections");
11258 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
11259 break;
11260 case OMP_PARALLEL:
11261 error_at (OMP_CLAUSE_LOCATION (c),
11262 "%<inscan%> %<reduction%> clause on "
11263 "%qs construct", "parallel");
11264 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
11265 break;
11266 case OMP_TEAMS:
11267 error_at (OMP_CLAUSE_LOCATION (c),
11268 "%<inscan%> %<reduction%> clause on "
11269 "%qs construct", "teams");
11270 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
11271 break;
11272 case OMP_TASKLOOP:
11273 error_at (OMP_CLAUSE_LOCATION (c),
11274 "%<inscan%> %<reduction%> clause on "
11275 "%qs construct", "taskloop");
11276 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
11277 break;
11278 case OMP_SCOPE:
11279 error_at (OMP_CLAUSE_LOCATION (c),
11280 "%<inscan%> %<reduction%> clause on "
11281 "%qs construct", "scope");
11282 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
11283 break;
11284 default:
11285 break;
11286 }
11287 /* FALLTHRU */
11288 case OMP_CLAUSE_IN_REDUCTION:
11289 case OMP_CLAUSE_TASK_REDUCTION:
11290 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
11291 /* OpenACC permits reductions on private variables. */
11292 if (!(region_type & ORT_ACC)
11293 /* taskgroup is actually not a worksharing region. */
11294 && code != OMP_TASKGROUP)
11295 check_non_private = omp_clause_code_name[OMP_CLAUSE_CODE (c)];
11296 decl = OMP_CLAUSE_DECL (c);
11297 if (TREE_CODE (decl) == MEM_REF)
11298 {
11299 tree type = TREE_TYPE (decl);
11300 bool saved_into_ssa = gimplify_ctxp->into_ssa;
11301 gimplify_ctxp->into_ssa = false;
11302 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
11303 NULL, is_gimple_val, fb_rvalue, false)
11304 == GS_ERROR)
11305 {
11306 gimplify_ctxp->into_ssa = saved_into_ssa;
11307 remove = true;
11308 break;
11309 }
11310 gimplify_ctxp->into_ssa = saved_into_ssa;
11311 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
11312 if (DECL_P (v))
11313 {
11314 omp_firstprivatize_variable (ctx, decl: v);
11315 omp_notice_variable (ctx, decl: v, in_code: true);
11316 }
11317 decl = TREE_OPERAND (decl, 0);
11318 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
11319 {
11320 gimplify_ctxp->into_ssa = false;
11321 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
11322 NULL, is_gimple_val, fb_rvalue, false)
11323 == GS_ERROR)
11324 {
11325 gimplify_ctxp->into_ssa = saved_into_ssa;
11326 remove = true;
11327 break;
11328 }
11329 gimplify_ctxp->into_ssa = saved_into_ssa;
11330 v = TREE_OPERAND (decl, 1);
11331 if (DECL_P (v))
11332 {
11333 omp_firstprivatize_variable (ctx, decl: v);
11334 omp_notice_variable (ctx, decl: v, in_code: true);
11335 }
11336 decl = TREE_OPERAND (decl, 0);
11337 }
11338 if (TREE_CODE (decl) == ADDR_EXPR
11339 || TREE_CODE (decl) == INDIRECT_REF)
11340 decl = TREE_OPERAND (decl, 0);
11341 }
11342 goto do_add_decl;
11343 case OMP_CLAUSE_LINEAR:
11344 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
11345 is_gimple_val, fb_rvalue) == GS_ERROR)
11346 {
11347 remove = true;
11348 break;
11349 }
11350 else
11351 {
11352 if (code == OMP_SIMD
11353 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11354 {
11355 struct gimplify_omp_ctx *octx = outer_ctx;
11356 if (octx
11357 && octx->region_type == ORT_WORKSHARE
11358 && octx->combined_loop
11359 && !octx->distribute)
11360 {
11361 if (octx->outer_context
11362 && (octx->outer_context->region_type
11363 == ORT_COMBINED_PARALLEL))
11364 octx = octx->outer_context->outer_context;
11365 else
11366 octx = octx->outer_context;
11367 }
11368 if (octx
11369 && octx->region_type == ORT_WORKSHARE
11370 && octx->combined_loop
11371 && octx->distribute)
11372 {
11373 error_at (OMP_CLAUSE_LOCATION (c),
11374 "%<linear%> clause for variable other than "
11375 "loop iterator specified on construct "
11376 "combined with %<distribute%>");
11377 remove = true;
11378 break;
11379 }
11380 }
11381 /* For combined #pragma omp parallel for simd, need to put
11382 lastprivate and perhaps firstprivate too on the
11383 parallel. Similarly for #pragma omp for simd. */
11384 struct gimplify_omp_ctx *octx = outer_ctx;
11385 bool taskloop_seen = false;
11386 decl = NULL_TREE;
11387 do
11388 {
11389 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
11390 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11391 break;
11392 decl = OMP_CLAUSE_DECL (c);
11393 if (error_operand_p (t: decl))
11394 {
11395 decl = NULL_TREE;
11396 break;
11397 }
11398 flags = GOVD_SEEN;
11399 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11400 flags |= GOVD_FIRSTPRIVATE;
11401 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11402 flags |= GOVD_LASTPRIVATE;
11403 if (octx
11404 && octx->region_type == ORT_WORKSHARE
11405 && octx->combined_loop)
11406 {
11407 if (octx->outer_context
11408 && (octx->outer_context->region_type
11409 == ORT_COMBINED_PARALLEL))
11410 octx = octx->outer_context;
11411 else if (omp_check_private (ctx: octx, decl, copyprivate: false))
11412 break;
11413 }
11414 else if (octx
11415 && (octx->region_type & ORT_TASK) != 0
11416 && octx->combined_loop)
11417 taskloop_seen = true;
11418 else if (octx
11419 && octx->region_type == ORT_COMBINED_PARALLEL
11420 && ((ctx->region_type == ORT_WORKSHARE
11421 && octx == outer_ctx)
11422 || taskloop_seen))
11423 flags = GOVD_SEEN | GOVD_SHARED;
11424 else if (octx
11425 && ((octx->region_type & ORT_COMBINED_TEAMS)
11426 == ORT_COMBINED_TEAMS))
11427 flags = GOVD_SEEN | GOVD_SHARED;
11428 else if (octx
11429 && octx->region_type == ORT_COMBINED_TARGET)
11430 {
11431 if (flags & GOVD_LASTPRIVATE)
11432 flags = GOVD_SEEN | GOVD_MAP;
11433 }
11434 else
11435 break;
11436 splay_tree_node on
11437 = splay_tree_lookup (octx->variables,
11438 (splay_tree_key) decl);
11439 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
11440 {
11441 octx = NULL;
11442 break;
11443 }
11444 omp_add_variable (ctx: octx, decl, flags);
11445 if (octx->outer_context == NULL)
11446 break;
11447 octx = octx->outer_context;
11448 }
11449 while (1);
11450 if (octx
11451 && decl
11452 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
11453 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
11454 omp_notice_variable (ctx: octx, decl, in_code: true);
11455 }
11456 flags = GOVD_LINEAR | GOVD_EXPLICIT;
11457 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
11458 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11459 {
11460 notice_outer = false;
11461 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11462 }
11463 goto do_add;
11464
11465 case OMP_CLAUSE_MAP:
11466 decl = OMP_CLAUSE_DECL (c);
11467 if (error_operand_p (t: decl))
11468 remove = true;
11469 switch (code)
11470 {
11471 case OMP_TARGET:
11472 break;
11473 case OACC_DATA:
11474 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
11475 break;
11476 /* FALLTHRU */
11477 case OMP_TARGET_DATA:
11478 case OMP_TARGET_ENTER_DATA:
11479 case OMP_TARGET_EXIT_DATA:
11480 case OACC_ENTER_DATA:
11481 case OACC_EXIT_DATA:
11482 case OACC_HOST_DATA:
11483 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11484 || (OMP_CLAUSE_MAP_KIND (c)
11485 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
11486 /* For target {,enter ,exit }data only the array slice is
11487 mapped, but not the pointer to it. */
11488 remove = true;
11489 break;
11490 default:
11491 break;
11492 }
11493 if (remove)
11494 break;
11495 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
11496 {
11497 struct gimplify_omp_ctx *octx;
11498 for (octx = outer_ctx; octx; octx = octx->outer_context)
11499 {
11500 if (octx->region_type != ORT_ACC_HOST_DATA)
11501 break;
11502 splay_tree_node n2
11503 = splay_tree_lookup (octx->variables,
11504 (splay_tree_key) decl);
11505 if (n2)
11506 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
11507 "declared in enclosing %<host_data%> region",
11508 DECL_NAME (decl));
11509 }
11510 }
11511 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
11512 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
11513 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
11514 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
11515 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
11516 {
11517 remove = true;
11518 break;
11519 }
11520 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11521 || (OMP_CLAUSE_MAP_KIND (c)
11522 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
11523 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11524 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
11525 {
11526 OMP_CLAUSE_SIZE (c)
11527 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
11528 allow_ssa: false);
11529 if ((region_type & ORT_TARGET) != 0)
11530 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
11531 flags: GOVD_FIRSTPRIVATE | GOVD_SEEN);
11532 }
11533
11534 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
11535 {
11536 tree base = omp_strip_components_and_deref (expr: decl);
11537 if (DECL_P (base))
11538 {
11539 decl = base;
11540 splay_tree_node n
11541 = splay_tree_lookup (ctx->variables,
11542 (splay_tree_key) decl);
11543 if (seen_error ()
11544 && n
11545 && (n->value & (GOVD_MAP | GOVD_FIRSTPRIVATE)) != 0)
11546 {
11547 remove = true;
11548 break;
11549 }
11550 flags = GOVD_MAP | GOVD_EXPLICIT;
11551
11552 goto do_add_decl;
11553 }
11554 }
11555
11556 if (TREE_CODE (decl) == TARGET_EXPR)
11557 {
11558 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
11559 is_gimple_lvalue, fb_lvalue)
11560 == GS_ERROR)
11561 remove = true;
11562 }
11563 else if (!DECL_P (decl))
11564 {
11565 tree d = decl, *pd;
11566 if (TREE_CODE (d) == ARRAY_REF)
11567 {
11568 while (TREE_CODE (d) == ARRAY_REF)
11569 d = TREE_OPERAND (d, 0);
11570 if (TREE_CODE (d) == COMPONENT_REF
11571 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
11572 decl = d;
11573 }
11574 pd = &OMP_CLAUSE_DECL (c);
11575 if (d == decl
11576 && TREE_CODE (decl) == INDIRECT_REF
11577 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
11578 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
11579 == REFERENCE_TYPE)
11580 && (OMP_CLAUSE_MAP_KIND (c)
11581 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION))
11582 {
11583 pd = &TREE_OPERAND (decl, 0);
11584 decl = TREE_OPERAND (decl, 0);
11585 }
11586 /* An "attach/detach" operation on an update directive should
11587 behave as a GOMP_MAP_ALWAYS_POINTER. Beware that
11588 unlike attach or detach map kinds, GOMP_MAP_ALWAYS_POINTER
11589 depends on the previous mapping. */
11590 if (code == OACC_UPDATE
11591 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11592 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALWAYS_POINTER);
11593
11594 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11595 {
11596 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
11597 == ARRAY_TYPE)
11598 remove = true;
11599 else
11600 {
11601 gomp_map_kind k = ((code == OACC_EXIT_DATA
11602 || code == OMP_TARGET_EXIT_DATA)
11603 ? GOMP_MAP_DETACH : GOMP_MAP_ATTACH);
11604 OMP_CLAUSE_SET_MAP_KIND (c, k);
11605 }
11606 }
11607
11608 tree cref = decl;
11609
11610 while (TREE_CODE (cref) == ARRAY_REF)
11611 cref = TREE_OPERAND (cref, 0);
11612
11613 if (TREE_CODE (cref) == INDIRECT_REF)
11614 cref = TREE_OPERAND (cref, 0);
11615
11616 if (TREE_CODE (cref) == COMPONENT_REF)
11617 {
11618 tree base = cref;
11619 while (base && !DECL_P (base))
11620 {
11621 tree innerbase = omp_get_base_pointer (expr: base);
11622 if (!innerbase)
11623 break;
11624 base = innerbase;
11625 }
11626 if (base
11627 && DECL_P (base)
11628 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c))
11629 && POINTER_TYPE_P (TREE_TYPE (base)))
11630 {
11631 splay_tree_node n
11632 = splay_tree_lookup (ctx->variables,
11633 (splay_tree_key) base);
11634 n->value |= GOVD_SEEN;
11635 }
11636 }
11637
11638 if (code == OMP_TARGET && OMP_CLAUSE_MAP_IN_REDUCTION (c))
11639 {
11640 /* Don't gimplify *pd fully at this point, as the base
11641 will need to be adjusted during omp lowering. */
11642 auto_vec<tree, 10> expr_stack;
11643 tree *p = pd;
11644 while (handled_component_p (t: *p)
11645 || TREE_CODE (*p) == INDIRECT_REF
11646 || TREE_CODE (*p) == ADDR_EXPR
11647 || TREE_CODE (*p) == MEM_REF
11648 || TREE_CODE (*p) == NON_LVALUE_EXPR)
11649 {
11650 expr_stack.safe_push (obj: *p);
11651 p = &TREE_OPERAND (*p, 0);
11652 }
11653 for (int i = expr_stack.length () - 1; i >= 0; i--)
11654 {
11655 tree t = expr_stack[i];
11656 if (TREE_CODE (t) == ARRAY_REF
11657 || TREE_CODE (t) == ARRAY_RANGE_REF)
11658 {
11659 if (TREE_OPERAND (t, 2) == NULL_TREE)
11660 {
11661 tree low = unshare_expr (expr: array_ref_low_bound (t));
11662 if (!is_gimple_min_invariant (low))
11663 {
11664 TREE_OPERAND (t, 2) = low;
11665 if (gimplify_expr (&TREE_OPERAND (t, 2),
11666 pre_p, NULL,
11667 is_gimple_reg,
11668 fb_rvalue) == GS_ERROR)
11669 remove = true;
11670 }
11671 }
11672 else if (gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
11673 NULL, is_gimple_reg,
11674 fb_rvalue) == GS_ERROR)
11675 remove = true;
11676 if (TREE_OPERAND (t, 3) == NULL_TREE)
11677 {
11678 tree elmt_size = array_ref_element_size (t);
11679 if (!is_gimple_min_invariant (elmt_size))
11680 {
11681 elmt_size = unshare_expr (expr: elmt_size);
11682 tree elmt_type
11683 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t,
11684 0)));
11685 tree factor
11686 = size_int (TYPE_ALIGN_UNIT (elmt_type));
11687 elmt_size
11688 = size_binop (EXACT_DIV_EXPR, elmt_size,
11689 factor);
11690 TREE_OPERAND (t, 3) = elmt_size;
11691 if (gimplify_expr (&TREE_OPERAND (t, 3),
11692 pre_p, NULL,
11693 is_gimple_reg,
11694 fb_rvalue) == GS_ERROR)
11695 remove = true;
11696 }
11697 }
11698 else if (gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
11699 NULL, is_gimple_reg,
11700 fb_rvalue) == GS_ERROR)
11701 remove = true;
11702 }
11703 else if (TREE_CODE (t) == COMPONENT_REF)
11704 {
11705 if (TREE_OPERAND (t, 2) == NULL_TREE)
11706 {
11707 tree offset = component_ref_field_offset (t);
11708 if (!is_gimple_min_invariant (offset))
11709 {
11710 offset = unshare_expr (expr: offset);
11711 tree field = TREE_OPERAND (t, 1);
11712 tree factor
11713 = size_int (DECL_OFFSET_ALIGN (field)
11714 / BITS_PER_UNIT);
11715 offset = size_binop (EXACT_DIV_EXPR, offset,
11716 factor);
11717 TREE_OPERAND (t, 2) = offset;
11718 if (gimplify_expr (&TREE_OPERAND (t, 2),
11719 pre_p, NULL,
11720 is_gimple_reg,
11721 fb_rvalue) == GS_ERROR)
11722 remove = true;
11723 }
11724 }
11725 else if (gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
11726 NULL, is_gimple_reg,
11727 fb_rvalue) == GS_ERROR)
11728 remove = true;
11729 }
11730 }
11731 for (; expr_stack.length () > 0; )
11732 {
11733 tree t = expr_stack.pop ();
11734
11735 if (TREE_CODE (t) == ARRAY_REF
11736 || TREE_CODE (t) == ARRAY_RANGE_REF)
11737 {
11738 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1))
11739 && gimplify_expr (&TREE_OPERAND (t, 1), pre_p,
11740 NULL, is_gimple_val,
11741 fb_rvalue) == GS_ERROR)
11742 remove = true;
11743 }
11744 }
11745 }
11746 else if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue,
11747 fb_lvalue) == GS_ERROR)
11748 {
11749 remove = true;
11750 break;
11751 }
11752 break;
11753 }
11754 flags = GOVD_MAP | GOVD_EXPLICIT;
11755 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
11756 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
11757 flags |= GOVD_MAP_ALWAYS_TO;
11758
11759 if ((code == OMP_TARGET
11760 || code == OMP_TARGET_DATA
11761 || code == OMP_TARGET_ENTER_DATA
11762 || code == OMP_TARGET_EXIT_DATA)
11763 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
11764 {
11765 for (struct gimplify_omp_ctx *octx = outer_ctx; octx;
11766 octx = octx->outer_context)
11767 {
11768 splay_tree_node n
11769 = splay_tree_lookup (octx->variables,
11770 (splay_tree_key) OMP_CLAUSE_DECL (c));
11771 /* If this is contained in an outer OpenMP region as a
11772 firstprivate value, remove the attach/detach. */
11773 if (n && (n->value & GOVD_FIRSTPRIVATE))
11774 {
11775 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FIRSTPRIVATE_POINTER);
11776 goto do_add;
11777 }
11778 }
11779
11780 enum gomp_map_kind map_kind = (code == OMP_TARGET_EXIT_DATA
11781 ? GOMP_MAP_DETACH
11782 : GOMP_MAP_ATTACH);
11783 OMP_CLAUSE_SET_MAP_KIND (c, map_kind);
11784 }
11785
11786 goto do_add;
11787
11788 case OMP_CLAUSE_AFFINITY:
11789 gimplify_omp_affinity (list_p, pre_p);
11790 remove = true;
11791 break;
11792 case OMP_CLAUSE_DOACROSS:
11793 if (OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK)
11794 {
11795 tree deps = OMP_CLAUSE_DECL (c);
11796 while (deps && TREE_CODE (deps) == TREE_LIST)
11797 {
11798 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
11799 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
11800 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
11801 pre_p, NULL, is_gimple_val, fb_rvalue);
11802 deps = TREE_CHAIN (deps);
11803 }
11804 }
11805 else
11806 gcc_assert (OMP_CLAUSE_DOACROSS_KIND (c)
11807 == OMP_CLAUSE_DOACROSS_SOURCE);
11808 break;
11809 case OMP_CLAUSE_DEPEND:
11810 if (handled_depend_iterators == -1)
11811 handled_depend_iterators = gimplify_omp_depend (list_p, pre_p);
11812 if (handled_depend_iterators)
11813 {
11814 if (handled_depend_iterators == 2)
11815 remove = true;
11816 break;
11817 }
11818 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
11819 {
11820 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
11821 NULL, is_gimple_val, fb_rvalue);
11822 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
11823 }
11824 if (error_operand_p (OMP_CLAUSE_DECL (c)))
11825 {
11826 remove = true;
11827 break;
11828 }
11829 if (OMP_CLAUSE_DECL (c) != null_pointer_node)
11830 {
11831 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
11832 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
11833 is_gimple_val, fb_rvalue) == GS_ERROR)
11834 {
11835 remove = true;
11836 break;
11837 }
11838 }
11839 if (code == OMP_TASK)
11840 ctx->has_depend = true;
11841 break;
11842
11843 case OMP_CLAUSE_TO:
11844 case OMP_CLAUSE_FROM:
11845 case OMP_CLAUSE__CACHE_:
11846 decl = OMP_CLAUSE_DECL (c);
11847 if (error_operand_p (t: decl))
11848 {
11849 remove = true;
11850 break;
11851 }
11852 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
11853 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
11854 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
11855 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
11856 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
11857 {
11858 remove = true;
11859 break;
11860 }
11861 if (!DECL_P (decl))
11862 {
11863 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
11864 NULL, is_gimple_lvalue, fb_lvalue)
11865 == GS_ERROR)
11866 {
11867 remove = true;
11868 break;
11869 }
11870 break;
11871 }
11872 goto do_notice;
11873
11874 case OMP_CLAUSE_USE_DEVICE_PTR:
11875 case OMP_CLAUSE_USE_DEVICE_ADDR:
11876 flags = GOVD_EXPLICIT;
11877 goto do_add;
11878
11879 case OMP_CLAUSE_HAS_DEVICE_ADDR:
11880 decl = OMP_CLAUSE_DECL (c);
11881 while (TREE_CODE (decl) == INDIRECT_REF
11882 || TREE_CODE (decl) == ARRAY_REF)
11883 decl = TREE_OPERAND (decl, 0);
11884 flags = GOVD_EXPLICIT;
11885 goto do_add_decl;
11886
11887 case OMP_CLAUSE_IS_DEVICE_PTR:
11888 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
11889 goto do_add;
11890
11891 do_add:
11892 decl = OMP_CLAUSE_DECL (c);
11893 do_add_decl:
11894 if (error_operand_p (t: decl))
11895 {
11896 remove = true;
11897 break;
11898 }
11899 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
11900 {
11901 tree t = omp_member_access_dummy_var (decl);
11902 if (t)
11903 {
11904 tree v = DECL_VALUE_EXPR (decl);
11905 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
11906 if (outer_ctx)
11907 omp_notice_variable (ctx: outer_ctx, decl: t, in_code: true);
11908 }
11909 }
11910 if (code == OACC_DATA
11911 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11912 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
11913 flags |= GOVD_MAP_0LEN_ARRAY;
11914 omp_add_variable (ctx, decl, flags);
11915 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11916 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
11917 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
11918 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
11919 {
11920 struct gimplify_omp_ctx *pctx
11921 = code == OMP_TARGET ? outer_ctx : ctx;
11922 if (pctx)
11923 omp_add_variable (ctx: pctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
11924 flags: GOVD_LOCAL | GOVD_SEEN);
11925 if (pctx
11926 && OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
11927 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
11928 find_decl_expr,
11929 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
11930 NULL) == NULL_TREE)
11931 omp_add_variable (ctx: pctx,
11932 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
11933 flags: GOVD_LOCAL | GOVD_SEEN);
11934 gimplify_omp_ctxp = pctx;
11935 push_gimplify_context ();
11936
11937 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
11938 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
11939
11940 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
11941 seq_p: &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
11942 pop_gimplify_context
11943 (body: gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
11944 push_gimplify_context ();
11945 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
11946 seq_p: &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
11947 pop_gimplify_context
11948 (body: gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
11949 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
11950 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
11951
11952 gimplify_omp_ctxp = outer_ctx;
11953 }
11954 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
11955 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
11956 {
11957 gimplify_omp_ctxp = ctx;
11958 push_gimplify_context ();
11959 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
11960 {
11961 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
11962 NULL, NULL);
11963 TREE_SIDE_EFFECTS (bind) = 1;
11964 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
11965 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
11966 }
11967 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
11968 seq_p: &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
11969 pop_gimplify_context
11970 (body: gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
11971 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
11972
11973 gimplify_omp_ctxp = outer_ctx;
11974 }
11975 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11976 && OMP_CLAUSE_LINEAR_STMT (c))
11977 {
11978 gimplify_omp_ctxp = ctx;
11979 push_gimplify_context ();
11980 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
11981 {
11982 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
11983 NULL, NULL);
11984 TREE_SIDE_EFFECTS (bind) = 1;
11985 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
11986 OMP_CLAUSE_LINEAR_STMT (c) = bind;
11987 }
11988 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
11989 seq_p: &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
11990 pop_gimplify_context
11991 (body: gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
11992 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
11993
11994 gimplify_omp_ctxp = outer_ctx;
11995 }
11996 if (notice_outer)
11997 goto do_notice;
11998 break;
11999
12000 case OMP_CLAUSE_COPYIN:
12001 case OMP_CLAUSE_COPYPRIVATE:
12002 decl = OMP_CLAUSE_DECL (c);
12003 if (error_operand_p (t: decl))
12004 {
12005 remove = true;
12006 break;
12007 }
12008 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
12009 && !remove
12010 && !omp_check_private (ctx, decl, copyprivate: true))
12011 {
12012 remove = true;
12013 if (is_global_var (t: decl))
12014 {
12015 if (DECL_THREAD_LOCAL_P (decl))
12016 remove = false;
12017 else if (DECL_HAS_VALUE_EXPR_P (decl))
12018 {
12019 tree value = get_base_address (DECL_VALUE_EXPR (decl));
12020
12021 if (value
12022 && DECL_P (value)
12023 && DECL_THREAD_LOCAL_P (value))
12024 remove = false;
12025 }
12026 }
12027 if (remove)
12028 error_at (OMP_CLAUSE_LOCATION (c),
12029 "copyprivate variable %qE is not threadprivate"
12030 " or private in outer context", DECL_NAME (decl));
12031 }
12032 do_notice:
12033 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
12034 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
12035 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
12036 && outer_ctx
12037 && ((region_type & ORT_TASKLOOP) == ORT_TASKLOOP
12038 || (region_type == ORT_WORKSHARE
12039 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
12040 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
12041 || code == OMP_LOOP)))
12042 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
12043 || (code == OMP_LOOP
12044 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
12045 && ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
12046 == ORT_COMBINED_TEAMS))))
12047 {
12048 splay_tree_node on
12049 = splay_tree_lookup (outer_ctx->variables,
12050 (splay_tree_key)decl);
12051 if (on == NULL || (on->value & GOVD_DATA_SHARE_CLASS) == 0)
12052 {
12053 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
12054 && TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
12055 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
12056 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
12057 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
12058 == POINTER_TYPE))))
12059 omp_firstprivatize_variable (ctx: outer_ctx, decl);
12060 else
12061 {
12062 omp_add_variable (ctx: outer_ctx, decl,
12063 flags: GOVD_SEEN | GOVD_SHARED);
12064 if (outer_ctx->outer_context)
12065 omp_notice_variable (ctx: outer_ctx->outer_context, decl,
12066 in_code: true);
12067 }
12068 }
12069 }
12070 if (outer_ctx)
12071 omp_notice_variable (ctx: outer_ctx, decl, in_code: true);
12072 if (check_non_private
12073 && (region_type == ORT_WORKSHARE || code == OMP_SCOPE)
12074 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
12075 || decl == OMP_CLAUSE_DECL (c)
12076 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
12077 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
12078 == ADDR_EXPR
12079 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
12080 == POINTER_PLUS_EXPR
12081 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
12082 (OMP_CLAUSE_DECL (c), 0), 0))
12083 == ADDR_EXPR)))))
12084 && omp_check_private (ctx, decl, copyprivate: false))
12085 {
12086 error ("%s variable %qE is private in outer context",
12087 check_non_private, DECL_NAME (decl));
12088 remove = true;
12089 }
12090 break;
12091
12092 case OMP_CLAUSE_DETACH:
12093 flags = GOVD_FIRSTPRIVATE | GOVD_SEEN;
12094 goto do_add;
12095
12096 case OMP_CLAUSE_IF:
12097 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
12098 && OMP_CLAUSE_IF_MODIFIER (c) != code)
12099 {
12100 const char *p[2];
12101 for (int i = 0; i < 2; i++)
12102 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
12103 {
12104 case VOID_CST: p[i] = "cancel"; break;
12105 case OMP_PARALLEL: p[i] = "parallel"; break;
12106 case OMP_SIMD: p[i] = "simd"; break;
12107 case OMP_TASK: p[i] = "task"; break;
12108 case OMP_TASKLOOP: p[i] = "taskloop"; break;
12109 case OMP_TARGET_DATA: p[i] = "target data"; break;
12110 case OMP_TARGET: p[i] = "target"; break;
12111 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
12112 case OMP_TARGET_ENTER_DATA:
12113 p[i] = "target enter data"; break;
12114 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
12115 default: gcc_unreachable ();
12116 }
12117 error_at (OMP_CLAUSE_LOCATION (c),
12118 "expected %qs %<if%> clause modifier rather than %qs",
12119 p[0], p[1]);
12120 remove = true;
12121 }
12122 /* Fall through. */
12123
12124 case OMP_CLAUSE_SELF:
12125 case OMP_CLAUSE_FINAL:
12126 OMP_CLAUSE_OPERAND (c, 0)
12127 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
12128 /* Fall through. */
12129
12130 case OMP_CLAUSE_NUM_TEAMS:
12131 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS
12132 && OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
12133 && !is_gimple_min_invariant (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)))
12134 {
12135 if (error_operand_p (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)))
12136 {
12137 remove = true;
12138 break;
12139 }
12140 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
12141 = get_initialized_tmp_var (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c),
12142 pre_p, NULL, allow_ssa: true);
12143 }
12144 /* Fall through. */
12145
12146 case OMP_CLAUSE_SCHEDULE:
12147 case OMP_CLAUSE_NUM_THREADS:
12148 case OMP_CLAUSE_THREAD_LIMIT:
12149 case OMP_CLAUSE_DIST_SCHEDULE:
12150 case OMP_CLAUSE_DEVICE:
12151 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEVICE
12152 && OMP_CLAUSE_DEVICE_ANCESTOR (c))
12153 {
12154 if (code != OMP_TARGET)
12155 {
12156 error_at (OMP_CLAUSE_LOCATION (c),
12157 "%<device%> clause with %<ancestor%> is only "
12158 "allowed on %<target%> construct");
12159 remove = true;
12160 break;
12161 }
12162
12163 tree clauses = *orig_list_p;
12164 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
12165 if (OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_DEVICE
12166 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_FIRSTPRIVATE
12167 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_PRIVATE
12168 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_DEFAULTMAP
12169 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_MAP
12170 )
12171 {
12172 error_at (OMP_CLAUSE_LOCATION (c),
12173 "with %<ancestor%>, only the %<device%>, "
12174 "%<firstprivate%>, %<private%>, %<defaultmap%>, "
12175 "and %<map%> clauses may appear on the "
12176 "construct");
12177 remove = true;
12178 break;
12179 }
12180 }
12181 /* Fall through. */
12182
12183 case OMP_CLAUSE_PRIORITY:
12184 case OMP_CLAUSE_GRAINSIZE:
12185 case OMP_CLAUSE_NUM_TASKS:
12186 case OMP_CLAUSE_FILTER:
12187 case OMP_CLAUSE_HINT:
12188 case OMP_CLAUSE_ASYNC:
12189 case OMP_CLAUSE_WAIT:
12190 case OMP_CLAUSE_NUM_GANGS:
12191 case OMP_CLAUSE_NUM_WORKERS:
12192 case OMP_CLAUSE_VECTOR_LENGTH:
12193 case OMP_CLAUSE_WORKER:
12194 case OMP_CLAUSE_VECTOR:
12195 if (OMP_CLAUSE_OPERAND (c, 0)
12196 && !is_gimple_min_invariant (OMP_CLAUSE_OPERAND (c, 0)))
12197 {
12198 if (error_operand_p (OMP_CLAUSE_OPERAND (c, 0)))
12199 {
12200 remove = true;
12201 break;
12202 }
12203 /* All these clauses care about value, not a particular decl,
12204 so try to force it into a SSA_NAME or fresh temporary. */
12205 OMP_CLAUSE_OPERAND (c, 0)
12206 = get_initialized_tmp_var (OMP_CLAUSE_OPERAND (c, 0),
12207 pre_p, NULL, allow_ssa: true);
12208 }
12209 break;
12210
12211 case OMP_CLAUSE_GANG:
12212 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
12213 is_gimple_val, fb_rvalue) == GS_ERROR)
12214 remove = true;
12215 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
12216 is_gimple_val, fb_rvalue) == GS_ERROR)
12217 remove = true;
12218 break;
12219
12220 case OMP_CLAUSE_NOWAIT:
12221 nowait = 1;
12222 break;
12223
12224 case OMP_CLAUSE_ORDERED:
12225 case OMP_CLAUSE_UNTIED:
12226 case OMP_CLAUSE_COLLAPSE:
12227 case OMP_CLAUSE_TILE:
12228 case OMP_CLAUSE_AUTO:
12229 case OMP_CLAUSE_SEQ:
12230 case OMP_CLAUSE_INDEPENDENT:
12231 case OMP_CLAUSE_MERGEABLE:
12232 case OMP_CLAUSE_PROC_BIND:
12233 case OMP_CLAUSE_SAFELEN:
12234 case OMP_CLAUSE_SIMDLEN:
12235 case OMP_CLAUSE_NOGROUP:
12236 case OMP_CLAUSE_THREADS:
12237 case OMP_CLAUSE_SIMD:
12238 case OMP_CLAUSE_BIND:
12239 case OMP_CLAUSE_IF_PRESENT:
12240 case OMP_CLAUSE_FINALIZE:
12241 break;
12242
12243 case OMP_CLAUSE_ORDER:
12244 ctx->order_concurrent = true;
12245 break;
12246
12247 case OMP_CLAUSE_DEFAULTMAP:
12248 enum gimplify_defaultmap_kind gdmkmin, gdmkmax;
12249 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c))
12250 {
12251 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED:
12252 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALL:
12253 gdmkmin = GDMK_SCALAR;
12254 gdmkmax = GDMK_POINTER;
12255 break;
12256 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR:
12257 gdmkmin = GDMK_SCALAR;
12258 gdmkmax = GDMK_SCALAR_TARGET;
12259 break;
12260 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE:
12261 gdmkmin = gdmkmax = GDMK_AGGREGATE;
12262 break;
12263 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE:
12264 gdmkmin = gdmkmax = GDMK_ALLOCATABLE;
12265 break;
12266 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER:
12267 gdmkmin = gdmkmax = GDMK_POINTER;
12268 break;
12269 default:
12270 gcc_unreachable ();
12271 }
12272 for (int gdmk = gdmkmin; gdmk <= gdmkmax; gdmk++)
12273 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c))
12274 {
12275 case OMP_CLAUSE_DEFAULTMAP_ALLOC:
12276 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_ALLOC_ONLY;
12277 break;
12278 case OMP_CLAUSE_DEFAULTMAP_TO:
12279 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_TO_ONLY;
12280 break;
12281 case OMP_CLAUSE_DEFAULTMAP_FROM:
12282 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FROM_ONLY;
12283 break;
12284 case OMP_CLAUSE_DEFAULTMAP_TOFROM:
12285 ctx->defaultmap[gdmk] = GOVD_MAP;
12286 break;
12287 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE:
12288 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
12289 break;
12290 case OMP_CLAUSE_DEFAULTMAP_NONE:
12291 ctx->defaultmap[gdmk] = 0;
12292 break;
12293 case OMP_CLAUSE_DEFAULTMAP_PRESENT:
12294 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
12295 break;
12296 case OMP_CLAUSE_DEFAULTMAP_DEFAULT:
12297 switch (gdmk)
12298 {
12299 case GDMK_SCALAR:
12300 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
12301 break;
12302 case GDMK_SCALAR_TARGET:
12303 ctx->defaultmap[gdmk] = (lang_GNU_Fortran ()
12304 ? GOVD_MAP : GOVD_FIRSTPRIVATE);
12305 break;
12306 case GDMK_AGGREGATE:
12307 case GDMK_ALLOCATABLE:
12308 ctx->defaultmap[gdmk] = GOVD_MAP;
12309 break;
12310 case GDMK_POINTER:
12311 ctx->defaultmap[gdmk] = GOVD_MAP;
12312 if (!lang_GNU_Fortran ())
12313 ctx->defaultmap[gdmk] |= GOVD_MAP_0LEN_ARRAY;
12314 break;
12315 default:
12316 gcc_unreachable ();
12317 }
12318 break;
12319 default:
12320 gcc_unreachable ();
12321 }
12322 break;
12323
12324 case OMP_CLAUSE_ALIGNED:
12325 decl = OMP_CLAUSE_DECL (c);
12326 if (error_operand_p (t: decl))
12327 {
12328 remove = true;
12329 break;
12330 }
12331 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
12332 is_gimple_val, fb_rvalue) == GS_ERROR)
12333 {
12334 remove = true;
12335 break;
12336 }
12337 if (!is_global_var (t: decl)
12338 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
12339 omp_add_variable (ctx, decl, flags: GOVD_ALIGNED);
12340 break;
12341
12342 case OMP_CLAUSE_NONTEMPORAL:
12343 decl = OMP_CLAUSE_DECL (c);
12344 if (error_operand_p (t: decl))
12345 {
12346 remove = true;
12347 break;
12348 }
12349 omp_add_variable (ctx, decl, flags: GOVD_NONTEMPORAL);
12350 break;
12351
12352 case OMP_CLAUSE_ALLOCATE:
12353 decl = OMP_CLAUSE_DECL (c);
12354 if (error_operand_p (t: decl))
12355 {
12356 remove = true;
12357 break;
12358 }
12359 if (gimplify_expr (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (c), pre_p, NULL,
12360 is_gimple_val, fb_rvalue) == GS_ERROR)
12361 {
12362 remove = true;
12363 break;
12364 }
12365 else if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
12366 || (TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
12367 == INTEGER_CST))
12368 ;
12369 else if (code == OMP_TASKLOOP
12370 || !DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
12371 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
12372 = get_initialized_tmp_var (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
12373 pre_p, NULL, allow_ssa: false);
12374 break;
12375
12376 case OMP_CLAUSE_DEFAULT:
12377 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
12378 break;
12379
12380 case OMP_CLAUSE_INCLUSIVE:
12381 case OMP_CLAUSE_EXCLUSIVE:
12382 decl = OMP_CLAUSE_DECL (c);
12383 {
12384 splay_tree_node n = splay_tree_lookup (outer_ctx->variables,
12385 (splay_tree_key) decl);
12386 if (n == NULL || (n->value & GOVD_REDUCTION) == 0)
12387 {
12388 error_at (OMP_CLAUSE_LOCATION (c),
12389 "%qD specified in %qs clause but not in %<inscan%> "
12390 "%<reduction%> clause on the containing construct",
12391 decl, omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
12392 remove = true;
12393 }
12394 else
12395 {
12396 n->value |= GOVD_REDUCTION_INSCAN;
12397 if (outer_ctx->region_type == ORT_SIMD
12398 && outer_ctx->outer_context
12399 && outer_ctx->outer_context->region_type == ORT_WORKSHARE)
12400 {
12401 n = splay_tree_lookup (outer_ctx->outer_context->variables,
12402 (splay_tree_key) decl);
12403 if (n && (n->value & GOVD_REDUCTION) != 0)
12404 n->value |= GOVD_REDUCTION_INSCAN;
12405 }
12406 }
12407 }
12408 break;
12409
12410 case OMP_CLAUSE_NOHOST:
12411 default:
12412 gcc_unreachable ();
12413 }
12414
12415 if (code == OACC_DATA
12416 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12417 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12418 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
12419 remove = true;
12420 if (remove)
12421 *list_p = OMP_CLAUSE_CHAIN (c);
12422 else
12423 list_p = &OMP_CLAUSE_CHAIN (c);
12424 }
12425
12426 ctx->clauses = *orig_list_p;
12427 gimplify_omp_ctxp = ctx;
12428}
12429
12430/* Return true if DECL is a candidate for shared to firstprivate
12431 optimization. We only consider non-addressable scalars, not
12432 too big, and not references. */
12433
12434static bool
12435omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
12436{
12437 if (TREE_ADDRESSABLE (decl))
12438 return false;
12439 tree type = TREE_TYPE (decl);
12440 if (!is_gimple_reg_type (type)
12441 || TREE_CODE (type) == REFERENCE_TYPE
12442 || TREE_ADDRESSABLE (type))
12443 return false;
12444 /* Don't optimize too large decls, as each thread/task will have
12445 its own. */
12446 HOST_WIDE_INT len = int_size_in_bytes (type);
12447 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
12448 return false;
12449 if (omp_privatize_by_reference (decl))
12450 return false;
12451 return true;
12452}
12453
12454/* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
12455 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
12456 GOVD_WRITTEN in outer contexts. */
12457
12458static void
12459omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
12460{
12461 for (; ctx; ctx = ctx->outer_context)
12462 {
12463 splay_tree_node n = splay_tree_lookup (ctx->variables,
12464 (splay_tree_key) decl);
12465 if (n == NULL)
12466 continue;
12467 else if (n->value & GOVD_SHARED)
12468 {
12469 n->value |= GOVD_WRITTEN;
12470 return;
12471 }
12472 else if (n->value & GOVD_DATA_SHARE_CLASS)
12473 return;
12474 }
12475}
12476
12477/* Helper callback for walk_gimple_seq to discover possible stores
12478 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
12479 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
12480 for those. */
12481
12482static tree
12483omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
12484{
12485 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
12486
12487 *walk_subtrees = 0;
12488 if (!wi->is_lhs)
12489 return NULL_TREE;
12490
12491 tree op = *tp;
12492 do
12493 {
12494 if (handled_component_p (t: op))
12495 op = TREE_OPERAND (op, 0);
12496 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
12497 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
12498 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
12499 else
12500 break;
12501 }
12502 while (1);
12503 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (decl: op))
12504 return NULL_TREE;
12505
12506 omp_mark_stores (ctx: gimplify_omp_ctxp, decl: op);
12507 return NULL_TREE;
12508}
12509
12510/* Helper callback for walk_gimple_seq to discover possible stores
12511 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
12512 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
12513 for those. */
12514
12515static tree
12516omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
12517 bool *handled_ops_p,
12518 struct walk_stmt_info *wi)
12519{
12520 gimple *stmt = gsi_stmt (i: *gsi_p);
12521 switch (gimple_code (g: stmt))
12522 {
12523 /* Don't recurse on OpenMP constructs for which
12524 gimplify_adjust_omp_clauses already handled the bodies,
12525 except handle gimple_omp_for_pre_body. */
12526 case GIMPLE_OMP_FOR:
12527 *handled_ops_p = true;
12528 if (gimple_omp_for_pre_body (gs: stmt))
12529 walk_gimple_seq (gimple_omp_for_pre_body (gs: stmt),
12530 omp_find_stores_stmt, omp_find_stores_op, wi);
12531 break;
12532 case GIMPLE_OMP_PARALLEL:
12533 case GIMPLE_OMP_TASK:
12534 case GIMPLE_OMP_SECTIONS:
12535 case GIMPLE_OMP_SINGLE:
12536 case GIMPLE_OMP_SCOPE:
12537 case GIMPLE_OMP_TARGET:
12538 case GIMPLE_OMP_TEAMS:
12539 case GIMPLE_OMP_CRITICAL:
12540 *handled_ops_p = true;
12541 break;
12542 default:
12543 break;
12544 }
12545 return NULL_TREE;
12546}
12547
12548struct gimplify_adjust_omp_clauses_data
12549{
12550 tree *list_p;
12551 gimple_seq *pre_p;
12552};
12553
12554/* For all variables that were not actually used within the context,
12555 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
12556
12557static int
12558gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
12559{
12560 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
12561 gimple_seq *pre_p
12562 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
12563 tree decl = (tree) n->key;
12564 unsigned flags = n->value;
12565 enum omp_clause_code code;
12566 tree clause;
12567 bool private_debug;
12568
12569 if (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
12570 && (flags & GOVD_LASTPRIVATE_CONDITIONAL) != 0)
12571 flags = GOVD_SHARED | GOVD_SEEN | GOVD_WRITTEN;
12572 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
12573 return 0;
12574 if ((flags & GOVD_SEEN) == 0)
12575 return 0;
12576 if (flags & GOVD_DEBUG_PRIVATE)
12577 {
12578 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
12579 private_debug = true;
12580 }
12581 else if (flags & GOVD_MAP)
12582 private_debug = false;
12583 else
12584 private_debug
12585 = lang_hooks.decls.omp_private_debug_clause (decl,
12586 !!(flags & GOVD_SHARED));
12587 if (private_debug)
12588 code = OMP_CLAUSE_PRIVATE;
12589 else if (flags & GOVD_MAP)
12590 {
12591 code = OMP_CLAUSE_MAP;
12592 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
12593 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
12594 {
12595 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
12596 return 0;
12597 }
12598 if (VAR_P (decl)
12599 && DECL_IN_CONSTANT_POOL (decl)
12600 && !lookup_attribute (attr_name: "omp declare target",
12601 DECL_ATTRIBUTES (decl)))
12602 {
12603 tree id = get_identifier ("omp declare target");
12604 DECL_ATTRIBUTES (decl)
12605 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
12606 varpool_node *node = varpool_node::get (decl);
12607 if (node)
12608 {
12609 node->offloadable = 1;
12610 if (ENABLE_OFFLOADING)
12611 g->have_offload = true;
12612 }
12613 }
12614 }
12615 else if (flags & GOVD_SHARED)
12616 {
12617 if (is_global_var (t: decl))
12618 {
12619 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
12620 while (ctx != NULL)
12621 {
12622 splay_tree_node on
12623 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12624 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
12625 | GOVD_PRIVATE | GOVD_REDUCTION
12626 | GOVD_LINEAR | GOVD_MAP)) != 0)
12627 break;
12628 ctx = ctx->outer_context;
12629 }
12630 if (ctx == NULL)
12631 return 0;
12632 }
12633 code = OMP_CLAUSE_SHARED;
12634 /* Don't optimize shared into firstprivate for read-only vars
12635 on tasks with depend clause, we shouldn't try to copy them
12636 until the dependencies are satisfied. */
12637 if (gimplify_omp_ctxp->has_depend)
12638 flags |= GOVD_WRITTEN;
12639 }
12640 else if (flags & GOVD_PRIVATE)
12641 code = OMP_CLAUSE_PRIVATE;
12642 else if (flags & GOVD_FIRSTPRIVATE)
12643 {
12644 code = OMP_CLAUSE_FIRSTPRIVATE;
12645 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
12646 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
12647 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
12648 {
12649 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
12650 "%<target%> construct", decl);
12651 return 0;
12652 }
12653 }
12654 else if (flags & GOVD_LASTPRIVATE)
12655 code = OMP_CLAUSE_LASTPRIVATE;
12656 else if (flags & (GOVD_ALIGNED | GOVD_NONTEMPORAL))
12657 return 0;
12658 else if (flags & GOVD_CONDTEMP)
12659 {
12660 code = OMP_CLAUSE__CONDTEMP_;
12661 gimple_add_tmp_var (tmp: decl);
12662 }
12663 else
12664 gcc_unreachable ();
12665
12666 if (((flags & GOVD_LASTPRIVATE)
12667 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
12668 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12669 omp_mark_stores (ctx: gimplify_omp_ctxp->outer_context, decl);
12670
12671 tree chain = *list_p;
12672 clause = build_omp_clause (input_location, code);
12673 OMP_CLAUSE_DECL (clause) = decl;
12674 OMP_CLAUSE_CHAIN (clause) = chain;
12675 if (private_debug)
12676 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
12677 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
12678 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
12679 else if (code == OMP_CLAUSE_SHARED
12680 && (flags & GOVD_WRITTEN) == 0
12681 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12682 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
12683 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
12684 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
12685 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
12686 {
12687 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
12688 OMP_CLAUSE_DECL (nc) = decl;
12689 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
12690 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
12691 OMP_CLAUSE_DECL (clause)
12692 = build_simple_mem_ref_loc (input_location, decl);
12693 OMP_CLAUSE_DECL (clause)
12694 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
12695 build_int_cst (build_pointer_type (char_type_node), 0));
12696 OMP_CLAUSE_SIZE (clause) = size_zero_node;
12697 OMP_CLAUSE_SIZE (nc) = size_zero_node;
12698 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
12699 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
12700 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
12701 OMP_CLAUSE_CHAIN (nc) = chain;
12702 OMP_CLAUSE_CHAIN (clause) = nc;
12703 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12704 gimplify_omp_ctxp = ctx->outer_context;
12705 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
12706 pre_p, NULL, is_gimple_val, fb_rvalue);
12707 gimplify_omp_ctxp = ctx;
12708 }
12709 else if (code == OMP_CLAUSE_MAP)
12710 {
12711 int kind;
12712 /* Not all combinations of these GOVD_MAP flags are actually valid. */
12713 switch (flags & (GOVD_MAP_TO_ONLY
12714 | GOVD_MAP_FORCE
12715 | GOVD_MAP_FORCE_PRESENT
12716 | GOVD_MAP_ALLOC_ONLY
12717 | GOVD_MAP_FROM_ONLY))
12718 {
12719 case 0:
12720 kind = GOMP_MAP_TOFROM;
12721 break;
12722 case GOVD_MAP_FORCE:
12723 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
12724 break;
12725 case GOVD_MAP_TO_ONLY:
12726 kind = GOMP_MAP_TO;
12727 break;
12728 case GOVD_MAP_FROM_ONLY:
12729 kind = GOMP_MAP_FROM;
12730 break;
12731 case GOVD_MAP_ALLOC_ONLY:
12732 kind = GOMP_MAP_ALLOC;
12733 break;
12734 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
12735 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
12736 break;
12737 case GOVD_MAP_FORCE_PRESENT:
12738 kind = GOMP_MAP_FORCE_PRESENT;
12739 break;
12740 case GOVD_MAP_FORCE_PRESENT | GOVD_MAP_ALLOC_ONLY:
12741 kind = GOMP_MAP_FORCE_PRESENT;
12742 break;
12743 default:
12744 gcc_unreachable ();
12745 }
12746 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
12747 /* Setting of the implicit flag for the runtime is currently disabled for
12748 OpenACC. */
12749 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0)
12750 OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (clause) = 1;
12751 if (DECL_SIZE (decl)
12752 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
12753 {
12754 tree decl2 = DECL_VALUE_EXPR (decl);
12755 gcc_assert (INDIRECT_REF_P (decl2));
12756 decl2 = TREE_OPERAND (decl2, 0);
12757 gcc_assert (DECL_P (decl2));
12758 tree mem = build_simple_mem_ref (decl2);
12759 OMP_CLAUSE_DECL (clause) = mem;
12760 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
12761 if (gimplify_omp_ctxp->outer_context)
12762 {
12763 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
12764 omp_notice_variable (ctx, decl: decl2, in_code: true);
12765 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), in_code: true);
12766 }
12767 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
12768 OMP_CLAUSE_MAP);
12769 OMP_CLAUSE_DECL (nc) = decl;
12770 OMP_CLAUSE_SIZE (nc) = size_zero_node;
12771 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
12772 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
12773 else
12774 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
12775 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
12776 OMP_CLAUSE_CHAIN (clause) = nc;
12777 }
12778 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
12779 && omp_privatize_by_reference (decl))
12780 {
12781 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
12782 OMP_CLAUSE_SIZE (clause)
12783 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
12784 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12785 gimplify_omp_ctxp = ctx->outer_context;
12786 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
12787 pre_p, NULL, is_gimple_val, fb_rvalue);
12788 gimplify_omp_ctxp = ctx;
12789 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
12790 OMP_CLAUSE_MAP);
12791 OMP_CLAUSE_DECL (nc) = decl;
12792 OMP_CLAUSE_SIZE (nc) = size_zero_node;
12793 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
12794 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
12795 OMP_CLAUSE_CHAIN (clause) = nc;
12796 }
12797 else
12798 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
12799 }
12800 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
12801 {
12802 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
12803 OMP_CLAUSE_DECL (nc) = decl;
12804 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
12805 OMP_CLAUSE_CHAIN (nc) = chain;
12806 OMP_CLAUSE_CHAIN (clause) = nc;
12807 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12808 gimplify_omp_ctxp = ctx->outer_context;
12809 lang_hooks.decls.omp_finish_clause (nc, pre_p,
12810 (ctx->region_type & ORT_ACC) != 0);
12811 gimplify_omp_ctxp = ctx;
12812 }
12813 *list_p = clause;
12814 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12815 gimplify_omp_ctxp = ctx->outer_context;
12816 /* Don't call omp_finish_clause on implicitly added OMP_CLAUSE_PRIVATE
12817 in simd. Those are only added for the local vars inside of simd body
12818 and they don't need to be e.g. default constructible. */
12819 if (code != OMP_CLAUSE_PRIVATE || ctx->region_type != ORT_SIMD)
12820 lang_hooks.decls.omp_finish_clause (clause, pre_p,
12821 (ctx->region_type & ORT_ACC) != 0);
12822 if (gimplify_omp_ctxp)
12823 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
12824 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
12825 && DECL_P (OMP_CLAUSE_SIZE (clause)))
12826 omp_notice_variable (ctx: gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
12827 in_code: true);
12828 gimplify_omp_ctxp = ctx;
12829 return 0;
12830}
12831
12832static void
12833gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
12834 enum tree_code code)
12835{
12836 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12837 tree *orig_list_p = list_p;
12838 tree c, decl;
12839 bool has_inscan_reductions = false;
12840
12841 if (body)
12842 {
12843 struct gimplify_omp_ctx *octx;
12844 for (octx = ctx; octx; octx = octx->outer_context)
12845 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
12846 break;
12847 if (octx)
12848 {
12849 struct walk_stmt_info wi;
12850 memset (s: &wi, c: 0, n: sizeof (wi));
12851 walk_gimple_seq (body, omp_find_stores_stmt,
12852 omp_find_stores_op, &wi);
12853 }
12854 }
12855
12856 if (ctx->add_safelen1)
12857 {
12858 /* If there are VLAs in the body of simd loop, prevent
12859 vectorization. */
12860 gcc_assert (ctx->region_type == ORT_SIMD);
12861 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
12862 OMP_CLAUSE_SAFELEN_EXPR (c) = integer_one_node;
12863 OMP_CLAUSE_CHAIN (c) = *list_p;
12864 *list_p = c;
12865 list_p = &OMP_CLAUSE_CHAIN (c);
12866 }
12867
12868 if (ctx->region_type == ORT_WORKSHARE
12869 && ctx->outer_context
12870 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL)
12871 {
12872 for (c = ctx->outer_context->clauses; c; c = OMP_CLAUSE_CHAIN (c))
12873 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12874 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
12875 {
12876 decl = OMP_CLAUSE_DECL (c);
12877 splay_tree_node n
12878 = splay_tree_lookup (ctx->outer_context->variables,
12879 (splay_tree_key) decl);
12880 gcc_checking_assert (!splay_tree_lookup (ctx->variables,
12881 (splay_tree_key) decl));
12882 omp_add_variable (ctx, decl, flags: n->value);
12883 tree c2 = copy_node (c);
12884 OMP_CLAUSE_CHAIN (c2) = *list_p;
12885 *list_p = c2;
12886 if ((n->value & GOVD_FIRSTPRIVATE) == 0)
12887 continue;
12888 c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12889 OMP_CLAUSE_FIRSTPRIVATE);
12890 OMP_CLAUSE_DECL (c2) = decl;
12891 OMP_CLAUSE_CHAIN (c2) = *list_p;
12892 *list_p = c2;
12893 }
12894 }
12895
12896 tree attach_list = NULL_TREE;
12897 tree *attach_tail = &attach_list;
12898
12899 while ((c = *list_p) != NULL)
12900 {
12901 splay_tree_node n;
12902 bool remove = false;
12903 bool move_attach = false;
12904
12905 switch (OMP_CLAUSE_CODE (c))
12906 {
12907 case OMP_CLAUSE_FIRSTPRIVATE:
12908 if ((ctx->region_type & ORT_TARGET)
12909 && (ctx->region_type & ORT_ACC) == 0
12910 && TYPE_ATOMIC (strip_array_types
12911 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
12912 {
12913 error_at (OMP_CLAUSE_LOCATION (c),
12914 "%<_Atomic%> %qD in %<firstprivate%> clause on "
12915 "%<target%> construct", OMP_CLAUSE_DECL (c));
12916 remove = true;
12917 break;
12918 }
12919 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
12920 {
12921 decl = OMP_CLAUSE_DECL (c);
12922 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12923 if ((n->value & GOVD_MAP) != 0)
12924 {
12925 remove = true;
12926 break;
12927 }
12928 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (c) = 0;
12929 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c) = 0;
12930 }
12931 /* FALLTHRU */
12932 case OMP_CLAUSE_PRIVATE:
12933 case OMP_CLAUSE_SHARED:
12934 case OMP_CLAUSE_LINEAR:
12935 decl = OMP_CLAUSE_DECL (c);
12936 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12937 remove = !(n->value & GOVD_SEEN);
12938 if ((n->value & GOVD_LASTPRIVATE_CONDITIONAL) != 0
12939 && code == OMP_PARALLEL
12940 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12941 remove = true;
12942 if (! remove)
12943 {
12944 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
12945 if ((n->value & GOVD_DEBUG_PRIVATE)
12946 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
12947 {
12948 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
12949 || ((n->value & GOVD_DATA_SHARE_CLASS)
12950 == GOVD_SHARED));
12951 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
12952 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
12953 }
12954 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
12955 && ctx->has_depend
12956 && DECL_P (decl))
12957 n->value |= GOVD_WRITTEN;
12958 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
12959 && (n->value & GOVD_WRITTEN) == 0
12960 && DECL_P (decl)
12961 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12962 OMP_CLAUSE_SHARED_READONLY (c) = 1;
12963 else if (DECL_P (decl)
12964 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
12965 && (n->value & GOVD_WRITTEN) != 0)
12966 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
12967 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
12968 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12969 omp_mark_stores (ctx: gimplify_omp_ctxp->outer_context, decl);
12970 }
12971 else
12972 n->value &= ~GOVD_EXPLICIT;
12973 break;
12974
12975 case OMP_CLAUSE_LASTPRIVATE:
12976 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
12977 accurately reflect the presence of a FIRSTPRIVATE clause. */
12978 decl = OMP_CLAUSE_DECL (c);
12979 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
12980 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
12981 = (n->value & GOVD_FIRSTPRIVATE) != 0;
12982 if (code == OMP_DISTRIBUTE
12983 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
12984 {
12985 remove = true;
12986 error_at (OMP_CLAUSE_LOCATION (c),
12987 "same variable used in %<firstprivate%> and "
12988 "%<lastprivate%> clauses on %<distribute%> "
12989 "construct");
12990 }
12991 if (!remove
12992 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12993 && DECL_P (decl)
12994 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
12995 omp_mark_stores (ctx: gimplify_omp_ctxp->outer_context, decl);
12996 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) && code == OMP_PARALLEL)
12997 remove = true;
12998 break;
12999
13000 case OMP_CLAUSE_ALIGNED:
13001 decl = OMP_CLAUSE_DECL (c);
13002 if (!is_global_var (t: decl))
13003 {
13004 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13005 remove = n == NULL || !(n->value & GOVD_SEEN);
13006 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
13007 {
13008 struct gimplify_omp_ctx *octx;
13009 if (n != NULL
13010 && (n->value & (GOVD_DATA_SHARE_CLASS
13011 & ~GOVD_FIRSTPRIVATE)))
13012 remove = true;
13013 else
13014 for (octx = ctx->outer_context; octx;
13015 octx = octx->outer_context)
13016 {
13017 n = splay_tree_lookup (octx->variables,
13018 (splay_tree_key) decl);
13019 if (n == NULL)
13020 continue;
13021 if (n->value & GOVD_LOCAL)
13022 break;
13023 /* We have to avoid assigning a shared variable
13024 to itself when trying to add
13025 __builtin_assume_aligned. */
13026 if (n->value & GOVD_SHARED)
13027 {
13028 remove = true;
13029 break;
13030 }
13031 }
13032 }
13033 }
13034 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
13035 {
13036 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13037 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
13038 remove = true;
13039 }
13040 break;
13041
13042 case OMP_CLAUSE_HAS_DEVICE_ADDR:
13043 decl = OMP_CLAUSE_DECL (c);
13044 while (INDIRECT_REF_P (decl)
13045 || TREE_CODE (decl) == ARRAY_REF)
13046 decl = TREE_OPERAND (decl, 0);
13047 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13048 remove = n == NULL || !(n->value & GOVD_SEEN);
13049 break;
13050
13051 case OMP_CLAUSE_IS_DEVICE_PTR:
13052 case OMP_CLAUSE_NONTEMPORAL:
13053 decl = OMP_CLAUSE_DECL (c);
13054 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13055 remove = n == NULL || !(n->value & GOVD_SEEN);
13056 break;
13057
13058 case OMP_CLAUSE_MAP:
13059 switch (OMP_CLAUSE_MAP_KIND (c))
13060 {
13061 case GOMP_MAP_PRESENT_ALLOC:
13062 case GOMP_MAP_PRESENT_TO:
13063 case GOMP_MAP_PRESENT_FROM:
13064 case GOMP_MAP_PRESENT_TOFROM:
13065 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_PRESENT);
13066 break;
13067 default:
13068 break;
13069 }
13070 if (code == OMP_TARGET_EXIT_DATA
13071 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
13072 {
13073 remove = true;
13074 break;
13075 }
13076 /* If we have a target region, we can push all the attaches to the
13077 end of the list (we may have standalone "attach" operations
13078 synthesized for GOMP_MAP_STRUCT nodes that must be processed after
13079 the attachment point AND the pointed-to block have been mapped).
13080 If we have something else, e.g. "enter data", we need to keep
13081 "attach" nodes together with the previous node they attach to so
13082 that separate "exit data" operations work properly (see
13083 libgomp/target.c). */
13084 if ((ctx->region_type & ORT_TARGET) != 0
13085 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
13086 || (OMP_CLAUSE_MAP_KIND (c)
13087 == GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION)))
13088 move_attach = true;
13089 decl = OMP_CLAUSE_DECL (c);
13090 /* Data clauses associated with reductions must be
13091 compatible with present_or_copy. Warn and adjust the clause
13092 if that is not the case. */
13093 if (ctx->region_type == ORT_ACC_PARALLEL
13094 || ctx->region_type == ORT_ACC_SERIAL)
13095 {
13096 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
13097 n = NULL;
13098
13099 if (DECL_P (t))
13100 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
13101
13102 if (n && (n->value & GOVD_REDUCTION))
13103 {
13104 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
13105
13106 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
13107 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
13108 && kind != GOMP_MAP_FORCE_PRESENT
13109 && kind != GOMP_MAP_POINTER)
13110 {
13111 warning_at (OMP_CLAUSE_LOCATION (c), 0,
13112 "incompatible data clause with reduction "
13113 "on %qE; promoting to %<present_or_copy%>",
13114 DECL_NAME (t));
13115 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
13116 }
13117 }
13118 }
13119 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
13120 && (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA))
13121 {
13122 remove = true;
13123 break;
13124 }
13125 if (!DECL_P (decl))
13126 {
13127 if ((ctx->region_type & ORT_TARGET) != 0
13128 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
13129 {
13130 if (INDIRECT_REF_P (decl)
13131 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
13132 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
13133 == REFERENCE_TYPE))
13134 decl = TREE_OPERAND (decl, 0);
13135 if (TREE_CODE (decl) == COMPONENT_REF)
13136 {
13137 while (TREE_CODE (decl) == COMPONENT_REF)
13138 decl = TREE_OPERAND (decl, 0);
13139 if (DECL_P (decl))
13140 {
13141 n = splay_tree_lookup (ctx->variables,
13142 (splay_tree_key) decl);
13143 if (!(n->value & GOVD_SEEN))
13144 remove = true;
13145 }
13146 }
13147 }
13148 break;
13149 }
13150 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13151 if ((ctx->region_type & ORT_TARGET) != 0
13152 && !(n->value & GOVD_SEEN)
13153 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
13154 && (!is_global_var (t: decl)
13155 || !lookup_attribute (attr_name: "omp declare target link",
13156 DECL_ATTRIBUTES (decl))))
13157 {
13158 remove = true;
13159 /* For struct element mapping, if struct is never referenced
13160 in target block and none of the mapping has always modifier,
13161 remove all the struct element mappings, which immediately
13162 follow the GOMP_MAP_STRUCT map clause. */
13163 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
13164 {
13165 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
13166 while (cnt--)
13167 OMP_CLAUSE_CHAIN (c)
13168 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
13169 }
13170 }
13171 else if (DECL_SIZE (decl)
13172 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
13173 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
13174 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
13175 && (OMP_CLAUSE_MAP_KIND (c)
13176 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
13177 {
13178 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
13179 for these, TREE_CODE (DECL_SIZE (decl)) will always be
13180 INTEGER_CST. */
13181 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
13182
13183 tree decl2 = DECL_VALUE_EXPR (decl);
13184 gcc_assert (INDIRECT_REF_P (decl2));
13185 decl2 = TREE_OPERAND (decl2, 0);
13186 gcc_assert (DECL_P (decl2));
13187 tree mem = build_simple_mem_ref (decl2);
13188 OMP_CLAUSE_DECL (c) = mem;
13189 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
13190 if (ctx->outer_context)
13191 {
13192 omp_notice_variable (ctx: ctx->outer_context, decl: decl2, in_code: true);
13193 omp_notice_variable (ctx: ctx->outer_context,
13194 OMP_CLAUSE_SIZE (c), in_code: true);
13195 }
13196 if (((ctx->region_type & ORT_TARGET) != 0
13197 || !ctx->target_firstprivatize_array_bases)
13198 && ((n->value & GOVD_SEEN) == 0
13199 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
13200 {
13201 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
13202 OMP_CLAUSE_MAP);
13203 OMP_CLAUSE_DECL (nc) = decl;
13204 OMP_CLAUSE_SIZE (nc) = size_zero_node;
13205 if (ctx->target_firstprivatize_array_bases)
13206 OMP_CLAUSE_SET_MAP_KIND (nc,
13207 GOMP_MAP_FIRSTPRIVATE_POINTER);
13208 else
13209 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
13210 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
13211 OMP_CLAUSE_CHAIN (c) = nc;
13212 c = nc;
13213 }
13214 }
13215 else
13216 {
13217 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
13218 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
13219 gcc_assert ((n->value & GOVD_SEEN) == 0
13220 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
13221 == 0));
13222 }
13223 break;
13224
13225 case OMP_CLAUSE_TO:
13226 case OMP_CLAUSE_FROM:
13227 case OMP_CLAUSE__CACHE_:
13228 decl = OMP_CLAUSE_DECL (c);
13229 if (!DECL_P (decl))
13230 break;
13231 if (DECL_SIZE (decl)
13232 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
13233 {
13234 tree decl2 = DECL_VALUE_EXPR (decl);
13235 gcc_assert (INDIRECT_REF_P (decl2));
13236 decl2 = TREE_OPERAND (decl2, 0);
13237 gcc_assert (DECL_P (decl2));
13238 tree mem = build_simple_mem_ref (decl2);
13239 OMP_CLAUSE_DECL (c) = mem;
13240 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
13241 if (ctx->outer_context)
13242 {
13243 omp_notice_variable (ctx: ctx->outer_context, decl: decl2, in_code: true);
13244 omp_notice_variable (ctx: ctx->outer_context,
13245 OMP_CLAUSE_SIZE (c), in_code: true);
13246 }
13247 }
13248 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
13249 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
13250 break;
13251
13252 case OMP_CLAUSE_REDUCTION:
13253 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
13254 {
13255 decl = OMP_CLAUSE_DECL (c);
13256 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13257 if ((n->value & GOVD_REDUCTION_INSCAN) == 0)
13258 {
13259 remove = true;
13260 error_at (OMP_CLAUSE_LOCATION (c),
13261 "%qD specified in %<inscan%> %<reduction%> clause "
13262 "but not in %<scan%> directive clause", decl);
13263 break;
13264 }
13265 has_inscan_reductions = true;
13266 }
13267 /* FALLTHRU */
13268 case OMP_CLAUSE_IN_REDUCTION:
13269 case OMP_CLAUSE_TASK_REDUCTION:
13270 decl = OMP_CLAUSE_DECL (c);
13271 /* OpenACC reductions need a present_or_copy data clause.
13272 Add one if necessary. Emit error when the reduction is private. */
13273 if (ctx->region_type == ORT_ACC_PARALLEL
13274 || ctx->region_type == ORT_ACC_SERIAL)
13275 {
13276 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13277 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
13278 {
13279 remove = true;
13280 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
13281 "reduction on %qE", DECL_NAME (decl));
13282 }
13283 else if ((n->value & GOVD_MAP) == 0)
13284 {
13285 tree next = OMP_CLAUSE_CHAIN (c);
13286 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
13287 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
13288 OMP_CLAUSE_DECL (nc) = decl;
13289 OMP_CLAUSE_CHAIN (c) = nc;
13290 lang_hooks.decls.omp_finish_clause (nc, pre_p,
13291 (ctx->region_type
13292 & ORT_ACC) != 0);
13293 while (1)
13294 {
13295 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
13296 if (OMP_CLAUSE_CHAIN (nc) == NULL)
13297 break;
13298 nc = OMP_CLAUSE_CHAIN (nc);
13299 }
13300 OMP_CLAUSE_CHAIN (nc) = next;
13301 n->value |= GOVD_MAP;
13302 }
13303 }
13304 if (DECL_P (decl)
13305 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
13306 omp_mark_stores (ctx: gimplify_omp_ctxp->outer_context, decl);
13307 break;
13308
13309 case OMP_CLAUSE_ALLOCATE:
13310 decl = OMP_CLAUSE_DECL (c);
13311 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
13312 if (n != NULL && !(n->value & GOVD_SEEN))
13313 {
13314 if ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE | GOVD_LINEAR))
13315 != 0
13316 && (n->value & (GOVD_REDUCTION | GOVD_LASTPRIVATE)) == 0)
13317 remove = true;
13318 }
13319 if (!remove
13320 && OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
13321 && TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)) != INTEGER_CST
13322 && ((ctx->region_type & (ORT_PARALLEL | ORT_TARGET)) != 0
13323 || (ctx->region_type & ORT_TASKLOOP) == ORT_TASK
13324 || (ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS))
13325 {
13326 tree allocator = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
13327 n = splay_tree_lookup (ctx->variables, (splay_tree_key) allocator);
13328 if (n == NULL)
13329 {
13330 enum omp_clause_default_kind default_kind
13331 = ctx->default_kind;
13332 ctx->default_kind = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
13333 omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
13334 in_code: true);
13335 ctx->default_kind = default_kind;
13336 }
13337 else
13338 omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
13339 in_code: true);
13340 }
13341 break;
13342
13343 case OMP_CLAUSE_COPYIN:
13344 case OMP_CLAUSE_COPYPRIVATE:
13345 case OMP_CLAUSE_IF:
13346 case OMP_CLAUSE_SELF:
13347 case OMP_CLAUSE_NUM_THREADS:
13348 case OMP_CLAUSE_NUM_TEAMS:
13349 case OMP_CLAUSE_THREAD_LIMIT:
13350 case OMP_CLAUSE_DIST_SCHEDULE:
13351 case OMP_CLAUSE_DEVICE:
13352 case OMP_CLAUSE_SCHEDULE:
13353 case OMP_CLAUSE_NOWAIT:
13354 case OMP_CLAUSE_ORDERED:
13355 case OMP_CLAUSE_DEFAULT:
13356 case OMP_CLAUSE_UNTIED:
13357 case OMP_CLAUSE_COLLAPSE:
13358 case OMP_CLAUSE_FINAL:
13359 case OMP_CLAUSE_MERGEABLE:
13360 case OMP_CLAUSE_PROC_BIND:
13361 case OMP_CLAUSE_SAFELEN:
13362 case OMP_CLAUSE_SIMDLEN:
13363 case OMP_CLAUSE_DEPEND:
13364 case OMP_CLAUSE_DOACROSS:
13365 case OMP_CLAUSE_PRIORITY:
13366 case OMP_CLAUSE_GRAINSIZE:
13367 case OMP_CLAUSE_NUM_TASKS:
13368 case OMP_CLAUSE_NOGROUP:
13369 case OMP_CLAUSE_THREADS:
13370 case OMP_CLAUSE_SIMD:
13371 case OMP_CLAUSE_FILTER:
13372 case OMP_CLAUSE_HINT:
13373 case OMP_CLAUSE_DEFAULTMAP:
13374 case OMP_CLAUSE_ORDER:
13375 case OMP_CLAUSE_BIND:
13376 case OMP_CLAUSE_DETACH:
13377 case OMP_CLAUSE_USE_DEVICE_PTR:
13378 case OMP_CLAUSE_USE_DEVICE_ADDR:
13379 case OMP_CLAUSE_ASYNC:
13380 case OMP_CLAUSE_WAIT:
13381 case OMP_CLAUSE_INDEPENDENT:
13382 case OMP_CLAUSE_NUM_GANGS:
13383 case OMP_CLAUSE_NUM_WORKERS:
13384 case OMP_CLAUSE_VECTOR_LENGTH:
13385 case OMP_CLAUSE_GANG:
13386 case OMP_CLAUSE_WORKER:
13387 case OMP_CLAUSE_VECTOR:
13388 case OMP_CLAUSE_AUTO:
13389 case OMP_CLAUSE_SEQ:
13390 case OMP_CLAUSE_TILE:
13391 case OMP_CLAUSE_IF_PRESENT:
13392 case OMP_CLAUSE_FINALIZE:
13393 case OMP_CLAUSE_INCLUSIVE:
13394 case OMP_CLAUSE_EXCLUSIVE:
13395 break;
13396
13397 case OMP_CLAUSE_NOHOST:
13398 default:
13399 gcc_unreachable ();
13400 }
13401
13402 if (remove)
13403 *list_p = OMP_CLAUSE_CHAIN (c);
13404 else if (move_attach)
13405 {
13406 /* Remove attach node from here, separate out into its own list. */
13407 *attach_tail = c;
13408 *list_p = OMP_CLAUSE_CHAIN (c);
13409 OMP_CLAUSE_CHAIN (c) = NULL_TREE;
13410 attach_tail = &OMP_CLAUSE_CHAIN (c);
13411 }
13412 else
13413 list_p = &OMP_CLAUSE_CHAIN (c);
13414 }
13415
13416 /* Splice attach nodes at the end of the list. */
13417 if (attach_list)
13418 {
13419 *list_p = attach_list;
13420 list_p = attach_tail;
13421 }
13422
13423 /* Add in any implicit data sharing. */
13424 struct gimplify_adjust_omp_clauses_data data;
13425 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0)
13426 {
13427 /* OpenMP. Implicit clauses are added at the start of the clause list,
13428 but after any non-map clauses. */
13429 tree *implicit_add_list_p = orig_list_p;
13430 while (*implicit_add_list_p
13431 && OMP_CLAUSE_CODE (*implicit_add_list_p) != OMP_CLAUSE_MAP)
13432 implicit_add_list_p = &OMP_CLAUSE_CHAIN (*implicit_add_list_p);
13433 data.list_p = implicit_add_list_p;
13434 }
13435 else
13436 /* OpenACC. */
13437 data.list_p = list_p;
13438 data.pre_p = pre_p;
13439 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
13440
13441 if (has_inscan_reductions)
13442 for (c = *orig_list_p; c; c = OMP_CLAUSE_CHAIN (c))
13443 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
13444 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
13445 {
13446 error_at (OMP_CLAUSE_LOCATION (c),
13447 "%<inscan%> %<reduction%> clause used together with "
13448 "%<linear%> clause for a variable other than loop "
13449 "iterator");
13450 break;
13451 }
13452
13453 gimplify_omp_ctxp = ctx->outer_context;
13454 delete_omp_context (c: ctx);
13455}
13456
13457/* Return 0 if CONSTRUCTS selectors don't match the OpenMP context,
13458 -1 if unknown yet (simd is involved, won't be known until vectorization)
13459 and 1 if they do. If SCORES is non-NULL, it should point to an array
13460 of at least 2*NCONSTRUCTS+2 ints, and will be filled with the positions
13461 of the CONSTRUCTS (position -1 if it will never match) followed by
13462 number of constructs in the OpenMP context construct trait. If the
13463 score depends on whether it will be in a declare simd clone or not,
13464 the function returns 2 and there will be two sets of the scores, the first
13465 one for the case that it is not in a declare simd clone, the other
13466 that it is in a declare simd clone. */
13467
13468int
13469omp_construct_selector_matches (enum tree_code *constructs, int nconstructs,
13470 int *scores)
13471{
13472 int matched = 0, cnt = 0;
13473 bool simd_seen = false;
13474 bool target_seen = false;
13475 int declare_simd_cnt = -1;
13476 auto_vec<enum tree_code, 16> codes;
13477 for (struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; ctx;)
13478 {
13479 if (((ctx->region_type & ORT_PARALLEL) && ctx->code == OMP_PARALLEL)
13480 || ((ctx->region_type & (ORT_TARGET | ORT_IMPLICIT_TARGET | ORT_ACC))
13481 == ORT_TARGET && ctx->code == OMP_TARGET)
13482 || ((ctx->region_type & ORT_TEAMS) && ctx->code == OMP_TEAMS)
13483 || (ctx->region_type == ORT_WORKSHARE && ctx->code == OMP_FOR)
13484 || (ctx->region_type == ORT_SIMD
13485 && ctx->code == OMP_SIMD
13486 && !omp_find_clause (clauses: ctx->clauses, kind: OMP_CLAUSE_BIND)))
13487 {
13488 ++cnt;
13489 if (scores)
13490 codes.safe_push (obj: ctx->code);
13491 else if (matched < nconstructs && ctx->code == constructs[matched])
13492 {
13493 if (ctx->code == OMP_SIMD)
13494 {
13495 if (matched)
13496 return 0;
13497 simd_seen = true;
13498 }
13499 ++matched;
13500 }
13501 if (ctx->code == OMP_TARGET)
13502 {
13503 if (scores == NULL)
13504 return matched < nconstructs ? 0 : simd_seen ? -1 : 1;
13505 target_seen = true;
13506 break;
13507 }
13508 }
13509 else if (ctx->region_type == ORT_WORKSHARE
13510 && ctx->code == OMP_LOOP
13511 && ctx->outer_context
13512 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL
13513 && ctx->outer_context->outer_context
13514 && ctx->outer_context->outer_context->code == OMP_LOOP
13515 && ctx->outer_context->outer_context->distribute)
13516 ctx = ctx->outer_context->outer_context;
13517 ctx = ctx->outer_context;
13518 }
13519 if (!target_seen
13520 && lookup_attribute (attr_name: "omp declare simd",
13521 DECL_ATTRIBUTES (current_function_decl)))
13522 {
13523 /* Declare simd is a maybe case, it is supposed to be added only to the
13524 omp-simd-clone.cc added clones and not to the base function. */
13525 declare_simd_cnt = cnt++;
13526 if (scores)
13527 codes.safe_push (obj: OMP_SIMD);
13528 else if (cnt == 0
13529 && constructs[0] == OMP_SIMD)
13530 {
13531 gcc_assert (matched == 0);
13532 simd_seen = true;
13533 if (++matched == nconstructs)
13534 return -1;
13535 }
13536 }
13537 if (tree attr = lookup_attribute (attr_name: "omp declare variant variant",
13538 DECL_ATTRIBUTES (current_function_decl)))
13539 {
13540 enum tree_code variant_constructs[5];
13541 int variant_nconstructs = 0;
13542 if (!target_seen)
13543 variant_nconstructs
13544 = omp_constructor_traits_to_codes (TREE_VALUE (attr),
13545 variant_constructs);
13546 for (int i = 0; i < variant_nconstructs; i++)
13547 {
13548 ++cnt;
13549 if (scores)
13550 codes.safe_push (obj: variant_constructs[i]);
13551 else if (matched < nconstructs
13552 && variant_constructs[i] == constructs[matched])
13553 {
13554 if (variant_constructs[i] == OMP_SIMD)
13555 {
13556 if (matched)
13557 return 0;
13558 simd_seen = true;
13559 }
13560 ++matched;
13561 }
13562 }
13563 }
13564 if (!target_seen
13565 && lookup_attribute (attr_name: "omp declare target block",
13566 DECL_ATTRIBUTES (current_function_decl)))
13567 {
13568 if (scores)
13569 codes.safe_push (obj: OMP_TARGET);
13570 else if (matched < nconstructs && constructs[matched] == OMP_TARGET)
13571 ++matched;
13572 }
13573 if (scores)
13574 {
13575 for (int pass = 0; pass < (declare_simd_cnt == -1 ? 1 : 2); pass++)
13576 {
13577 int j = codes.length () - 1;
13578 for (int i = nconstructs - 1; i >= 0; i--)
13579 {
13580 while (j >= 0
13581 && (pass != 0 || declare_simd_cnt != j)
13582 && constructs[i] != codes[j])
13583 --j;
13584 if (pass == 0 && declare_simd_cnt != -1 && j > declare_simd_cnt)
13585 *scores++ = j - 1;
13586 else
13587 *scores++ = j;
13588 }
13589 *scores++ = ((pass == 0 && declare_simd_cnt != -1)
13590 ? codes.length () - 1 : codes.length ());
13591 }
13592 return declare_simd_cnt == -1 ? 1 : 2;
13593 }
13594 if (matched == nconstructs)
13595 return simd_seen ? -1 : 1;
13596 return 0;
13597}
13598
13599/* Gimplify OACC_CACHE. */
13600
13601static void
13602gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
13603{
13604 tree expr = *expr_p;
13605
13606 gimplify_scan_omp_clauses (list_p: &OACC_CACHE_CLAUSES (expr), pre_p, region_type: ORT_ACC,
13607 code: OACC_CACHE);
13608 gimplify_adjust_omp_clauses (pre_p, NULL, list_p: &OACC_CACHE_CLAUSES (expr),
13609 code: OACC_CACHE);
13610
13611 /* TODO: Do something sensible with this information. */
13612
13613 *expr_p = NULL_TREE;
13614}
13615
13616/* Helper function of gimplify_oacc_declare. The helper's purpose is to,
13617 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
13618 kind. The entry kind will replace the one in CLAUSE, while the exit
13619 kind will be used in a new omp_clause and returned to the caller. */
13620
13621static tree
13622gimplify_oacc_declare_1 (tree clause)
13623{
13624 HOST_WIDE_INT kind, new_op;
13625 bool ret = false;
13626 tree c = NULL;
13627
13628 kind = OMP_CLAUSE_MAP_KIND (clause);
13629
13630 switch (kind)
13631 {
13632 case GOMP_MAP_ALLOC:
13633 new_op = GOMP_MAP_RELEASE;
13634 ret = true;
13635 break;
13636
13637 case GOMP_MAP_FROM:
13638 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
13639 new_op = GOMP_MAP_FROM;
13640 ret = true;
13641 break;
13642
13643 case GOMP_MAP_TOFROM:
13644 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
13645 new_op = GOMP_MAP_FROM;
13646 ret = true;
13647 break;
13648
13649 case GOMP_MAP_DEVICE_RESIDENT:
13650 case GOMP_MAP_FORCE_DEVICEPTR:
13651 case GOMP_MAP_FORCE_PRESENT:
13652 case GOMP_MAP_LINK:
13653 case GOMP_MAP_POINTER:
13654 case GOMP_MAP_TO:
13655 break;
13656
13657 default:
13658 gcc_unreachable ();
13659 break;
13660 }
13661
13662 if (ret)
13663 {
13664 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
13665 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
13666 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
13667 }
13668
13669 return c;
13670}
13671
13672/* Gimplify OACC_DECLARE. */
13673
13674static void
13675gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
13676{
13677 tree expr = *expr_p;
13678 gomp_target *stmt;
13679 tree clauses, t, decl;
13680
13681 clauses = OACC_DECLARE_CLAUSES (expr);
13682
13683 gimplify_scan_omp_clauses (list_p: &clauses, pre_p, region_type: ORT_TARGET_DATA, code: OACC_DECLARE);
13684 gimplify_adjust_omp_clauses (pre_p, NULL, list_p: &clauses, code: OACC_DECLARE);
13685
13686 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
13687 {
13688 decl = OMP_CLAUSE_DECL (t);
13689
13690 if (TREE_CODE (decl) == MEM_REF)
13691 decl = TREE_OPERAND (decl, 0);
13692
13693 if (VAR_P (decl) && !is_oacc_declared (decl))
13694 {
13695 tree attr = get_identifier ("oacc declare target");
13696 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
13697 DECL_ATTRIBUTES (decl));
13698 }
13699
13700 if (VAR_P (decl)
13701 && !is_global_var (t: decl)
13702 && DECL_CONTEXT (decl) == current_function_decl)
13703 {
13704 tree c = gimplify_oacc_declare_1 (clause: t);
13705 if (c)
13706 {
13707 if (oacc_declare_returns == NULL)
13708 oacc_declare_returns = new hash_map<tree, tree>;
13709
13710 oacc_declare_returns->put (k: decl, v: c);
13711 }
13712 }
13713
13714 if (gimplify_omp_ctxp)
13715 omp_add_variable (ctx: gimplify_omp_ctxp, decl, flags: GOVD_SEEN);
13716 }
13717
13718 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
13719 clauses);
13720
13721 gimplify_seq_add_stmt (seq_p: pre_p, gs: stmt);
13722
13723 *expr_p = NULL_TREE;
13724}
13725
13726/* Gimplify the contents of an OMP_PARALLEL statement. This involves
13727 gimplification of the body, as well as scanning the body for used
13728 variables. We need to do this scan now, because variable-sized
13729 decls will be decomposed during gimplification. */
13730
13731static void
13732gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
13733{
13734 tree expr = *expr_p;
13735 gimple *g;
13736 gimple_seq body = NULL;
13737
13738 gimplify_scan_omp_clauses (list_p: &OMP_PARALLEL_CLAUSES (expr), pre_p,
13739 OMP_PARALLEL_COMBINED (expr)
13740 ? ORT_COMBINED_PARALLEL
13741 : ORT_PARALLEL, code: OMP_PARALLEL);
13742
13743 push_gimplify_context ();
13744
13745 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), seq_p: &body);
13746 if (gimple_code (g) == GIMPLE_BIND)
13747 pop_gimplify_context (body: g);
13748 else
13749 pop_gimplify_context (NULL);
13750
13751 gimplify_adjust_omp_clauses (pre_p, body, list_p: &OMP_PARALLEL_CLAUSES (expr),
13752 code: OMP_PARALLEL);
13753
13754 g = gimple_build_omp_parallel (body,
13755 OMP_PARALLEL_CLAUSES (expr),
13756 NULL_TREE, NULL_TREE);
13757 if (OMP_PARALLEL_COMBINED (expr))
13758 gimple_omp_set_subcode (s: g, subcode: GF_OMP_PARALLEL_COMBINED);
13759 gimplify_seq_add_stmt (seq_p: pre_p, gs: g);
13760 *expr_p = NULL_TREE;
13761}
13762
13763/* Gimplify the contents of an OMP_TASK statement. This involves
13764 gimplification of the body, as well as scanning the body for used
13765 variables. We need to do this scan now, because variable-sized
13766 decls will be decomposed during gimplification. */
13767
13768static void
13769gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
13770{
13771 tree expr = *expr_p;
13772 gimple *g;
13773 gimple_seq body = NULL;
13774 bool nowait = false;
13775 bool has_depend = false;
13776
13777 if (OMP_TASK_BODY (expr) == NULL_TREE)
13778 {
13779 for (tree c = OMP_TASK_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
13780 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
13781 {
13782 has_depend = true;
13783 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET)
13784 {
13785 error_at (OMP_CLAUSE_LOCATION (c),
13786 "%<mutexinoutset%> kind in %<depend%> clause on a "
13787 "%<taskwait%> construct");
13788 break;
13789 }
13790 }
13791 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NOWAIT)
13792 nowait = true;
13793 if (nowait && !has_depend)
13794 {
13795 error_at (EXPR_LOCATION (expr),
13796 "%<taskwait%> construct with %<nowait%> clause but no "
13797 "%<depend%> clauses");
13798 *expr_p = NULL_TREE;
13799 return;
13800 }
13801 }
13802
13803 gimplify_scan_omp_clauses (list_p: &OMP_TASK_CLAUSES (expr), pre_p,
13804 region_type: omp_find_clause (OMP_TASK_CLAUSES (expr),
13805 kind: OMP_CLAUSE_UNTIED)
13806 ? ORT_UNTIED_TASK : ORT_TASK, code: OMP_TASK);
13807
13808 if (OMP_TASK_BODY (expr))
13809 {
13810 push_gimplify_context ();
13811
13812 g = gimplify_and_return_first (OMP_TASK_BODY (expr), seq_p: &body);
13813 if (gimple_code (g) == GIMPLE_BIND)
13814 pop_gimplify_context (body: g);
13815 else
13816 pop_gimplify_context (NULL);
13817 }
13818
13819 gimplify_adjust_omp_clauses (pre_p, body, list_p: &OMP_TASK_CLAUSES (expr),
13820 code: OMP_TASK);
13821
13822 g = gimple_build_omp_task (body,
13823 OMP_TASK_CLAUSES (expr),
13824 NULL_TREE, NULL_TREE,
13825 NULL_TREE, NULL_TREE, NULL_TREE);
13826 if (OMP_TASK_BODY (expr) == NULL_TREE)
13827 gimple_omp_task_set_taskwait_p (g, taskwait_p: true);
13828 gimplify_seq_add_stmt (seq_p: pre_p, gs: g);
13829 *expr_p = NULL_TREE;
13830}
13831
13832/* Helper function for gimplify_omp_for. If *TP is not a gimple constant,
13833 force it into a temporary initialized in PRE_P and add firstprivate clause
13834 to ORIG_FOR_STMT. */
13835
13836static void
13837gimplify_omp_taskloop_expr (tree type, tree *tp, gimple_seq *pre_p,
13838 tree orig_for_stmt)
13839{
13840 if (*tp == NULL || is_gimple_constant (t: *tp))
13841 return;
13842
13843 *tp = get_initialized_tmp_var (val: *tp, pre_p, NULL, allow_ssa: false);
13844 /* Reference to pointer conversion is considered useless,
13845 but is significant for firstprivate clause. Force it
13846 here. */
13847 if (type
13848 && TREE_CODE (type) == POINTER_TYPE
13849 && TREE_CODE (TREE_TYPE (*tp)) == REFERENCE_TYPE)
13850 {
13851 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
13852 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v, *tp);
13853 gimplify_and_add (t: m, seq_p: pre_p);
13854 *tp = v;
13855 }
13856
13857 tree c = build_omp_clause (input_location, OMP_CLAUSE_FIRSTPRIVATE);
13858 OMP_CLAUSE_DECL (c) = *tp;
13859 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
13860 OMP_FOR_CLAUSES (orig_for_stmt) = c;
13861}
13862
13863/* Helper function of gimplify_omp_for, find OMP_ORDERED with
13864 null OMP_ORDERED_BODY inside of OMP_FOR's body. */
13865
13866static tree
13867find_standalone_omp_ordered (tree *tp, int *walk_subtrees, void *)
13868{
13869 switch (TREE_CODE (*tp))
13870 {
13871 case OMP_ORDERED:
13872 if (OMP_ORDERED_BODY (*tp) == NULL_TREE)
13873 return *tp;
13874 break;
13875 case OMP_SIMD:
13876 case OMP_PARALLEL:
13877 case OMP_TARGET:
13878 *walk_subtrees = 0;
13879 break;
13880 default:
13881 break;
13882 }
13883 return NULL_TREE;
13884}
13885
13886/* Gimplify the gross structure of an OMP_FOR statement. */
13887
13888static enum gimplify_status
13889gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
13890{
13891 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
13892 enum gimplify_status ret = GS_ALL_DONE;
13893 enum gimplify_status tret;
13894 gomp_for *gfor;
13895 gimple_seq for_body, for_pre_body;
13896 int i;
13897 bitmap has_decl_expr = NULL;
13898 enum omp_region_type ort = ORT_WORKSHARE;
13899 bool openacc = TREE_CODE (*expr_p) == OACC_LOOP;
13900
13901 orig_for_stmt = for_stmt = *expr_p;
13902
13903 bool loop_p = (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), kind: OMP_CLAUSE_BIND)
13904 != NULL_TREE);
13905 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
13906 {
13907 tree *data[4] = { NULL, NULL, NULL, NULL };
13908 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
13909 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
13910 find_combined_omp_for, data, NULL);
13911 if (inner_for_stmt == NULL_TREE)
13912 {
13913 gcc_assert (seen_error ());
13914 *expr_p = NULL_TREE;
13915 return GS_ERROR;
13916 }
13917 if (data[2] && OMP_FOR_PRE_BODY (*data[2]))
13918 {
13919 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data[2]),
13920 &OMP_FOR_PRE_BODY (for_stmt));
13921 OMP_FOR_PRE_BODY (*data[2]) = NULL_TREE;
13922 }
13923 if (OMP_FOR_PRE_BODY (inner_for_stmt))
13924 {
13925 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt),
13926 &OMP_FOR_PRE_BODY (for_stmt));
13927 OMP_FOR_PRE_BODY (inner_for_stmt) = NULL_TREE;
13928 }
13929
13930 if (data[0])
13931 {
13932 /* We have some statements or variable declarations in between
13933 the composite construct directives. Move them around the
13934 inner_for_stmt. */
13935 data[0] = expr_p;
13936 for (i = 0; i < 3; i++)
13937 if (data[i])
13938 {
13939 tree t = *data[i];
13940 if (i < 2 && data[i + 1] == &OMP_BODY (t))
13941 data[i + 1] = data[i];
13942 *data[i] = OMP_BODY (t);
13943 tree body = build3 (BIND_EXPR, void_type_node, NULL_TREE,
13944 NULL_TREE, make_node (BLOCK));
13945 OMP_BODY (t) = body;
13946 append_to_statement_list_force (inner_for_stmt,
13947 &BIND_EXPR_BODY (body));
13948 *data[3] = t;
13949 data[3] = tsi_stmt_ptr (i: tsi_start (BIND_EXPR_BODY (body)));
13950 gcc_assert (*data[3] == inner_for_stmt);
13951 }
13952 return GS_OK;
13953 }
13954
13955 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
13956 if (!loop_p
13957 && OMP_FOR_ORIG_DECLS (inner_for_stmt)
13958 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
13959 i)) == TREE_LIST
13960 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
13961 i)))
13962 {
13963 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
13964 /* Class iterators aren't allowed on OMP_SIMD, so the only
13965 case we need to solve is distribute parallel for. They are
13966 allowed on the loop construct, but that is already handled
13967 in gimplify_omp_loop. */
13968 gcc_assert (TREE_CODE (inner_for_stmt) == OMP_FOR
13969 && TREE_CODE (for_stmt) == OMP_DISTRIBUTE
13970 && data[1]);
13971 tree orig_decl = TREE_PURPOSE (orig);
13972 tree last = TREE_VALUE (orig);
13973 tree *pc;
13974 for (pc = &OMP_FOR_CLAUSES (inner_for_stmt);
13975 *pc; pc = &OMP_CLAUSE_CHAIN (*pc))
13976 if ((OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE
13977 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LASTPRIVATE)
13978 && OMP_CLAUSE_DECL (*pc) == orig_decl)
13979 break;
13980 if (*pc == NULL_TREE)
13981 {
13982 tree *spc;
13983 for (spc = &OMP_PARALLEL_CLAUSES (*data[1]);
13984 *spc; spc = &OMP_CLAUSE_CHAIN (*spc))
13985 if (OMP_CLAUSE_CODE (*spc) == OMP_CLAUSE_PRIVATE
13986 && OMP_CLAUSE_DECL (*spc) == orig_decl)
13987 break;
13988 if (*spc)
13989 {
13990 tree c = *spc;
13991 *spc = OMP_CLAUSE_CHAIN (c);
13992 OMP_CLAUSE_CHAIN (c) = NULL_TREE;
13993 *pc = c;
13994 }
13995 }
13996 if (*pc == NULL_TREE)
13997 ;
13998 else if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE)
13999 {
14000 /* private clause will appear only on inner_for_stmt.
14001 Change it into firstprivate, and add private clause
14002 on for_stmt. */
14003 tree c = copy_node (*pc);
14004 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
14005 OMP_FOR_CLAUSES (for_stmt) = c;
14006 OMP_CLAUSE_CODE (*pc) = OMP_CLAUSE_FIRSTPRIVATE;
14007 lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
14008 }
14009 else
14010 {
14011 /* lastprivate clause will appear on both inner_for_stmt
14012 and for_stmt. Add firstprivate clause to
14013 inner_for_stmt. */
14014 tree c = build_omp_clause (OMP_CLAUSE_LOCATION (*pc),
14015 OMP_CLAUSE_FIRSTPRIVATE);
14016 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (*pc);
14017 OMP_CLAUSE_CHAIN (c) = *pc;
14018 *pc = c;
14019 lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
14020 }
14021 tree c = build_omp_clause (UNKNOWN_LOCATION,
14022 OMP_CLAUSE_FIRSTPRIVATE);
14023 OMP_CLAUSE_DECL (c) = last;
14024 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
14025 OMP_PARALLEL_CLAUSES (*data[1]) = c;
14026 c = build_omp_clause (UNKNOWN_LOCATION,
14027 *pc ? OMP_CLAUSE_SHARED
14028 : OMP_CLAUSE_FIRSTPRIVATE);
14029 OMP_CLAUSE_DECL (c) = orig_decl;
14030 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
14031 OMP_PARALLEL_CLAUSES (*data[1]) = c;
14032 }
14033 /* Similarly, take care of C++ range for temporaries, those should
14034 be firstprivate on OMP_PARALLEL if any. */
14035 if (data[1])
14036 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
14037 if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
14038 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
14039 i)) == TREE_LIST
14040 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
14041 i)))
14042 {
14043 tree orig
14044 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
14045 tree v = TREE_CHAIN (orig);
14046 tree c = build_omp_clause (UNKNOWN_LOCATION,
14047 OMP_CLAUSE_FIRSTPRIVATE);
14048 /* First add firstprivate clause for the __for_end artificial
14049 decl. */
14050 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 1);
14051 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
14052 == REFERENCE_TYPE)
14053 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
14054 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
14055 OMP_PARALLEL_CLAUSES (*data[1]) = c;
14056 if (TREE_VEC_ELT (v, 0))
14057 {
14058 /* And now the same for __for_range artificial decl if it
14059 exists. */
14060 c = build_omp_clause (UNKNOWN_LOCATION,
14061 OMP_CLAUSE_FIRSTPRIVATE);
14062 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 0);
14063 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
14064 == REFERENCE_TYPE)
14065 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
14066 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
14067 OMP_PARALLEL_CLAUSES (*data[1]) = c;
14068 }
14069 }
14070 }
14071
14072 switch (TREE_CODE (for_stmt))
14073 {
14074 case OMP_FOR:
14075 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt))
14076 {
14077 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
14078 kind: OMP_CLAUSE_SCHEDULE))
14079 error_at (EXPR_LOCATION (for_stmt),
14080 "%qs clause may not appear on non-rectangular %qs",
14081 "schedule", lang_GNU_Fortran () ? "do" : "for");
14082 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), kind: OMP_CLAUSE_ORDERED))
14083 error_at (EXPR_LOCATION (for_stmt),
14084 "%qs clause may not appear on non-rectangular %qs",
14085 "ordered", lang_GNU_Fortran () ? "do" : "for");
14086 }
14087 break;
14088 case OMP_DISTRIBUTE:
14089 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt)
14090 && omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
14091 kind: OMP_CLAUSE_DIST_SCHEDULE))
14092 error_at (EXPR_LOCATION (for_stmt),
14093 "%qs clause may not appear on non-rectangular %qs",
14094 "dist_schedule", "distribute");
14095 break;
14096 case OACC_LOOP:
14097 ort = ORT_ACC;
14098 break;
14099 case OMP_TASKLOOP:
14100 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt))
14101 {
14102 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
14103 kind: OMP_CLAUSE_GRAINSIZE))
14104 error_at (EXPR_LOCATION (for_stmt),
14105 "%qs clause may not appear on non-rectangular %qs",
14106 "grainsize", "taskloop");
14107 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
14108 kind: OMP_CLAUSE_NUM_TASKS))
14109 error_at (EXPR_LOCATION (for_stmt),
14110 "%qs clause may not appear on non-rectangular %qs",
14111 "num_tasks", "taskloop");
14112 }
14113 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), kind: OMP_CLAUSE_UNTIED))
14114 ort = ORT_UNTIED_TASKLOOP;
14115 else
14116 ort = ORT_TASKLOOP;
14117 break;
14118 case OMP_SIMD:
14119 ort = ORT_SIMD;
14120 break;
14121 default:
14122 gcc_unreachable ();
14123 }
14124
14125 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
14126 clause for the IV. */
14127 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
14128 {
14129 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
14130 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14131 decl = TREE_OPERAND (t, 0);
14132 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
14133 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
14134 && OMP_CLAUSE_DECL (c) == decl)
14135 {
14136 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
14137 break;
14138 }
14139 }
14140
14141 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
14142 gimplify_scan_omp_clauses (list_p: &OMP_FOR_CLAUSES (for_stmt), pre_p, region_type: ort,
14143 code: loop_p && TREE_CODE (for_stmt) != OMP_SIMD
14144 ? OMP_LOOP : TREE_CODE (for_stmt));
14145
14146 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
14147 gimplify_omp_ctxp->distribute = true;
14148
14149 /* Handle OMP_FOR_INIT. */
14150 for_pre_body = NULL;
14151 if ((ort == ORT_SIMD
14152 || (inner_for_stmt && TREE_CODE (inner_for_stmt) == OMP_SIMD))
14153 && OMP_FOR_PRE_BODY (for_stmt))
14154 {
14155 has_decl_expr = BITMAP_ALLOC (NULL);
14156 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
14157 && VAR_P (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt))))
14158 {
14159 t = OMP_FOR_PRE_BODY (for_stmt);
14160 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
14161 }
14162 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
14163 {
14164 tree_stmt_iterator si;
14165 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (i: si);
14166 tsi_next (i: &si))
14167 {
14168 t = tsi_stmt (i: si);
14169 if (TREE_CODE (t) == DECL_EXPR
14170 && VAR_P (DECL_EXPR_DECL (t)))
14171 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
14172 }
14173 }
14174 }
14175 if (OMP_FOR_PRE_BODY (for_stmt))
14176 {
14177 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
14178 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), seq_p: &for_pre_body);
14179 else
14180 {
14181 struct gimplify_omp_ctx ctx;
14182 memset (s: &ctx, c: 0, n: sizeof (ctx));
14183 ctx.region_type = ORT_NONE;
14184 gimplify_omp_ctxp = &ctx;
14185 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), seq_p: &for_pre_body);
14186 gimplify_omp_ctxp = NULL;
14187 }
14188 }
14189 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
14190
14191 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
14192 for_stmt = inner_for_stmt;
14193
14194 /* For taskloop, need to gimplify the start, end and step before the
14195 taskloop, outside of the taskloop omp context. */
14196 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
14197 {
14198 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14199 {
14200 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
14201 gimple_seq *for_pre_p = (gimple_seq_empty_p (s: for_pre_body)
14202 ? pre_p : &for_pre_body);
14203 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
14204 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
14205 {
14206 tree v = TREE_OPERAND (t, 1);
14207 gimplify_omp_taskloop_expr (type, tp: &TREE_VEC_ELT (v, 1),
14208 pre_p: for_pre_p, orig_for_stmt);
14209 gimplify_omp_taskloop_expr (type, tp: &TREE_VEC_ELT (v, 2),
14210 pre_p: for_pre_p, orig_for_stmt);
14211 }
14212 else
14213 gimplify_omp_taskloop_expr (type, tp: &TREE_OPERAND (t, 1), pre_p: for_pre_p,
14214 orig_for_stmt);
14215
14216 /* Handle OMP_FOR_COND. */
14217 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
14218 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
14219 {
14220 tree v = TREE_OPERAND (t, 1);
14221 gimplify_omp_taskloop_expr (type, tp: &TREE_VEC_ELT (v, 1),
14222 pre_p: for_pre_p, orig_for_stmt);
14223 gimplify_omp_taskloop_expr (type, tp: &TREE_VEC_ELT (v, 2),
14224 pre_p: for_pre_p, orig_for_stmt);
14225 }
14226 else
14227 gimplify_omp_taskloop_expr (type, tp: &TREE_OPERAND (t, 1), pre_p: for_pre_p,
14228 orig_for_stmt);
14229
14230 /* Handle OMP_FOR_INCR. */
14231 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14232 if (TREE_CODE (t) == MODIFY_EXPR)
14233 {
14234 decl = TREE_OPERAND (t, 0);
14235 t = TREE_OPERAND (t, 1);
14236 tree *tp = &TREE_OPERAND (t, 1);
14237 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
14238 tp = &TREE_OPERAND (t, 0);
14239
14240 gimplify_omp_taskloop_expr (NULL_TREE, tp, pre_p: for_pre_p,
14241 orig_for_stmt);
14242 }
14243 }
14244
14245 gimplify_scan_omp_clauses (list_p: &OMP_FOR_CLAUSES (orig_for_stmt), pre_p, region_type: ort,
14246 code: OMP_TASKLOOP);
14247 }
14248
14249 if (orig_for_stmt != for_stmt)
14250 gimplify_omp_ctxp->combined_loop = true;
14251
14252 for_body = NULL;
14253 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
14254 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
14255 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
14256 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
14257
14258 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), kind: OMP_CLAUSE_ORDERED);
14259 bool is_doacross = false;
14260 if (c && walk_tree_without_duplicates (&OMP_FOR_BODY (for_stmt),
14261 find_standalone_omp_ordered, NULL))
14262 {
14263 OMP_CLAUSE_ORDERED_DOACROSS (c) = 1;
14264 is_doacross = true;
14265 int len = TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt));
14266 gimplify_omp_ctxp->loop_iter_var.create (nelems: len * 2);
14267 for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
14268 if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LINEAR)
14269 {
14270 error_at (OMP_CLAUSE_LOCATION (*pc),
14271 "%<linear%> clause may not be specified together "
14272 "with %<ordered%> clause if stand-alone %<ordered%> "
14273 "construct is nested in it");
14274 *pc = OMP_CLAUSE_CHAIN (*pc);
14275 }
14276 else
14277 pc = &OMP_CLAUSE_CHAIN (*pc);
14278 }
14279 int collapse = 1, tile = 0;
14280 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), kind: OMP_CLAUSE_COLLAPSE);
14281 if (c)
14282 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
14283 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), kind: OMP_CLAUSE_TILE);
14284 if (c)
14285 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
14286 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), kind: OMP_CLAUSE_ALLOCATE);
14287 hash_set<tree> *allocate_uids = NULL;
14288 if (c)
14289 {
14290 allocate_uids = new hash_set<tree>;
14291 for (; c; c = OMP_CLAUSE_CHAIN (c))
14292 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE)
14293 allocate_uids->add (OMP_CLAUSE_DECL (c));
14294 }
14295 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14296 {
14297 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
14298 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14299 decl = TREE_OPERAND (t, 0);
14300 gcc_assert (DECL_P (decl));
14301 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
14302 || POINTER_TYPE_P (TREE_TYPE (decl)));
14303 if (is_doacross)
14304 {
14305 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
14306 {
14307 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
14308 if (TREE_CODE (orig_decl) == TREE_LIST)
14309 {
14310 orig_decl = TREE_PURPOSE (orig_decl);
14311 if (!orig_decl)
14312 orig_decl = decl;
14313 }
14314 gimplify_omp_ctxp->loop_iter_var.quick_push (obj: orig_decl);
14315 }
14316 else
14317 gimplify_omp_ctxp->loop_iter_var.quick_push (obj: decl);
14318 gimplify_omp_ctxp->loop_iter_var.quick_push (obj: decl);
14319 }
14320
14321 if (for_stmt == orig_for_stmt)
14322 {
14323 tree orig_decl = decl;
14324 if (OMP_FOR_ORIG_DECLS (for_stmt))
14325 {
14326 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
14327 if (TREE_CODE (orig_decl) == TREE_LIST)
14328 {
14329 orig_decl = TREE_PURPOSE (orig_decl);
14330 if (!orig_decl)
14331 orig_decl = decl;
14332 }
14333 }
14334 if (is_global_var (t: orig_decl) && DECL_THREAD_LOCAL_P (orig_decl))
14335 error_at (EXPR_LOCATION (for_stmt),
14336 "threadprivate iteration variable %qD", orig_decl);
14337 }
14338
14339 /* Make sure the iteration variable is private. */
14340 tree c = NULL_TREE;
14341 tree c2 = NULL_TREE;
14342 if (orig_for_stmt != for_stmt)
14343 {
14344 /* Preserve this information until we gimplify the inner simd. */
14345 if (has_decl_expr
14346 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
14347 TREE_PRIVATE (t) = 1;
14348 }
14349 else if (ort == ORT_SIMD)
14350 {
14351 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
14352 (splay_tree_key) decl);
14353 omp_is_private (ctx: gimplify_omp_ctxp, decl,
14354 simd: 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
14355 != 1));
14356 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
14357 {
14358 omp_notice_variable (ctx: gimplify_omp_ctxp, decl, in_code: true);
14359 if (n->value & GOVD_LASTPRIVATE_CONDITIONAL)
14360 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
14361 kind: OMP_CLAUSE_LASTPRIVATE);
14362 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
14363 kind: OMP_CLAUSE_LASTPRIVATE))
14364 if (OMP_CLAUSE_DECL (c3) == decl)
14365 {
14366 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
14367 "conditional %<lastprivate%> on loop "
14368 "iterator %qD ignored", decl);
14369 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
14370 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
14371 }
14372 }
14373 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1 && !loop_p)
14374 {
14375 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
14376 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
14377 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
14378 if ((has_decl_expr
14379 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
14380 || TREE_PRIVATE (t))
14381 {
14382 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
14383 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
14384 }
14385 struct gimplify_omp_ctx *outer
14386 = gimplify_omp_ctxp->outer_context;
14387 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
14388 {
14389 if (outer->region_type == ORT_WORKSHARE
14390 && outer->combined_loop)
14391 {
14392 n = splay_tree_lookup (outer->variables,
14393 (splay_tree_key)decl);
14394 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
14395 {
14396 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
14397 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
14398 }
14399 else
14400 {
14401 struct gimplify_omp_ctx *octx = outer->outer_context;
14402 if (octx
14403 && octx->region_type == ORT_COMBINED_PARALLEL
14404 && octx->outer_context
14405 && (octx->outer_context->region_type
14406 == ORT_WORKSHARE)
14407 && octx->outer_context->combined_loop)
14408 {
14409 octx = octx->outer_context;
14410 n = splay_tree_lookup (octx->variables,
14411 (splay_tree_key)decl);
14412 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
14413 {
14414 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
14415 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
14416 }
14417 }
14418 }
14419 }
14420 }
14421
14422 OMP_CLAUSE_DECL (c) = decl;
14423 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
14424 OMP_FOR_CLAUSES (for_stmt) = c;
14425 omp_add_variable (ctx: gimplify_omp_ctxp, decl, flags);
14426 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
14427 omp_lastprivate_for_combined_outer_constructs (octx: outer, decl,
14428 implicit_p: true);
14429 }
14430 else
14431 {
14432 bool lastprivate
14433 = (!has_decl_expr
14434 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
14435 if (TREE_PRIVATE (t))
14436 lastprivate = false;
14437 if (loop_p && OMP_FOR_ORIG_DECLS (for_stmt))
14438 {
14439 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
14440 if (TREE_CODE (elt) == TREE_LIST && TREE_PURPOSE (elt))
14441 lastprivate = false;
14442 }
14443
14444 struct gimplify_omp_ctx *outer
14445 = gimplify_omp_ctxp->outer_context;
14446 if (outer && lastprivate)
14447 omp_lastprivate_for_combined_outer_constructs (octx: outer, decl,
14448 implicit_p: true);
14449
14450 c = build_omp_clause (input_location,
14451 lastprivate ? OMP_CLAUSE_LASTPRIVATE
14452 : OMP_CLAUSE_PRIVATE);
14453 OMP_CLAUSE_DECL (c) = decl;
14454 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
14455 OMP_FOR_CLAUSES (for_stmt) = c;
14456 omp_add_variable (ctx: gimplify_omp_ctxp, decl,
14457 flags: (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
14458 | GOVD_EXPLICIT | GOVD_SEEN);
14459 c = NULL_TREE;
14460 }
14461 }
14462 else if (omp_is_private (ctx: gimplify_omp_ctxp, decl, simd: 0))
14463 {
14464 omp_notice_variable (ctx: gimplify_omp_ctxp, decl, in_code: true);
14465 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
14466 (splay_tree_key) decl);
14467 if (n && (n->value & GOVD_LASTPRIVATE_CONDITIONAL))
14468 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
14469 kind: OMP_CLAUSE_LASTPRIVATE);
14470 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
14471 kind: OMP_CLAUSE_LASTPRIVATE))
14472 if (OMP_CLAUSE_DECL (c3) == decl)
14473 {
14474 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
14475 "conditional %<lastprivate%> on loop "
14476 "iterator %qD ignored", decl);
14477 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
14478 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
14479 }
14480 }
14481 else
14482 omp_add_variable (ctx: gimplify_omp_ctxp, decl, flags: GOVD_PRIVATE | GOVD_SEEN);
14483
14484 /* If DECL is not a gimple register, create a temporary variable to act
14485 as an iteration counter. This is valid, since DECL cannot be
14486 modified in the body of the loop. Similarly for any iteration vars
14487 in simd with collapse > 1 where the iterator vars must be
14488 lastprivate. And similarly for vars mentioned in allocate clauses. */
14489 if (orig_for_stmt != for_stmt)
14490 var = decl;
14491 else if (!is_gimple_reg (decl)
14492 || (ort == ORT_SIMD
14493 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
14494 || (allocate_uids && allocate_uids->contains (k: decl)))
14495 {
14496 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
14497 /* Make sure omp_add_variable is not called on it prematurely.
14498 We call it ourselves a few lines later. */
14499 gimplify_omp_ctxp = NULL;
14500 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
14501 gimplify_omp_ctxp = ctx;
14502 TREE_OPERAND (t, 0) = var;
14503
14504 gimplify_seq_add_stmt (seq_p: &for_body, gs: gimple_build_assign (decl, var));
14505
14506 if (ort == ORT_SIMD
14507 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
14508 {
14509 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
14510 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
14511 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
14512 OMP_CLAUSE_DECL (c2) = var;
14513 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
14514 OMP_FOR_CLAUSES (for_stmt) = c2;
14515 omp_add_variable (ctx: gimplify_omp_ctxp, decl: var,
14516 flags: GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
14517 if (c == NULL_TREE)
14518 {
14519 c = c2;
14520 c2 = NULL_TREE;
14521 }
14522 }
14523 else
14524 omp_add_variable (ctx: gimplify_omp_ctxp, decl: var,
14525 flags: GOVD_PRIVATE | GOVD_SEEN);
14526 }
14527 else
14528 var = decl;
14529
14530 gimplify_omp_ctxp->in_for_exprs = true;
14531 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
14532 {
14533 tree lb = TREE_OPERAND (t, 1);
14534 tret = gimplify_expr (&TREE_VEC_ELT (lb, 1), &for_pre_body, NULL,
14535 is_gimple_val, fb_rvalue, false);
14536 ret = MIN (ret, tret);
14537 tret = gimplify_expr (&TREE_VEC_ELT (lb, 2), &for_pre_body, NULL,
14538 is_gimple_val, fb_rvalue, false);
14539 }
14540 else
14541 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
14542 is_gimple_val, fb_rvalue, false);
14543 gimplify_omp_ctxp->in_for_exprs = false;
14544 ret = MIN (ret, tret);
14545 if (ret == GS_ERROR)
14546 return ret;
14547
14548 /* Handle OMP_FOR_COND. */
14549 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
14550 gcc_assert (COMPARISON_CLASS_P (t));
14551 gcc_assert (TREE_OPERAND (t, 0) == decl);
14552
14553 gimplify_omp_ctxp->in_for_exprs = true;
14554 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
14555 {
14556 tree ub = TREE_OPERAND (t, 1);
14557 tret = gimplify_expr (&TREE_VEC_ELT (ub, 1), &for_pre_body, NULL,
14558 is_gimple_val, fb_rvalue, false);
14559 ret = MIN (ret, tret);
14560 tret = gimplify_expr (&TREE_VEC_ELT (ub, 2), &for_pre_body, NULL,
14561 is_gimple_val, fb_rvalue, false);
14562 }
14563 else
14564 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
14565 is_gimple_val, fb_rvalue, false);
14566 gimplify_omp_ctxp->in_for_exprs = false;
14567 ret = MIN (ret, tret);
14568
14569 /* Handle OMP_FOR_INCR. */
14570 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14571 switch (TREE_CODE (t))
14572 {
14573 case PREINCREMENT_EXPR:
14574 case POSTINCREMENT_EXPR:
14575 {
14576 tree decl = TREE_OPERAND (t, 0);
14577 /* c_omp_for_incr_canonicalize_ptr() should have been
14578 called to massage things appropriately. */
14579 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
14580
14581 if (orig_for_stmt != for_stmt)
14582 break;
14583 t = build_int_cst (TREE_TYPE (decl), 1);
14584 if (c)
14585 OMP_CLAUSE_LINEAR_STEP (c) = t;
14586 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
14587 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
14588 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
14589 break;
14590 }
14591
14592 case PREDECREMENT_EXPR:
14593 case POSTDECREMENT_EXPR:
14594 /* c_omp_for_incr_canonicalize_ptr() should have been
14595 called to massage things appropriately. */
14596 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
14597 if (orig_for_stmt != for_stmt)
14598 break;
14599 t = build_int_cst (TREE_TYPE (decl), -1);
14600 if (c)
14601 OMP_CLAUSE_LINEAR_STEP (c) = t;
14602 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
14603 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
14604 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
14605 break;
14606
14607 case MODIFY_EXPR:
14608 gcc_assert (TREE_OPERAND (t, 0) == decl);
14609 TREE_OPERAND (t, 0) = var;
14610
14611 t = TREE_OPERAND (t, 1);
14612 switch (TREE_CODE (t))
14613 {
14614 case PLUS_EXPR:
14615 if (TREE_OPERAND (t, 1) == decl)
14616 {
14617 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
14618 TREE_OPERAND (t, 0) = var;
14619 break;
14620 }
14621
14622 /* Fallthru. */
14623 case MINUS_EXPR:
14624 case POINTER_PLUS_EXPR:
14625 gcc_assert (TREE_OPERAND (t, 0) == decl);
14626 TREE_OPERAND (t, 0) = var;
14627 break;
14628 default:
14629 gcc_unreachable ();
14630 }
14631
14632 gimplify_omp_ctxp->in_for_exprs = true;
14633 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
14634 is_gimple_val, fb_rvalue, false);
14635 ret = MIN (ret, tret);
14636 if (c)
14637 {
14638 tree step = TREE_OPERAND (t, 1);
14639 tree stept = TREE_TYPE (decl);
14640 if (POINTER_TYPE_P (stept))
14641 stept = sizetype;
14642 step = fold_convert (stept, step);
14643 if (TREE_CODE (t) == MINUS_EXPR)
14644 step = fold_build1 (NEGATE_EXPR, stept, step);
14645 OMP_CLAUSE_LINEAR_STEP (c) = step;
14646 if (step != TREE_OPERAND (t, 1))
14647 {
14648 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
14649 &for_pre_body, NULL,
14650 is_gimple_val, fb_rvalue, false);
14651 ret = MIN (ret, tret);
14652 }
14653 }
14654 gimplify_omp_ctxp->in_for_exprs = false;
14655 break;
14656
14657 default:
14658 gcc_unreachable ();
14659 }
14660
14661 if (c2)
14662 {
14663 gcc_assert (c);
14664 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
14665 }
14666
14667 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
14668 {
14669 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
14670 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
14671 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
14672 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
14673 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
14674 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
14675 && OMP_CLAUSE_DECL (c) == decl)
14676 {
14677 if (is_doacross && (collapse == 1 || i >= collapse))
14678 t = var;
14679 else
14680 {
14681 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14682 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14683 gcc_assert (TREE_OPERAND (t, 0) == var);
14684 t = TREE_OPERAND (t, 1);
14685 gcc_assert (TREE_CODE (t) == PLUS_EXPR
14686 || TREE_CODE (t) == MINUS_EXPR
14687 || TREE_CODE (t) == POINTER_PLUS_EXPR);
14688 gcc_assert (TREE_OPERAND (t, 0) == var);
14689 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
14690 is_doacross ? var : decl,
14691 TREE_OPERAND (t, 1));
14692 }
14693 gimple_seq *seq;
14694 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
14695 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
14696 else
14697 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
14698 push_gimplify_context ();
14699 gimplify_assign (decl, t, seq);
14700 gimple *bind = NULL;
14701 if (gimplify_ctxp->temps)
14702 {
14703 bind = gimple_build_bind (NULL_TREE, *seq, NULL_TREE);
14704 *seq = NULL;
14705 gimplify_seq_add_stmt (seq_p: seq, gs: bind);
14706 }
14707 pop_gimplify_context (body: bind);
14708 }
14709 }
14710 if (OMP_FOR_NON_RECTANGULAR (for_stmt) && var != decl)
14711 for (int j = i + 1; j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
14712 {
14713 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
14714 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14715 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14716 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14717 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14718 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
14719 gcc_assert (COMPARISON_CLASS_P (t));
14720 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14721 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14722 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14723 }
14724 }
14725
14726 BITMAP_FREE (has_decl_expr);
14727 delete allocate_uids;
14728
14729 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
14730 || (loop_p && orig_for_stmt == for_stmt))
14731 {
14732 push_gimplify_context ();
14733 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
14734 {
14735 OMP_FOR_BODY (orig_for_stmt)
14736 = build3 (BIND_EXPR, void_type_node, NULL,
14737 OMP_FOR_BODY (orig_for_stmt), NULL);
14738 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
14739 }
14740 }
14741
14742 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
14743 seq_p: &for_body);
14744
14745 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
14746 || (loop_p && orig_for_stmt == for_stmt))
14747 {
14748 if (gimple_code (g) == GIMPLE_BIND)
14749 pop_gimplify_context (body: g);
14750 else
14751 pop_gimplify_context (NULL);
14752 }
14753
14754 if (orig_for_stmt != for_stmt)
14755 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14756 {
14757 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
14758 decl = TREE_OPERAND (t, 0);
14759 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
14760 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
14761 gimplify_omp_ctxp = ctx->outer_context;
14762 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
14763 gimplify_omp_ctxp = ctx;
14764 omp_add_variable (ctx: gimplify_omp_ctxp, decl: var, flags: GOVD_PRIVATE | GOVD_SEEN);
14765 TREE_OPERAND (t, 0) = var;
14766 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14767 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
14768 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
14769 if (OMP_FOR_NON_RECTANGULAR (for_stmt))
14770 for (int j = i + 1;
14771 j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
14772 {
14773 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
14774 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
14775 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14776 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14777 {
14778 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
14779 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14780 }
14781 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
14782 gcc_assert (COMPARISON_CLASS_P (t));
14783 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
14784 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
14785 {
14786 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
14787 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
14788 }
14789 }
14790 }
14791
14792 gimplify_adjust_omp_clauses (pre_p, body: for_body,
14793 list_p: &OMP_FOR_CLAUSES (orig_for_stmt),
14794 TREE_CODE (orig_for_stmt));
14795
14796 int kind;
14797 switch (TREE_CODE (orig_for_stmt))
14798 {
14799 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
14800 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
14801 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
14802 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
14803 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
14804 default:
14805 gcc_unreachable ();
14806 }
14807 if (loop_p && kind == GF_OMP_FOR_KIND_SIMD)
14808 {
14809 gimplify_seq_add_seq (dst_p: pre_p, src: for_pre_body);
14810 for_pre_body = NULL;
14811 }
14812 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
14813 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
14814 for_pre_body);
14815 if (orig_for_stmt != for_stmt)
14816 gimple_omp_for_set_combined_p (g: gfor, combined_p: true);
14817 if (gimplify_omp_ctxp
14818 && (gimplify_omp_ctxp->combined_loop
14819 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
14820 && gimplify_omp_ctxp->outer_context
14821 && gimplify_omp_ctxp->outer_context->combined_loop)))
14822 {
14823 gimple_omp_for_set_combined_into_p (g: gfor, combined_p: true);
14824 if (gimplify_omp_ctxp->combined_loop)
14825 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
14826 else
14827 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
14828 }
14829
14830 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
14831 {
14832 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
14833 gimple_omp_for_set_index (gs: gfor, i, TREE_OPERAND (t, 0));
14834 gimple_omp_for_set_initial (gs: gfor, i, TREE_OPERAND (t, 1));
14835 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
14836 gimple_omp_for_set_cond (gs: gfor, i, TREE_CODE (t));
14837 gimple_omp_for_set_final (gs: gfor, i, TREE_OPERAND (t, 1));
14838 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
14839 gimple_omp_for_set_incr (gs: gfor, i, TREE_OPERAND (t, 1));
14840 }
14841
14842 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
14843 constructs with GIMPLE_OMP_TASK sandwiched in between them.
14844 The outer taskloop stands for computing the number of iterations,
14845 counts for collapsed loops and holding taskloop specific clauses.
14846 The task construct stands for the effect of data sharing on the
14847 explicit task it creates and the inner taskloop stands for expansion
14848 of the static loop inside of the explicit task construct. */
14849 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
14850 {
14851 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gs: gfor);
14852 tree task_clauses = NULL_TREE;
14853 tree c = *gfor_clauses_ptr;
14854 tree *gtask_clauses_ptr = &task_clauses;
14855 tree outer_for_clauses = NULL_TREE;
14856 tree *gforo_clauses_ptr = &outer_for_clauses;
14857 bitmap lastprivate_uids = NULL;
14858 if (omp_find_clause (clauses: c, kind: OMP_CLAUSE_ALLOCATE))
14859 {
14860 c = omp_find_clause (clauses: c, kind: OMP_CLAUSE_LASTPRIVATE);
14861 if (c)
14862 {
14863 lastprivate_uids = BITMAP_ALLOC (NULL);
14864 for (; c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
14865 kind: OMP_CLAUSE_LASTPRIVATE))
14866 bitmap_set_bit (lastprivate_uids,
14867 DECL_UID (OMP_CLAUSE_DECL (c)));
14868 }
14869 c = *gfor_clauses_ptr;
14870 }
14871 for (; c; c = OMP_CLAUSE_CHAIN (c))
14872 switch (OMP_CLAUSE_CODE (c))
14873 {
14874 /* These clauses are allowed on task, move them there. */
14875 case OMP_CLAUSE_SHARED:
14876 case OMP_CLAUSE_FIRSTPRIVATE:
14877 case OMP_CLAUSE_DEFAULT:
14878 case OMP_CLAUSE_IF:
14879 case OMP_CLAUSE_UNTIED:
14880 case OMP_CLAUSE_FINAL:
14881 case OMP_CLAUSE_MERGEABLE:
14882 case OMP_CLAUSE_PRIORITY:
14883 case OMP_CLAUSE_REDUCTION:
14884 case OMP_CLAUSE_IN_REDUCTION:
14885 *gtask_clauses_ptr = c;
14886 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14887 break;
14888 case OMP_CLAUSE_PRIVATE:
14889 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
14890 {
14891 /* We want private on outer for and firstprivate
14892 on task. */
14893 *gtask_clauses_ptr
14894 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14895 OMP_CLAUSE_FIRSTPRIVATE);
14896 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
14897 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
14898 openacc);
14899 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14900 *gforo_clauses_ptr = c;
14901 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14902 }
14903 else
14904 {
14905 *gtask_clauses_ptr = c;
14906 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14907 }
14908 break;
14909 /* These clauses go into outer taskloop clauses. */
14910 case OMP_CLAUSE_GRAINSIZE:
14911 case OMP_CLAUSE_NUM_TASKS:
14912 case OMP_CLAUSE_NOGROUP:
14913 *gforo_clauses_ptr = c;
14914 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14915 break;
14916 /* Collapse clause we duplicate on both taskloops. */
14917 case OMP_CLAUSE_COLLAPSE:
14918 *gfor_clauses_ptr = c;
14919 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14920 *gforo_clauses_ptr = copy_node (c);
14921 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
14922 break;
14923 /* For lastprivate, keep the clause on inner taskloop, and add
14924 a shared clause on task. If the same decl is also firstprivate,
14925 add also firstprivate clause on the inner taskloop. */
14926 case OMP_CLAUSE_LASTPRIVATE:
14927 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
14928 {
14929 /* For taskloop C++ lastprivate IVs, we want:
14930 1) private on outer taskloop
14931 2) firstprivate and shared on task
14932 3) lastprivate on inner taskloop */
14933 *gtask_clauses_ptr
14934 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14935 OMP_CLAUSE_FIRSTPRIVATE);
14936 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
14937 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
14938 openacc);
14939 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14940 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
14941 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14942 OMP_CLAUSE_PRIVATE);
14943 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
14944 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
14945 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
14946 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
14947 }
14948 *gfor_clauses_ptr = c;
14949 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14950 *gtask_clauses_ptr
14951 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
14952 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
14953 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
14954 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
14955 gtask_clauses_ptr
14956 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14957 break;
14958 /* Allocate clause we duplicate on task and inner taskloop
14959 if the decl is lastprivate, otherwise just put on task. */
14960 case OMP_CLAUSE_ALLOCATE:
14961 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
14962 && DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
14963 {
14964 /* Additionally, put firstprivate clause on task
14965 for the allocator if it is not constant. */
14966 *gtask_clauses_ptr
14967 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
14968 OMP_CLAUSE_FIRSTPRIVATE);
14969 OMP_CLAUSE_DECL (*gtask_clauses_ptr)
14970 = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
14971 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14972 }
14973 if (lastprivate_uids
14974 && bitmap_bit_p (lastprivate_uids,
14975 DECL_UID (OMP_CLAUSE_DECL (c))))
14976 {
14977 *gfor_clauses_ptr = c;
14978 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14979 *gtask_clauses_ptr = copy_node (c);
14980 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
14981 }
14982 else
14983 {
14984 *gtask_clauses_ptr = c;
14985 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
14986 }
14987 break;
14988 default:
14989 gcc_unreachable ();
14990 }
14991 *gfor_clauses_ptr = NULL_TREE;
14992 *gtask_clauses_ptr = NULL_TREE;
14993 *gforo_clauses_ptr = NULL_TREE;
14994 BITMAP_FREE (lastprivate_uids);
14995 gimple_set_location (g: gfor, location: input_location);
14996 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
14997 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
14998 NULL_TREE, NULL_TREE, NULL_TREE);
14999 gimple_set_location (g, location: input_location);
15000 gimple_omp_task_set_taskloop_p (g, taskloop_p: true);
15001 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
15002 gomp_for *gforo
15003 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
15004 gimple_omp_for_collapse (gs: gfor),
15005 gimple_omp_for_pre_body (gs: gfor));
15006 gimple_omp_for_set_pre_body (gs: gfor, NULL);
15007 gimple_omp_for_set_combined_p (g: gforo, combined_p: true);
15008 gimple_omp_for_set_combined_into_p (g: gfor, combined_p: true);
15009 for (i = 0; i < (int) gimple_omp_for_collapse (gs: gfor); i++)
15010 {
15011 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
15012 tree v = create_tmp_var (type);
15013 gimple_omp_for_set_index (gs: gforo, i, index: v);
15014 t = unshare_expr (expr: gimple_omp_for_initial (gs: gfor, i));
15015 gimple_omp_for_set_initial (gs: gforo, i, initial: t);
15016 gimple_omp_for_set_cond (gs: gforo, i,
15017 cond: gimple_omp_for_cond (gs: gfor, i));
15018 t = unshare_expr (expr: gimple_omp_for_final (gs: gfor, i));
15019 gimple_omp_for_set_final (gs: gforo, i, final: t);
15020 t = unshare_expr (expr: gimple_omp_for_incr (gs: gfor, i));
15021 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
15022 TREE_OPERAND (t, 0) = v;
15023 gimple_omp_for_set_incr (gs: gforo, i, incr: t);
15024 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
15025 OMP_CLAUSE_DECL (t) = v;
15026 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gs: gforo);
15027 gimple_omp_for_set_clauses (gs: gforo, clauses: t);
15028 if (OMP_FOR_NON_RECTANGULAR (for_stmt))
15029 {
15030 tree *p1 = NULL, *p2 = NULL;
15031 t = gimple_omp_for_initial (gs: gforo, i);
15032 if (TREE_CODE (t) == TREE_VEC)
15033 p1 = &TREE_VEC_ELT (t, 0);
15034 t = gimple_omp_for_final (gs: gforo, i);
15035 if (TREE_CODE (t) == TREE_VEC)
15036 {
15037 if (p1)
15038 p2 = &TREE_VEC_ELT (t, 0);
15039 else
15040 p1 = &TREE_VEC_ELT (t, 0);
15041 }
15042 if (p1)
15043 {
15044 int j;
15045 for (j = 0; j < i; j++)
15046 if (*p1 == gimple_omp_for_index (gs: gfor, i: j))
15047 {
15048 *p1 = gimple_omp_for_index (gs: gforo, i: j);
15049 if (p2)
15050 *p2 = *p1;
15051 break;
15052 }
15053 gcc_assert (j < i);
15054 }
15055 }
15056 }
15057 gimplify_seq_add_stmt (seq_p: pre_p, gs: gforo);
15058 }
15059 else
15060 gimplify_seq_add_stmt (seq_p: pre_p, gs: gfor);
15061
15062 if (TREE_CODE (orig_for_stmt) == OMP_FOR)
15063 {
15064 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
15065 unsigned lastprivate_conditional = 0;
15066 while (ctx
15067 && (ctx->region_type == ORT_TARGET_DATA
15068 || ctx->region_type == ORT_TASKGROUP))
15069 ctx = ctx->outer_context;
15070 if (ctx && (ctx->region_type & ORT_PARALLEL) != 0)
15071 for (tree c = gimple_omp_for_clauses (gs: gfor);
15072 c; c = OMP_CLAUSE_CHAIN (c))
15073 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
15074 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
15075 ++lastprivate_conditional;
15076 if (lastprivate_conditional)
15077 {
15078 struct omp_for_data fd;
15079 omp_extract_for_data (for_stmt: gfor, fd: &fd, NULL);
15080 tree type = build_array_type_nelts (unsigned_type_for (fd.iter_type),
15081 lastprivate_conditional);
15082 tree var = create_tmp_var_raw (type);
15083 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
15084 OMP_CLAUSE_DECL (c) = var;
15085 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gs: gfor);
15086 gimple_omp_for_set_clauses (gs: gfor, clauses: c);
15087 omp_add_variable (ctx, decl: var, flags: GOVD_CONDTEMP | GOVD_SEEN);
15088 }
15089 }
15090 else if (TREE_CODE (orig_for_stmt) == OMP_SIMD)
15091 {
15092 unsigned lastprivate_conditional = 0;
15093 for (tree c = gimple_omp_for_clauses (gs: gfor); c; c = OMP_CLAUSE_CHAIN (c))
15094 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
15095 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
15096 ++lastprivate_conditional;
15097 if (lastprivate_conditional)
15098 {
15099 struct omp_for_data fd;
15100 omp_extract_for_data (for_stmt: gfor, fd: &fd, NULL);
15101 tree type = unsigned_type_for (fd.iter_type);
15102 while (lastprivate_conditional--)
15103 {
15104 tree c = build_omp_clause (UNKNOWN_LOCATION,
15105 OMP_CLAUSE__CONDTEMP_);
15106 OMP_CLAUSE_DECL (c) = create_tmp_var (type);
15107 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gs: gfor);
15108 gimple_omp_for_set_clauses (gs: gfor, clauses: c);
15109 }
15110 }
15111 }
15112
15113 if (ret != GS_ALL_DONE)
15114 return GS_ERROR;
15115 *expr_p = NULL_TREE;
15116 return GS_ALL_DONE;
15117}
15118
15119/* Helper for gimplify_omp_loop, called through walk_tree. */
15120
15121static tree
15122note_no_context_vars (tree *tp, int *, void *data)
15123{
15124 if (VAR_P (*tp)
15125 && DECL_CONTEXT (*tp) == NULL_TREE
15126 && !is_global_var (t: *tp))
15127 {
15128 vec<tree> *d = (vec<tree> *) data;
15129 d->safe_push (obj: *tp);
15130 DECL_CONTEXT (*tp) = current_function_decl;
15131 }
15132 return NULL_TREE;
15133}
15134
15135/* Gimplify the gross structure of an OMP_LOOP statement. */
15136
15137static enum gimplify_status
15138gimplify_omp_loop (tree *expr_p, gimple_seq *pre_p)
15139{
15140 tree for_stmt = *expr_p;
15141 tree clauses = OMP_FOR_CLAUSES (for_stmt);
15142 struct gimplify_omp_ctx *octx = gimplify_omp_ctxp;
15143 enum omp_clause_bind_kind kind = OMP_CLAUSE_BIND_THREAD;
15144 int i;
15145
15146 /* If order is not present, the behavior is as if order(concurrent)
15147 appeared. */
15148 tree order = omp_find_clause (clauses, kind: OMP_CLAUSE_ORDER);
15149 if (order == NULL_TREE)
15150 {
15151 order = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_ORDER);
15152 OMP_CLAUSE_CHAIN (order) = clauses;
15153 OMP_FOR_CLAUSES (for_stmt) = clauses = order;
15154 }
15155
15156 tree bind = omp_find_clause (clauses, kind: OMP_CLAUSE_BIND);
15157 if (bind == NULL_TREE)
15158 {
15159 if (!flag_openmp) /* flag_openmp_simd */
15160 ;
15161 else if (octx && (octx->region_type & ORT_TEAMS) != 0)
15162 kind = OMP_CLAUSE_BIND_TEAMS;
15163 else if (octx && (octx->region_type & ORT_PARALLEL) != 0)
15164 kind = OMP_CLAUSE_BIND_PARALLEL;
15165 else
15166 {
15167 for (; octx; octx = octx->outer_context)
15168 {
15169 if ((octx->region_type & ORT_ACC) != 0
15170 || octx->region_type == ORT_NONE
15171 || octx->region_type == ORT_IMPLICIT_TARGET)
15172 continue;
15173 break;
15174 }
15175 if (octx == NULL && !in_omp_construct)
15176 error_at (EXPR_LOCATION (for_stmt),
15177 "%<bind%> clause not specified on a %<loop%> "
15178 "construct not nested inside another OpenMP construct");
15179 }
15180 bind = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_BIND);
15181 OMP_CLAUSE_CHAIN (bind) = clauses;
15182 OMP_CLAUSE_BIND_KIND (bind) = kind;
15183 OMP_FOR_CLAUSES (for_stmt) = bind;
15184 }
15185 else
15186 switch (OMP_CLAUSE_BIND_KIND (bind))
15187 {
15188 case OMP_CLAUSE_BIND_THREAD:
15189 break;
15190 case OMP_CLAUSE_BIND_PARALLEL:
15191 if (!flag_openmp) /* flag_openmp_simd */
15192 {
15193 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
15194 break;
15195 }
15196 for (; octx; octx = octx->outer_context)
15197 if (octx->region_type == ORT_SIMD
15198 && omp_find_clause (clauses: octx->clauses, kind: OMP_CLAUSE_BIND) == NULL_TREE)
15199 {
15200 error_at (EXPR_LOCATION (for_stmt),
15201 "%<bind(parallel)%> on a %<loop%> construct nested "
15202 "inside %<simd%> construct");
15203 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
15204 break;
15205 }
15206 kind = OMP_CLAUSE_BIND_PARALLEL;
15207 break;
15208 case OMP_CLAUSE_BIND_TEAMS:
15209 if (!flag_openmp) /* flag_openmp_simd */
15210 {
15211 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
15212 break;
15213 }
15214 if ((octx
15215 && octx->region_type != ORT_IMPLICIT_TARGET
15216 && octx->region_type != ORT_NONE
15217 && (octx->region_type & ORT_TEAMS) == 0)
15218 || in_omp_construct)
15219 {
15220 error_at (EXPR_LOCATION (for_stmt),
15221 "%<bind(teams)%> on a %<loop%> region not strictly "
15222 "nested inside of a %<teams%> region");
15223 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
15224 break;
15225 }
15226 kind = OMP_CLAUSE_BIND_TEAMS;
15227 break;
15228 default:
15229 gcc_unreachable ();
15230 }
15231
15232 for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
15233 switch (OMP_CLAUSE_CODE (*pc))
15234 {
15235 case OMP_CLAUSE_REDUCTION:
15236 if (OMP_CLAUSE_REDUCTION_INSCAN (*pc))
15237 {
15238 error_at (OMP_CLAUSE_LOCATION (*pc),
15239 "%<inscan%> %<reduction%> clause on "
15240 "%qs construct", "loop");
15241 OMP_CLAUSE_REDUCTION_INSCAN (*pc) = 0;
15242 }
15243 if (OMP_CLAUSE_REDUCTION_TASK (*pc))
15244 {
15245 error_at (OMP_CLAUSE_LOCATION (*pc),
15246 "invalid %<task%> reduction modifier on construct "
15247 "other than %<parallel%>, %qs or %<sections%>",
15248 lang_GNU_Fortran () ? "do" : "for");
15249 OMP_CLAUSE_REDUCTION_TASK (*pc) = 0;
15250 }
15251 pc = &OMP_CLAUSE_CHAIN (*pc);
15252 break;
15253 case OMP_CLAUSE_LASTPRIVATE:
15254 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
15255 {
15256 tree t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
15257 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
15258 if (OMP_CLAUSE_DECL (*pc) == TREE_OPERAND (t, 0))
15259 break;
15260 if (OMP_FOR_ORIG_DECLS (for_stmt)
15261 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
15262 i)) == TREE_LIST
15263 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
15264 i)))
15265 {
15266 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
15267 if (OMP_CLAUSE_DECL (*pc) == TREE_PURPOSE (orig))
15268 break;
15269 }
15270 }
15271 if (i == TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)))
15272 {
15273 error_at (OMP_CLAUSE_LOCATION (*pc),
15274 "%<lastprivate%> clause on a %<loop%> construct refers "
15275 "to a variable %qD which is not the loop iterator",
15276 OMP_CLAUSE_DECL (*pc));
15277 *pc = OMP_CLAUSE_CHAIN (*pc);
15278 break;
15279 }
15280 pc = &OMP_CLAUSE_CHAIN (*pc);
15281 break;
15282 default:
15283 pc = &OMP_CLAUSE_CHAIN (*pc);
15284 break;
15285 }
15286
15287 TREE_SET_CODE (for_stmt, OMP_SIMD);
15288
15289 int last;
15290 switch (kind)
15291 {
15292 case OMP_CLAUSE_BIND_THREAD: last = 0; break;
15293 case OMP_CLAUSE_BIND_PARALLEL: last = 1; break;
15294 case OMP_CLAUSE_BIND_TEAMS: last = 2; break;
15295 }
15296 for (int pass = 1; pass <= last; pass++)
15297 {
15298 if (pass == 2)
15299 {
15300 tree bind = build3 (BIND_EXPR, void_type_node, NULL, NULL,
15301 make_node (BLOCK));
15302 append_to_statement_list (*expr_p, &BIND_EXPR_BODY (bind));
15303 *expr_p = make_node (OMP_PARALLEL);
15304 TREE_TYPE (*expr_p) = void_type_node;
15305 OMP_PARALLEL_BODY (*expr_p) = bind;
15306 OMP_PARALLEL_COMBINED (*expr_p) = 1;
15307 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (for_stmt));
15308 tree *pc = &OMP_PARALLEL_CLAUSES (*expr_p);
15309 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
15310 if (OMP_FOR_ORIG_DECLS (for_stmt)
15311 && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i))
15312 == TREE_LIST))
15313 {
15314 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
15315 if (TREE_PURPOSE (elt) && TREE_VALUE (elt))
15316 {
15317 *pc = build_omp_clause (UNKNOWN_LOCATION,
15318 OMP_CLAUSE_FIRSTPRIVATE);
15319 OMP_CLAUSE_DECL (*pc) = TREE_VALUE (elt);
15320 pc = &OMP_CLAUSE_CHAIN (*pc);
15321 }
15322 }
15323 }
15324 tree t = make_node (pass == 2 ? OMP_DISTRIBUTE : OMP_FOR);
15325 tree *pc = &OMP_FOR_CLAUSES (t);
15326 TREE_TYPE (t) = void_type_node;
15327 OMP_FOR_BODY (t) = *expr_p;
15328 SET_EXPR_LOCATION (t, EXPR_LOCATION (for_stmt));
15329 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
15330 switch (OMP_CLAUSE_CODE (c))
15331 {
15332 case OMP_CLAUSE_BIND:
15333 case OMP_CLAUSE_ORDER:
15334 case OMP_CLAUSE_COLLAPSE:
15335 *pc = copy_node (c);
15336 pc = &OMP_CLAUSE_CHAIN (*pc);
15337 break;
15338 case OMP_CLAUSE_PRIVATE:
15339 case OMP_CLAUSE_FIRSTPRIVATE:
15340 /* Only needed on innermost. */
15341 break;
15342 case OMP_CLAUSE_LASTPRIVATE:
15343 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c) && pass != last)
15344 {
15345 *pc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
15346 OMP_CLAUSE_FIRSTPRIVATE);
15347 OMP_CLAUSE_DECL (*pc) = OMP_CLAUSE_DECL (c);
15348 lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
15349 pc = &OMP_CLAUSE_CHAIN (*pc);
15350 }
15351 *pc = copy_node (c);
15352 OMP_CLAUSE_LASTPRIVATE_STMT (*pc) = NULL_TREE;
15353 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
15354 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
15355 {
15356 if (pass != last)
15357 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc) = 1;
15358 else
15359 lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
15360 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc) = 0;
15361 }
15362 pc = &OMP_CLAUSE_CHAIN (*pc);
15363 break;
15364 case OMP_CLAUSE_REDUCTION:
15365 *pc = copy_node (c);
15366 OMP_CLAUSE_DECL (*pc) = unshare_expr (OMP_CLAUSE_DECL (c));
15367 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
15368 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc))
15369 {
15370 auto_vec<tree> no_context_vars;
15371 int walk_subtrees = 0;
15372 note_no_context_vars (tp: &OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
15373 &walk_subtrees, data: &no_context_vars);
15374 if (tree p = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c))
15375 note_no_context_vars (tp: &p, &walk_subtrees, data: &no_context_vars);
15376 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (c),
15377 note_no_context_vars,
15378 &no_context_vars);
15379 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (c),
15380 note_no_context_vars,
15381 &no_context_vars);
15382
15383 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc)
15384 = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c));
15385 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
15386 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc)
15387 = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c));
15388
15389 hash_map<tree, tree> decl_map;
15390 decl_map.put (OMP_CLAUSE_DECL (c), OMP_CLAUSE_DECL (c));
15391 decl_map.put (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
15392 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc));
15393 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
15394 decl_map.put (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
15395 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc));
15396
15397 copy_body_data id;
15398 memset (s: &id, c: 0, n: sizeof (id));
15399 id.src_fn = current_function_decl;
15400 id.dst_fn = current_function_decl;
15401 id.src_cfun = cfun;
15402 id.decl_map = &decl_map;
15403 id.copy_decl = copy_decl_no_change;
15404 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
15405 id.transform_new_cfg = true;
15406 id.transform_return_to_modify = false;
15407 id.eh_lp_nr = 0;
15408 walk_tree (&OMP_CLAUSE_REDUCTION_INIT (*pc), copy_tree_body_r,
15409 &id, NULL);
15410 walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (*pc), copy_tree_body_r,
15411 &id, NULL);
15412
15413 for (tree d : no_context_vars)
15414 {
15415 DECL_CONTEXT (d) = NULL_TREE;
15416 DECL_CONTEXT (*decl_map.get (d)) = NULL_TREE;
15417 }
15418 }
15419 else
15420 {
15421 OMP_CLAUSE_REDUCTION_INIT (*pc)
15422 = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c));
15423 OMP_CLAUSE_REDUCTION_MERGE (*pc)
15424 = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c));
15425 }
15426 pc = &OMP_CLAUSE_CHAIN (*pc);
15427 break;
15428 default:
15429 gcc_unreachable ();
15430 }
15431 *pc = NULL_TREE;
15432 *expr_p = t;
15433 }
15434 return gimplify_expr (expr_p, pre_p, NULL, is_gimple_stmt, fb_none);
15435}
15436
15437
15438/* Helper function of optimize_target_teams, find OMP_TEAMS inside
15439 of OMP_TARGET's body. */
15440
15441static tree
15442find_omp_teams (tree *tp, int *walk_subtrees, void *)
15443{
15444 *walk_subtrees = 0;
15445 switch (TREE_CODE (*tp))
15446 {
15447 case OMP_TEAMS:
15448 return *tp;
15449 case BIND_EXPR:
15450 case STATEMENT_LIST:
15451 *walk_subtrees = 1;
15452 break;
15453 default:
15454 break;
15455 }
15456 return NULL_TREE;
15457}
15458
15459/* Helper function of optimize_target_teams, determine if the expression
15460 can be computed safely before the target construct on the host. */
15461
15462static tree
15463computable_teams_clause (tree *tp, int *walk_subtrees, void *)
15464{
15465 splay_tree_node n;
15466
15467 if (TYPE_P (*tp))
15468 {
15469 *walk_subtrees = 0;
15470 return NULL_TREE;
15471 }
15472 switch (TREE_CODE (*tp))
15473 {
15474 case VAR_DECL:
15475 case PARM_DECL:
15476 case RESULT_DECL:
15477 *walk_subtrees = 0;
15478 if (error_operand_p (t: *tp)
15479 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
15480 || DECL_HAS_VALUE_EXPR_P (*tp)
15481 || DECL_THREAD_LOCAL_P (*tp)
15482 || TREE_SIDE_EFFECTS (*tp)
15483 || TREE_THIS_VOLATILE (*tp))
15484 return *tp;
15485 if (is_global_var (t: *tp)
15486 && (lookup_attribute (attr_name: "omp declare target", DECL_ATTRIBUTES (*tp))
15487 || lookup_attribute (attr_name: "omp declare target link",
15488 DECL_ATTRIBUTES (*tp))))
15489 return *tp;
15490 if (VAR_P (*tp)
15491 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
15492 && !is_global_var (t: *tp)
15493 && decl_function_context (*tp) == current_function_decl)
15494 return *tp;
15495 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
15496 (splay_tree_key) *tp);
15497 if (n == NULL)
15498 {
15499 if (gimplify_omp_ctxp->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
15500 return NULL_TREE;
15501 return *tp;
15502 }
15503 else if (n->value & GOVD_LOCAL)
15504 return *tp;
15505 else if (n->value & GOVD_FIRSTPRIVATE)
15506 return NULL_TREE;
15507 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
15508 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
15509 return NULL_TREE;
15510 return *tp;
15511 case INTEGER_CST:
15512 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
15513 return *tp;
15514 return NULL_TREE;
15515 case TARGET_EXPR:
15516 if (TARGET_EXPR_INITIAL (*tp)
15517 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
15518 return *tp;
15519 return computable_teams_clause (tp: &TARGET_EXPR_SLOT (*tp),
15520 walk_subtrees, NULL);
15521 /* Allow some reasonable subset of integral arithmetics. */
15522 case PLUS_EXPR:
15523 case MINUS_EXPR:
15524 case MULT_EXPR:
15525 case TRUNC_DIV_EXPR:
15526 case CEIL_DIV_EXPR:
15527 case FLOOR_DIV_EXPR:
15528 case ROUND_DIV_EXPR:
15529 case TRUNC_MOD_EXPR:
15530 case CEIL_MOD_EXPR:
15531 case FLOOR_MOD_EXPR:
15532 case ROUND_MOD_EXPR:
15533 case RDIV_EXPR:
15534 case EXACT_DIV_EXPR:
15535 case MIN_EXPR:
15536 case MAX_EXPR:
15537 case LSHIFT_EXPR:
15538 case RSHIFT_EXPR:
15539 case BIT_IOR_EXPR:
15540 case BIT_XOR_EXPR:
15541 case BIT_AND_EXPR:
15542 case NEGATE_EXPR:
15543 case ABS_EXPR:
15544 case BIT_NOT_EXPR:
15545 case NON_LVALUE_EXPR:
15546 CASE_CONVERT:
15547 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
15548 return *tp;
15549 return NULL_TREE;
15550 /* And disallow anything else, except for comparisons. */
15551 default:
15552 if (COMPARISON_CLASS_P (*tp))
15553 return NULL_TREE;
15554 return *tp;
15555 }
15556}
15557
15558/* Try to determine if the num_teams and/or thread_limit expressions
15559 can have their values determined already before entering the
15560 target construct.
15561 INTEGER_CSTs trivially are,
15562 integral decls that are firstprivate (explicitly or implicitly)
15563 or explicitly map(always, to:) or map(always, tofrom:) on the target
15564 region too, and expressions involving simple arithmetics on those
15565 too, function calls are not ok, dereferencing something neither etc.
15566 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
15567 EXPR based on what we find:
15568 0 stands for clause not specified at all, use implementation default
15569 -1 stands for value that can't be determined easily before entering
15570 the target construct.
15571 -2 means that no explicit teams construct was specified
15572 If teams construct is not present at all, use 1 for num_teams
15573 and 0 for thread_limit (only one team is involved, and the thread
15574 limit is implementation defined. */
15575
15576static void
15577optimize_target_teams (tree target, gimple_seq *pre_p)
15578{
15579 tree body = OMP_BODY (target);
15580 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
15581 tree num_teams_lower = NULL_TREE;
15582 tree num_teams_upper = integer_zero_node;
15583 tree thread_limit = integer_zero_node;
15584 location_t num_teams_loc = EXPR_LOCATION (target);
15585 location_t thread_limit_loc = EXPR_LOCATION (target);
15586 tree c, *p, expr;
15587 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
15588
15589 if (teams == NULL_TREE)
15590 num_teams_upper = build_int_cst (integer_type_node, -2);
15591 else
15592 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
15593 {
15594 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
15595 {
15596 p = &num_teams_upper;
15597 num_teams_loc = OMP_CLAUSE_LOCATION (c);
15598 if (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c))
15599 {
15600 expr = OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c);
15601 if (TREE_CODE (expr) == INTEGER_CST)
15602 num_teams_lower = expr;
15603 else if (walk_tree (&expr, computable_teams_clause,
15604 NULL, NULL))
15605 num_teams_lower = integer_minus_one_node;
15606 else
15607 {
15608 num_teams_lower = expr;
15609 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
15610 if (gimplify_expr (&num_teams_lower, pre_p, NULL,
15611 is_gimple_val, fb_rvalue, false)
15612 == GS_ERROR)
15613 {
15614 gimplify_omp_ctxp = target_ctx;
15615 num_teams_lower = integer_minus_one_node;
15616 }
15617 else
15618 {
15619 gimplify_omp_ctxp = target_ctx;
15620 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
15621 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
15622 = num_teams_lower;
15623 }
15624 }
15625 }
15626 }
15627 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
15628 {
15629 p = &thread_limit;
15630 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
15631 }
15632 else
15633 continue;
15634 expr = OMP_CLAUSE_OPERAND (c, 0);
15635 if (TREE_CODE (expr) == INTEGER_CST)
15636 {
15637 *p = expr;
15638 continue;
15639 }
15640 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
15641 {
15642 *p = integer_minus_one_node;
15643 continue;
15644 }
15645 *p = expr;
15646 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
15647 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
15648 == GS_ERROR)
15649 {
15650 gimplify_omp_ctxp = target_ctx;
15651 *p = integer_minus_one_node;
15652 continue;
15653 }
15654 gimplify_omp_ctxp = target_ctx;
15655 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
15656 OMP_CLAUSE_OPERAND (c, 0) = *p;
15657 }
15658 if (!omp_find_clause (OMP_TARGET_CLAUSES (target), kind: OMP_CLAUSE_THREAD_LIMIT))
15659 {
15660 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
15661 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
15662 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
15663 OMP_TARGET_CLAUSES (target) = c;
15664 }
15665 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
15666 OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (c) = num_teams_upper;
15667 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c) = num_teams_lower;
15668 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
15669 OMP_TARGET_CLAUSES (target) = c;
15670}
15671
15672/* Gimplify the gross structure of several OMP constructs. */
15673
15674static void
15675gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
15676{
15677 tree expr = *expr_p;
15678 gimple *stmt;
15679 gimple_seq body = NULL;
15680 enum omp_region_type ort;
15681
15682 switch (TREE_CODE (expr))
15683 {
15684 case OMP_SECTIONS:
15685 case OMP_SINGLE:
15686 ort = ORT_WORKSHARE;
15687 break;
15688 case OMP_SCOPE:
15689 ort = ORT_TASKGROUP;
15690 break;
15691 case OMP_TARGET:
15692 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
15693 break;
15694 case OACC_KERNELS:
15695 ort = ORT_ACC_KERNELS;
15696 break;
15697 case OACC_PARALLEL:
15698 ort = ORT_ACC_PARALLEL;
15699 break;
15700 case OACC_SERIAL:
15701 ort = ORT_ACC_SERIAL;
15702 break;
15703 case OACC_DATA:
15704 ort = ORT_ACC_DATA;
15705 break;
15706 case OMP_TARGET_DATA:
15707 ort = ORT_TARGET_DATA;
15708 break;
15709 case OMP_TEAMS:
15710 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
15711 if (gimplify_omp_ctxp == NULL
15712 || gimplify_omp_ctxp->region_type == ORT_IMPLICIT_TARGET)
15713 ort = (enum omp_region_type) (ort | ORT_HOST_TEAMS);
15714 break;
15715 case OACC_HOST_DATA:
15716 ort = ORT_ACC_HOST_DATA;
15717 break;
15718 default:
15719 gcc_unreachable ();
15720 }
15721
15722 bool save_in_omp_construct = in_omp_construct;
15723 if ((ort & ORT_ACC) == 0)
15724 in_omp_construct = false;
15725 gimplify_scan_omp_clauses (list_p: &OMP_CLAUSES (expr), pre_p, region_type: ort,
15726 TREE_CODE (expr));
15727 if (TREE_CODE (expr) == OMP_TARGET)
15728 optimize_target_teams (target: expr, pre_p);
15729 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0
15730 || (ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
15731 {
15732 push_gimplify_context ();
15733 gimple *g = gimplify_and_return_first (OMP_BODY (expr), seq_p: &body);
15734 if (gimple_code (g) == GIMPLE_BIND)
15735 pop_gimplify_context (body: g);
15736 else
15737 pop_gimplify_context (NULL);
15738 if ((ort & ORT_TARGET_DATA) != 0)
15739 {
15740 enum built_in_function end_ix;
15741 switch (TREE_CODE (expr))
15742 {
15743 case OACC_DATA:
15744 case OACC_HOST_DATA:
15745 end_ix = BUILT_IN_GOACC_DATA_END;
15746 break;
15747 case OMP_TARGET_DATA:
15748 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
15749 break;
15750 default:
15751 gcc_unreachable ();
15752 }
15753 tree fn = builtin_decl_explicit (fncode: end_ix);
15754 g = gimple_build_call (fn, 0);
15755 gimple_seq cleanup = NULL;
15756 gimple_seq_add_stmt (&cleanup, g);
15757 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
15758 body = NULL;
15759 gimple_seq_add_stmt (&body, g);
15760 }
15761 }
15762 else
15763 gimplify_and_add (OMP_BODY (expr), seq_p: &body);
15764 gimplify_adjust_omp_clauses (pre_p, body, list_p: &OMP_CLAUSES (expr),
15765 TREE_CODE (expr));
15766 in_omp_construct = save_in_omp_construct;
15767
15768 switch (TREE_CODE (expr))
15769 {
15770 case OACC_DATA:
15771 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
15772 OMP_CLAUSES (expr));
15773 break;
15774 case OACC_HOST_DATA:
15775 if (omp_find_clause (OMP_CLAUSES (expr), kind: OMP_CLAUSE_IF_PRESENT))
15776 {
15777 for (tree c = OMP_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
15778 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
15779 OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c) = 1;
15780 }
15781
15782 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
15783 OMP_CLAUSES (expr));
15784 break;
15785 case OACC_KERNELS:
15786 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
15787 OMP_CLAUSES (expr));
15788 break;
15789 case OACC_PARALLEL:
15790 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
15791 OMP_CLAUSES (expr));
15792 break;
15793 case OACC_SERIAL:
15794 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_SERIAL,
15795 OMP_CLAUSES (expr));
15796 break;
15797 case OMP_SECTIONS:
15798 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
15799 break;
15800 case OMP_SINGLE:
15801 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
15802 break;
15803 case OMP_SCOPE:
15804 stmt = gimple_build_omp_scope (body, OMP_CLAUSES (expr));
15805 break;
15806 case OMP_TARGET:
15807 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
15808 OMP_CLAUSES (expr));
15809 break;
15810 case OMP_TARGET_DATA:
15811 /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
15812 to be evaluated before the use_device_{ptr,addr} clauses if they
15813 refer to the same variables. */
15814 {
15815 tree use_device_clauses;
15816 tree *pc, *uc = &use_device_clauses;
15817 for (pc = &OMP_CLAUSES (expr); *pc; )
15818 if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_PTR
15819 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_ADDR)
15820 {
15821 *uc = *pc;
15822 *pc = OMP_CLAUSE_CHAIN (*pc);
15823 uc = &OMP_CLAUSE_CHAIN (*uc);
15824 }
15825 else
15826 pc = &OMP_CLAUSE_CHAIN (*pc);
15827 *uc = NULL_TREE;
15828 *pc = use_device_clauses;
15829 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
15830 OMP_CLAUSES (expr));
15831 }
15832 break;
15833 case OMP_TEAMS:
15834 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
15835 if ((ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
15836 gimple_omp_teams_set_host (omp_teams_stmt: as_a <gomp_teams *> (p: stmt), value: true);
15837 break;
15838 default:
15839 gcc_unreachable ();
15840 }
15841
15842 gimplify_seq_add_stmt (seq_p: pre_p, gs: stmt);
15843 *expr_p = NULL_TREE;
15844}
15845
15846/* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
15847 target update constructs. */
15848
15849static void
15850gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
15851{
15852 tree expr = *expr_p;
15853 int kind;
15854 gomp_target *stmt;
15855 enum omp_region_type ort = ORT_WORKSHARE;
15856
15857 switch (TREE_CODE (expr))
15858 {
15859 case OACC_ENTER_DATA:
15860 kind = GF_OMP_TARGET_KIND_OACC_ENTER_DATA;
15861 ort = ORT_ACC;
15862 break;
15863 case OACC_EXIT_DATA:
15864 kind = GF_OMP_TARGET_KIND_OACC_EXIT_DATA;
15865 ort = ORT_ACC;
15866 break;
15867 case OACC_UPDATE:
15868 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
15869 ort = ORT_ACC;
15870 break;
15871 case OMP_TARGET_UPDATE:
15872 kind = GF_OMP_TARGET_KIND_UPDATE;
15873 break;
15874 case OMP_TARGET_ENTER_DATA:
15875 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
15876 break;
15877 case OMP_TARGET_EXIT_DATA:
15878 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
15879 break;
15880 default:
15881 gcc_unreachable ();
15882 }
15883 gimplify_scan_omp_clauses (list_p: &OMP_STANDALONE_CLAUSES (expr), pre_p,
15884 region_type: ort, TREE_CODE (expr));
15885 gimplify_adjust_omp_clauses (pre_p, NULL, list_p: &OMP_STANDALONE_CLAUSES (expr),
15886 TREE_CODE (expr));
15887 if (TREE_CODE (expr) == OACC_UPDATE
15888 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
15889 kind: OMP_CLAUSE_IF_PRESENT))
15890 {
15891 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
15892 clause. */
15893 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
15894 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
15895 switch (OMP_CLAUSE_MAP_KIND (c))
15896 {
15897 case GOMP_MAP_FORCE_TO:
15898 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
15899 break;
15900 case GOMP_MAP_FORCE_FROM:
15901 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FROM);
15902 break;
15903 default:
15904 break;
15905 }
15906 }
15907 else if (TREE_CODE (expr) == OACC_EXIT_DATA
15908 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
15909 kind: OMP_CLAUSE_FINALIZE))
15910 {
15911 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote "finalize"
15912 semantics. */
15913 bool have_clause = false;
15914 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
15915 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
15916 switch (OMP_CLAUSE_MAP_KIND (c))
15917 {
15918 case GOMP_MAP_FROM:
15919 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_FROM);
15920 have_clause = true;
15921 break;
15922 case GOMP_MAP_RELEASE:
15923 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DELETE);
15924 have_clause = true;
15925 break;
15926 case GOMP_MAP_TO_PSET:
15927 /* Fortran arrays with descriptors must map that descriptor when
15928 doing standalone "attach" operations (in OpenACC). In that
15929 case GOMP_MAP_TO_PSET appears by itself with no preceding
15930 clause (see trans-openmp.cc:gfc_trans_omp_clauses). */
15931 break;
15932 case GOMP_MAP_POINTER:
15933 /* TODO PR92929: we may see these here, but they'll always follow
15934 one of the clauses above, and will be handled by libgomp as
15935 one group, so no handling required here. */
15936 gcc_assert (have_clause);
15937 break;
15938 case GOMP_MAP_DETACH:
15939 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_DETACH);
15940 have_clause = false;
15941 break;
15942 case GOMP_MAP_STRUCT:
15943 have_clause = false;
15944 break;
15945 default:
15946 gcc_unreachable ();
15947 }
15948 }
15949 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
15950
15951 gimplify_seq_add_stmt (seq_p: pre_p, gs: stmt);
15952 *expr_p = NULL_TREE;
15953}
15954
15955/* A subroutine of gimplify_omp_atomic. The front end is supposed to have
15956 stabilized the lhs of the atomic operation as *ADDR. Return true if
15957 EXPR is this stabilized form. */
15958
15959static bool
15960goa_lhs_expr_p (tree expr, tree addr)
15961{
15962 /* Also include casts to other type variants. The C front end is fond
15963 of adding these for e.g. volatile variables. This is like
15964 STRIP_TYPE_NOPS but includes the main variant lookup. */
15965 STRIP_USELESS_TYPE_CONVERSION (expr);
15966
15967 if (INDIRECT_REF_P (expr))
15968 {
15969 expr = TREE_OPERAND (expr, 0);
15970 while (expr != addr
15971 && (CONVERT_EXPR_P (expr)
15972 || TREE_CODE (expr) == NON_LVALUE_EXPR)
15973 && TREE_CODE (expr) == TREE_CODE (addr)
15974 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
15975 {
15976 expr = TREE_OPERAND (expr, 0);
15977 addr = TREE_OPERAND (addr, 0);
15978 }
15979 if (expr == addr)
15980 return true;
15981 return (TREE_CODE (addr) == ADDR_EXPR
15982 && TREE_CODE (expr) == ADDR_EXPR
15983 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
15984 }
15985 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
15986 return true;
15987 return false;
15988}
15989
15990/* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
15991 expression does not involve the lhs, evaluate it into a temporary.
15992 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
15993 or -1 if an error was encountered. */
15994
15995static int
15996goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
15997 tree lhs_var, tree &target_expr, bool rhs, int depth)
15998{
15999 tree expr = *expr_p;
16000 int saw_lhs = 0;
16001
16002 if (goa_lhs_expr_p (expr, addr: lhs_addr))
16003 {
16004 if (pre_p)
16005 *expr_p = lhs_var;
16006 return 1;
16007 }
16008 if (is_gimple_val (expr))
16009 return 0;
16010
16011 /* Maximum depth of lhs in expression is for the
16012 __builtin_clear_padding (...), __builtin_clear_padding (...),
16013 __builtin_memcmp (&TARGET_EXPR <lhs, >, ...) == 0 ? ... : lhs; */
16014 if (++depth > 7)
16015 goto finish;
16016
16017 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
16018 {
16019 case tcc_binary:
16020 case tcc_comparison:
16021 saw_lhs |= goa_stabilize_expr (expr_p: &TREE_OPERAND (expr, 1), pre_p, lhs_addr,
16022 lhs_var, target_expr, rhs: true, depth);
16023 /* FALLTHRU */
16024 case tcc_unary:
16025 saw_lhs |= goa_stabilize_expr (expr_p: &TREE_OPERAND (expr, 0), pre_p, lhs_addr,
16026 lhs_var, target_expr, rhs: true, depth);
16027 break;
16028 case tcc_expression:
16029 switch (TREE_CODE (expr))
16030 {
16031 case TRUTH_ANDIF_EXPR:
16032 case TRUTH_ORIF_EXPR:
16033 case TRUTH_AND_EXPR:
16034 case TRUTH_OR_EXPR:
16035 case TRUTH_XOR_EXPR:
16036 case BIT_INSERT_EXPR:
16037 saw_lhs |= goa_stabilize_expr (expr_p: &TREE_OPERAND (expr, 1), pre_p,
16038 lhs_addr, lhs_var, target_expr, rhs: true,
16039 depth);
16040 /* FALLTHRU */
16041 case TRUTH_NOT_EXPR:
16042 saw_lhs |= goa_stabilize_expr (expr_p: &TREE_OPERAND (expr, 0), pre_p,
16043 lhs_addr, lhs_var, target_expr, rhs: true,
16044 depth);
16045 break;
16046 case MODIFY_EXPR:
16047 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr, lhs_var,
16048 target_expr, rhs: true, depth))
16049 break;
16050 saw_lhs |= goa_stabilize_expr (expr_p: &TREE_OPERAND (expr, 1), pre_p,
16051 lhs_addr, lhs_var, target_expr, rhs: true,
16052 depth);
16053 saw_lhs |= goa_stabilize_expr (expr_p: &TREE_OPERAND (expr, 0), pre_p,
16054 lhs_addr, lhs_var, target_expr, rhs: false,
16055 depth);
16056 break;
16057 /* FALLTHRU */
16058 case ADDR_EXPR:
16059 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr, lhs_var,
16060 target_expr, rhs: true, depth))
16061 break;
16062 saw_lhs |= goa_stabilize_expr (expr_p: &TREE_OPERAND (expr, 0), pre_p,
16063 lhs_addr, lhs_var, target_expr, rhs: false,
16064 depth);
16065 break;
16066 case COMPOUND_EXPR:
16067 /* Break out any preevaluations from cp_build_modify_expr. */
16068 for (; TREE_CODE (expr) == COMPOUND_EXPR;
16069 expr = TREE_OPERAND (expr, 1))
16070 {
16071 /* Special-case __builtin_clear_padding call before
16072 __builtin_memcmp. */
16073 if (TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR)
16074 {
16075 tree fndecl = get_callee_fndecl (TREE_OPERAND (expr, 0));
16076 if (fndecl
16077 && fndecl_built_in_p (node: fndecl, name1: BUILT_IN_CLEAR_PADDING)
16078 && VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
16079 && (!pre_p
16080 || goa_stabilize_expr (expr_p: &TREE_OPERAND (expr, 0), NULL,
16081 lhs_addr, lhs_var,
16082 target_expr, rhs: true, depth)))
16083 {
16084 if (pre_p)
16085 *expr_p = expr;
16086 saw_lhs = goa_stabilize_expr (expr_p: &TREE_OPERAND (expr, 0),
16087 pre_p, lhs_addr, lhs_var,
16088 target_expr, rhs: true, depth);
16089 saw_lhs |= goa_stabilize_expr (expr_p: &TREE_OPERAND (expr, 1),
16090 pre_p, lhs_addr, lhs_var,
16091 target_expr, rhs, depth);
16092 return saw_lhs;
16093 }
16094 }
16095
16096 if (pre_p)
16097 gimplify_stmt (stmt_p: &TREE_OPERAND (expr, 0), seq_p: pre_p);
16098 }
16099 if (!pre_p)
16100 return goa_stabilize_expr (expr_p: &expr, pre_p, lhs_addr, lhs_var,
16101 target_expr, rhs, depth);
16102 *expr_p = expr;
16103 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var,
16104 target_expr, rhs, depth);
16105 case COND_EXPR:
16106 if (!goa_stabilize_expr (expr_p: &TREE_OPERAND (expr, 0), NULL, lhs_addr,
16107 lhs_var, target_expr, rhs: true, depth))
16108 break;
16109 saw_lhs |= goa_stabilize_expr (expr_p: &TREE_OPERAND (expr, 0), pre_p,
16110 lhs_addr, lhs_var, target_expr, rhs: true,
16111 depth);
16112 saw_lhs |= goa_stabilize_expr (expr_p: &TREE_OPERAND (expr, 1), pre_p,
16113 lhs_addr, lhs_var, target_expr, rhs: true,
16114 depth);
16115 saw_lhs |= goa_stabilize_expr (expr_p: &TREE_OPERAND (expr, 2), pre_p,
16116 lhs_addr, lhs_var, target_expr, rhs: true,
16117 depth);
16118 break;
16119 case TARGET_EXPR:
16120 if (TARGET_EXPR_INITIAL (expr))
16121 {
16122 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr,
16123 lhs_var, target_expr, rhs: true,
16124 depth))
16125 break;
16126 if (expr == target_expr)
16127 saw_lhs = 1;
16128 else
16129 {
16130 saw_lhs = goa_stabilize_expr (expr_p: &TARGET_EXPR_INITIAL (expr),
16131 pre_p, lhs_addr, lhs_var,
16132 target_expr, rhs: true, depth);
16133 if (saw_lhs && target_expr == NULL_TREE && pre_p)
16134 target_expr = expr;
16135 }
16136 }
16137 break;
16138 default:
16139 break;
16140 }
16141 break;
16142 case tcc_reference:
16143 if (TREE_CODE (expr) == BIT_FIELD_REF
16144 || TREE_CODE (expr) == VIEW_CONVERT_EXPR)
16145 saw_lhs |= goa_stabilize_expr (expr_p: &TREE_OPERAND (expr, 0), pre_p,
16146 lhs_addr, lhs_var, target_expr, rhs: true,
16147 depth);
16148 break;
16149 case tcc_vl_exp:
16150 if (TREE_CODE (expr) == CALL_EXPR)
16151 {
16152 if (tree fndecl = get_callee_fndecl (expr))
16153 if (fndecl_built_in_p (node: fndecl, name1: BUILT_IN_CLEAR_PADDING,
16154 names: BUILT_IN_MEMCMP))
16155 {
16156 int nargs = call_expr_nargs (expr);
16157 for (int i = 0; i < nargs; i++)
16158 saw_lhs |= goa_stabilize_expr (expr_p: &CALL_EXPR_ARG (expr, i),
16159 pre_p, lhs_addr, lhs_var,
16160 target_expr, rhs: true, depth);
16161 }
16162 }
16163 break;
16164 default:
16165 break;
16166 }
16167
16168 finish:
16169 if (saw_lhs == 0 && pre_p)
16170 {
16171 enum gimplify_status gs;
16172 if (TREE_CODE (expr) == CALL_EXPR && VOID_TYPE_P (TREE_TYPE (expr)))
16173 {
16174 gimplify_stmt (stmt_p: &expr, seq_p: pre_p);
16175 return saw_lhs;
16176 }
16177 else if (rhs)
16178 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
16179 else
16180 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_lvalue, fb_lvalue);
16181 if (gs != GS_ALL_DONE)
16182 saw_lhs = -1;
16183 }
16184
16185 return saw_lhs;
16186}
16187
16188/* Gimplify an OMP_ATOMIC statement. */
16189
16190static enum gimplify_status
16191gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
16192{
16193 tree addr = TREE_OPERAND (*expr_p, 0);
16194 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
16195 ? NULL : TREE_OPERAND (*expr_p, 1);
16196 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
16197 tree tmp_load;
16198 gomp_atomic_load *loadstmt;
16199 gomp_atomic_store *storestmt;
16200 tree target_expr = NULL_TREE;
16201
16202 tmp_load = create_tmp_reg (type);
16203 if (rhs
16204 && goa_stabilize_expr (expr_p: &rhs, pre_p, lhs_addr: addr, lhs_var: tmp_load, target_expr,
16205 rhs: true, depth: 0) < 0)
16206 return GS_ERROR;
16207
16208 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
16209 != GS_ALL_DONE)
16210 return GS_ERROR;
16211
16212 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr,
16213 OMP_ATOMIC_MEMORY_ORDER (*expr_p));
16214 gimplify_seq_add_stmt (seq_p: pre_p, gs: loadstmt);
16215 if (rhs)
16216 {
16217 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
16218 representatives. Use BIT_FIELD_REF on the lhs instead. */
16219 tree rhsarg = rhs;
16220 if (TREE_CODE (rhs) == COND_EXPR)
16221 rhsarg = TREE_OPERAND (rhs, 1);
16222 if (TREE_CODE (rhsarg) == BIT_INSERT_EXPR
16223 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load)))
16224 {
16225 tree bitpos = TREE_OPERAND (rhsarg, 2);
16226 tree op1 = TREE_OPERAND (rhsarg, 1);
16227 tree bitsize;
16228 tree tmp_store = tmp_load;
16229 if (TREE_CODE (*expr_p) == OMP_ATOMIC_CAPTURE_OLD)
16230 tmp_store = get_initialized_tmp_var (val: tmp_load, pre_p);
16231 if (INTEGRAL_TYPE_P (TREE_TYPE (op1)))
16232 bitsize = bitsize_int (TYPE_PRECISION (TREE_TYPE (op1)));
16233 else
16234 bitsize = TYPE_SIZE (TREE_TYPE (op1));
16235 gcc_assert (TREE_OPERAND (rhsarg, 0) == tmp_load);
16236 tree t = build2_loc (EXPR_LOCATION (rhsarg),
16237 code: MODIFY_EXPR, void_type_node,
16238 arg0: build3_loc (EXPR_LOCATION (rhsarg),
16239 code: BIT_FIELD_REF, TREE_TYPE (op1),
16240 arg0: tmp_store, arg1: bitsize, arg2: bitpos), arg1: op1);
16241 if (TREE_CODE (rhs) == COND_EXPR)
16242 t = build3_loc (EXPR_LOCATION (rhs), code: COND_EXPR, void_type_node,
16243 TREE_OPERAND (rhs, 0), arg1: t, void_node);
16244 gimplify_and_add (t, seq_p: pre_p);
16245 rhs = tmp_store;
16246 }
16247 bool save_allow_rhs_cond_expr = gimplify_ctxp->allow_rhs_cond_expr;
16248 if (TREE_CODE (rhs) == COND_EXPR)
16249 gimplify_ctxp->allow_rhs_cond_expr = true;
16250 enum gimplify_status gs = gimplify_expr (&rhs, pre_p, NULL,
16251 is_gimple_val, fb_rvalue);
16252 gimplify_ctxp->allow_rhs_cond_expr = save_allow_rhs_cond_expr;
16253 if (gs != GS_ALL_DONE)
16254 return GS_ERROR;
16255 }
16256
16257 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
16258 rhs = tmp_load;
16259 storestmt
16260 = gimple_build_omp_atomic_store (rhs, OMP_ATOMIC_MEMORY_ORDER (*expr_p));
16261 if (TREE_CODE (*expr_p) != OMP_ATOMIC_READ && OMP_ATOMIC_WEAK (*expr_p))
16262 {
16263 gimple_omp_atomic_set_weak (g: loadstmt);
16264 gimple_omp_atomic_set_weak (g: storestmt);
16265 }
16266 gimplify_seq_add_stmt (seq_p: pre_p, gs: storestmt);
16267 switch (TREE_CODE (*expr_p))
16268 {
16269 case OMP_ATOMIC_READ:
16270 case OMP_ATOMIC_CAPTURE_OLD:
16271 *expr_p = tmp_load;
16272 gimple_omp_atomic_set_need_value (g: loadstmt);
16273 break;
16274 case OMP_ATOMIC_CAPTURE_NEW:
16275 *expr_p = rhs;
16276 gimple_omp_atomic_set_need_value (g: storestmt);
16277 break;
16278 default:
16279 *expr_p = NULL;
16280 break;
16281 }
16282
16283 return GS_ALL_DONE;
16284}
16285
16286/* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
16287 body, and adding some EH bits. */
16288
16289static enum gimplify_status
16290gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
16291{
16292 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
16293 gimple *body_stmt;
16294 gtransaction *trans_stmt;
16295 gimple_seq body = NULL;
16296 int subcode = 0;
16297
16298 /* Wrap the transaction body in a BIND_EXPR so we have a context
16299 where to put decls for OMP. */
16300 if (TREE_CODE (tbody) != BIND_EXPR)
16301 {
16302 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
16303 TREE_SIDE_EFFECTS (bind) = 1;
16304 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
16305 TRANSACTION_EXPR_BODY (expr) = bind;
16306 }
16307
16308 push_gimplify_context ();
16309 temp = voidify_wrapper_expr (wrapper: *expr_p, NULL);
16310
16311 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), seq_p: &body);
16312 pop_gimplify_context (body: body_stmt);
16313
16314 trans_stmt = gimple_build_transaction (body);
16315 if (TRANSACTION_EXPR_OUTER (expr))
16316 subcode = GTMA_IS_OUTER;
16317 else if (TRANSACTION_EXPR_RELAXED (expr))
16318 subcode = GTMA_IS_RELAXED;
16319 gimple_transaction_set_subcode (transaction_stmt: trans_stmt, subcode);
16320
16321 gimplify_seq_add_stmt (seq_p: pre_p, gs: trans_stmt);
16322
16323 if (temp)
16324 {
16325 *expr_p = temp;
16326 return GS_OK;
16327 }
16328
16329 *expr_p = NULL_TREE;
16330 return GS_ALL_DONE;
16331}
16332
16333/* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
16334 is the OMP_BODY of the original EXPR (which has already been
16335 gimplified so it's not present in the EXPR).
16336
16337 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
16338
16339static gimple *
16340gimplify_omp_ordered (tree expr, gimple_seq body)
16341{
16342 tree c, decls;
16343 int failures = 0;
16344 unsigned int i;
16345 tree source_c = NULL_TREE;
16346 tree sink_c = NULL_TREE;
16347
16348 if (gimplify_omp_ctxp)
16349 {
16350 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
16351 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
16352 && gimplify_omp_ctxp->loop_iter_var.is_empty ())
16353 {
16354 error_at (OMP_CLAUSE_LOCATION (c),
16355 "%<ordered%> construct with %qs clause must be "
16356 "closely nested inside a loop with %<ordered%> clause",
16357 OMP_CLAUSE_DOACROSS_DEPEND (c) ? "depend" : "doacross");
16358 failures++;
16359 }
16360 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
16361 && OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK)
16362 {
16363 bool fail = false;
16364 sink_c = c;
16365 if (OMP_CLAUSE_DECL (c) == NULL_TREE)
16366 continue; /* omp_cur_iteration - 1 */
16367 for (decls = OMP_CLAUSE_DECL (c), i = 0;
16368 decls && TREE_CODE (decls) == TREE_LIST;
16369 decls = TREE_CHAIN (decls), ++i)
16370 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
16371 continue;
16372 else if (TREE_VALUE (decls)
16373 != gimplify_omp_ctxp->loop_iter_var[2 * i])
16374 {
16375 error_at (OMP_CLAUSE_LOCATION (c),
16376 "variable %qE is not an iteration "
16377 "of outermost loop %d, expected %qE",
16378 TREE_VALUE (decls), i + 1,
16379 gimplify_omp_ctxp->loop_iter_var[2 * i]);
16380 fail = true;
16381 failures++;
16382 }
16383 else
16384 TREE_VALUE (decls)
16385 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
16386 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
16387 {
16388 error_at (OMP_CLAUSE_LOCATION (c),
16389 "number of variables in %qs clause with "
16390 "%<sink%> modifier does not match number of "
16391 "iteration variables",
16392 OMP_CLAUSE_DOACROSS_DEPEND (c)
16393 ? "depend" : "doacross");
16394 failures++;
16395 }
16396 }
16397 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DOACROSS
16398 && OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SOURCE)
16399 {
16400 if (source_c)
16401 {
16402 error_at (OMP_CLAUSE_LOCATION (c),
16403 "more than one %qs clause with %<source%> "
16404 "modifier on an %<ordered%> construct",
16405 OMP_CLAUSE_DOACROSS_DEPEND (source_c)
16406 ? "depend" : "doacross");
16407 failures++;
16408 }
16409 else
16410 source_c = c;
16411 }
16412 }
16413 if (source_c && sink_c)
16414 {
16415 error_at (OMP_CLAUSE_LOCATION (source_c),
16416 "%qs clause with %<source%> modifier specified "
16417 "together with %qs clauses with %<sink%> modifier "
16418 "on the same construct",
16419 OMP_CLAUSE_DOACROSS_DEPEND (source_c) ? "depend" : "doacross",
16420 OMP_CLAUSE_DOACROSS_DEPEND (sink_c) ? "depend" : "doacross");
16421 failures++;
16422 }
16423
16424 if (failures)
16425 return gimple_build_nop ();
16426 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
16427}
16428
16429/* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
16430 expression produces a value to be used as an operand inside a GIMPLE
16431 statement, the value will be stored back in *EXPR_P. This value will
16432 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
16433 an SSA_NAME. The corresponding sequence of GIMPLE statements is
16434 emitted in PRE_P and POST_P.
16435
16436 Additionally, this process may overwrite parts of the input
16437 expression during gimplification. Ideally, it should be
16438 possible to do non-destructive gimplification.
16439
16440 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
16441 the expression needs to evaluate to a value to be used as
16442 an operand in a GIMPLE statement, this value will be stored in
16443 *EXPR_P on exit. This happens when the caller specifies one
16444 of fb_lvalue or fb_rvalue fallback flags.
16445
16446 PRE_P will contain the sequence of GIMPLE statements corresponding
16447 to the evaluation of EXPR and all the side-effects that must
16448 be executed before the main expression. On exit, the last
16449 statement of PRE_P is the core statement being gimplified. For
16450 instance, when gimplifying 'if (++a)' the last statement in
16451 PRE_P will be 'if (t.1)' where t.1 is the result of
16452 pre-incrementing 'a'.
16453
16454 POST_P will contain the sequence of GIMPLE statements corresponding
16455 to the evaluation of all the side-effects that must be executed
16456 after the main expression. If this is NULL, the post
16457 side-effects are stored at the end of PRE_P.
16458
16459 The reason why the output is split in two is to handle post
16460 side-effects explicitly. In some cases, an expression may have
16461 inner and outer post side-effects which need to be emitted in
16462 an order different from the one given by the recursive
16463 traversal. For instance, for the expression (*p--)++ the post
16464 side-effects of '--' must actually occur *after* the post
16465 side-effects of '++'. However, gimplification will first visit
16466 the inner expression, so if a separate POST sequence was not
16467 used, the resulting sequence would be:
16468
16469 1 t.1 = *p
16470 2 p = p - 1
16471 3 t.2 = t.1 + 1
16472 4 *p = t.2
16473
16474 However, the post-decrement operation in line #2 must not be
16475 evaluated until after the store to *p at line #4, so the
16476 correct sequence should be:
16477
16478 1 t.1 = *p
16479 2 t.2 = t.1 + 1
16480 3 *p = t.2
16481 4 p = p - 1
16482
16483 So, by specifying a separate post queue, it is possible
16484 to emit the post side-effects in the correct order.
16485 If POST_P is NULL, an internal queue will be used. Before
16486 returning to the caller, the sequence POST_P is appended to
16487 the main output sequence PRE_P.
16488
16489 GIMPLE_TEST_F points to a function that takes a tree T and
16490 returns nonzero if T is in the GIMPLE form requested by the
16491 caller. The GIMPLE predicates are in gimple.cc.
16492
16493 FALLBACK tells the function what sort of a temporary we want if
16494 gimplification cannot produce an expression that complies with
16495 GIMPLE_TEST_F.
16496
16497 fb_none means that no temporary should be generated
16498 fb_rvalue means that an rvalue is OK to generate
16499 fb_lvalue means that an lvalue is OK to generate
16500 fb_either means that either is OK, but an lvalue is preferable.
16501 fb_mayfail means that gimplification may fail (in which case
16502 GS_ERROR will be returned)
16503
16504 The return value is either GS_ERROR or GS_ALL_DONE, since this
16505 function iterates until EXPR is completely gimplified or an error
16506 occurs. */
16507
16508enum gimplify_status
16509gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
16510 bool (*gimple_test_f) (tree), fallback_t fallback)
16511{
16512 tree tmp;
16513 gimple_seq internal_pre = NULL;
16514 gimple_seq internal_post = NULL;
16515 tree save_expr;
16516 bool is_statement;
16517 location_t saved_location;
16518 enum gimplify_status ret;
16519 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
16520 tree label;
16521
16522 save_expr = *expr_p;
16523 if (save_expr == NULL_TREE)
16524 return GS_ALL_DONE;
16525
16526 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
16527 is_statement = gimple_test_f == is_gimple_stmt;
16528 if (is_statement)
16529 gcc_assert (pre_p);
16530
16531 /* Consistency checks. */
16532 if (gimple_test_f == is_gimple_reg)
16533 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
16534 else if (gimple_test_f == is_gimple_val
16535 || gimple_test_f == is_gimple_call_addr
16536 || gimple_test_f == is_gimple_condexpr_for_cond
16537 || gimple_test_f == is_gimple_mem_rhs
16538 || gimple_test_f == is_gimple_mem_rhs_or_call
16539 || gimple_test_f == is_gimple_reg_rhs
16540 || gimple_test_f == is_gimple_reg_rhs_or_call
16541 || gimple_test_f == is_gimple_asm_val
16542 || gimple_test_f == is_gimple_mem_ref_addr)
16543 gcc_assert (fallback & fb_rvalue);
16544 else if (gimple_test_f == is_gimple_min_lval
16545 || gimple_test_f == is_gimple_lvalue)
16546 gcc_assert (fallback & fb_lvalue);
16547 else if (gimple_test_f == is_gimple_addressable)
16548 gcc_assert (fallback & fb_either);
16549 else if (gimple_test_f == is_gimple_stmt)
16550 gcc_assert (fallback == fb_none);
16551 else
16552 {
16553 /* We should have recognized the GIMPLE_TEST_F predicate to
16554 know what kind of fallback to use in case a temporary is
16555 needed to hold the value or address of *EXPR_P. */
16556 gcc_unreachable ();
16557 }
16558
16559 /* We used to check the predicate here and return immediately if it
16560 succeeds. This is wrong; the design is for gimplification to be
16561 idempotent, and for the predicates to only test for valid forms, not
16562 whether they are fully simplified. */
16563 if (pre_p == NULL)
16564 pre_p = &internal_pre;
16565
16566 if (post_p == NULL)
16567 post_p = &internal_post;
16568
16569 /* Remember the last statements added to PRE_P and POST_P. Every
16570 new statement added by the gimplification helpers needs to be
16571 annotated with location information. To centralize the
16572 responsibility, we remember the last statement that had been
16573 added to both queues before gimplifying *EXPR_P. If
16574 gimplification produces new statements in PRE_P and POST_P, those
16575 statements will be annotated with the same location information
16576 as *EXPR_P. */
16577 pre_last_gsi = gsi_last (seq&: *pre_p);
16578 post_last_gsi = gsi_last (seq&: *post_p);
16579
16580 saved_location = input_location;
16581 if (save_expr != error_mark_node
16582 && EXPR_HAS_LOCATION (*expr_p))
16583 input_location = EXPR_LOCATION (*expr_p);
16584
16585 /* Loop over the specific gimplifiers until the toplevel node
16586 remains the same. */
16587 do
16588 {
16589 /* Strip away as many useless type conversions as possible
16590 at the toplevel. */
16591 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
16592
16593 /* Remember the expr. */
16594 save_expr = *expr_p;
16595
16596 /* Die, die, die, my darling. */
16597 if (error_operand_p (t: save_expr))
16598 {
16599 ret = GS_ERROR;
16600 break;
16601 }
16602
16603 /* Do any language-specific gimplification. */
16604 ret = ((enum gimplify_status)
16605 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
16606 if (ret == GS_OK)
16607 {
16608 if (*expr_p == NULL_TREE)
16609 break;
16610 if (*expr_p != save_expr)
16611 continue;
16612 }
16613 else if (ret != GS_UNHANDLED)
16614 break;
16615
16616 /* Make sure that all the cases set 'ret' appropriately. */
16617 ret = GS_UNHANDLED;
16618 switch (TREE_CODE (*expr_p))
16619 {
16620 /* First deal with the special cases. */
16621
16622 case POSTINCREMENT_EXPR:
16623 case POSTDECREMENT_EXPR:
16624 case PREINCREMENT_EXPR:
16625 case PREDECREMENT_EXPR:
16626 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
16627 want_value: fallback != fb_none,
16628 TREE_TYPE (*expr_p));
16629 break;
16630
16631 case VIEW_CONVERT_EXPR:
16632 if ((fallback & fb_rvalue)
16633 && is_gimple_reg_type (TREE_TYPE (*expr_p))
16634 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
16635 {
16636 ret = gimplify_expr (expr_p: &TREE_OPERAND (*expr_p, 0), pre_p,
16637 post_p, gimple_test_f: is_gimple_val, fallback: fb_rvalue);
16638 recalculate_side_effects (t: *expr_p);
16639 break;
16640 }
16641 /* Fallthru. */
16642
16643 case ARRAY_REF:
16644 case ARRAY_RANGE_REF:
16645 case REALPART_EXPR:
16646 case IMAGPART_EXPR:
16647 case COMPONENT_REF:
16648 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
16649 fallback: fallback ? fallback : fb_rvalue);
16650 break;
16651
16652 case COND_EXPR:
16653 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
16654
16655 /* C99 code may assign to an array in a structure value of a
16656 conditional expression, and this has undefined behavior
16657 only on execution, so create a temporary if an lvalue is
16658 required. */
16659 if (fallback == fb_lvalue)
16660 {
16661 *expr_p = get_initialized_tmp_var (val: *expr_p, pre_p, post_p, allow_ssa: false);
16662 mark_addressable (*expr_p);
16663 ret = GS_OK;
16664 }
16665 break;
16666
16667 case CALL_EXPR:
16668 ret = gimplify_call_expr (expr_p, pre_p, want_value: fallback != fb_none);
16669
16670 /* C99 code may assign to an array in a structure returned
16671 from a function, and this has undefined behavior only on
16672 execution, so create a temporary if an lvalue is
16673 required. */
16674 if (fallback == fb_lvalue)
16675 {
16676 *expr_p = get_initialized_tmp_var (val: *expr_p, pre_p, post_p, allow_ssa: false);
16677 mark_addressable (*expr_p);
16678 ret = GS_OK;
16679 }
16680 break;
16681
16682 case TREE_LIST:
16683 gcc_unreachable ();
16684
16685 case COMPOUND_EXPR:
16686 ret = gimplify_compound_expr (expr_p, pre_p, want_value: fallback != fb_none);
16687 break;
16688
16689 case COMPOUND_LITERAL_EXPR:
16690 ret = gimplify_compound_literal_expr (expr_p, pre_p,
16691 gimple_test_f, fallback);
16692 break;
16693
16694 case MODIFY_EXPR:
16695 case INIT_EXPR:
16696 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
16697 want_value: fallback != fb_none);
16698 break;
16699
16700 case TRUTH_ANDIF_EXPR:
16701 case TRUTH_ORIF_EXPR:
16702 {
16703 /* Preserve the original type of the expression and the
16704 source location of the outer expression. */
16705 tree org_type = TREE_TYPE (*expr_p);
16706 *expr_p = gimple_boolify (expr: *expr_p);
16707 *expr_p = build3_loc (loc: input_location, code: COND_EXPR,
16708 type: org_type, arg0: *expr_p,
16709 arg1: fold_convert_loc
16710 (input_location,
16711 org_type, boolean_true_node),
16712 arg2: fold_convert_loc
16713 (input_location,
16714 org_type, boolean_false_node));
16715 ret = GS_OK;
16716 break;
16717 }
16718
16719 case TRUTH_NOT_EXPR:
16720 {
16721 tree type = TREE_TYPE (*expr_p);
16722 /* The parsers are careful to generate TRUTH_NOT_EXPR
16723 only with operands that are always zero or one.
16724 We do not fold here but handle the only interesting case
16725 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
16726 *expr_p = gimple_boolify (expr: *expr_p);
16727 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
16728 *expr_p = build1_loc (loc: input_location, code: BIT_NOT_EXPR,
16729 TREE_TYPE (*expr_p),
16730 TREE_OPERAND (*expr_p, 0));
16731 else
16732 *expr_p = build2_loc (loc: input_location, code: BIT_XOR_EXPR,
16733 TREE_TYPE (*expr_p),
16734 TREE_OPERAND (*expr_p, 0),
16735 arg1: build_int_cst (TREE_TYPE (*expr_p), 1));
16736 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
16737 *expr_p = fold_convert_loc (input_location, type, *expr_p);
16738 ret = GS_OK;
16739 break;
16740 }
16741
16742 case ADDR_EXPR:
16743 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
16744 break;
16745
16746 case ANNOTATE_EXPR:
16747 {
16748 tree cond = TREE_OPERAND (*expr_p, 0);
16749 tree kind = TREE_OPERAND (*expr_p, 1);
16750 tree data = TREE_OPERAND (*expr_p, 2);
16751 tree type = TREE_TYPE (cond);
16752 if (!INTEGRAL_TYPE_P (type))
16753 {
16754 *expr_p = cond;
16755 ret = GS_OK;
16756 break;
16757 }
16758 tree tmp = create_tmp_var (type);
16759 gimplify_arg (arg_p: &cond, pre_p, EXPR_LOCATION (*expr_p));
16760 gcall *call
16761 = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
16762 gimple_call_set_lhs (gs: call, lhs: tmp);
16763 gimplify_seq_add_stmt (seq_p: pre_p, gs: call);
16764 *expr_p = tmp;
16765 ret = GS_ALL_DONE;
16766 break;
16767 }
16768
16769 case VA_ARG_EXPR:
16770 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
16771 break;
16772
16773 CASE_CONVERT:
16774 if (IS_EMPTY_STMT (*expr_p))
16775 {
16776 ret = GS_ALL_DONE;
16777 break;
16778 }
16779
16780 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
16781 || fallback == fb_none)
16782 {
16783 /* Just strip a conversion to void (or in void context) and
16784 try again. */
16785 *expr_p = TREE_OPERAND (*expr_p, 0);
16786 ret = GS_OK;
16787 break;
16788 }
16789
16790 ret = gimplify_conversion (expr_p);
16791 if (ret == GS_ERROR)
16792 break;
16793 if (*expr_p != save_expr)
16794 break;
16795 /* FALLTHRU */
16796
16797 case FIX_TRUNC_EXPR:
16798 /* unary_expr: ... | '(' cast ')' val | ... */
16799 ret = gimplify_expr (expr_p: &TREE_OPERAND (*expr_p, 0), pre_p, post_p,
16800 gimple_test_f: is_gimple_val, fallback: fb_rvalue);
16801 recalculate_side_effects (t: *expr_p);
16802 break;
16803
16804 case INDIRECT_REF:
16805 {
16806 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
16807 bool notrap = TREE_THIS_NOTRAP (*expr_p);
16808 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
16809
16810 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
16811 if (*expr_p != save_expr)
16812 {
16813 ret = GS_OK;
16814 break;
16815 }
16816
16817 ret = gimplify_expr (expr_p: &TREE_OPERAND (*expr_p, 0), pre_p, post_p,
16818 gimple_test_f: is_gimple_reg, fallback: fb_rvalue);
16819 if (ret == GS_ERROR)
16820 break;
16821
16822 recalculate_side_effects (t: *expr_p);
16823 *expr_p = fold_build2_loc (input_location, MEM_REF,
16824 TREE_TYPE (*expr_p),
16825 TREE_OPERAND (*expr_p, 0),
16826 build_int_cst (saved_ptr_type, 0));
16827 TREE_THIS_VOLATILE (*expr_p) = volatilep;
16828 TREE_THIS_NOTRAP (*expr_p) = notrap;
16829 ret = GS_OK;
16830 break;
16831 }
16832
16833 /* We arrive here through the various re-gimplifcation paths. */
16834 case MEM_REF:
16835 /* First try re-folding the whole thing. */
16836 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
16837 TREE_OPERAND (*expr_p, 0),
16838 TREE_OPERAND (*expr_p, 1));
16839 if (tmp)
16840 {
16841 REF_REVERSE_STORAGE_ORDER (tmp)
16842 = REF_REVERSE_STORAGE_ORDER (*expr_p);
16843 *expr_p = tmp;
16844 recalculate_side_effects (t: *expr_p);
16845 ret = GS_OK;
16846 break;
16847 }
16848 /* Avoid re-gimplifying the address operand if it is already
16849 in suitable form. Re-gimplifying would mark the address
16850 operand addressable. Always gimplify when not in SSA form
16851 as we still may have to gimplify decls with value-exprs. */
16852 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
16853 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
16854 {
16855 ret = gimplify_expr (expr_p: &TREE_OPERAND (*expr_p, 0), pre_p, post_p,
16856 gimple_test_f: is_gimple_mem_ref_addr, fallback: fb_rvalue);
16857 if (ret == GS_ERROR)
16858 break;
16859 }
16860 recalculate_side_effects (t: *expr_p);
16861 ret = GS_ALL_DONE;
16862 break;
16863
16864 /* Constants need not be gimplified. */
16865 case INTEGER_CST:
16866 case REAL_CST:
16867 case FIXED_CST:
16868 case STRING_CST:
16869 case COMPLEX_CST:
16870 case VECTOR_CST:
16871 /* Drop the overflow flag on constants, we do not want
16872 that in the GIMPLE IL. */
16873 if (TREE_OVERFLOW_P (*expr_p))
16874 *expr_p = drop_tree_overflow (*expr_p);
16875 ret = GS_ALL_DONE;
16876 break;
16877
16878 case CONST_DECL:
16879 /* If we require an lvalue, such as for ADDR_EXPR, retain the
16880 CONST_DECL node. Otherwise the decl is replaceable by its
16881 value. */
16882 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
16883 if (fallback & fb_lvalue)
16884 ret = GS_ALL_DONE;
16885 else
16886 {
16887 *expr_p = DECL_INITIAL (*expr_p);
16888 ret = GS_OK;
16889 }
16890 break;
16891
16892 case DECL_EXPR:
16893 ret = gimplify_decl_expr (stmt_p: expr_p, seq_p: pre_p);
16894 break;
16895
16896 case BIND_EXPR:
16897 ret = gimplify_bind_expr (expr_p, pre_p);
16898 break;
16899
16900 case LOOP_EXPR:
16901 ret = gimplify_loop_expr (expr_p, pre_p);
16902 break;
16903
16904 case SWITCH_EXPR:
16905 ret = gimplify_switch_expr (expr_p, pre_p);
16906 break;
16907
16908 case EXIT_EXPR:
16909 ret = gimplify_exit_expr (expr_p);
16910 break;
16911
16912 case GOTO_EXPR:
16913 /* If the target is not LABEL, then it is a computed jump
16914 and the target needs to be gimplified. */
16915 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
16916 {
16917 ret = gimplify_expr (expr_p: &GOTO_DESTINATION (*expr_p), pre_p,
16918 NULL, gimple_test_f: is_gimple_val, fallback: fb_rvalue);
16919 if (ret == GS_ERROR)
16920 break;
16921 }
16922 gimplify_seq_add_stmt (seq_p: pre_p,
16923 gs: gimple_build_goto (GOTO_DESTINATION (*expr_p)));
16924 ret = GS_ALL_DONE;
16925 break;
16926
16927 case PREDICT_EXPR:
16928 gimplify_seq_add_stmt (seq_p: pre_p,
16929 gs: gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
16930 PREDICT_EXPR_OUTCOME (*expr_p)));
16931 ret = GS_ALL_DONE;
16932 break;
16933
16934 case LABEL_EXPR:
16935 ret = gimplify_label_expr (expr_p, pre_p);
16936 label = LABEL_EXPR_LABEL (*expr_p);
16937 gcc_assert (decl_function_context (label) == current_function_decl);
16938
16939 /* If the label is used in a goto statement, or address of the label
16940 is taken, we need to unpoison all variables that were seen so far.
16941 Doing so would prevent us from reporting a false positives. */
16942 if (asan_poisoned_variables
16943 && asan_used_labels != NULL
16944 && asan_used_labels->contains (k: label)
16945 && !gimplify_omp_ctxp)
16946 asan_poison_variables (variables: asan_poisoned_variables, poison: false, seq_p: pre_p);
16947 break;
16948
16949 case CASE_LABEL_EXPR:
16950 ret = gimplify_case_label_expr (expr_p, pre_p);
16951
16952 if (gimplify_ctxp->live_switch_vars)
16953 asan_poison_variables (variables: gimplify_ctxp->live_switch_vars, poison: false,
16954 seq_p: pre_p);
16955 break;
16956
16957 case RETURN_EXPR:
16958 ret = gimplify_return_expr (stmt: *expr_p, pre_p);
16959 break;
16960
16961 case CONSTRUCTOR:
16962 /* Don't reduce this in place; let gimplify_init_constructor work its
16963 magic. Buf if we're just elaborating this for side effects, just
16964 gimplify any element that has side-effects. */
16965 if (fallback == fb_none)
16966 {
16967 unsigned HOST_WIDE_INT ix;
16968 tree val;
16969 tree temp = NULL_TREE;
16970 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
16971 if (TREE_SIDE_EFFECTS (val))
16972 append_to_statement_list (val, &temp);
16973
16974 *expr_p = temp;
16975 ret = temp ? GS_OK : GS_ALL_DONE;
16976 }
16977 /* C99 code may assign to an array in a constructed
16978 structure or union, and this has undefined behavior only
16979 on execution, so create a temporary if an lvalue is
16980 required. */
16981 else if (fallback == fb_lvalue)
16982 {
16983 *expr_p = get_initialized_tmp_var (val: *expr_p, pre_p, post_p, allow_ssa: false);
16984 mark_addressable (*expr_p);
16985 ret = GS_OK;
16986 }
16987 else
16988 ret = GS_ALL_DONE;
16989 break;
16990
16991 /* The following are special cases that are not handled by the
16992 original GIMPLE grammar. */
16993
16994 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
16995 eliminated. */
16996 case SAVE_EXPR:
16997 ret = gimplify_save_expr (expr_p, pre_p, post_p);
16998 break;
16999
17000 case BIT_FIELD_REF:
17001 ret = gimplify_expr (expr_p: &TREE_OPERAND (*expr_p, 0), pre_p,
17002 post_p, gimple_test_f: is_gimple_lvalue, fallback: fb_either);
17003 recalculate_side_effects (t: *expr_p);
17004 break;
17005
17006 case TARGET_MEM_REF:
17007 {
17008 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
17009
17010 if (TMR_BASE (*expr_p))
17011 r0 = gimplify_expr (expr_p: &TMR_BASE (*expr_p), pre_p,
17012 post_p, gimple_test_f: is_gimple_mem_ref_addr, fallback: fb_either);
17013 if (TMR_INDEX (*expr_p))
17014 r1 = gimplify_expr (expr_p: &TMR_INDEX (*expr_p), pre_p,
17015 post_p, gimple_test_f: is_gimple_val, fallback: fb_rvalue);
17016 if (TMR_INDEX2 (*expr_p))
17017 r1 = gimplify_expr (expr_p: &TMR_INDEX2 (*expr_p), pre_p,
17018 post_p, gimple_test_f: is_gimple_val, fallback: fb_rvalue);
17019 /* TMR_STEP and TMR_OFFSET are always integer constants. */
17020 ret = MIN (r0, r1);
17021 }
17022 break;
17023
17024 case NON_LVALUE_EXPR:
17025 /* This should have been stripped above. */
17026 gcc_unreachable ();
17027
17028 case ASM_EXPR:
17029 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
17030 break;
17031
17032 case TRY_FINALLY_EXPR:
17033 case TRY_CATCH_EXPR:
17034 {
17035 gimple_seq eval, cleanup;
17036 gtry *try_;
17037
17038 /* Calls to destructors are generated automatically in FINALLY/CATCH
17039 block. They should have location as UNKNOWN_LOCATION. However,
17040 gimplify_call_expr will reset these call stmts to input_location
17041 if it finds stmt's location is unknown. To prevent resetting for
17042 destructors, we set the input_location to unknown.
17043 Note that this only affects the destructor calls in FINALLY/CATCH
17044 block, and will automatically reset to its original value by the
17045 end of gimplify_expr. */
17046 input_location = UNKNOWN_LOCATION;
17047 eval = cleanup = NULL;
17048 gimplify_and_add (TREE_OPERAND (*expr_p, 0), seq_p: &eval);
17049 if (TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
17050 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == EH_ELSE_EXPR)
17051 {
17052 gimple_seq n = NULL, e = NULL;
17053 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
17054 0), seq_p: &n);
17055 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
17056 1), seq_p: &e);
17057 if (!gimple_seq_empty_p (s: n) && !gimple_seq_empty_p (s: e))
17058 {
17059 geh_else *stmt = gimple_build_eh_else (n, e);
17060 gimple_seq_add_stmt (&cleanup, stmt);
17061 }
17062 }
17063 else
17064 gimplify_and_add (TREE_OPERAND (*expr_p, 1), seq_p: &cleanup);
17065 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
17066 if (gimple_seq_empty_p (s: cleanup))
17067 {
17068 gimple_seq_add_seq (pre_p, eval);
17069 ret = GS_ALL_DONE;
17070 break;
17071 }
17072 try_ = gimple_build_try (eval, cleanup,
17073 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
17074 ? GIMPLE_TRY_FINALLY
17075 : GIMPLE_TRY_CATCH);
17076 if (EXPR_HAS_LOCATION (save_expr))
17077 gimple_set_location (g: try_, EXPR_LOCATION (save_expr));
17078 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
17079 gimple_set_location (g: try_, location: saved_location);
17080 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
17081 gimple_try_set_catch_is_cleanup (g: try_,
17082 TRY_CATCH_IS_CLEANUP (*expr_p));
17083 gimplify_seq_add_stmt (seq_p: pre_p, gs: try_);
17084 ret = GS_ALL_DONE;
17085 break;
17086 }
17087
17088 case CLEANUP_POINT_EXPR:
17089 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
17090 break;
17091
17092 case TARGET_EXPR:
17093 ret = gimplify_target_expr (expr_p, pre_p, post_p);
17094 break;
17095
17096 case CATCH_EXPR:
17097 {
17098 gimple *c;
17099 gimple_seq handler = NULL;
17100 gimplify_and_add (CATCH_BODY (*expr_p), seq_p: &handler);
17101 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
17102 gimplify_seq_add_stmt (seq_p: pre_p, gs: c);
17103 ret = GS_ALL_DONE;
17104 break;
17105 }
17106
17107 case EH_FILTER_EXPR:
17108 {
17109 gimple *ehf;
17110 gimple_seq failure = NULL;
17111
17112 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), seq_p: &failure);
17113 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
17114 copy_warning (ehf, *expr_p);
17115 gimplify_seq_add_stmt (seq_p: pre_p, gs: ehf);
17116 ret = GS_ALL_DONE;
17117 break;
17118 }
17119
17120 case OBJ_TYPE_REF:
17121 {
17122 enum gimplify_status r0, r1;
17123 r0 = gimplify_expr (expr_p: &OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
17124 post_p, gimple_test_f: is_gimple_val, fallback: fb_rvalue);
17125 r1 = gimplify_expr (expr_p: &OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
17126 post_p, gimple_test_f: is_gimple_val, fallback: fb_rvalue);
17127 TREE_SIDE_EFFECTS (*expr_p) = 0;
17128 ret = MIN (r0, r1);
17129 }
17130 break;
17131
17132 case LABEL_DECL:
17133 /* We get here when taking the address of a label. We mark
17134 the label as "forced"; meaning it can never be removed and
17135 it is a potential target for any computed goto. */
17136 FORCED_LABEL (*expr_p) = 1;
17137 ret = GS_ALL_DONE;
17138 break;
17139
17140 case STATEMENT_LIST:
17141 ret = gimplify_statement_list (expr_p, pre_p);
17142 break;
17143
17144 case WITH_SIZE_EXPR:
17145 {
17146 gimplify_expr (expr_p: &TREE_OPERAND (*expr_p, 0), pre_p,
17147 post_p: post_p == &internal_post ? NULL : post_p,
17148 gimple_test_f, fallback);
17149 gimplify_expr (expr_p: &TREE_OPERAND (*expr_p, 1), pre_p, post_p,
17150 gimple_test_f: is_gimple_val, fallback: fb_rvalue);
17151 ret = GS_ALL_DONE;
17152 }
17153 break;
17154
17155 case VAR_DECL:
17156 case PARM_DECL:
17157 ret = gimplify_var_or_parm_decl (expr_p);
17158 break;
17159
17160 case RESULT_DECL:
17161 /* When within an OMP context, notice uses of variables. */
17162 if (gimplify_omp_ctxp)
17163 omp_notice_variable (ctx: gimplify_omp_ctxp, decl: *expr_p, in_code: true);
17164 ret = GS_ALL_DONE;
17165 break;
17166
17167 case DEBUG_EXPR_DECL:
17168 gcc_unreachable ();
17169
17170 case DEBUG_BEGIN_STMT:
17171 gimplify_seq_add_stmt (seq_p: pre_p,
17172 gs: gimple_build_debug_begin_stmt
17173 (TREE_BLOCK (*expr_p),
17174 EXPR_LOCATION (*expr_p)));
17175 ret = GS_ALL_DONE;
17176 *expr_p = NULL;
17177 break;
17178
17179 case SSA_NAME:
17180 /* Allow callbacks into the gimplifier during optimization. */
17181 ret = GS_ALL_DONE;
17182 break;
17183
17184 case OMP_PARALLEL:
17185 gimplify_omp_parallel (expr_p, pre_p);
17186 ret = GS_ALL_DONE;
17187 break;
17188
17189 case OMP_TASK:
17190 gimplify_omp_task (expr_p, pre_p);
17191 ret = GS_ALL_DONE;
17192 break;
17193
17194 case OMP_SIMD:
17195 {
17196 /* Temporarily disable into_ssa, as scan_omp_simd
17197 which calls copy_gimple_seq_and_replace_locals can't deal
17198 with SSA_NAMEs defined outside of the body properly. */
17199 bool saved_into_ssa = gimplify_ctxp->into_ssa;
17200 gimplify_ctxp->into_ssa = false;
17201 ret = gimplify_omp_for (expr_p, pre_p);
17202 gimplify_ctxp->into_ssa = saved_into_ssa;
17203 break;
17204 }
17205
17206 case OMP_FOR:
17207 case OMP_DISTRIBUTE:
17208 case OMP_TASKLOOP:
17209 case OACC_LOOP:
17210 ret = gimplify_omp_for (expr_p, pre_p);
17211 break;
17212
17213 case OMP_LOOP:
17214 ret = gimplify_omp_loop (expr_p, pre_p);
17215 break;
17216
17217 case OACC_CACHE:
17218 gimplify_oacc_cache (expr_p, pre_p);
17219 ret = GS_ALL_DONE;
17220 break;
17221
17222 case OACC_DECLARE:
17223 gimplify_oacc_declare (expr_p, pre_p);
17224 ret = GS_ALL_DONE;
17225 break;
17226
17227 case OACC_HOST_DATA:
17228 case OACC_DATA:
17229 case OACC_KERNELS:
17230 case OACC_PARALLEL:
17231 case OACC_SERIAL:
17232 case OMP_SCOPE:
17233 case OMP_SECTIONS:
17234 case OMP_SINGLE:
17235 case OMP_TARGET:
17236 case OMP_TARGET_DATA:
17237 case OMP_TEAMS:
17238 gimplify_omp_workshare (expr_p, pre_p);
17239 ret = GS_ALL_DONE;
17240 break;
17241
17242 case OACC_ENTER_DATA:
17243 case OACC_EXIT_DATA:
17244 case OACC_UPDATE:
17245 case OMP_TARGET_UPDATE:
17246 case OMP_TARGET_ENTER_DATA:
17247 case OMP_TARGET_EXIT_DATA:
17248 gimplify_omp_target_update (expr_p, pre_p);
17249 ret = GS_ALL_DONE;
17250 break;
17251
17252 case OMP_SECTION:
17253 case OMP_STRUCTURED_BLOCK:
17254 case OMP_MASTER:
17255 case OMP_MASKED:
17256 case OMP_ORDERED:
17257 case OMP_CRITICAL:
17258 case OMP_SCAN:
17259 {
17260 gimple_seq body = NULL;
17261 gimple *g;
17262 bool saved_in_omp_construct = in_omp_construct;
17263
17264 in_omp_construct = true;
17265 gimplify_and_add (OMP_BODY (*expr_p), seq_p: &body);
17266 in_omp_construct = saved_in_omp_construct;
17267 switch (TREE_CODE (*expr_p))
17268 {
17269 case OMP_SECTION:
17270 g = gimple_build_omp_section (body);
17271 break;
17272 case OMP_STRUCTURED_BLOCK:
17273 g = gimple_build_omp_structured_block (body);
17274 break;
17275 case OMP_MASTER:
17276 g = gimple_build_omp_master (body);
17277 break;
17278 case OMP_ORDERED:
17279 g = gimplify_omp_ordered (expr: *expr_p, body);
17280 if (OMP_BODY (*expr_p) == NULL_TREE
17281 && gimple_code (g) == GIMPLE_OMP_ORDERED)
17282 gimple_omp_ordered_standalone (g);
17283 break;
17284 case OMP_MASKED:
17285 gimplify_scan_omp_clauses (list_p: &OMP_MASKED_CLAUSES (*expr_p),
17286 pre_p, region_type: ORT_WORKSHARE, code: OMP_MASKED);
17287 gimplify_adjust_omp_clauses (pre_p, body,
17288 list_p: &OMP_MASKED_CLAUSES (*expr_p),
17289 code: OMP_MASKED);
17290 g = gimple_build_omp_masked (body,
17291 OMP_MASKED_CLAUSES (*expr_p));
17292 break;
17293 case OMP_CRITICAL:
17294 gimplify_scan_omp_clauses (list_p: &OMP_CRITICAL_CLAUSES (*expr_p),
17295 pre_p, region_type: ORT_WORKSHARE, code: OMP_CRITICAL);
17296 gimplify_adjust_omp_clauses (pre_p, body,
17297 list_p: &OMP_CRITICAL_CLAUSES (*expr_p),
17298 code: OMP_CRITICAL);
17299 g = gimple_build_omp_critical (body,
17300 OMP_CRITICAL_NAME (*expr_p),
17301 OMP_CRITICAL_CLAUSES (*expr_p));
17302 break;
17303 case OMP_SCAN:
17304 gimplify_scan_omp_clauses (list_p: &OMP_SCAN_CLAUSES (*expr_p),
17305 pre_p, region_type: ORT_WORKSHARE, code: OMP_SCAN);
17306 gimplify_adjust_omp_clauses (pre_p, body,
17307 list_p: &OMP_SCAN_CLAUSES (*expr_p),
17308 code: OMP_SCAN);
17309 g = gimple_build_omp_scan (body, OMP_SCAN_CLAUSES (*expr_p));
17310 break;
17311 default:
17312 gcc_unreachable ();
17313 }
17314 gimplify_seq_add_stmt (seq_p: pre_p, gs: g);
17315 ret = GS_ALL_DONE;
17316 break;
17317 }
17318
17319 case OMP_TASKGROUP:
17320 {
17321 gimple_seq body = NULL;
17322
17323 tree *pclauses = &OMP_TASKGROUP_CLAUSES (*expr_p);
17324 bool saved_in_omp_construct = in_omp_construct;
17325 gimplify_scan_omp_clauses (list_p: pclauses, pre_p, region_type: ORT_TASKGROUP,
17326 code: OMP_TASKGROUP);
17327 gimplify_adjust_omp_clauses (pre_p, NULL, list_p: pclauses, code: OMP_TASKGROUP);
17328
17329 in_omp_construct = true;
17330 gimplify_and_add (OMP_BODY (*expr_p), seq_p: &body);
17331 in_omp_construct = saved_in_omp_construct;
17332 gimple_seq cleanup = NULL;
17333 tree fn = builtin_decl_explicit (fncode: BUILT_IN_GOMP_TASKGROUP_END);
17334 gimple *g = gimple_build_call (fn, 0);
17335 gimple_seq_add_stmt (&cleanup, g);
17336 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
17337 body = NULL;
17338 gimple_seq_add_stmt (&body, g);
17339 g = gimple_build_omp_taskgroup (body, *pclauses);
17340 gimplify_seq_add_stmt (seq_p: pre_p, gs: g);
17341 ret = GS_ALL_DONE;
17342 break;
17343 }
17344
17345 case OMP_ATOMIC:
17346 case OMP_ATOMIC_READ:
17347 case OMP_ATOMIC_CAPTURE_OLD:
17348 case OMP_ATOMIC_CAPTURE_NEW:
17349 ret = gimplify_omp_atomic (expr_p, pre_p);
17350 break;
17351
17352 case TRANSACTION_EXPR:
17353 ret = gimplify_transaction (expr_p, pre_p);
17354 break;
17355
17356 case TRUTH_AND_EXPR:
17357 case TRUTH_OR_EXPR:
17358 case TRUTH_XOR_EXPR:
17359 {
17360 tree orig_type = TREE_TYPE (*expr_p);
17361 tree new_type, xop0, xop1;
17362 *expr_p = gimple_boolify (expr: *expr_p);
17363 new_type = TREE_TYPE (*expr_p);
17364 if (!useless_type_conversion_p (orig_type, new_type))
17365 {
17366 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
17367 ret = GS_OK;
17368 break;
17369 }
17370
17371 /* Boolified binary truth expressions are semantically equivalent
17372 to bitwise binary expressions. Canonicalize them to the
17373 bitwise variant. */
17374 switch (TREE_CODE (*expr_p))
17375 {
17376 case TRUTH_AND_EXPR:
17377 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
17378 break;
17379 case TRUTH_OR_EXPR:
17380 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
17381 break;
17382 case TRUTH_XOR_EXPR:
17383 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
17384 break;
17385 default:
17386 break;
17387 }
17388 /* Now make sure that operands have compatible type to
17389 expression's new_type. */
17390 xop0 = TREE_OPERAND (*expr_p, 0);
17391 xop1 = TREE_OPERAND (*expr_p, 1);
17392 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
17393 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
17394 new_type,
17395 xop0);
17396 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
17397 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
17398 new_type,
17399 xop1);
17400 /* Continue classified as tcc_binary. */
17401 goto expr_2;
17402 }
17403
17404 case VEC_COND_EXPR:
17405 goto expr_3;
17406
17407 case VEC_PERM_EXPR:
17408 /* Classified as tcc_expression. */
17409 goto expr_3;
17410
17411 case BIT_INSERT_EXPR:
17412 /* Argument 3 is a constant. */
17413 goto expr_2;
17414
17415 case POINTER_PLUS_EXPR:
17416 {
17417 enum gimplify_status r0, r1;
17418 r0 = gimplify_expr (expr_p: &TREE_OPERAND (*expr_p, 0), pre_p,
17419 post_p, gimple_test_f: is_gimple_val, fallback: fb_rvalue);
17420 r1 = gimplify_expr (expr_p: &TREE_OPERAND (*expr_p, 1), pre_p,
17421 post_p, gimple_test_f: is_gimple_val, fallback: fb_rvalue);
17422 recalculate_side_effects (t: *expr_p);
17423 ret = MIN (r0, r1);
17424 break;
17425 }
17426
17427 default:
17428 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
17429 {
17430 case tcc_comparison:
17431 /* Handle comparison of objects of non scalar mode aggregates
17432 with a call to memcmp. It would be nice to only have to do
17433 this for variable-sized objects, but then we'd have to allow
17434 the same nest of reference nodes we allow for MODIFY_EXPR and
17435 that's too complex.
17436
17437 Compare scalar mode aggregates as scalar mode values. Using
17438 memcmp for them would be very inefficient at best, and is
17439 plain wrong if bitfields are involved. */
17440 if (error_operand_p (TREE_OPERAND (*expr_p, 1)))
17441 ret = GS_ERROR;
17442 else
17443 {
17444 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
17445
17446 /* Vector comparisons need no boolification. */
17447 if (TREE_CODE (type) == VECTOR_TYPE)
17448 goto expr_2;
17449 else if (!AGGREGATE_TYPE_P (type))
17450 {
17451 tree org_type = TREE_TYPE (*expr_p);
17452 *expr_p = gimple_boolify (expr: *expr_p);
17453 if (!useless_type_conversion_p (org_type,
17454 TREE_TYPE (*expr_p)))
17455 {
17456 *expr_p = fold_convert_loc (input_location,
17457 org_type, *expr_p);
17458 ret = GS_OK;
17459 }
17460 else
17461 goto expr_2;
17462 }
17463 else if (TYPE_MODE (type) != BLKmode)
17464 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
17465 else
17466 ret = gimplify_variable_sized_compare (expr_p);
17467 }
17468 break;
17469
17470 /* If *EXPR_P does not need to be special-cased, handle it
17471 according to its class. */
17472 case tcc_unary:
17473 ret = gimplify_expr (expr_p: &TREE_OPERAND (*expr_p, 0), pre_p,
17474 post_p, gimple_test_f: is_gimple_val, fallback: fb_rvalue);
17475 break;
17476
17477 case tcc_binary:
17478 expr_2:
17479 {
17480 enum gimplify_status r0, r1;
17481
17482 r0 = gimplify_expr (expr_p: &TREE_OPERAND (*expr_p, 0), pre_p,
17483 post_p, gimple_test_f: is_gimple_val, fallback: fb_rvalue);
17484 r1 = gimplify_expr (expr_p: &TREE_OPERAND (*expr_p, 1), pre_p,
17485 post_p, gimple_test_f: is_gimple_val, fallback: fb_rvalue);
17486
17487 ret = MIN (r0, r1);
17488 break;
17489 }
17490
17491 expr_3:
17492 {
17493 enum gimplify_status r0, r1, r2;
17494
17495 r0 = gimplify_expr (expr_p: &TREE_OPERAND (*expr_p, 0), pre_p,
17496 post_p, gimple_test_f: is_gimple_val, fallback: fb_rvalue);
17497 r1 = gimplify_expr (expr_p: &TREE_OPERAND (*expr_p, 1), pre_p,
17498 post_p, gimple_test_f: is_gimple_val, fallback: fb_rvalue);
17499 r2 = gimplify_expr (expr_p: &TREE_OPERAND (*expr_p, 2), pre_p,
17500 post_p, gimple_test_f: is_gimple_val, fallback: fb_rvalue);
17501
17502 ret = MIN (MIN (r0, r1), r2);
17503 break;
17504 }
17505
17506 case tcc_declaration:
17507 case tcc_constant:
17508 ret = GS_ALL_DONE;
17509 goto dont_recalculate;
17510
17511 default:
17512 gcc_unreachable ();
17513 }
17514
17515 recalculate_side_effects (t: *expr_p);
17516
17517 dont_recalculate:
17518 break;
17519 }
17520
17521 gcc_assert (*expr_p || ret != GS_OK);
17522 }
17523 while (ret == GS_OK);
17524
17525 /* If we encountered an error_mark somewhere nested inside, either
17526 stub out the statement or propagate the error back out. */
17527 if (ret == GS_ERROR)
17528 {
17529 if (is_statement)
17530 *expr_p = NULL;
17531 goto out;
17532 }
17533
17534 /* This was only valid as a return value from the langhook, which
17535 we handled. Make sure it doesn't escape from any other context. */
17536 gcc_assert (ret != GS_UNHANDLED);
17537
17538 if (fallback == fb_none && *expr_p && !is_gimple_stmt (t: *expr_p))
17539 {
17540 /* We aren't looking for a value, and we don't have a valid
17541 statement. If it doesn't have side-effects, throw it away.
17542 We can also get here with code such as "*&&L;", where L is
17543 a LABEL_DECL that is marked as FORCED_LABEL. */
17544 if (TREE_CODE (*expr_p) == LABEL_DECL
17545 || !TREE_SIDE_EFFECTS (*expr_p))
17546 *expr_p = NULL;
17547 else if (!TREE_THIS_VOLATILE (*expr_p))
17548 {
17549 /* This is probably a _REF that contains something nested that
17550 has side effects. Recurse through the operands to find it. */
17551 enum tree_code code = TREE_CODE (*expr_p);
17552
17553 switch (code)
17554 {
17555 case COMPONENT_REF:
17556 case REALPART_EXPR:
17557 case IMAGPART_EXPR:
17558 case VIEW_CONVERT_EXPR:
17559 gimplify_expr (expr_p: &TREE_OPERAND (*expr_p, 0), pre_p, post_p,
17560 gimple_test_f, fallback);
17561 break;
17562
17563 case ARRAY_REF:
17564 case ARRAY_RANGE_REF:
17565 gimplify_expr (expr_p: &TREE_OPERAND (*expr_p, 0), pre_p, post_p,
17566 gimple_test_f, fallback);
17567 gimplify_expr (expr_p: &TREE_OPERAND (*expr_p, 1), pre_p, post_p,
17568 gimple_test_f, fallback);
17569 break;
17570
17571 default:
17572 /* Anything else with side-effects must be converted to
17573 a valid statement before we get here. */
17574 gcc_unreachable ();
17575 }
17576
17577 *expr_p = NULL;
17578 }
17579 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
17580 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode
17581 && !is_empty_type (TREE_TYPE (*expr_p)))
17582 {
17583 /* Historically, the compiler has treated a bare reference
17584 to a non-BLKmode volatile lvalue as forcing a load. */
17585 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
17586
17587 /* Normally, we do not want to create a temporary for a
17588 TREE_ADDRESSABLE type because such a type should not be
17589 copied by bitwise-assignment. However, we make an
17590 exception here, as all we are doing here is ensuring that
17591 we read the bytes that make up the type. We use
17592 create_tmp_var_raw because create_tmp_var will abort when
17593 given a TREE_ADDRESSABLE type. */
17594 tree tmp = create_tmp_var_raw (type, "vol");
17595 gimple_add_tmp_var (tmp);
17596 gimplify_assign (tmp, *expr_p, pre_p);
17597 *expr_p = NULL;
17598 }
17599 else
17600 /* We can't do anything useful with a volatile reference to
17601 an incomplete type, so just throw it away. Likewise for
17602 a BLKmode type, since any implicit inner load should
17603 already have been turned into an explicit one by the
17604 gimplification process. */
17605 *expr_p = NULL;
17606 }
17607
17608 /* If we are gimplifying at the statement level, we're done. Tack
17609 everything together and return. */
17610 if (fallback == fb_none || is_statement)
17611 {
17612 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
17613 it out for GC to reclaim it. */
17614 *expr_p = NULL_TREE;
17615
17616 if (!gimple_seq_empty_p (s: internal_pre)
17617 || !gimple_seq_empty_p (s: internal_post))
17618 {
17619 gimplify_seq_add_seq (dst_p: &internal_pre, src: internal_post);
17620 gimplify_seq_add_seq (dst_p: pre_p, src: internal_pre);
17621 }
17622
17623 /* The result of gimplifying *EXPR_P is going to be the last few
17624 statements in *PRE_P and *POST_P. Add location information
17625 to all the statements that were added by the gimplification
17626 helpers. */
17627 if (!gimple_seq_empty_p (s: *pre_p))
17628 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
17629
17630 if (!gimple_seq_empty_p (s: *post_p))
17631 annotate_all_with_location_after (*post_p, post_last_gsi,
17632 input_location);
17633
17634 goto out;
17635 }
17636
17637#ifdef ENABLE_GIMPLE_CHECKING
17638 if (*expr_p)
17639 {
17640 enum tree_code code = TREE_CODE (*expr_p);
17641 /* These expressions should already be in gimple IR form. */
17642 gcc_assert (code != MODIFY_EXPR
17643 && code != ASM_EXPR
17644 && code != BIND_EXPR
17645 && code != CATCH_EXPR
17646 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
17647 && code != EH_FILTER_EXPR
17648 && code != GOTO_EXPR
17649 && code != LABEL_EXPR
17650 && code != LOOP_EXPR
17651 && code != SWITCH_EXPR
17652 && code != TRY_FINALLY_EXPR
17653 && code != EH_ELSE_EXPR
17654 && code != OACC_PARALLEL
17655 && code != OACC_KERNELS
17656 && code != OACC_SERIAL
17657 && code != OACC_DATA
17658 && code != OACC_HOST_DATA
17659 && code != OACC_DECLARE
17660 && code != OACC_UPDATE
17661 && code != OACC_ENTER_DATA
17662 && code != OACC_EXIT_DATA
17663 && code != OACC_CACHE
17664 && code != OMP_CRITICAL
17665 && code != OMP_FOR
17666 && code != OACC_LOOP
17667 && code != OMP_MASTER
17668 && code != OMP_MASKED
17669 && code != OMP_TASKGROUP
17670 && code != OMP_ORDERED
17671 && code != OMP_PARALLEL
17672 && code != OMP_SCAN
17673 && code != OMP_SECTIONS
17674 && code != OMP_SECTION
17675 && code != OMP_STRUCTURED_BLOCK
17676 && code != OMP_SINGLE
17677 && code != OMP_SCOPE);
17678 }
17679#endif
17680
17681 /* Otherwise we're gimplifying a subexpression, so the resulting
17682 value is interesting. If it's a valid operand that matches
17683 GIMPLE_TEST_F, we're done. Unless we are handling some
17684 post-effects internally; if that's the case, we need to copy into
17685 a temporary before adding the post-effects to POST_P. */
17686 if (gimple_seq_empty_p (s: internal_post) && (*gimple_test_f) (*expr_p))
17687 goto out;
17688
17689 /* Otherwise, we need to create a new temporary for the gimplified
17690 expression. */
17691
17692 /* We can't return an lvalue if we have an internal postqueue. The
17693 object the lvalue refers to would (probably) be modified by the
17694 postqueue; we need to copy the value out first, which means an
17695 rvalue. */
17696 if ((fallback & fb_lvalue)
17697 && gimple_seq_empty_p (s: internal_post)
17698 && is_gimple_addressable (t: *expr_p))
17699 {
17700 /* An lvalue will do. Take the address of the expression, store it
17701 in a temporary, and replace the expression with an INDIRECT_REF of
17702 that temporary. */
17703 tree ref_alias_type = reference_alias_ptr_type (*expr_p);
17704 unsigned int ref_align = get_object_alignment (*expr_p);
17705 tree ref_type = TREE_TYPE (*expr_p);
17706 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
17707 gimplify_expr (expr_p: &tmp, pre_p, post_p, gimple_test_f: is_gimple_reg, fallback: fb_rvalue);
17708 if (TYPE_ALIGN (ref_type) != ref_align)
17709 ref_type = build_aligned_type (ref_type, ref_align);
17710 *expr_p = build2 (MEM_REF, ref_type,
17711 tmp, build_zero_cst (ref_alias_type));
17712 }
17713 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (t: *expr_p))
17714 {
17715 /* An rvalue will do. Assign the gimplified expression into a
17716 new temporary TMP and replace the original expression with
17717 TMP. First, make sure that the expression has a type so that
17718 it can be assigned into a temporary. */
17719 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
17720 *expr_p = get_formal_tmp_var (val: *expr_p, pre_p);
17721 }
17722 else
17723 {
17724#ifdef ENABLE_GIMPLE_CHECKING
17725 if (!(fallback & fb_mayfail))
17726 {
17727 fprintf (stderr, format: "gimplification failed:\n");
17728 print_generic_expr (stderr, *expr_p);
17729 debug_tree (*expr_p);
17730 internal_error ("gimplification failed");
17731 }
17732#endif
17733 gcc_assert (fallback & fb_mayfail);
17734
17735 /* If this is an asm statement, and the user asked for the
17736 impossible, don't die. Fail and let gimplify_asm_expr
17737 issue an error. */
17738 ret = GS_ERROR;
17739 goto out;
17740 }
17741
17742 /* Make sure the temporary matches our predicate. */
17743 gcc_assert ((*gimple_test_f) (*expr_p));
17744
17745 if (!gimple_seq_empty_p (s: internal_post))
17746 {
17747 annotate_all_with_location (internal_post, input_location);
17748 gimplify_seq_add_seq (dst_p: pre_p, src: internal_post);
17749 }
17750
17751 out:
17752 input_location = saved_location;
17753 return ret;
17754}
17755
17756/* Like gimplify_expr but make sure the gimplified result is not itself
17757 a SSA name (but a decl if it were). Temporaries required by
17758 evaluating *EXPR_P may be still SSA names. */
17759
17760static enum gimplify_status
17761gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
17762 bool (*gimple_test_f) (tree), fallback_t fallback,
17763 bool allow_ssa)
17764{
17765 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
17766 gimple_test_f, fallback);
17767 if (! allow_ssa
17768 && TREE_CODE (*expr_p) == SSA_NAME)
17769 *expr_p = get_initialized_tmp_var (val: *expr_p, pre_p, NULL, allow_ssa: false);
17770 return ret;
17771}
17772
17773/* Look through TYPE for variable-sized objects and gimplify each such
17774 size that we find. Add to LIST_P any statements generated. */
17775
17776void
17777gimplify_type_sizes (tree type, gimple_seq *list_p)
17778{
17779 if (type == NULL || type == error_mark_node)
17780 return;
17781
17782 const bool ignored_p
17783 = TYPE_NAME (type)
17784 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
17785 && DECL_IGNORED_P (TYPE_NAME (type));
17786 tree t;
17787
17788 /* We first do the main variant, then copy into any other variants. */
17789 type = TYPE_MAIN_VARIANT (type);
17790
17791 /* Avoid infinite recursion. */
17792 if (TYPE_SIZES_GIMPLIFIED (type))
17793 return;
17794
17795 TYPE_SIZES_GIMPLIFIED (type) = 1;
17796
17797 switch (TREE_CODE (type))
17798 {
17799 case INTEGER_TYPE:
17800 case ENUMERAL_TYPE:
17801 case BOOLEAN_TYPE:
17802 case REAL_TYPE:
17803 case FIXED_POINT_TYPE:
17804 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
17805 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
17806
17807 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
17808 {
17809 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
17810 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
17811 }
17812 break;
17813
17814 case ARRAY_TYPE:
17815 /* These types may not have declarations, so handle them here. */
17816 gimplify_type_sizes (TREE_TYPE (type), list_p);
17817 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
17818 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
17819 with assigned stack slots, for -O1+ -g they should be tracked
17820 by VTA. */
17821 if (!ignored_p
17822 && TYPE_DOMAIN (type)
17823 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
17824 {
17825 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
17826 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
17827 DECL_IGNORED_P (t) = 0;
17828 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
17829 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
17830 DECL_IGNORED_P (t) = 0;
17831 }
17832 break;
17833
17834 case RECORD_TYPE:
17835 case UNION_TYPE:
17836 case QUAL_UNION_TYPE:
17837 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
17838 if (TREE_CODE (field) == FIELD_DECL)
17839 {
17840 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
17841 /* Likewise, ensure variable offsets aren't removed. */
17842 if (!ignored_p
17843 && (t = DECL_FIELD_OFFSET (field))
17844 && VAR_P (t)
17845 && DECL_ARTIFICIAL (t))
17846 DECL_IGNORED_P (t) = 0;
17847 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
17848 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
17849 gimplify_type_sizes (TREE_TYPE (field), list_p);
17850 }
17851 break;
17852
17853 case POINTER_TYPE:
17854 case REFERENCE_TYPE:
17855 /* We used to recurse on the pointed-to type here, which turned out to
17856 be incorrect because its definition might refer to variables not
17857 yet initialized at this point if a forward declaration is involved.
17858
17859 It was actually useful for anonymous pointed-to types to ensure
17860 that the sizes evaluation dominates every possible later use of the
17861 values. Restricting to such types here would be safe since there
17862 is no possible forward declaration around, but would introduce an
17863 undesirable middle-end semantic to anonymity. We then defer to
17864 front-ends the responsibility of ensuring that the sizes are
17865 evaluated both early and late enough, e.g. by attaching artificial
17866 type declarations to the tree. */
17867 break;
17868
17869 default:
17870 break;
17871 }
17872
17873 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
17874 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
17875
17876 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
17877 {
17878 TYPE_SIZE (t) = TYPE_SIZE (type);
17879 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
17880 TYPE_SIZES_GIMPLIFIED (t) = 1;
17881 }
17882}
17883
17884/* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
17885 a size or position, has had all of its SAVE_EXPRs evaluated.
17886 We add any required statements to *STMT_P. */
17887
17888void
17889gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
17890{
17891 tree expr = *expr_p;
17892
17893 /* We don't do anything if the value isn't there, is constant, or contains
17894 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
17895 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
17896 will want to replace it with a new variable, but that will cause problems
17897 if this type is from outside the function. It's OK to have that here. */
17898 if (expr == NULL_TREE
17899 || is_gimple_constant (t: expr)
17900 || VAR_P (expr)
17901 || CONTAINS_PLACEHOLDER_P (expr))
17902 return;
17903
17904 *expr_p = unshare_expr (expr);
17905
17906 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
17907 if the def vanishes. */
17908 gimplify_expr (expr_p, pre_p: stmt_p, NULL, gimple_test_f: is_gimple_val, fallback: fb_rvalue, allow_ssa: false);
17909
17910 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
17911 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
17912 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
17913 if (is_gimple_constant (t: *expr_p))
17914 *expr_p = get_initialized_tmp_var (val: *expr_p, pre_p: stmt_p, NULL, allow_ssa: false);
17915}
17916
17917/* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
17918 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
17919 is true, also gimplify the parameters. */
17920
17921gbind *
17922gimplify_body (tree fndecl, bool do_parms)
17923{
17924 location_t saved_location = input_location;
17925 gimple_seq parm_stmts, parm_cleanup = NULL, seq;
17926 gimple *outer_stmt;
17927 gbind *outer_bind;
17928
17929 timevar_push (tv: TV_TREE_GIMPLIFY);
17930
17931 init_tree_ssa (cfun);
17932
17933 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
17934 gimplification. */
17935 default_rtl_profile ();
17936
17937 gcc_assert (gimplify_ctxp == NULL);
17938 push_gimplify_context (in_ssa: true);
17939
17940 if (flag_openacc || flag_openmp)
17941 {
17942 gcc_assert (gimplify_omp_ctxp == NULL);
17943 if (lookup_attribute (attr_name: "omp declare target", DECL_ATTRIBUTES (fndecl)))
17944 gimplify_omp_ctxp = new_omp_context (region_type: ORT_IMPLICIT_TARGET);
17945 }
17946
17947 /* Unshare most shared trees in the body and in that of any nested functions.
17948 It would seem we don't have to do this for nested functions because
17949 they are supposed to be output and then the outer function gimplified
17950 first, but the g++ front end doesn't always do it that way. */
17951 unshare_body (fndecl);
17952 unvisit_body (fndecl);
17953
17954 /* Make sure input_location isn't set to something weird. */
17955 input_location = DECL_SOURCE_LOCATION (fndecl);
17956
17957 /* Resolve callee-copies. This has to be done before processing
17958 the body so that DECL_VALUE_EXPR gets processed correctly. */
17959 parm_stmts = do_parms ? gimplify_parameters (&parm_cleanup) : NULL;
17960
17961 /* Gimplify the function's body. */
17962 seq = NULL;
17963 gimplify_stmt (stmt_p: &DECL_SAVED_TREE (fndecl), seq_p: &seq);
17964 outer_stmt = gimple_seq_first_nondebug_stmt (s: seq);
17965 if (!outer_stmt)
17966 {
17967 outer_stmt = gimple_build_nop ();
17968 gimplify_seq_add_stmt (seq_p: &seq, gs: outer_stmt);
17969 }
17970
17971 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
17972 not the case, wrap everything in a GIMPLE_BIND to make it so. */
17973 if (gimple_code (g: outer_stmt) == GIMPLE_BIND
17974 && (gimple_seq_first_nondebug_stmt (s: seq)
17975 == gimple_seq_last_nondebug_stmt (s: seq)))
17976 {
17977 outer_bind = as_a <gbind *> (p: outer_stmt);
17978 if (gimple_seq_first_stmt (s: seq) != outer_stmt
17979 || gimple_seq_last_stmt (s: seq) != outer_stmt)
17980 {
17981 /* If there are debug stmts before or after outer_stmt, move them
17982 inside of outer_bind body. */
17983 gimple_stmt_iterator gsi = gsi_for_stmt (outer_stmt, &seq);
17984 gimple_seq second_seq = NULL;
17985 if (gimple_seq_first_stmt (s: seq) != outer_stmt
17986 && gimple_seq_last_stmt (s: seq) != outer_stmt)
17987 {
17988 second_seq = gsi_split_seq_after (gsi);
17989 gsi_remove (&gsi, false);
17990 }
17991 else if (gimple_seq_first_stmt (s: seq) != outer_stmt)
17992 gsi_remove (&gsi, false);
17993 else
17994 {
17995 gsi_remove (&gsi, false);
17996 second_seq = seq;
17997 seq = NULL;
17998 }
17999 gimple_seq_add_seq_without_update (&seq,
18000 gimple_bind_body (gs: outer_bind));
18001 gimple_seq_add_seq_without_update (&seq, second_seq);
18002 gimple_bind_set_body (bind_stmt: outer_bind, seq);
18003 }
18004 }
18005 else
18006 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
18007
18008 DECL_SAVED_TREE (fndecl) = NULL_TREE;
18009
18010 /* If we had callee-copies statements, insert them at the beginning
18011 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
18012 if (!gimple_seq_empty_p (s: parm_stmts))
18013 {
18014 tree parm;
18015
18016 gimplify_seq_add_seq (dst_p: &parm_stmts, src: gimple_bind_body (gs: outer_bind));
18017 if (parm_cleanup)
18018 {
18019 gtry *g = gimple_build_try (parm_stmts, parm_cleanup,
18020 GIMPLE_TRY_FINALLY);
18021 parm_stmts = NULL;
18022 gimple_seq_add_stmt (&parm_stmts, g);
18023 }
18024 gimple_bind_set_body (bind_stmt: outer_bind, seq: parm_stmts);
18025
18026 for (parm = DECL_ARGUMENTS (current_function_decl);
18027 parm; parm = DECL_CHAIN (parm))
18028 if (DECL_HAS_VALUE_EXPR_P (parm))
18029 {
18030 DECL_HAS_VALUE_EXPR_P (parm) = 0;
18031 DECL_IGNORED_P (parm) = 0;
18032 }
18033 }
18034
18035 if ((flag_openacc || flag_openmp || flag_openmp_simd)
18036 && gimplify_omp_ctxp)
18037 {
18038 delete_omp_context (c: gimplify_omp_ctxp);
18039 gimplify_omp_ctxp = NULL;
18040 }
18041
18042 pop_gimplify_context (body: outer_bind);
18043 gcc_assert (gimplify_ctxp == NULL);
18044
18045 if (flag_checking && !seen_error ())
18046 verify_gimple_in_seq (gimple_bind_body (gs: outer_bind));
18047
18048 timevar_pop (tv: TV_TREE_GIMPLIFY);
18049 input_location = saved_location;
18050
18051 return outer_bind;
18052}
18053
18054typedef char *char_p; /* For DEF_VEC_P. */
18055
18056/* Return whether we should exclude FNDECL from instrumentation. */
18057
18058static bool
18059flag_instrument_functions_exclude_p (tree fndecl)
18060{
18061 vec<char_p> *v;
18062
18063 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
18064 if (v && v->length () > 0)
18065 {
18066 const char *name;
18067 int i;
18068 char *s;
18069
18070 name = lang_hooks.decl_printable_name (fndecl, 1);
18071 FOR_EACH_VEC_ELT (*v, i, s)
18072 if (strstr (haystack: name, needle: s) != NULL)
18073 return true;
18074 }
18075
18076 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
18077 if (v && v->length () > 0)
18078 {
18079 const char *name;
18080 int i;
18081 char *s;
18082
18083 name = DECL_SOURCE_FILE (fndecl);
18084 FOR_EACH_VEC_ELT (*v, i, s)
18085 if (strstr (haystack: name, needle: s) != NULL)
18086 return true;
18087 }
18088
18089 return false;
18090}
18091
18092/* Build a call to the instrumentation function FNCODE and add it to SEQ.
18093 If COND_VAR is not NULL, it is a boolean variable guarding the call to
18094 the instrumentation function. IF STMT is not NULL, it is a statement
18095 to be executed just before the call to the instrumentation function. */
18096
18097static void
18098build_instrumentation_call (gimple_seq *seq, enum built_in_function fncode,
18099 tree cond_var, gimple *stmt)
18100{
18101 /* The instrumentation hooks aren't going to call the instrumented
18102 function and the address they receive is expected to be matchable
18103 against symbol addresses. Make sure we don't create a trampoline,
18104 in case the current function is nested. */
18105 tree this_fn_addr = build_fold_addr_expr (current_function_decl);
18106 TREE_NO_TRAMPOLINE (this_fn_addr) = 1;
18107
18108 tree label_true, label_false;
18109 if (cond_var)
18110 {
18111 label_true = create_artificial_label (UNKNOWN_LOCATION);
18112 label_false = create_artificial_label (UNKNOWN_LOCATION);
18113 gcond *cond = gimple_build_cond (EQ_EXPR, cond_var, boolean_false_node,
18114 label_true, label_false);
18115 gimplify_seq_add_stmt (seq_p: seq, gs: cond);
18116 gimplify_seq_add_stmt (seq_p: seq, gs: gimple_build_label (label: label_true));
18117 gimplify_seq_add_stmt (seq_p: seq, gs: gimple_build_predict (predictor: PRED_COLD_LABEL,
18118 outcome: NOT_TAKEN));
18119 }
18120
18121 if (stmt)
18122 gimplify_seq_add_stmt (seq_p: seq, gs: stmt);
18123
18124 tree x = builtin_decl_implicit (fncode: BUILT_IN_RETURN_ADDRESS);
18125 gcall *call = gimple_build_call (x, 1, integer_zero_node);
18126 tree tmp_var = create_tmp_var (ptr_type_node, "return_addr");
18127 gimple_call_set_lhs (gs: call, lhs: tmp_var);
18128 gimplify_seq_add_stmt (seq_p: seq, gs: call);
18129 x = builtin_decl_implicit (fncode);
18130 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
18131 gimplify_seq_add_stmt (seq_p: seq, gs: call);
18132
18133 if (cond_var)
18134 gimplify_seq_add_stmt (seq_p: seq, gs: gimple_build_label (label: label_false));
18135}
18136
18137/* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
18138 node for the function we want to gimplify.
18139
18140 Return the sequence of GIMPLE statements corresponding to the body
18141 of FNDECL. */
18142
18143void
18144gimplify_function_tree (tree fndecl)
18145{
18146 gimple_seq seq;
18147 gbind *bind;
18148
18149 gcc_assert (!gimple_body (fndecl));
18150
18151 if (DECL_STRUCT_FUNCTION (fndecl))
18152 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
18153 else
18154 push_struct_function (fndecl);
18155
18156 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
18157 if necessary. */
18158 cfun->curr_properties |= PROP_gimple_lva;
18159
18160 if (asan_sanitize_use_after_scope ())
18161 asan_poisoned_variables = new hash_set<tree> ();
18162 bind = gimplify_body (fndecl, do_parms: true);
18163 if (asan_poisoned_variables)
18164 {
18165 delete asan_poisoned_variables;
18166 asan_poisoned_variables = NULL;
18167 }
18168
18169 /* The tree body of the function is no longer needed, replace it
18170 with the new GIMPLE body. */
18171 seq = NULL;
18172 gimple_seq_add_stmt (&seq, bind);
18173 gimple_set_body (fndecl, seq);
18174
18175 /* If we're instrumenting function entry/exit, then prepend the call to
18176 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
18177 catch the exit hook. */
18178 /* ??? Add some way to ignore exceptions for this TFE. */
18179 if (flag_instrument_function_entry_exit
18180 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
18181 /* Do not instrument extern inline functions. */
18182 && !(DECL_DECLARED_INLINE_P (fndecl)
18183 && DECL_EXTERNAL (fndecl)
18184 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
18185 && !flag_instrument_functions_exclude_p (fndecl))
18186 {
18187 gimple_seq body = NULL, cleanup = NULL;
18188 gassign *assign;
18189 tree cond_var;
18190
18191 /* If -finstrument-functions-once is specified, generate:
18192
18193 static volatile bool C.0 = false;
18194 bool tmp_called;
18195
18196 tmp_called = C.0;
18197 if (!tmp_called)
18198 {
18199 C.0 = true;
18200 [call profiling enter function]
18201 }
18202
18203 without specific protection for data races. */
18204 if (flag_instrument_function_entry_exit > 1)
18205 {
18206 tree first_var
18207 = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
18208 VAR_DECL,
18209 create_tmp_var_name ("C"),
18210 boolean_type_node);
18211 DECL_ARTIFICIAL (first_var) = 1;
18212 DECL_IGNORED_P (first_var) = 1;
18213 TREE_STATIC (first_var) = 1;
18214 TREE_THIS_VOLATILE (first_var) = 1;
18215 TREE_USED (first_var) = 1;
18216 DECL_INITIAL (first_var) = boolean_false_node;
18217 varpool_node::add (decl: first_var);
18218
18219 cond_var = create_tmp_var (boolean_type_node, "tmp_called");
18220 assign = gimple_build_assign (cond_var, first_var);
18221 gimplify_seq_add_stmt (seq_p: &body, gs: assign);
18222
18223 assign = gimple_build_assign (first_var, boolean_true_node);
18224 }
18225
18226 else
18227 {
18228 cond_var = NULL_TREE;
18229 assign = NULL;
18230 }
18231
18232 build_instrumentation_call (seq: &body, fncode: BUILT_IN_PROFILE_FUNC_ENTER,
18233 cond_var, stmt: assign);
18234
18235 /* If -finstrument-functions-once is specified, generate:
18236
18237 if (!tmp_called)
18238 [call profiling exit function]
18239
18240 without specific protection for data races. */
18241 build_instrumentation_call (seq: &cleanup, fncode: BUILT_IN_PROFILE_FUNC_EXIT,
18242 cond_var, NULL);
18243
18244 gimple *tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
18245 gimplify_seq_add_stmt (seq_p: &body, gs: tf);
18246 gbind *new_bind = gimple_build_bind (NULL, body, NULL);
18247
18248 /* Replace the current function body with the body
18249 wrapped in the try/finally TF. */
18250 seq = NULL;
18251 gimple_seq_add_stmt (&seq, new_bind);
18252 gimple_set_body (fndecl, seq);
18253 bind = new_bind;
18254 }
18255
18256 if (sanitize_flags_p (flag: SANITIZE_THREAD)
18257 && param_tsan_instrument_func_entry_exit)
18258 {
18259 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
18260 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
18261 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
18262 /* Replace the current function body with the body
18263 wrapped in the try/finally TF. */
18264 seq = NULL;
18265 gimple_seq_add_stmt (&seq, new_bind);
18266 gimple_set_body (fndecl, seq);
18267 }
18268
18269 DECL_SAVED_TREE (fndecl) = NULL_TREE;
18270 cfun->curr_properties |= PROP_gimple_any;
18271
18272 pop_cfun ();
18273
18274 dump_function (phase: TDI_gimple, fn: fndecl);
18275}
18276
18277/* Return a dummy expression of type TYPE in order to keep going after an
18278 error. */
18279
18280static tree
18281dummy_object (tree type)
18282{
18283 tree t = build_int_cst (build_pointer_type (type), 0);
18284 return build2 (MEM_REF, type, t, t);
18285}
18286
18287/* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
18288 builtin function, but a very special sort of operator. */
18289
18290enum gimplify_status
18291gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
18292 gimple_seq *post_p ATTRIBUTE_UNUSED)
18293{
18294 tree promoted_type, have_va_type;
18295 tree valist = TREE_OPERAND (*expr_p, 0);
18296 tree type = TREE_TYPE (*expr_p);
18297 tree t, tag, aptag;
18298 location_t loc = EXPR_LOCATION (*expr_p);
18299
18300 /* Verify that valist is of the proper type. */
18301 have_va_type = TREE_TYPE (valist);
18302 if (have_va_type == error_mark_node)
18303 return GS_ERROR;
18304 have_va_type = targetm.canonical_va_list_type (have_va_type);
18305 if (have_va_type == NULL_TREE
18306 && POINTER_TYPE_P (TREE_TYPE (valist)))
18307 /* Handle 'Case 1: Not an array type' from c-common.cc/build_va_arg. */
18308 have_va_type
18309 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
18310 gcc_assert (have_va_type != NULL_TREE);
18311
18312 /* Generate a diagnostic for requesting data of a type that cannot
18313 be passed through `...' due to type promotion at the call site. */
18314 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
18315 != type)
18316 {
18317 static bool gave_help;
18318 bool warned;
18319 /* Use the expansion point to handle cases such as passing bool (defined
18320 in a system header) through `...'. */
18321 location_t xloc
18322 = expansion_point_location_if_in_system_header (loc);
18323
18324 /* Unfortunately, this is merely undefined, rather than a constraint
18325 violation, so we cannot make this an error. If this call is never
18326 executed, the program is still strictly conforming. */
18327 auto_diagnostic_group d;
18328 warned = warning_at (xloc, 0,
18329 "%qT is promoted to %qT when passed through %<...%>",
18330 type, promoted_type);
18331 if (!gave_help && warned)
18332 {
18333 gave_help = true;
18334 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
18335 promoted_type, type);
18336 }
18337
18338 /* We can, however, treat "undefined" any way we please.
18339 Call abort to encourage the user to fix the program. */
18340 if (warned)
18341 inform (xloc, "if this code is reached, the program will abort");
18342 /* Before the abort, allow the evaluation of the va_list
18343 expression to exit or longjmp. */
18344 gimplify_and_add (t: valist, seq_p: pre_p);
18345 t = build_call_expr_loc (loc,
18346 builtin_decl_implicit (fncode: BUILT_IN_TRAP), 0);
18347 gimplify_and_add (t, seq_p: pre_p);
18348
18349 /* This is dead code, but go ahead and finish so that the
18350 mode of the result comes out right. */
18351 *expr_p = dummy_object (type);
18352 return GS_ALL_DONE;
18353 }
18354
18355 tag = build_int_cst (build_pointer_type (type), 0);
18356 aptag = build_int_cst (TREE_TYPE (valist), 0);
18357
18358 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
18359 valist, tag, aptag);
18360
18361 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
18362 needs to be expanded. */
18363 cfun->curr_properties &= ~PROP_gimple_lva;
18364
18365 return GS_OK;
18366}
18367
18368/* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
18369
18370 DST/SRC are the destination and source respectively. You can pass
18371 ungimplified trees in DST or SRC, in which case they will be
18372 converted to a gimple operand if necessary.
18373
18374 This function returns the newly created GIMPLE_ASSIGN tuple. */
18375
18376gimple *
18377gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
18378{
18379 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
18380 gimplify_and_add (t, seq_p);
18381 ggc_free (t);
18382 return gimple_seq_last_stmt (s: *seq_p);
18383}
18384
18385inline hashval_t
18386gimplify_hasher::hash (const elt_t *p)
18387{
18388 tree t = p->val;
18389 return iterative_hash_expr (tree: t, seed: 0);
18390}
18391
18392inline bool
18393gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
18394{
18395 tree t1 = p1->val;
18396 tree t2 = p2->val;
18397 enum tree_code code = TREE_CODE (t1);
18398
18399 if (TREE_CODE (t2) != code
18400 || TREE_TYPE (t1) != TREE_TYPE (t2))
18401 return false;
18402
18403 if (!operand_equal_p (t1, t2, flags: 0))
18404 return false;
18405
18406 /* Only allow them to compare equal if they also hash equal; otherwise
18407 results are nondeterminate, and we fail bootstrap comparison. */
18408 gcc_checking_assert (hash (p1) == hash (p2));
18409
18410 return true;
18411}
18412

source code of gcc/gimplify.cc