1/* This file contains routines to construct OpenACC and OpenMP constructs,
2 called from parsing in the C and C++ front ends.
3
4 Copyright (C) 2005-2024 Free Software Foundation, Inc.
5 Contributed by Richard Henderson <rth@redhat.com>,
6 Diego Novillo <dnovillo@redhat.com>.
7
8This file is part of GCC.
9
10GCC is free software; you can redistribute it and/or modify it under
11the terms of the GNU General Public License as published by the Free
12Software Foundation; either version 3, or (at your option) any later
13version.
14
15GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16WARRANTY; without even the implied warranty of MERCHANTABILITY or
17FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18for more details.
19
20You should have received a copy of the GNU General Public License
21along with GCC; see the file COPYING3. If not see
22<http://www.gnu.org/licenses/>. */
23
24#include "config.h"
25#include "system.h"
26#include "coretypes.h"
27#include "options.h"
28#include "c-common.h"
29#include "gimple-expr.h"
30#include "c-pragma.h"
31#include "stringpool.h"
32#include "omp-general.h"
33#include "gomp-constants.h"
34#include "memmodel.h"
35#include "attribs.h"
36#include "gimplify.h"
37#include "langhooks.h"
38#include "bitmap.h"
39#include "tree-iterator.h"
40
41
42/* Complete a #pragma oacc wait construct. LOC is the location of
43 the #pragma. */
44
45tree
46c_finish_oacc_wait (location_t loc, tree parms, tree clauses)
47{
48 const int nparms = list_length (parms);
49 tree stmt, t;
50 vec<tree, va_gc> *args;
51
52 vec_alloc (v&: args, nelems: nparms + 2);
53 stmt = builtin_decl_explicit (fncode: BUILT_IN_GOACC_WAIT);
54
55 if (omp_find_clause (clauses, kind: OMP_CLAUSE_ASYNC))
56 t = OMP_CLAUSE_ASYNC_EXPR (clauses);
57 else
58 t = build_int_cst (integer_type_node, GOMP_ASYNC_SYNC);
59
60 args->quick_push (obj: t);
61 args->quick_push (obj: build_int_cst (integer_type_node, nparms));
62
63 for (t = parms; t; t = TREE_CHAIN (t))
64 {
65 if (TREE_CODE (OMP_CLAUSE_WAIT_EXPR (t)) == INTEGER_CST)
66 args->quick_push (obj: build_int_cst (integer_type_node,
67 TREE_INT_CST_LOW (OMP_CLAUSE_WAIT_EXPR (t))));
68 else
69 args->quick_push (OMP_CLAUSE_WAIT_EXPR (t));
70 }
71
72 stmt = build_call_expr_loc_vec (loc, stmt, args);
73
74 vec_free (v&: args);
75
76 return stmt;
77}
78
79/* Complete a #pragma omp master construct. STMT is the structured-block
80 that follows the pragma. LOC is the location of the #pragma. */
81
82tree
83c_finish_omp_master (location_t loc, tree stmt)
84{
85 tree t = add_stmt (build1 (OMP_MASTER, void_type_node, stmt));
86 SET_EXPR_LOCATION (t, loc);
87 return t;
88}
89
90/* Complete a #pragma omp masked construct. BODY is the structured-block
91 that follows the pragma. LOC is the location of the #pragma. */
92
93tree
94c_finish_omp_masked (location_t loc, tree body, tree clauses)
95{
96 tree stmt = make_node (OMP_MASKED);
97 TREE_TYPE (stmt) = void_type_node;
98 OMP_MASKED_BODY (stmt) = body;
99 OMP_MASKED_CLAUSES (stmt) = clauses;
100 SET_EXPR_LOCATION (stmt, loc);
101 return add_stmt (stmt);
102}
103
104/* Complete a #pragma omp taskgroup construct. BODY is the structured-block
105 that follows the pragma. LOC is the location of the #pragma. */
106
107tree
108c_finish_omp_taskgroup (location_t loc, tree body, tree clauses)
109{
110 tree stmt = make_node (OMP_TASKGROUP);
111 TREE_TYPE (stmt) = void_type_node;
112 OMP_TASKGROUP_BODY (stmt) = body;
113 OMP_TASKGROUP_CLAUSES (stmt) = clauses;
114 SET_EXPR_LOCATION (stmt, loc);
115 return add_stmt (stmt);
116}
117
118/* Complete a #pragma omp critical construct. BODY is the structured-block
119 that follows the pragma, NAME is the identifier in the pragma, or null
120 if it was omitted. LOC is the location of the #pragma. */
121
122tree
123c_finish_omp_critical (location_t loc, tree body, tree name, tree clauses)
124{
125 gcc_assert (!clauses || OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_HINT);
126 if (name == NULL_TREE
127 && clauses != NULL_TREE
128 && integer_nonzerop (OMP_CLAUSE_HINT_EXPR (clauses)))
129 {
130 error_at (OMP_CLAUSE_LOCATION (clauses),
131 "%<#pragma omp critical%> with %<hint%> clause requires "
132 "a name, except when %<omp_sync_hint_none%> is used");
133 return error_mark_node;
134 }
135
136 tree stmt = make_node (OMP_CRITICAL);
137 TREE_TYPE (stmt) = void_type_node;
138 OMP_CRITICAL_BODY (stmt) = body;
139 OMP_CRITICAL_NAME (stmt) = name;
140 OMP_CRITICAL_CLAUSES (stmt) = clauses;
141 SET_EXPR_LOCATION (stmt, loc);
142 return add_stmt (stmt);
143}
144
145/* Complete a #pragma omp ordered construct. STMT is the structured-block
146 that follows the pragma. LOC is the location of the #pragma. */
147
148tree
149c_finish_omp_ordered (location_t loc, tree clauses, tree stmt)
150{
151 tree t = make_node (OMP_ORDERED);
152 TREE_TYPE (t) = void_type_node;
153 OMP_ORDERED_BODY (t) = stmt;
154 if (!flag_openmp /* flag_openmp_simd */
155 && (OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_SIMD
156 || OMP_CLAUSE_CHAIN (clauses)))
157 clauses = build_omp_clause (loc, OMP_CLAUSE_SIMD);
158 OMP_ORDERED_CLAUSES (t) = clauses;
159 SET_EXPR_LOCATION (t, loc);
160 return add_stmt (t);
161}
162
163
164/* Complete a #pragma omp barrier construct. LOC is the location of
165 the #pragma. */
166
167void
168c_finish_omp_barrier (location_t loc)
169{
170 tree x;
171
172 x = builtin_decl_explicit (fncode: BUILT_IN_GOMP_BARRIER);
173 x = build_call_expr_loc (loc, x, 0);
174 add_stmt (x);
175}
176
177
178/* Complete a #pragma omp taskwait construct. LOC is the location of the
179 pragma. */
180
181void
182c_finish_omp_taskwait (location_t loc)
183{
184 tree x;
185
186 x = builtin_decl_explicit (fncode: BUILT_IN_GOMP_TASKWAIT);
187 x = build_call_expr_loc (loc, x, 0);
188 add_stmt (x);
189}
190
191
192/* Complete a #pragma omp taskyield construct. LOC is the location of the
193 pragma. */
194
195void
196c_finish_omp_taskyield (location_t loc)
197{
198 tree x;
199
200 x = builtin_decl_explicit (fncode: BUILT_IN_GOMP_TASKYIELD);
201 x = build_call_expr_loc (loc, x, 0);
202 add_stmt (x);
203}
204
205
206/* Complete a #pragma omp atomic construct. For CODE OMP_ATOMIC
207 the expression to be implemented atomically is LHS opcode= RHS.
208 For OMP_ATOMIC_READ V = LHS, for OMP_ATOMIC_CAPTURE_{NEW,OLD} LHS
209 opcode= RHS with the new or old content of LHS returned.
210 LOC is the location of the atomic statement. The value returned
211 is either error_mark_node (if the construct was erroneous) or an
212 OMP_ATOMIC* node which should be added to the current statement
213 tree with add_stmt. If TEST is set, avoid calling save_expr
214 or create_tmp_var*. */
215
216tree
217c_finish_omp_atomic (location_t loc, enum tree_code code,
218 enum tree_code opcode, tree lhs, tree rhs,
219 tree v, tree lhs1, tree rhs1, tree r, bool swapped,
220 enum omp_memory_order memory_order, bool weak,
221 bool test)
222{
223 tree x, type, addr, pre = NULL_TREE, rtmp = NULL_TREE, vtmp = NULL_TREE;
224 HOST_WIDE_INT bitpos = 0, bitsize = 0;
225 enum tree_code orig_opcode = opcode;
226
227 if (lhs == error_mark_node || rhs == error_mark_node
228 || v == error_mark_node || lhs1 == error_mark_node
229 || rhs1 == error_mark_node || r == error_mark_node)
230 return error_mark_node;
231
232 /* ??? According to one reading of the OpenMP spec, complex type are
233 supported, but there are no atomic stores for any architecture.
234 But at least icc 9.0 doesn't support complex types here either.
235 And lets not even talk about vector types... */
236 type = TREE_TYPE (lhs);
237 if (!INTEGRAL_TYPE_P (type)
238 && !POINTER_TYPE_P (type)
239 && !SCALAR_FLOAT_TYPE_P (type))
240 {
241 error_at (loc, "invalid expression type for %<#pragma omp atomic%>");
242 return error_mark_node;
243 }
244 if (TYPE_ATOMIC (type))
245 {
246 error_at (loc, "%<_Atomic%> expression in %<#pragma omp atomic%>");
247 return error_mark_node;
248 }
249 if (r && r != void_list_node && !INTEGRAL_TYPE_P (TREE_TYPE (r)))
250 {
251 error_at (loc, "%<#pragma omp atomic compare capture%> with non-integral "
252 "comparison result");
253 return error_mark_node;
254 }
255
256 if (opcode == RDIV_EXPR)
257 opcode = TRUNC_DIV_EXPR;
258
259 /* ??? Validate that rhs does not overlap lhs. */
260 tree blhs = NULL;
261 if (TREE_CODE (lhs) == COMPONENT_REF
262 && TREE_CODE (TREE_OPERAND (lhs, 1)) == FIELD_DECL
263 && DECL_C_BIT_FIELD (TREE_OPERAND (lhs, 1))
264 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (lhs, 1)))
265 {
266 tree field = TREE_OPERAND (lhs, 1);
267 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
268 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
269 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
270 bitpos = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
271 - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
272 else
273 bitpos = 0;
274 bitpos += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
275 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
276 gcc_assert (tree_fits_shwi_p (DECL_SIZE (field)));
277 bitsize = tree_to_shwi (DECL_SIZE (field));
278 blhs = lhs;
279 type = TREE_TYPE (repr);
280 lhs = build3 (COMPONENT_REF, TREE_TYPE (repr), TREE_OPERAND (lhs, 0),
281 repr, TREE_OPERAND (lhs, 2));
282 }
283
284 /* Take and save the address of the lhs. From then on we'll reference it
285 via indirection. */
286 addr = build_unary_op (loc, ADDR_EXPR, lhs, false);
287 if (addr == error_mark_node)
288 return error_mark_node;
289 if (!test)
290 addr = save_expr (addr);
291 if (!test
292 && TREE_CODE (addr) != SAVE_EXPR
293 && (TREE_CODE (addr) != ADDR_EXPR
294 || !VAR_P (TREE_OPERAND (addr, 0))))
295 {
296 /* Make sure LHS is simple enough so that goa_lhs_expr_p can recognize
297 it even after unsharing function body. */
298 tree var = create_tmp_var_raw (TREE_TYPE (addr));
299 DECL_CONTEXT (var) = current_function_decl;
300 addr = build4 (TARGET_EXPR, TREE_TYPE (addr), var, addr, NULL, NULL);
301 }
302 tree orig_lhs = lhs;
303 lhs = build_indirect_ref (loc, addr, RO_NULL);
304 tree new_lhs = lhs;
305
306 if (code == OMP_ATOMIC_READ)
307 {
308 x = build1 (OMP_ATOMIC_READ, type, addr);
309 SET_EXPR_LOCATION (x, loc);
310 OMP_ATOMIC_MEMORY_ORDER (x) = memory_order;
311 gcc_assert (!weak);
312 if (blhs)
313 x = build3_loc (loc, code: BIT_FIELD_REF, TREE_TYPE (blhs), arg0: x,
314 bitsize_int (bitsize), bitsize_int (bitpos));
315 return build_modify_expr (loc, v, NULL_TREE, NOP_EXPR,
316 loc, x, NULL_TREE);
317 }
318
319 /* There are lots of warnings, errors, and conversions that need to happen
320 in the course of interpreting a statement. Use the normal mechanisms
321 to do this, and then take it apart again. */
322 if (blhs)
323 {
324 lhs = build3_loc (loc, code: BIT_FIELD_REF, TREE_TYPE (blhs), arg0: lhs,
325 bitsize_int (bitsize), bitsize_int (bitpos));
326 if (opcode == COND_EXPR)
327 {
328 bool save = in_late_binary_op;
329 in_late_binary_op = true;
330 std::swap (a&: rhs, b&: rhs1);
331 rhs1 = build_binary_op (loc, EQ_EXPR, lhs, rhs1, true);
332 in_late_binary_op = save;
333 }
334 else if (swapped)
335 rhs = build_binary_op (loc, opcode, rhs, lhs, true);
336 else if (opcode != NOP_EXPR)
337 rhs = build_binary_op (loc, opcode, lhs, rhs, true);
338 opcode = NOP_EXPR;
339 }
340 else if (opcode == COND_EXPR)
341 {
342 bool save = in_late_binary_op;
343 in_late_binary_op = true;
344 std::swap (a&: rhs, b&: rhs1);
345 rhs1 = build_binary_op (loc, EQ_EXPR, lhs, rhs1, true);
346 in_late_binary_op = save;
347 opcode = NOP_EXPR;
348 }
349 else if (swapped)
350 {
351 rhs = build_binary_op (loc, opcode, rhs, lhs, true);
352 opcode = NOP_EXPR;
353 }
354 bool save = in_late_binary_op;
355 in_late_binary_op = true;
356 if ((opcode == MIN_EXPR || opcode == MAX_EXPR)
357 && build_binary_op (loc, LT_EXPR, blhs ? blhs : lhs, rhs,
358 true) == error_mark_node)
359 x = error_mark_node;
360 else
361 x = build_modify_expr (loc, blhs ? blhs : lhs, NULL_TREE, opcode,
362 loc, rhs, NULL_TREE);
363 in_late_binary_op = save;
364 if (x == error_mark_node)
365 return error_mark_node;
366 if (TREE_CODE (x) == COMPOUND_EXPR)
367 {
368 pre = TREE_OPERAND (x, 0);
369 gcc_assert (TREE_CODE (pre) == SAVE_EXPR || tree_invariant_p (pre));
370 x = TREE_OPERAND (x, 1);
371 }
372 gcc_assert (TREE_CODE (x) == MODIFY_EXPR);
373 rhs = TREE_OPERAND (x, 1);
374
375 if (blhs)
376 rhs = build3_loc (loc, code: BIT_INSERT_EXPR, type, arg0: new_lhs,
377 arg1: rhs, bitsize_int (bitpos));
378 if (orig_opcode == COND_EXPR)
379 {
380 if (error_operand_p (t: rhs1))
381 return error_mark_node;
382 gcc_assert (TREE_CODE (rhs1) == EQ_EXPR);
383 tree cmptype = TREE_TYPE (TREE_OPERAND (rhs1, 0));
384 if (SCALAR_FLOAT_TYPE_P (cmptype) && !test)
385 {
386 bool clear_padding = false;
387 HOST_WIDE_INT non_padding_start = 0;
388 HOST_WIDE_INT non_padding_end = 0;
389 if (BITS_PER_UNIT == 8
390 && CHAR_BIT == 8
391 && clear_padding_type_may_have_padding_p (cmptype))
392 {
393 HOST_WIDE_INT sz = int_size_in_bytes (cmptype), i;
394 gcc_assert (sz > 0);
395 unsigned char *buf = XALLOCAVEC (unsigned char, sz);
396 memset (s: buf, c: ~0, n: sz);
397 clear_type_padding_in_mask (cmptype, buf);
398 for (i = 0; i < sz; i++)
399 if (buf[i] != (unsigned char) ~0)
400 {
401 clear_padding = true;
402 break;
403 }
404 if (clear_padding && buf[i] == 0)
405 {
406 /* Try to optimize. In the common case where
407 non-padding bits are all continuous and start
408 and end at a byte boundary, we can just adjust
409 the memcmp call arguments and don't need to
410 emit __builtin_clear_padding calls. */
411 if (i == 0)
412 {
413 for (i = 0; i < sz; i++)
414 if (buf[i] != 0)
415 break;
416 if (i < sz && buf[i] == (unsigned char) ~0)
417 {
418 non_padding_start = i;
419 for (; i < sz; i++)
420 if (buf[i] != (unsigned char) ~0)
421 break;
422 }
423 else
424 i = 0;
425 }
426 if (i != 0)
427 {
428 non_padding_end = i;
429 for (; i < sz; i++)
430 if (buf[i] != 0)
431 {
432 non_padding_start = 0;
433 non_padding_end = 0;
434 break;
435 }
436 }
437 }
438 }
439 tree inttype = NULL_TREE;
440 if (!clear_padding && tree_fits_uhwi_p (TYPE_SIZE (cmptype)))
441 {
442 HOST_WIDE_INT prec = tree_to_uhwi (TYPE_SIZE (cmptype));
443 inttype = c_common_type_for_size (prec, 1);
444 if (inttype
445 && (!tree_int_cst_equal (TYPE_SIZE (cmptype),
446 TYPE_SIZE (inttype))
447 || TYPE_PRECISION (inttype) != prec))
448 inttype = NULL_TREE;
449 }
450 if (inttype)
451 {
452 TREE_OPERAND (rhs1, 0)
453 = build1_loc (loc, code: VIEW_CONVERT_EXPR, type: inttype,
454 TREE_OPERAND (rhs1, 0));
455 TREE_OPERAND (rhs1, 1)
456 = build1_loc (loc, code: VIEW_CONVERT_EXPR, type: inttype,
457 TREE_OPERAND (rhs1, 1));
458 }
459 else
460 {
461 tree pcmptype = build_pointer_type (cmptype);
462 tree tmp1 = create_tmp_var_raw (cmptype);
463 TREE_ADDRESSABLE (tmp1) = 1;
464 DECL_CONTEXT (tmp1) = current_function_decl;
465 tmp1 = build4 (TARGET_EXPR, cmptype, tmp1,
466 TREE_OPERAND (rhs1, 0), NULL, NULL);
467 tmp1 = build1 (ADDR_EXPR, pcmptype, tmp1);
468 tree tmp2 = create_tmp_var_raw (cmptype);
469 TREE_ADDRESSABLE (tmp2) = 1;
470 DECL_CONTEXT (tmp2) = current_function_decl;
471 tmp2 = build4 (TARGET_EXPR, cmptype, tmp2,
472 TREE_OPERAND (rhs1, 1), NULL, NULL);
473 tmp2 = build1 (ADDR_EXPR, pcmptype, tmp2);
474 if (non_padding_start)
475 {
476 tmp1 = build2 (POINTER_PLUS_EXPR, pcmptype, tmp1,
477 size_int (non_padding_start));
478 tmp2 = build2 (POINTER_PLUS_EXPR, pcmptype, tmp2,
479 size_int (non_padding_start));
480 }
481 tree fndecl = builtin_decl_explicit (fncode: BUILT_IN_MEMCMP);
482 rhs1 = build_call_expr_loc (loc, fndecl, 3, tmp1, tmp2,
483 non_padding_end
484 ? size_int (non_padding_end
485 - non_padding_start)
486 : TYPE_SIZE_UNIT (cmptype));
487 rhs1 = build2 (EQ_EXPR, boolean_type_node, rhs1,
488 integer_zero_node);
489 if (clear_padding && non_padding_end == 0)
490 {
491 fndecl = builtin_decl_explicit (fncode: BUILT_IN_CLEAR_PADDING);
492 tree cp1 = build_call_expr_loc (loc, fndecl, 1, tmp1);
493 tree cp2 = build_call_expr_loc (loc, fndecl, 1, tmp2);
494 rhs1 = omit_two_operands_loc (loc, boolean_type_node,
495 rhs1, cp2, cp1);
496 }
497 }
498 }
499 if (r && test)
500 rtmp = rhs1;
501 else if (r)
502 {
503 tree var = create_tmp_var_raw (boolean_type_node);
504 DECL_CONTEXT (var) = current_function_decl;
505 rtmp = build4 (TARGET_EXPR, boolean_type_node, var,
506 boolean_false_node, NULL, NULL);
507 save = in_late_binary_op;
508 in_late_binary_op = true;
509 x = build_modify_expr (loc, var, NULL_TREE, NOP_EXPR,
510 loc, rhs1, NULL_TREE);
511 in_late_binary_op = save;
512 if (x == error_mark_node)
513 return error_mark_node;
514 gcc_assert (TREE_CODE (x) == MODIFY_EXPR
515 && TREE_OPERAND (x, 0) == var);
516 TREE_OPERAND (x, 0) = rtmp;
517 rhs1 = omit_one_operand_loc (loc, boolean_type_node, x, rtmp);
518 }
519 rhs = build3_loc (loc, code: COND_EXPR, type, arg0: rhs1, arg1: rhs, arg2: new_lhs);
520 rhs1 = NULL_TREE;
521 }
522
523 /* Punt the actual generation of atomic operations to common code. */
524 if (code == OMP_ATOMIC)
525 type = void_type_node;
526 x = build2 (code, type, addr, rhs);
527 SET_EXPR_LOCATION (x, loc);
528 OMP_ATOMIC_MEMORY_ORDER (x) = memory_order;
529 OMP_ATOMIC_WEAK (x) = weak;
530
531 /* Generally it is hard to prove lhs1 and lhs are the same memory
532 location, just diagnose different variables. */
533 if (rhs1
534 && VAR_P (rhs1)
535 && VAR_P (orig_lhs)
536 && rhs1 != orig_lhs
537 && !test)
538 {
539 if (code == OMP_ATOMIC)
540 error_at (loc, "%<#pragma omp atomic update%> uses two different "
541 "variables for memory");
542 else
543 error_at (loc, "%<#pragma omp atomic capture%> uses two different "
544 "variables for memory");
545 return error_mark_node;
546 }
547
548 if (lhs1
549 && lhs1 != orig_lhs
550 && TREE_CODE (lhs1) == COMPONENT_REF
551 && TREE_CODE (TREE_OPERAND (lhs1, 1)) == FIELD_DECL
552 && DECL_C_BIT_FIELD (TREE_OPERAND (lhs1, 1))
553 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (lhs1, 1)))
554 {
555 tree field = TREE_OPERAND (lhs1, 1);
556 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
557 lhs1 = build3 (COMPONENT_REF, TREE_TYPE (repr), TREE_OPERAND (lhs1, 0),
558 repr, TREE_OPERAND (lhs1, 2));
559 }
560 if (rhs1
561 && rhs1 != orig_lhs
562 && TREE_CODE (rhs1) == COMPONENT_REF
563 && TREE_CODE (TREE_OPERAND (rhs1, 1)) == FIELD_DECL
564 && DECL_C_BIT_FIELD (TREE_OPERAND (rhs1, 1))
565 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (rhs1, 1)))
566 {
567 tree field = TREE_OPERAND (rhs1, 1);
568 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
569 rhs1 = build3 (COMPONENT_REF, TREE_TYPE (repr), TREE_OPERAND (rhs1, 0),
570 repr, TREE_OPERAND (rhs1, 2));
571 }
572
573 if (code != OMP_ATOMIC)
574 {
575 /* Generally it is hard to prove lhs1 and lhs are the same memory
576 location, just diagnose different variables. */
577 if (lhs1 && VAR_P (lhs1) && VAR_P (orig_lhs))
578 {
579 if (lhs1 != orig_lhs && !test)
580 {
581 error_at (loc, "%<#pragma omp atomic capture%> uses two "
582 "different variables for memory");
583 return error_mark_node;
584 }
585 }
586 if (blhs)
587 x = build3_loc (loc, code: BIT_FIELD_REF, TREE_TYPE (blhs), arg0: x,
588 bitsize_int (bitsize), bitsize_int (bitpos));
589 if (r && !test)
590 {
591 vtmp = create_tmp_var_raw (TREE_TYPE (x));
592 DECL_CONTEXT (vtmp) = current_function_decl;
593 }
594 else
595 vtmp = v;
596 x = build_modify_expr (loc, vtmp, NULL_TREE, NOP_EXPR,
597 loc, x, NULL_TREE);
598 if (x == error_mark_node)
599 return error_mark_node;
600 type = TREE_TYPE (x);
601 if (r && !test)
602 {
603 vtmp = build4 (TARGET_EXPR, TREE_TYPE (vtmp), vtmp,
604 build_zero_cst (TREE_TYPE (vtmp)), NULL, NULL);
605 gcc_assert (TREE_CODE (x) == MODIFY_EXPR
606 && TREE_OPERAND (x, 0) == TARGET_EXPR_SLOT (vtmp));
607 TREE_OPERAND (x, 0) = vtmp;
608 }
609 if (rhs1 && rhs1 != orig_lhs)
610 {
611 tree rhs1addr = build_unary_op (loc, ADDR_EXPR, rhs1, false);
612 if (rhs1addr == error_mark_node)
613 return error_mark_node;
614 x = omit_one_operand_loc (loc, type, x, rhs1addr);
615 }
616 if (lhs1 && lhs1 != orig_lhs)
617 {
618 tree lhs1addr = build_unary_op (loc, ADDR_EXPR, lhs1, false);
619 if (lhs1addr == error_mark_node)
620 return error_mark_node;
621 if (code == OMP_ATOMIC_CAPTURE_OLD)
622 x = omit_one_operand_loc (loc, type, x, lhs1addr);
623 else
624 {
625 if (!test)
626 x = save_expr (x);
627 x = omit_two_operands_loc (loc, type, x, x, lhs1addr);
628 }
629 }
630 }
631 else if (rhs1 && rhs1 != orig_lhs)
632 {
633 tree rhs1addr = build_unary_op (loc, ADDR_EXPR, rhs1, false);
634 if (rhs1addr == error_mark_node)
635 return error_mark_node;
636 x = omit_one_operand_loc (loc, type, x, rhs1addr);
637 }
638
639 if (pre)
640 x = omit_one_operand_loc (loc, type, x, pre);
641 if (r && r != void_list_node)
642 {
643 in_late_binary_op = true;
644 tree x2 = build_modify_expr (loc, r, NULL_TREE, NOP_EXPR,
645 loc, rtmp, NULL_TREE);
646 in_late_binary_op = save;
647 if (x2 == error_mark_node)
648 return error_mark_node;
649 x = omit_one_operand_loc (loc, TREE_TYPE (x2), x2, x);
650 }
651 if (v && vtmp != v)
652 {
653 in_late_binary_op = true;
654 tree x2 = build_modify_expr (loc, v, NULL_TREE, NOP_EXPR,
655 loc, vtmp, NULL_TREE);
656 in_late_binary_op = save;
657 if (x2 == error_mark_node)
658 return error_mark_node;
659 x2 = build3_loc (loc, code: COND_EXPR, void_type_node, arg0: rtmp,
660 void_node, arg2: x2);
661 x = omit_one_operand_loc (loc, TREE_TYPE (x2), x2, x);
662 }
663 return x;
664}
665
666
667/* Return true if TYPE is the implementation's omp_depend_t. */
668
669bool
670c_omp_depend_t_p (tree type)
671{
672 type = TYPE_MAIN_VARIANT (type);
673 return (TREE_CODE (type) == RECORD_TYPE
674 && TYPE_NAME (type)
675 && ((TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
676 ? DECL_NAME (TYPE_NAME (type)) : TYPE_NAME (type))
677 == get_identifier ("omp_depend_t"))
678 && TYPE_FILE_SCOPE_P (type)
679 && COMPLETE_TYPE_P (type)
680 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
681 && !compare_tree_int (TYPE_SIZE (type),
682 2 * tree_to_uhwi (TYPE_SIZE (ptr_type_node))));
683}
684
685
686/* Complete a #pragma omp depobj construct. LOC is the location of the
687 #pragma. */
688
689void
690c_finish_omp_depobj (location_t loc, tree depobj,
691 enum omp_clause_depend_kind kind, tree clause)
692{
693 tree t = NULL_TREE;
694 if (!error_operand_p (t: depobj))
695 {
696 if (!c_omp_depend_t_p (TREE_TYPE (depobj)))
697 {
698 error_at (EXPR_LOC_OR_LOC (depobj, loc),
699 "type of %<depobj%> expression is not %<omp_depend_t%>");
700 depobj = error_mark_node;
701 }
702 else if (TYPE_READONLY (TREE_TYPE (depobj)))
703 {
704 error_at (EXPR_LOC_OR_LOC (depobj, loc),
705 "%<const%> qualified %<depobj%> expression");
706 depobj = error_mark_node;
707 }
708 }
709 else
710 depobj = error_mark_node;
711
712 if (clause == error_mark_node)
713 return;
714
715 if (clause)
716 {
717 gcc_assert (TREE_CODE (clause) == OMP_CLAUSE);
718 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_DOACROSS)
719 {
720 error_at (OMP_CLAUSE_LOCATION (clause),
721 "%<depend(%s)%> is only allowed in %<omp ordered%>",
722 OMP_CLAUSE_DOACROSS_KIND (clause)
723 == OMP_CLAUSE_DOACROSS_SOURCE
724 ? "source" : "sink");
725 return;
726 }
727 gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_DEPEND);
728 if (OMP_CLAUSE_CHAIN (clause))
729 error_at (OMP_CLAUSE_LOCATION (clause),
730 "more than one locator in %<depend%> clause on %<depobj%> "
731 "construct");
732 switch (OMP_CLAUSE_DEPEND_KIND (clause))
733 {
734 case OMP_CLAUSE_DEPEND_DEPOBJ:
735 error_at (OMP_CLAUSE_LOCATION (clause),
736 "%<depobj%> dependence type specified in %<depend%> "
737 "clause on %<depobj%> construct");
738 return;
739 case OMP_CLAUSE_DEPEND_IN:
740 case OMP_CLAUSE_DEPEND_OUT:
741 case OMP_CLAUSE_DEPEND_INOUT:
742 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
743 case OMP_CLAUSE_DEPEND_INOUTSET:
744 kind = OMP_CLAUSE_DEPEND_KIND (clause);
745 t = OMP_CLAUSE_DECL (clause);
746 gcc_assert (t);
747 if (TREE_CODE (t) == TREE_LIST
748 && TREE_PURPOSE (t)
749 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
750 {
751 error_at (OMP_CLAUSE_LOCATION (clause),
752 "%<iterator%> modifier may not be specified on "
753 "%<depobj%> construct");
754 return;
755 }
756 if (TREE_CODE (t) == COMPOUND_EXPR)
757 {
758 tree t1 = build_fold_addr_expr (TREE_OPERAND (t, 1));
759 t = build2 (COMPOUND_EXPR, TREE_TYPE (t1), TREE_OPERAND (t, 0),
760 t1);
761 }
762 else if (t != null_pointer_node)
763 t = build_fold_addr_expr (t);
764 break;
765 default:
766 gcc_unreachable ();
767 }
768 }
769 else
770 gcc_assert (kind != OMP_CLAUSE_DEPEND_INVALID);
771
772 if (depobj == error_mark_node)
773 return;
774
775 depobj = build_fold_addr_expr_loc (EXPR_LOC_OR_LOC (depobj, loc), depobj);
776 tree dtype
777 = build_pointer_type_for_mode (ptr_type_node, TYPE_MODE (ptr_type_node),
778 true);
779 depobj = fold_convert (dtype, depobj);
780 tree r;
781 if (clause)
782 {
783 depobj = save_expr (depobj);
784 r = build_indirect_ref (loc, depobj, RO_UNARY_STAR);
785 add_stmt (build2 (MODIFY_EXPR, void_type_node, r, t));
786 }
787 int k;
788 switch (kind)
789 {
790 case OMP_CLAUSE_DEPEND_IN:
791 k = GOMP_DEPEND_IN;
792 break;
793 case OMP_CLAUSE_DEPEND_OUT:
794 k = GOMP_DEPEND_OUT;
795 break;
796 case OMP_CLAUSE_DEPEND_INOUT:
797 k = GOMP_DEPEND_INOUT;
798 break;
799 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
800 k = GOMP_DEPEND_MUTEXINOUTSET;
801 break;
802 case OMP_CLAUSE_DEPEND_INOUTSET:
803 k = GOMP_DEPEND_INOUTSET;
804 break;
805 case OMP_CLAUSE_DEPEND_LAST:
806 k = -1;
807 break;
808 default:
809 gcc_unreachable ();
810 }
811 t = build_int_cst (ptr_type_node, k);
812 depobj = build2_loc (loc, code: POINTER_PLUS_EXPR, TREE_TYPE (depobj), arg0: depobj,
813 TYPE_SIZE_UNIT (ptr_type_node));
814 r = build_indirect_ref (loc, depobj, RO_UNARY_STAR);
815 add_stmt (build2 (MODIFY_EXPR, void_type_node, r, t));
816}
817
818
819/* Complete a #pragma omp flush construct. We don't do anything with
820 the variable list that the syntax allows. LOC is the location of
821 the #pragma. */
822
823void
824c_finish_omp_flush (location_t loc, int mo)
825{
826 tree x;
827
828 if (mo == MEMMODEL_LAST || mo == MEMMODEL_SEQ_CST)
829 {
830 x = builtin_decl_explicit (fncode: BUILT_IN_SYNC_SYNCHRONIZE);
831 x = build_call_expr_loc (loc, x, 0);
832 }
833 else
834 {
835 x = builtin_decl_explicit (fncode: BUILT_IN_ATOMIC_THREAD_FENCE);
836 x = build_call_expr_loc (loc, x, 1,
837 build_int_cst (integer_type_node, mo));
838 }
839 add_stmt (x);
840}
841
842
843/* Check and canonicalize OMP_FOR increment expression.
844 Helper function for c_finish_omp_for. */
845
846static tree
847check_omp_for_incr_expr (location_t loc, tree exp, tree decl)
848{
849 tree t;
850
851 if (!INTEGRAL_TYPE_P (TREE_TYPE (exp))
852 || TYPE_PRECISION (TREE_TYPE (exp)) < TYPE_PRECISION (TREE_TYPE (decl)))
853 return error_mark_node;
854
855 if (exp == decl)
856 return build_int_cst (TREE_TYPE (exp), 0);
857
858 switch (TREE_CODE (exp))
859 {
860 CASE_CONVERT:
861 t = check_omp_for_incr_expr (loc, TREE_OPERAND (exp, 0), decl);
862 if (t != error_mark_node)
863 return fold_convert_loc (loc, TREE_TYPE (exp), t);
864 break;
865 case MINUS_EXPR:
866 t = check_omp_for_incr_expr (loc, TREE_OPERAND (exp, 0), decl);
867 if (t != error_mark_node)
868 return fold_build2_loc (loc, MINUS_EXPR,
869 TREE_TYPE (exp), t, TREE_OPERAND (exp, 1));
870 break;
871 case PLUS_EXPR:
872 t = check_omp_for_incr_expr (loc, TREE_OPERAND (exp, 0), decl);
873 if (t != error_mark_node)
874 return fold_build2_loc (loc, PLUS_EXPR,
875 TREE_TYPE (exp), t, TREE_OPERAND (exp, 1));
876 t = check_omp_for_incr_expr (loc, TREE_OPERAND (exp, 1), decl);
877 if (t != error_mark_node)
878 return fold_build2_loc (loc, PLUS_EXPR,
879 TREE_TYPE (exp), TREE_OPERAND (exp, 0), t);
880 break;
881 case COMPOUND_EXPR:
882 {
883 /* cp_build_modify_expr forces preevaluation of the RHS to make
884 sure that it is evaluated before the lvalue-rvalue conversion
885 is applied to the LHS. Reconstruct the original expression. */
886 tree op0 = TREE_OPERAND (exp, 0);
887 if (TREE_CODE (op0) == TARGET_EXPR
888 && !VOID_TYPE_P (TREE_TYPE (op0)))
889 {
890 tree op1 = TREE_OPERAND (exp, 1);
891 tree temp = TARGET_EXPR_SLOT (op0);
892 if (BINARY_CLASS_P (op1)
893 && TREE_OPERAND (op1, 1) == temp)
894 {
895 op1 = copy_node (op1);
896 TREE_OPERAND (op1, 1) = TARGET_EXPR_INITIAL (op0);
897 return check_omp_for_incr_expr (loc, exp: op1, decl);
898 }
899 }
900 break;
901 }
902 default:
903 break;
904 }
905
906 return error_mark_node;
907}
908
909/* If the OMP_FOR increment expression in INCR is of pointer type,
910 canonicalize it into an expression handled by gimplify_omp_for()
911 and return it. DECL is the iteration variable. */
912
913static tree
914c_omp_for_incr_canonicalize_ptr (location_t loc, tree decl, tree incr)
915{
916 if (POINTER_TYPE_P (TREE_TYPE (decl))
917 && TREE_OPERAND (incr, 1))
918 {
919 tree t = fold_convert_loc (loc,
920 sizetype, TREE_OPERAND (incr, 1));
921
922 if (TREE_CODE (incr) == POSTDECREMENT_EXPR
923 || TREE_CODE (incr) == PREDECREMENT_EXPR)
924 t = fold_build1_loc (loc, NEGATE_EXPR, sizetype, t);
925 t = fold_build_pointer_plus (decl, t);
926 incr = build2 (MODIFY_EXPR, void_type_node, decl, t);
927 }
928 return incr;
929}
930
931/* Validate and generate OMP_FOR.
932 DECLV is a vector of iteration variables, for each collapsed loop.
933
934 ORIG_DECLV, if non-NULL, is a vector with the original iteration
935 variables (prior to any transformations, by say, C++ iterators).
936
937 INITV, CONDV and INCRV are vectors containing initialization
938 expressions, controlling predicates and increment expressions.
939 BODY is the body of the loop and PRE_BODY statements that go before
940 the loop. */
941
942tree
943c_finish_omp_for (location_t locus, enum tree_code code, tree declv,
944 tree orig_declv, tree initv, tree condv, tree incrv,
945 tree body, tree pre_body, bool final_p)
946{
947 location_t elocus;
948 bool fail = false;
949 int i;
950
951 gcc_assert (TREE_VEC_LENGTH (declv) == TREE_VEC_LENGTH (initv));
952 gcc_assert (TREE_VEC_LENGTH (declv) == TREE_VEC_LENGTH (condv));
953 gcc_assert (TREE_VEC_LENGTH (declv) == TREE_VEC_LENGTH (incrv));
954 for (i = 0; i < TREE_VEC_LENGTH (declv); i++)
955 {
956 tree decl = TREE_VEC_ELT (declv, i);
957 tree init = TREE_VEC_ELT (initv, i);
958 tree cond = TREE_VEC_ELT (condv, i);
959 tree incr = TREE_VEC_ELT (incrv, i);
960
961 elocus = locus;
962 if (EXPR_HAS_LOCATION (init))
963 elocus = EXPR_LOCATION (init);
964
965 /* Validate the iteration variable. */
966 if (!INTEGRAL_TYPE_P (TREE_TYPE (decl))
967 && TREE_CODE (TREE_TYPE (decl)) != POINTER_TYPE)
968 {
969 error_at (elocus, "invalid type for iteration variable %qE", decl);
970 fail = true;
971 }
972 else if (TYPE_ATOMIC (TREE_TYPE (decl)))
973 {
974 error_at (elocus, "%<_Atomic%> iteration variable %qE", decl);
975 fail = true;
976 /* _Atomic iterator confuses stuff too much, so we risk ICE
977 trying to diagnose it further. */
978 continue;
979 }
980
981 /* In the case of "for (int i = 0...)", init will be a decl. It should
982 have a DECL_INITIAL that we can turn into an assignment. */
983 if (init == decl)
984 {
985 elocus = DECL_SOURCE_LOCATION (decl);
986
987 init = DECL_INITIAL (decl);
988 if (init == NULL)
989 {
990 error_at (elocus, "%qE is not initialized", decl);
991 init = integer_zero_node;
992 fail = true;
993 }
994 DECL_INITIAL (decl) = NULL_TREE;
995
996 init = build_modify_expr (elocus, decl, NULL_TREE, NOP_EXPR,
997 /* FIXME diagnostics: This should
998 be the location of the INIT. */
999 elocus,
1000 init,
1001 NULL_TREE);
1002 }
1003 if (init != error_mark_node)
1004 {
1005 gcc_assert (TREE_CODE (init) == MODIFY_EXPR);
1006 gcc_assert (TREE_OPERAND (init, 0) == decl);
1007 }
1008
1009 if (cond == NULL_TREE)
1010 {
1011 error_at (elocus, "missing controlling predicate");
1012 fail = true;
1013 }
1014 else
1015 {
1016 bool cond_ok = false;
1017
1018 /* E.g. C sizeof (vla) could add COMPOUND_EXPRs with
1019 evaluation of the vla VAR_DECL. We need to readd
1020 them to the non-decl operand. See PR45784. */
1021 while (TREE_CODE (cond) == COMPOUND_EXPR)
1022 cond = TREE_OPERAND (cond, 1);
1023
1024 if (EXPR_HAS_LOCATION (cond))
1025 elocus = EXPR_LOCATION (cond);
1026
1027 if (TREE_CODE (cond) == LT_EXPR
1028 || TREE_CODE (cond) == LE_EXPR
1029 || TREE_CODE (cond) == GT_EXPR
1030 || TREE_CODE (cond) == GE_EXPR
1031 || TREE_CODE (cond) == NE_EXPR
1032 || TREE_CODE (cond) == EQ_EXPR)
1033 {
1034 tree op0 = TREE_OPERAND (cond, 0);
1035 tree op1 = TREE_OPERAND (cond, 1);
1036
1037 /* 2.5.1. The comparison in the condition is computed in
1038 the type of DECL, otherwise the behavior is undefined.
1039
1040 For example:
1041 long n; int i;
1042 i < n;
1043
1044 according to ISO will be evaluated as:
1045 (long)i < n;
1046
1047 We want to force:
1048 i < (int)n; */
1049 if (TREE_CODE (op0) == NOP_EXPR
1050 && decl == TREE_OPERAND (op0, 0))
1051 {
1052 TREE_OPERAND (cond, 0) = TREE_OPERAND (op0, 0);
1053 TREE_OPERAND (cond, 1)
1054 = fold_build1_loc (elocus, NOP_EXPR, TREE_TYPE (decl),
1055 TREE_OPERAND (cond, 1));
1056 }
1057 else if (TREE_CODE (op1) == NOP_EXPR
1058 && decl == TREE_OPERAND (op1, 0))
1059 {
1060 TREE_OPERAND (cond, 1) = TREE_OPERAND (op1, 0);
1061 TREE_OPERAND (cond, 0)
1062 = fold_build1_loc (elocus, NOP_EXPR, TREE_TYPE (decl),
1063 TREE_OPERAND (cond, 0));
1064 }
1065
1066 if (decl == TREE_OPERAND (cond, 0))
1067 cond_ok = true;
1068 else if (decl == TREE_OPERAND (cond, 1))
1069 {
1070 TREE_SET_CODE (cond,
1071 swap_tree_comparison (TREE_CODE (cond)));
1072 TREE_OPERAND (cond, 1) = TREE_OPERAND (cond, 0);
1073 TREE_OPERAND (cond, 0) = decl;
1074 cond_ok = true;
1075 }
1076
1077 if (TREE_CODE (cond) == NE_EXPR
1078 || TREE_CODE (cond) == EQ_EXPR)
1079 {
1080 if (!INTEGRAL_TYPE_P (TREE_TYPE (decl)))
1081 {
1082 if (code == OACC_LOOP || TREE_CODE (cond) == EQ_EXPR)
1083 cond_ok = false;
1084 }
1085 else if (operand_equal_p (TREE_OPERAND (cond, 1),
1086 TYPE_MIN_VALUE (TREE_TYPE (decl)),
1087 flags: 0))
1088 TREE_SET_CODE (cond, TREE_CODE (cond) == NE_EXPR
1089 ? GT_EXPR : LE_EXPR);
1090 else if (operand_equal_p (TREE_OPERAND (cond, 1),
1091 TYPE_MAX_VALUE (TREE_TYPE (decl)),
1092 flags: 0))
1093 TREE_SET_CODE (cond, TREE_CODE (cond) == NE_EXPR
1094 ? LT_EXPR : GE_EXPR);
1095 else if (code == OACC_LOOP || TREE_CODE (cond) == EQ_EXPR)
1096 cond_ok = false;
1097 }
1098
1099 if (cond_ok && TREE_VEC_ELT (condv, i) != cond)
1100 {
1101 tree ce = NULL_TREE, *pce = &ce;
1102 tree type = TREE_TYPE (TREE_OPERAND (cond, 1));
1103 for (tree c = TREE_VEC_ELT (condv, i); c != cond;
1104 c = TREE_OPERAND (c, 1))
1105 {
1106 *pce = build2 (COMPOUND_EXPR, type, TREE_OPERAND (c, 0),
1107 TREE_OPERAND (cond, 1));
1108 pce = &TREE_OPERAND (*pce, 1);
1109 }
1110 TREE_OPERAND (cond, 1) = ce;
1111 TREE_VEC_ELT (condv, i) = cond;
1112 }
1113 }
1114
1115 if (!cond_ok)
1116 {
1117 error_at (elocus, "invalid controlling predicate");
1118 fail = true;
1119 }
1120 }
1121
1122 if (incr == NULL_TREE)
1123 {
1124 error_at (elocus, "missing increment expression");
1125 fail = true;
1126 }
1127 else
1128 {
1129 bool incr_ok = false;
1130
1131 if (EXPR_HAS_LOCATION (incr))
1132 elocus = EXPR_LOCATION (incr);
1133
1134 /* Check all the valid increment expressions: v++, v--, ++v, --v,
1135 v = v + incr, v = incr + v and v = v - incr. */
1136 switch (TREE_CODE (incr))
1137 {
1138 case POSTINCREMENT_EXPR:
1139 case PREINCREMENT_EXPR:
1140 case POSTDECREMENT_EXPR:
1141 case PREDECREMENT_EXPR:
1142 if (TREE_OPERAND (incr, 0) != decl)
1143 break;
1144
1145 incr_ok = true;
1146 if (!fail
1147 && TREE_CODE (cond) == NE_EXPR
1148 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
1149 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
1150 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
1151 != INTEGER_CST))
1152 {
1153 /* For pointer to VLA, transform != into < or >
1154 depending on whether incr is increment or decrement. */
1155 if (TREE_CODE (incr) == PREINCREMENT_EXPR
1156 || TREE_CODE (incr) == POSTINCREMENT_EXPR)
1157 TREE_SET_CODE (cond, LT_EXPR);
1158 else
1159 TREE_SET_CODE (cond, GT_EXPR);
1160 }
1161 incr = c_omp_for_incr_canonicalize_ptr (loc: elocus, decl, incr);
1162 break;
1163
1164 case COMPOUND_EXPR:
1165 if (TREE_CODE (TREE_OPERAND (incr, 0)) != SAVE_EXPR
1166 || TREE_CODE (TREE_OPERAND (incr, 1)) != MODIFY_EXPR)
1167 break;
1168 incr = TREE_OPERAND (incr, 1);
1169 /* FALLTHRU */
1170 case MODIFY_EXPR:
1171 if (TREE_OPERAND (incr, 0) != decl)
1172 break;
1173 if (TREE_OPERAND (incr, 1) == decl)
1174 break;
1175 if (TREE_CODE (TREE_OPERAND (incr, 1)) == PLUS_EXPR
1176 && (TREE_OPERAND (TREE_OPERAND (incr, 1), 0) == decl
1177 || TREE_OPERAND (TREE_OPERAND (incr, 1), 1) == decl))
1178 incr_ok = true;
1179 else if ((TREE_CODE (TREE_OPERAND (incr, 1)) == MINUS_EXPR
1180 || (TREE_CODE (TREE_OPERAND (incr, 1))
1181 == POINTER_PLUS_EXPR))
1182 && TREE_OPERAND (TREE_OPERAND (incr, 1), 0) == decl)
1183 incr_ok = true;
1184 else
1185 {
1186 tree t = check_omp_for_incr_expr (loc: elocus,
1187 TREE_OPERAND (incr, 1),
1188 decl);
1189 if (t != error_mark_node)
1190 {
1191 incr_ok = true;
1192 t = build2 (PLUS_EXPR, TREE_TYPE (decl), decl, t);
1193 incr = build2 (MODIFY_EXPR, void_type_node, decl, t);
1194 }
1195 }
1196 if (!fail
1197 && incr_ok
1198 && TREE_CODE (cond) == NE_EXPR)
1199 {
1200 tree i = TREE_OPERAND (incr, 1);
1201 i = TREE_OPERAND (i, TREE_OPERAND (i, 0) == decl);
1202 i = c_fully_fold (i, false, NULL);
1203 if (!final_p
1204 && TREE_CODE (i) != INTEGER_CST)
1205 ;
1206 else if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
1207 {
1208 tree unit
1209 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
1210 if (unit)
1211 {
1212 enum tree_code ccode = GT_EXPR;
1213 unit = c_fully_fold (unit, false, NULL);
1214 i = fold_convert (TREE_TYPE (unit), i);
1215 if (operand_equal_p (unit, i, flags: 0))
1216 ccode = LT_EXPR;
1217 if (ccode == GT_EXPR)
1218 {
1219 i = fold_unary (NEGATE_EXPR, TREE_TYPE (i), i);
1220 if (i == NULL_TREE
1221 || !operand_equal_p (unit, i, flags: 0))
1222 {
1223 error_at (elocus,
1224 "increment is not constant 1 or "
1225 "-1 for %<!=%> condition");
1226 fail = true;
1227 }
1228 }
1229 if (TREE_CODE (unit) != INTEGER_CST)
1230 /* For pointer to VLA, transform != into < or >
1231 depending on whether the pointer is
1232 incremented or decremented in each
1233 iteration. */
1234 TREE_SET_CODE (cond, ccode);
1235 }
1236 }
1237 else
1238 {
1239 if (!integer_onep (i) && !integer_minus_onep (i))
1240 {
1241 error_at (elocus,
1242 "increment is not constant 1 or -1 for"
1243 " %<!=%> condition");
1244 fail = true;
1245 }
1246 }
1247 }
1248 break;
1249
1250 default:
1251 break;
1252 }
1253 if (!incr_ok)
1254 {
1255 error_at (elocus, "invalid increment expression");
1256 fail = true;
1257 }
1258 }
1259
1260 TREE_VEC_ELT (initv, i) = init;
1261 TREE_VEC_ELT (incrv, i) = incr;
1262 }
1263
1264 if (fail)
1265 return NULL;
1266 else
1267 {
1268 tree t = make_node (code);
1269
1270 TREE_TYPE (t) = void_type_node;
1271 OMP_FOR_INIT (t) = initv;
1272 OMP_FOR_COND (t) = condv;
1273 OMP_FOR_INCR (t) = incrv;
1274 OMP_FOR_BODY (t) = body;
1275 OMP_FOR_PRE_BODY (t) = pre_body;
1276 OMP_FOR_ORIG_DECLS (t) = orig_declv;
1277
1278 SET_EXPR_LOCATION (t, locus);
1279 return t;
1280 }
1281}
1282
1283/* Type for passing data in between c_omp_check_loop_iv and
1284 c_omp_check_loop_iv_r. */
1285
1286struct c_omp_check_loop_iv_data
1287{
1288 tree declv;
1289 bool fail;
1290 bool maybe_nonrect;
1291 location_t stmt_loc;
1292 location_t expr_loc;
1293 int kind;
1294 int idx;
1295 walk_tree_lh lh;
1296 hash_set<tree> *ppset;
1297};
1298
1299/* Return -1 if DECL is not a loop iterator in loop nest D, otherwise
1300 return the index of the loop in which it is an iterator.
1301 Return TREE_VEC_LENGTH (d->declv) if it is a C++ range for iterator. */
1302
1303static int
1304c_omp_is_loop_iterator (tree decl, struct c_omp_check_loop_iv_data *d)
1305{
1306 for (int i = 0; i < TREE_VEC_LENGTH (d->declv); i++)
1307 if (decl == TREE_VEC_ELT (d->declv, i)
1308 || (TREE_CODE (TREE_VEC_ELT (d->declv, i)) == TREE_LIST
1309 && decl == TREE_PURPOSE (TREE_VEC_ELT (d->declv, i))))
1310 return i;
1311 else if (TREE_CODE (TREE_VEC_ELT (d->declv, i)) == TREE_LIST
1312 && TREE_CHAIN (TREE_VEC_ELT (d->declv, i))
1313 && (TREE_CODE (TREE_CHAIN (TREE_VEC_ELT (d->declv, i)))
1314 == TREE_VEC))
1315 for (int j = 2;
1316 j < TREE_VEC_LENGTH (TREE_CHAIN (TREE_VEC_ELT (d->declv, i))); j++)
1317 if (decl == TREE_VEC_ELT (TREE_CHAIN (TREE_VEC_ELT (d->declv, i)), j))
1318 return TREE_VEC_LENGTH (d->declv);
1319 return -1;
1320}
1321
1322/* Helper function called via walk_tree, to diagnose uses
1323 of associated loop IVs inside of lb, b and incr expressions
1324 of OpenMP loops. */
1325
1326static tree
1327c_omp_check_loop_iv_r (tree *tp, int *walk_subtrees, void *data)
1328{
1329 struct c_omp_check_loop_iv_data *d
1330 = (struct c_omp_check_loop_iv_data *) data;
1331 if (DECL_P (*tp))
1332 {
1333 int idx = c_omp_is_loop_iterator (decl: *tp, d);
1334 if (idx == -1)
1335 return NULL_TREE;
1336
1337 if ((d->kind & 4) && idx < d->idx)
1338 {
1339 d->maybe_nonrect = true;
1340 return NULL_TREE;
1341 }
1342
1343 if (d->ppset->add (k: *tp))
1344 return NULL_TREE;
1345
1346 location_t loc = d->expr_loc;
1347 if (loc == UNKNOWN_LOCATION)
1348 loc = d->stmt_loc;
1349
1350 switch (d->kind & 3)
1351 {
1352 case 0:
1353 error_at (loc, "initializer expression refers to "
1354 "iteration variable %qD", *tp);
1355 break;
1356 case 1:
1357 error_at (loc, "condition expression refers to "
1358 "iteration variable %qD", *tp);
1359 break;
1360 case 2:
1361 error_at (loc, "increment expression refers to "
1362 "iteration variable %qD", *tp);
1363 break;
1364 }
1365 d->fail = true;
1366 }
1367 else if ((d->kind & 4)
1368 && TREE_CODE (*tp) != TREE_VEC
1369 && TREE_CODE (*tp) != PLUS_EXPR
1370 && TREE_CODE (*tp) != MINUS_EXPR
1371 && TREE_CODE (*tp) != MULT_EXPR
1372 && TREE_CODE (*tp) != POINTER_PLUS_EXPR
1373 && !CONVERT_EXPR_P (*tp))
1374 {
1375 *walk_subtrees = 0;
1376 d->kind &= 3;
1377 walk_tree_1 (tp, c_omp_check_loop_iv_r, data, NULL, d->lh);
1378 d->kind |= 4;
1379 return NULL_TREE;
1380 }
1381 else if (d->ppset->add (k: *tp))
1382 *walk_subtrees = 0;
1383 /* Don't walk dtors added by C++ wrap_cleanups_r. */
1384 else if (TREE_CODE (*tp) == TRY_CATCH_EXPR
1385 && TRY_CATCH_IS_CLEANUP (*tp))
1386 {
1387 *walk_subtrees = 0;
1388 return walk_tree_1 (&TREE_OPERAND (*tp, 0), c_omp_check_loop_iv_r, data,
1389 NULL, d->lh);
1390 }
1391
1392 return NULL_TREE;
1393}
1394
1395/* Check the allowed expressions for non-rectangular loop nest lb and b
1396 expressions. Return the outer var decl referenced in the expression. */
1397
1398static tree
1399c_omp_check_nonrect_loop_iv (tree *tp, struct c_omp_check_loop_iv_data *d,
1400 walk_tree_lh lh)
1401{
1402 d->maybe_nonrect = false;
1403 if (d->fail)
1404 return NULL_TREE;
1405
1406 hash_set<tree> pset;
1407 hash_set<tree> *ppset = d->ppset;
1408 d->ppset = &pset;
1409
1410 tree t = *tp;
1411 if (TREE_CODE (t) == TREE_VEC
1412 && TREE_VEC_LENGTH (t) == 3
1413 && DECL_P (TREE_VEC_ELT (t, 0))
1414 && c_omp_is_loop_iterator (TREE_VEC_ELT (t, 0), d) >= 0)
1415 {
1416 d->kind &= 3;
1417 walk_tree_1 (&TREE_VEC_ELT (t, 1), c_omp_check_loop_iv_r, d, NULL, lh);
1418 walk_tree_1 (&TREE_VEC_ELT (t, 1), c_omp_check_loop_iv_r, d, NULL, lh);
1419 d->ppset = ppset;
1420 return d->fail ? NULL_TREE : TREE_VEC_ELT (t, 0);
1421 }
1422
1423 while (CONVERT_EXPR_P (t))
1424 t = TREE_OPERAND (t, 0);
1425
1426 tree a1 = t, a2 = integer_zero_node;
1427 bool neg_a1 = false, neg_a2 = false;
1428 switch (TREE_CODE (t))
1429 {
1430 case PLUS_EXPR:
1431 case MINUS_EXPR:
1432 a1 = TREE_OPERAND (t, 0);
1433 a2 = TREE_OPERAND (t, 1);
1434 while (CONVERT_EXPR_P (a1))
1435 a1 = TREE_OPERAND (a1, 0);
1436 while (CONVERT_EXPR_P (a2))
1437 a2 = TREE_OPERAND (a2, 0);
1438 if (DECL_P (a1) && c_omp_is_loop_iterator (decl: a1, d) >= 0)
1439 {
1440 a2 = TREE_OPERAND (t, 1);
1441 if (TREE_CODE (t) == MINUS_EXPR)
1442 neg_a2 = true;
1443 t = a1;
1444 break;
1445 }
1446 if (DECL_P (a2) && c_omp_is_loop_iterator (decl: a2, d) >= 0)
1447 {
1448 a1 = TREE_OPERAND (t, 0);
1449 if (TREE_CODE (t) == MINUS_EXPR)
1450 neg_a1 = true;
1451 t = a2;
1452 a2 = a1;
1453 break;
1454 }
1455 if (TREE_CODE (a1) == MULT_EXPR && TREE_CODE (a2) == MULT_EXPR)
1456 {
1457 tree o1 = TREE_OPERAND (a1, 0);
1458 tree o2 = TREE_OPERAND (a1, 1);
1459 while (CONVERT_EXPR_P (o1))
1460 o1 = TREE_OPERAND (o1, 0);
1461 while (CONVERT_EXPR_P (o2))
1462 o2 = TREE_OPERAND (o2, 0);
1463 if ((DECL_P (o1) && c_omp_is_loop_iterator (decl: o1, d) >= 0)
1464 || (DECL_P (o2) && c_omp_is_loop_iterator (decl: o2, d) >= 0))
1465 {
1466 a2 = TREE_OPERAND (t, 1);
1467 if (TREE_CODE (t) == MINUS_EXPR)
1468 neg_a2 = true;
1469 t = a1;
1470 break;
1471 }
1472 }
1473 if (TREE_CODE (a2) == MULT_EXPR)
1474 {
1475 a1 = TREE_OPERAND (t, 0);
1476 if (TREE_CODE (t) == MINUS_EXPR)
1477 neg_a1 = true;
1478 t = a2;
1479 a2 = a1;
1480 break;
1481 }
1482 if (TREE_CODE (a1) == MULT_EXPR)
1483 {
1484 a2 = TREE_OPERAND (t, 1);
1485 if (TREE_CODE (t) == MINUS_EXPR)
1486 neg_a2 = true;
1487 t = a1;
1488 break;
1489 }
1490 a2 = integer_zero_node;
1491 break;
1492 case POINTER_PLUS_EXPR:
1493 a1 = TREE_OPERAND (t, 0);
1494 a2 = TREE_OPERAND (t, 1);
1495 while (CONVERT_EXPR_P (a1))
1496 a1 = TREE_OPERAND (a1, 0);
1497 if (DECL_P (a1) && c_omp_is_loop_iterator (decl: a1, d) >= 0)
1498 {
1499 a2 = TREE_OPERAND (t, 1);
1500 t = a1;
1501 break;
1502 }
1503 break;
1504 default:
1505 break;
1506 }
1507
1508 a1 = integer_one_node;
1509 if (TREE_CODE (t) == MULT_EXPR)
1510 {
1511 tree o1 = TREE_OPERAND (t, 0);
1512 tree o2 = TREE_OPERAND (t, 1);
1513 while (CONVERT_EXPR_P (o1))
1514 o1 = TREE_OPERAND (o1, 0);
1515 while (CONVERT_EXPR_P (o2))
1516 o2 = TREE_OPERAND (o2, 0);
1517 if (DECL_P (o1) && c_omp_is_loop_iterator (decl: o1, d) >= 0)
1518 {
1519 a1 = TREE_OPERAND (t, 1);
1520 t = o1;
1521 }
1522 else if (DECL_P (o2) && c_omp_is_loop_iterator (decl: o2, d) >= 0)
1523 {
1524 a1 = TREE_OPERAND (t, 0);
1525 t = o2;
1526 }
1527 }
1528
1529 d->kind &= 3;
1530 tree ret = NULL_TREE;
1531 if (DECL_P (t) && c_omp_is_loop_iterator (decl: t, d) >= 0)
1532 {
1533 location_t loc = d->expr_loc;
1534 if (loc == UNKNOWN_LOCATION)
1535 loc = d->stmt_loc;
1536 if (!lang_hooks.types_compatible_p (TREE_TYPE (*tp), TREE_TYPE (t)))
1537 {
1538 if (d->kind == 0)
1539 error_at (loc, "outer iteration variable %qD used in initializer"
1540 " expression has type other than %qT",
1541 t, TREE_TYPE (*tp));
1542 else
1543 error_at (loc, "outer iteration variable %qD used in condition"
1544 " expression has type other than %qT",
1545 t, TREE_TYPE (*tp));
1546 d->fail = true;
1547 }
1548 else if (!INTEGRAL_TYPE_P (TREE_TYPE (a1)))
1549 {
1550 error_at (loc, "outer iteration variable %qD multiplier expression"
1551 " %qE is not integral", t, a1);
1552 d->fail = true;
1553 }
1554 else if (!INTEGRAL_TYPE_P (TREE_TYPE (a2)))
1555 {
1556 error_at (loc, "outer iteration variable %qD addend expression"
1557 " %qE is not integral", t, a2);
1558 d->fail = true;
1559 }
1560 else
1561 {
1562 walk_tree_1 (&a1, c_omp_check_loop_iv_r, d, NULL, lh);
1563 walk_tree_1 (&a2, c_omp_check_loop_iv_r, d, NULL, lh);
1564 }
1565 if (!d->fail)
1566 {
1567 a1 = fold_convert (TREE_TYPE (*tp), a1);
1568 a2 = fold_convert (TREE_TYPE (*tp), a2);
1569 if (neg_a1)
1570 a1 = fold_build1 (NEGATE_EXPR, TREE_TYPE (a1), a1);
1571 if (neg_a2)
1572 a2 = fold_build1 (NEGATE_EXPR, TREE_TYPE (a2), a2);
1573 ret = t;
1574 *tp = make_tree_vec (3);
1575 TREE_VEC_ELT (*tp, 0) = t;
1576 TREE_VEC_ELT (*tp, 1) = a1;
1577 TREE_VEC_ELT (*tp, 2) = a2;
1578 }
1579 }
1580 else
1581 walk_tree_1 (&t, c_omp_check_loop_iv_r, d, NULL, lh);
1582
1583 d->ppset = ppset;
1584 return ret;
1585}
1586
1587/* Diagnose invalid references to loop iterators in lb, b and incr
1588 expressions. */
1589
1590bool
1591c_omp_check_loop_iv (tree stmt, tree declv, walk_tree_lh lh)
1592{
1593 hash_set<tree> pset;
1594 struct c_omp_check_loop_iv_data data;
1595 int i;
1596
1597 data.declv = declv;
1598 data.fail = false;
1599 data.maybe_nonrect = false;
1600 data.stmt_loc = EXPR_LOCATION (stmt);
1601 data.lh = lh;
1602 data.ppset = &pset;
1603 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (stmt)); i++)
1604 {
1605 tree init = TREE_VEC_ELT (OMP_FOR_INIT (stmt), i);
1606 gcc_assert (TREE_CODE (init) == MODIFY_EXPR);
1607 tree decl = TREE_OPERAND (init, 0);
1608 tree cond = TREE_VEC_ELT (OMP_FOR_COND (stmt), i);
1609 gcc_assert (COMPARISON_CLASS_P (cond));
1610 gcc_assert (TREE_OPERAND (cond, 0) == decl);
1611 tree incr = TREE_VEC_ELT (OMP_FOR_INCR (stmt), i);
1612 data.expr_loc = EXPR_LOCATION (TREE_OPERAND (init, 1));
1613 tree vec_outer1 = NULL_TREE, vec_outer2 = NULL_TREE;
1614 int kind = 0;
1615 if (i > 0
1616 && (unsigned) c_omp_is_loop_iterator (decl, d: &data) < (unsigned) i)
1617 {
1618 location_t loc = data.expr_loc;
1619 if (loc == UNKNOWN_LOCATION)
1620 loc = data.stmt_loc;
1621 error_at (loc, "the same loop iteration variables %qD used in "
1622 "multiple associated loops", decl);
1623 data.fail = true;
1624 }
1625 /* Handle non-rectangular loop nests. */
1626 if (TREE_CODE (stmt) != OACC_LOOP && i > 0)
1627 kind = 4;
1628 data.kind = kind;
1629 data.idx = i;
1630 walk_tree_1 (&TREE_OPERAND (init, 1),
1631 c_omp_check_loop_iv_r, &data, NULL, lh);
1632 if (data.maybe_nonrect)
1633 vec_outer1 = c_omp_check_nonrect_loop_iv (tp: &TREE_OPERAND (init, 1),
1634 d: &data, lh);
1635 /* Don't warn for C++ random access iterators here, the
1636 expression then involves the subtraction and always refers
1637 to the original value. The C++ FE needs to warn on those
1638 earlier. */
1639 if (decl == TREE_VEC_ELT (declv, i)
1640 || (TREE_CODE (TREE_VEC_ELT (declv, i)) == TREE_LIST
1641 && decl == TREE_PURPOSE (TREE_VEC_ELT (declv, i))))
1642 {
1643 data.expr_loc = EXPR_LOCATION (cond);
1644 data.kind = kind | 1;
1645 walk_tree_1 (&TREE_OPERAND (cond, 1),
1646 c_omp_check_loop_iv_r, &data, NULL, lh);
1647 if (data.maybe_nonrect)
1648 vec_outer2 = c_omp_check_nonrect_loop_iv (tp: &TREE_OPERAND (cond, 1),
1649 d: &data, lh);
1650 }
1651 if (vec_outer1 && vec_outer2 && vec_outer1 != vec_outer2)
1652 {
1653 location_t loc = data.expr_loc;
1654 if (loc == UNKNOWN_LOCATION)
1655 loc = data.stmt_loc;
1656 error_at (loc, "two different outer iteration variables %qD and %qD"
1657 " used in a single loop", vec_outer1, vec_outer2);
1658 data.fail = true;
1659 }
1660 if (vec_outer1 || vec_outer2)
1661 OMP_FOR_NON_RECTANGULAR (stmt) = 1;
1662 if (TREE_CODE (incr) == MODIFY_EXPR)
1663 {
1664 gcc_assert (TREE_OPERAND (incr, 0) == decl);
1665 incr = TREE_OPERAND (incr, 1);
1666 data.kind = 2;
1667 if (TREE_CODE (incr) == PLUS_EXPR
1668 && TREE_OPERAND (incr, 1) == decl)
1669 {
1670 data.expr_loc = EXPR_LOCATION (TREE_OPERAND (incr, 0));
1671 walk_tree_1 (&TREE_OPERAND (incr, 0),
1672 c_omp_check_loop_iv_r, &data, NULL, lh);
1673 }
1674 else
1675 {
1676 data.expr_loc = EXPR_LOCATION (TREE_OPERAND (incr, 1));
1677 walk_tree_1 (&TREE_OPERAND (incr, 1),
1678 c_omp_check_loop_iv_r, &data, NULL, lh);
1679 }
1680 }
1681 }
1682 return !data.fail;
1683}
1684
1685/* Similar, but allows to check the init or cond expressions individually. */
1686
1687bool
1688c_omp_check_loop_iv_exprs (location_t stmt_loc, enum tree_code code,
1689 tree declv, int i, tree decl, tree init, tree cond,
1690 walk_tree_lh lh)
1691{
1692 hash_set<tree> pset;
1693 struct c_omp_check_loop_iv_data data;
1694 int kind = (code != OACC_LOOP && i > 0) ? 4 : 0;
1695
1696 data.declv = declv;
1697 data.fail = false;
1698 data.maybe_nonrect = false;
1699 data.stmt_loc = stmt_loc;
1700 data.lh = lh;
1701 data.ppset = &pset;
1702 data.idx = i;
1703 if (i > 0
1704 && (unsigned) c_omp_is_loop_iterator (decl, d: &data) < (unsigned) i)
1705 {
1706 error_at (stmt_loc, "the same loop iteration variables %qD used in "
1707 "multiple associated loops", decl);
1708 data.fail = true;
1709 }
1710 if (init)
1711 {
1712 data.expr_loc = EXPR_LOCATION (init);
1713 data.kind = kind;
1714 walk_tree_1 (&init,
1715 c_omp_check_loop_iv_r, &data, NULL, lh);
1716 }
1717 if (cond)
1718 {
1719 gcc_assert (COMPARISON_CLASS_P (cond));
1720 data.expr_loc = EXPR_LOCATION (init);
1721 data.kind = kind | 1;
1722 if (TREE_OPERAND (cond, 0) == decl)
1723 walk_tree_1 (&TREE_OPERAND (cond, 1),
1724 c_omp_check_loop_iv_r, &data, NULL, lh);
1725 else
1726 walk_tree_1 (&TREE_OPERAND (cond, 0),
1727 c_omp_check_loop_iv_r, &data, NULL, lh);
1728 }
1729 return !data.fail;
1730}
1731
1732
1733/* Helper function for c_omp_check_loop_binding_exprs: look for a binding
1734 of DECL in BODY. Only traverse things that might be containers for
1735 intervening code in an OMP loop. Returns the BIND_EXPR or DECL_EXPR
1736 if found, otherwise null. */
1737
1738static tree
1739find_binding_in_body (tree decl, tree body)
1740{
1741 if (!body)
1742 return NULL_TREE;
1743
1744 switch (TREE_CODE (body))
1745 {
1746 case BIND_EXPR:
1747 for (tree b = BIND_EXPR_VARS (body); b; b = DECL_CHAIN (b))
1748 if (b == decl)
1749 return body;
1750 return find_binding_in_body (decl, BIND_EXPR_BODY (body));
1751
1752 case DECL_EXPR:
1753 if (DECL_EXPR_DECL (body) == decl)
1754 return body;
1755 return NULL_TREE;
1756
1757 case STATEMENT_LIST:
1758 for (tree_stmt_iterator si = tsi_start (t: body); !tsi_end_p (i: si);
1759 tsi_next (i: &si))
1760 {
1761 tree b = find_binding_in_body (decl, body: tsi_stmt (i: si));
1762 if (b)
1763 return b;
1764 }
1765 return NULL_TREE;
1766
1767 case OMP_STRUCTURED_BLOCK:
1768 return find_binding_in_body (decl, OMP_BODY (body));
1769
1770 default:
1771 return NULL_TREE;
1772 }
1773}
1774
1775/* Traversal function for check_loop_binding_expr, to diagnose
1776 errors when a binding made in intervening code is referenced outside
1777 of the loop. Returns non-null if such a reference is found. DATA points
1778 to the tree containing the loop body. */
1779
1780static tree
1781check_loop_binding_expr_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
1782 void *data)
1783{
1784 tree body = *(tree *)data;
1785
1786 if (DECL_P (*tp) && find_binding_in_body (decl: *tp, body))
1787 return *tp;
1788 return NULL_TREE;
1789}
1790
1791/* Helper macro used below. */
1792
1793#define LOCATION_OR(loc1, loc2) \
1794 ((loc1) != UNKNOWN_LOCATION ? (loc1) : (loc2))
1795
1796enum check_loop_binding_expr_ctx {
1797 CHECK_LOOP_BINDING_EXPR_CTX_LOOP_VAR,
1798 CHECK_LOOP_BINDING_EXPR_CTX_IN_INIT,
1799 CHECK_LOOP_BINDING_EXPR_CTX_END_TEST,
1800 CHECK_LOOP_BINDING_EXPR_CTX_INCR
1801};
1802
1803/* Check a single expression EXPR for references to variables bound in
1804 intervening code in BODY. Return true if ok, otherwise give an error
1805 referencing CONTEXT and return false. Use LOC for the error message
1806 if EXPR doesn't have one. */
1807static bool
1808check_loop_binding_expr (tree expr, tree body, location_t loc,
1809 check_loop_binding_expr_ctx ctx)
1810{
1811 tree bad = walk_tree (&expr, check_loop_binding_expr_r, (void *)&body, NULL);
1812
1813 if (bad)
1814 {
1815 location_t eloc = EXPR_LOCATION (expr);
1816 eloc = LOCATION_OR (eloc, loc);
1817 switch (ctx)
1818 {
1819 case CHECK_LOOP_BINDING_EXPR_CTX_LOOP_VAR:
1820 error_at (eloc, "variable %qD used as loop variable is bound "
1821 "in intervening code", bad);
1822 break;
1823 case CHECK_LOOP_BINDING_EXPR_CTX_IN_INIT:
1824 error_at (eloc, "variable %qD used in initializer is bound "
1825 "in intervening code", bad);
1826 break;
1827 case CHECK_LOOP_BINDING_EXPR_CTX_END_TEST:
1828 error_at (eloc, "variable %qD used in end test is bound "
1829 "in intervening code", bad);
1830 break;
1831 case CHECK_LOOP_BINDING_EXPR_CTX_INCR:
1832 error_at (eloc, "variable %qD used in increment expression is bound "
1833 "in intervening code", bad);
1834 break;
1835 }
1836 return false;
1837 }
1838 return true;
1839}
1840
1841/* STMT is an OMP_FOR construct. Check all of the iteration variable,
1842 initializer, end condition, and increment for bindings inside the
1843 loop body. If ORIG_INITS is provided, check those elements too.
1844 Return true if OK, false otherwise. */
1845bool
1846c_omp_check_loop_binding_exprs (tree stmt, vec<tree> *orig_inits)
1847{
1848 bool ok = true;
1849 location_t loc = EXPR_LOCATION (stmt);
1850 tree body = OMP_FOR_BODY (stmt);
1851 int orig_init_length = orig_inits ? orig_inits->length () : 0;
1852
1853 for (int i = 1; i < TREE_VEC_LENGTH (OMP_FOR_INIT (stmt)); i++)
1854 {
1855 tree init = TREE_VEC_ELT (OMP_FOR_INIT (stmt), i);
1856 tree cond = TREE_VEC_ELT (OMP_FOR_COND (stmt), i);
1857 tree incr = TREE_VEC_ELT (OMP_FOR_INCR (stmt), i);
1858 gcc_assert (TREE_CODE (init) == MODIFY_EXPR);
1859 tree decl = TREE_OPERAND (init, 0);
1860 tree orig_init = i < orig_init_length ? (*orig_inits)[i] : NULL_TREE;
1861 tree e;
1862 location_t eloc;
1863
1864 e = TREE_OPERAND (init, 1);
1865 eloc = LOCATION_OR (EXPR_LOCATION (init), loc);
1866 if (!check_loop_binding_expr (expr: decl, body, loc: eloc,
1867 ctx: CHECK_LOOP_BINDING_EXPR_CTX_LOOP_VAR))
1868 ok = false;
1869 if (!check_loop_binding_expr (expr: e, body, loc: eloc,
1870 ctx: CHECK_LOOP_BINDING_EXPR_CTX_IN_INIT))
1871 ok = false;
1872 if (orig_init
1873 && !check_loop_binding_expr (expr: orig_init, body, loc: eloc,
1874 ctx: CHECK_LOOP_BINDING_EXPR_CTX_IN_INIT))
1875 ok = false;
1876
1877 /* INCR and/or COND may be null if this is a template with a
1878 class iterator. */
1879 if (cond)
1880 {
1881 eloc = LOCATION_OR (EXPR_LOCATION (cond), loc);
1882 if (COMPARISON_CLASS_P (cond) && TREE_OPERAND (cond, 0) == decl)
1883 e = TREE_OPERAND (cond, 1);
1884 else if (COMPARISON_CLASS_P (cond) && TREE_OPERAND (cond, 1) == decl)
1885 e = TREE_OPERAND (cond, 0);
1886 else
1887 e = cond;
1888 if (!check_loop_binding_expr (expr: e, body, loc: eloc,
1889 ctx: CHECK_LOOP_BINDING_EXPR_CTX_END_TEST))
1890 ok = false;
1891 }
1892
1893 if (incr)
1894 {
1895 eloc = LOCATION_OR (EXPR_LOCATION (incr), loc);
1896 /* INCR should be either a MODIFY_EXPR or pre/post
1897 increment/decrement. We don't have to check the latter
1898 since there are no operands besides the iteration variable. */
1899 if (TREE_CODE (incr) == MODIFY_EXPR
1900 && !check_loop_binding_expr (TREE_OPERAND (incr, 1), body, loc: eloc,
1901 ctx: CHECK_LOOP_BINDING_EXPR_CTX_INCR))
1902 ok = false;
1903 }
1904 }
1905
1906 return ok;
1907}
1908
1909/* This function splits clauses for OpenACC combined loop
1910 constructs. OpenACC combined loop constructs are:
1911 #pragma acc kernels loop
1912 #pragma acc parallel loop */
1913
1914tree
1915c_oacc_split_loop_clauses (tree clauses, tree *not_loop_clauses,
1916 bool is_parallel)
1917{
1918 tree next, loop_clauses, nc;
1919
1920 loop_clauses = *not_loop_clauses = NULL_TREE;
1921 for (; clauses ; clauses = next)
1922 {
1923 next = OMP_CLAUSE_CHAIN (clauses);
1924
1925 switch (OMP_CLAUSE_CODE (clauses))
1926 {
1927 /* Loop clauses. */
1928 case OMP_CLAUSE_COLLAPSE:
1929 case OMP_CLAUSE_TILE:
1930 case OMP_CLAUSE_GANG:
1931 case OMP_CLAUSE_WORKER:
1932 case OMP_CLAUSE_VECTOR:
1933 case OMP_CLAUSE_AUTO:
1934 case OMP_CLAUSE_SEQ:
1935 case OMP_CLAUSE_INDEPENDENT:
1936 case OMP_CLAUSE_PRIVATE:
1937 OMP_CLAUSE_CHAIN (clauses) = loop_clauses;
1938 loop_clauses = clauses;
1939 break;
1940
1941 /* Reductions must be duplicated on both constructs. */
1942 case OMP_CLAUSE_REDUCTION:
1943 if (is_parallel)
1944 {
1945 nc = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
1946 OMP_CLAUSE_REDUCTION);
1947 OMP_CLAUSE_DECL (nc) = OMP_CLAUSE_DECL (clauses);
1948 OMP_CLAUSE_REDUCTION_CODE (nc)
1949 = OMP_CLAUSE_REDUCTION_CODE (clauses);
1950 OMP_CLAUSE_CHAIN (nc) = *not_loop_clauses;
1951 *not_loop_clauses = nc;
1952 }
1953
1954 OMP_CLAUSE_CHAIN (clauses) = loop_clauses;
1955 loop_clauses = clauses;
1956 break;
1957
1958 /* Parallel/kernels clauses. */
1959 default:
1960 OMP_CLAUSE_CHAIN (clauses) = *not_loop_clauses;
1961 *not_loop_clauses = clauses;
1962 break;
1963 }
1964 }
1965
1966 return loop_clauses;
1967}
1968
1969/* This function attempts to split or duplicate clauses for OpenMP
1970 combined/composite constructs. Right now there are 30 different
1971 constructs. CODE is the innermost construct in the combined construct,
1972 and MASK allows to determine which constructs are combined together,
1973 as every construct has at least one clause that no other construct
1974 has (except for OMP_SECTIONS, but that can be only combined with parallel,
1975 and OMP_MASTER, which doesn't have any clauses at all).
1976 OpenMP combined/composite constructs are:
1977 #pragma omp distribute parallel for
1978 #pragma omp distribute parallel for simd
1979 #pragma omp distribute simd
1980 #pragma omp for simd
1981 #pragma omp masked taskloop
1982 #pragma omp masked taskloop simd
1983 #pragma omp master taskloop
1984 #pragma omp master taskloop simd
1985 #pragma omp parallel for
1986 #pragma omp parallel for simd
1987 #pragma omp parallel loop
1988 #pragma omp parallel masked
1989 #pragma omp parallel masked taskloop
1990 #pragma omp parallel masked taskloop simd
1991 #pragma omp parallel master
1992 #pragma omp parallel master taskloop
1993 #pragma omp parallel master taskloop simd
1994 #pragma omp parallel sections
1995 #pragma omp target parallel
1996 #pragma omp target parallel for
1997 #pragma omp target parallel for simd
1998 #pragma omp target parallel loop
1999 #pragma omp target teams
2000 #pragma omp target teams distribute
2001 #pragma omp target teams distribute parallel for
2002 #pragma omp target teams distribute parallel for simd
2003 #pragma omp target teams distribute simd
2004 #pragma omp target teams loop
2005 #pragma omp target simd
2006 #pragma omp taskloop simd
2007 #pragma omp teams distribute
2008 #pragma omp teams distribute parallel for
2009 #pragma omp teams distribute parallel for simd
2010 #pragma omp teams distribute simd
2011 #pragma omp teams loop */
2012
2013void
2014c_omp_split_clauses (location_t loc, enum tree_code code,
2015 omp_clause_mask mask, tree clauses, tree *cclauses)
2016{
2017 tree next, c;
2018 enum c_omp_clause_split s;
2019 int i;
2020 bool has_dup_allocate = false;
2021
2022 for (i = 0; i < C_OMP_CLAUSE_SPLIT_COUNT; i++)
2023 cclauses[i] = NULL;
2024 /* Add implicit nowait clause on
2025 #pragma omp parallel {for,for simd,sections}. */
2026 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NUM_THREADS)) != 0)
2027 switch (code)
2028 {
2029 case OMP_FOR:
2030 case OMP_SIMD:
2031 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_SCHEDULE)) != 0)
2032 cclauses[C_OMP_CLAUSE_SPLIT_FOR]
2033 = build_omp_clause (loc, OMP_CLAUSE_NOWAIT);
2034 break;
2035 case OMP_SECTIONS:
2036 cclauses[C_OMP_CLAUSE_SPLIT_SECTIONS]
2037 = build_omp_clause (loc, OMP_CLAUSE_NOWAIT);
2038 break;
2039 default:
2040 break;
2041 }
2042
2043 for (; clauses ; clauses = next)
2044 {
2045 next = OMP_CLAUSE_CHAIN (clauses);
2046
2047 switch (OMP_CLAUSE_CODE (clauses))
2048 {
2049 /* First the clauses that are unique to some constructs. */
2050 case OMP_CLAUSE_DEVICE:
2051 case OMP_CLAUSE_MAP:
2052 case OMP_CLAUSE_IS_DEVICE_PTR:
2053 case OMP_CLAUSE_HAS_DEVICE_ADDR:
2054 case OMP_CLAUSE_DEFAULTMAP:
2055 case OMP_CLAUSE_DEPEND:
2056 s = C_OMP_CLAUSE_SPLIT_TARGET;
2057 break;
2058 case OMP_CLAUSE_DOACROSS:
2059 /* This can happen with invalid depend(source) or
2060 depend(sink:vec) on target combined with other constructs. */
2061 gcc_assert (OMP_CLAUSE_DOACROSS_DEPEND (clauses));
2062 s = C_OMP_CLAUSE_SPLIT_TARGET;
2063 break;
2064 case OMP_CLAUSE_NUM_TEAMS:
2065 s = C_OMP_CLAUSE_SPLIT_TEAMS;
2066 break;
2067 case OMP_CLAUSE_DIST_SCHEDULE:
2068 s = C_OMP_CLAUSE_SPLIT_DISTRIBUTE;
2069 break;
2070 case OMP_CLAUSE_COPYIN:
2071 case OMP_CLAUSE_NUM_THREADS:
2072 case OMP_CLAUSE_PROC_BIND:
2073 s = C_OMP_CLAUSE_SPLIT_PARALLEL;
2074 break;
2075 case OMP_CLAUSE_ORDERED:
2076 s = C_OMP_CLAUSE_SPLIT_FOR;
2077 break;
2078 case OMP_CLAUSE_SCHEDULE:
2079 s = C_OMP_CLAUSE_SPLIT_FOR;
2080 if (code != OMP_SIMD)
2081 OMP_CLAUSE_SCHEDULE_SIMD (clauses) = 0;
2082 break;
2083 case OMP_CLAUSE_SAFELEN:
2084 case OMP_CLAUSE_SIMDLEN:
2085 case OMP_CLAUSE_ALIGNED:
2086 case OMP_CLAUSE_NONTEMPORAL:
2087 s = C_OMP_CLAUSE_SPLIT_SIMD;
2088 break;
2089 case OMP_CLAUSE_GRAINSIZE:
2090 case OMP_CLAUSE_NUM_TASKS:
2091 case OMP_CLAUSE_FINAL:
2092 case OMP_CLAUSE_UNTIED:
2093 case OMP_CLAUSE_MERGEABLE:
2094 case OMP_CLAUSE_NOGROUP:
2095 case OMP_CLAUSE_PRIORITY:
2096 s = C_OMP_CLAUSE_SPLIT_TASKLOOP;
2097 break;
2098 case OMP_CLAUSE_BIND:
2099 s = C_OMP_CLAUSE_SPLIT_LOOP;
2100 break;
2101 case OMP_CLAUSE_FILTER:
2102 s = C_OMP_CLAUSE_SPLIT_MASKED;
2103 break;
2104 /* Duplicate this to all of taskloop, distribute, for, simd and
2105 loop. */
2106 case OMP_CLAUSE_COLLAPSE:
2107 if (code == OMP_SIMD)
2108 {
2109 if ((mask & ((OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_SCHEDULE)
2110 | (OMP_CLAUSE_MASK_1
2111 << PRAGMA_OMP_CLAUSE_DIST_SCHEDULE)
2112 | (OMP_CLAUSE_MASK_1
2113 << PRAGMA_OMP_CLAUSE_NOGROUP))) != 0)
2114 {
2115 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2116 OMP_CLAUSE_COLLAPSE);
2117 OMP_CLAUSE_COLLAPSE_EXPR (c)
2118 = OMP_CLAUSE_COLLAPSE_EXPR (clauses);
2119 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_SIMD];
2120 cclauses[C_OMP_CLAUSE_SPLIT_SIMD] = c;
2121 }
2122 else
2123 {
2124 /* This must be #pragma omp target simd */
2125 s = C_OMP_CLAUSE_SPLIT_SIMD;
2126 break;
2127 }
2128 }
2129 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_SCHEDULE)) != 0)
2130 {
2131 if ((mask & (OMP_CLAUSE_MASK_1
2132 << PRAGMA_OMP_CLAUSE_DIST_SCHEDULE)) != 0)
2133 {
2134 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2135 OMP_CLAUSE_COLLAPSE);
2136 OMP_CLAUSE_COLLAPSE_EXPR (c)
2137 = OMP_CLAUSE_COLLAPSE_EXPR (clauses);
2138 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_FOR];
2139 cclauses[C_OMP_CLAUSE_SPLIT_FOR] = c;
2140 s = C_OMP_CLAUSE_SPLIT_DISTRIBUTE;
2141 }
2142 else
2143 s = C_OMP_CLAUSE_SPLIT_FOR;
2144 }
2145 else if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NOGROUP))
2146 != 0)
2147 s = C_OMP_CLAUSE_SPLIT_TASKLOOP;
2148 else if (code == OMP_LOOP)
2149 s = C_OMP_CLAUSE_SPLIT_LOOP;
2150 else
2151 s = C_OMP_CLAUSE_SPLIT_DISTRIBUTE;
2152 break;
2153 /* Private clause is supported on all constructs but master/masked,
2154 it is enough to put it on the innermost one other than
2155 master/masked. For #pragma omp {for,sections} put it on parallel
2156 though, as that's what we did for OpenMP 3.1. */
2157 case OMP_CLAUSE_PRIVATE:
2158 switch (code)
2159 {
2160 case OMP_SIMD: s = C_OMP_CLAUSE_SPLIT_SIMD; break;
2161 case OMP_FOR: case OMP_SECTIONS:
2162 case OMP_PARALLEL: s = C_OMP_CLAUSE_SPLIT_PARALLEL; break;
2163 case OMP_DISTRIBUTE: s = C_OMP_CLAUSE_SPLIT_DISTRIBUTE; break;
2164 case OMP_TEAMS: s = C_OMP_CLAUSE_SPLIT_TEAMS; break;
2165 case OMP_MASTER: s = C_OMP_CLAUSE_SPLIT_PARALLEL; break;
2166 case OMP_MASKED: s = C_OMP_CLAUSE_SPLIT_PARALLEL; break;
2167 case OMP_TASKLOOP: s = C_OMP_CLAUSE_SPLIT_TASKLOOP; break;
2168 case OMP_LOOP: s = C_OMP_CLAUSE_SPLIT_LOOP; break;
2169 default: gcc_unreachable ();
2170 }
2171 break;
2172 /* Firstprivate clause is supported on all constructs but
2173 simd, master, masked and loop. Put it on the outermost of those
2174 and duplicate on teams and parallel. */
2175 case OMP_CLAUSE_FIRSTPRIVATE:
2176 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_MAP))
2177 != 0)
2178 {
2179 if (code == OMP_SIMD
2180 && (mask & ((OMP_CLAUSE_MASK_1
2181 << PRAGMA_OMP_CLAUSE_NUM_THREADS)
2182 | (OMP_CLAUSE_MASK_1
2183 << PRAGMA_OMP_CLAUSE_NUM_TEAMS))) == 0)
2184 {
2185 /* This must be #pragma omp target simd. */
2186 s = C_OMP_CLAUSE_SPLIT_TARGET;
2187 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clauses) = 1;
2188 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (clauses) = 1;
2189 break;
2190 }
2191 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2192 OMP_CLAUSE_FIRSTPRIVATE);
2193 /* firstprivate should not be applied to target if it is
2194 also lastprivate or on the combined/composite construct,
2195 or if it is mentioned in map clause. OMP_CLAUSE_DECLs
2196 may need to go through FE handling though (instantiation,
2197 C++ non-static data members, array section lowering), so
2198 add the clause with OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT and
2199 let *finish_omp_clauses and the gimplifier handle it
2200 right. */
2201 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c) = 1;
2202 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2203 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_TARGET];
2204 cclauses[C_OMP_CLAUSE_SPLIT_TARGET] = c;
2205 }
2206 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NUM_THREADS))
2207 != 0)
2208 {
2209 if ((mask & ((OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NUM_TEAMS)
2210 | (OMP_CLAUSE_MASK_1
2211 << PRAGMA_OMP_CLAUSE_DIST_SCHEDULE))) != 0)
2212 {
2213 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2214 OMP_CLAUSE_FIRSTPRIVATE);
2215 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2216 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_PARALLEL];
2217 cclauses[C_OMP_CLAUSE_SPLIT_PARALLEL] = c;
2218 if ((mask & (OMP_CLAUSE_MASK_1
2219 << PRAGMA_OMP_CLAUSE_NUM_TEAMS)) != 0)
2220 s = C_OMP_CLAUSE_SPLIT_TEAMS;
2221 else
2222 s = C_OMP_CLAUSE_SPLIT_DISTRIBUTE;
2223 }
2224 else if ((mask & (OMP_CLAUSE_MASK_1
2225 << PRAGMA_OMP_CLAUSE_NOGROUP)) != 0)
2226 /* This must be
2227 #pragma omp parallel mas{ked,ter} taskloop{, simd}. */
2228 s = C_OMP_CLAUSE_SPLIT_TASKLOOP;
2229 else
2230 /* This must be
2231 #pragma omp parallel{, for{, simd}, sections,loop}
2232 or
2233 #pragma omp target parallel. */
2234 s = C_OMP_CLAUSE_SPLIT_PARALLEL;
2235 }
2236 else if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NUM_TEAMS))
2237 != 0)
2238 {
2239 /* This must be one of
2240 #pragma omp {,target }teams {distribute,loop}
2241 #pragma omp target teams
2242 #pragma omp {,target }teams distribute simd. */
2243 gcc_assert (code == OMP_DISTRIBUTE
2244 || code == OMP_LOOP
2245 || code == OMP_TEAMS
2246 || code == OMP_SIMD);
2247 s = C_OMP_CLAUSE_SPLIT_TEAMS;
2248 }
2249 else if ((mask & (OMP_CLAUSE_MASK_1
2250 << PRAGMA_OMP_CLAUSE_DIST_SCHEDULE)) != 0)
2251 {
2252 /* This must be #pragma omp distribute simd. */
2253 gcc_assert (code == OMP_SIMD);
2254 s = C_OMP_CLAUSE_SPLIT_DISTRIBUTE;
2255 }
2256 else if ((mask & (OMP_CLAUSE_MASK_1
2257 << PRAGMA_OMP_CLAUSE_NOGROUP)) != 0)
2258 {
2259 /* This must be
2260 #pragma omp {,{,parallel }mas{ked,ter} }taskloop simd
2261 or
2262 #pragma omp {,parallel }mas{ked,ter} taskloop. */
2263 gcc_assert (code == OMP_SIMD || code == OMP_TASKLOOP);
2264 s = C_OMP_CLAUSE_SPLIT_TASKLOOP;
2265 }
2266 else
2267 {
2268 /* This must be #pragma omp for simd. */
2269 gcc_assert (code == OMP_SIMD);
2270 s = C_OMP_CLAUSE_SPLIT_FOR;
2271 }
2272 break;
2273 /* Lastprivate is allowed on distribute, for, sections, taskloop, loop
2274 and simd. In parallel {for{, simd},sections} we actually want to
2275 put it on parallel rather than for or sections. */
2276 case OMP_CLAUSE_LASTPRIVATE:
2277 if (code == OMP_DISTRIBUTE)
2278 {
2279 s = C_OMP_CLAUSE_SPLIT_DISTRIBUTE;
2280 break;
2281 }
2282 if ((mask & (OMP_CLAUSE_MASK_1
2283 << PRAGMA_OMP_CLAUSE_DIST_SCHEDULE)) != 0)
2284 {
2285 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2286 OMP_CLAUSE_LASTPRIVATE);
2287 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2288 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_DISTRIBUTE];
2289 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
2290 = OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (clauses);
2291 cclauses[C_OMP_CLAUSE_SPLIT_DISTRIBUTE] = c;
2292 }
2293 if (code == OMP_FOR || code == OMP_SECTIONS)
2294 {
2295 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NUM_THREADS))
2296 != 0)
2297 s = C_OMP_CLAUSE_SPLIT_PARALLEL;
2298 else
2299 s = C_OMP_CLAUSE_SPLIT_FOR;
2300 break;
2301 }
2302 if (code == OMP_TASKLOOP)
2303 {
2304 s = C_OMP_CLAUSE_SPLIT_TASKLOOP;
2305 break;
2306 }
2307 if (code == OMP_LOOP)
2308 {
2309 s = C_OMP_CLAUSE_SPLIT_LOOP;
2310 break;
2311 }
2312 gcc_assert (code == OMP_SIMD);
2313 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_SCHEDULE)) != 0)
2314 {
2315 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2316 OMP_CLAUSE_LASTPRIVATE);
2317 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2318 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
2319 = OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (clauses);
2320 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NUM_THREADS))
2321 != 0)
2322 s = C_OMP_CLAUSE_SPLIT_PARALLEL;
2323 else
2324 s = C_OMP_CLAUSE_SPLIT_FOR;
2325 OMP_CLAUSE_CHAIN (c) = cclauses[s];
2326 cclauses[s] = c;
2327 }
2328 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NOGROUP)) != 0)
2329 {
2330 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2331 OMP_CLAUSE_LASTPRIVATE);
2332 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2333 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
2334 = OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (clauses);
2335 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_TASKLOOP];
2336 cclauses[C_OMP_CLAUSE_SPLIT_TASKLOOP] = c;
2337 }
2338 s = C_OMP_CLAUSE_SPLIT_SIMD;
2339 break;
2340 /* Shared and default clauses are allowed on parallel, teams and
2341 taskloop. */
2342 case OMP_CLAUSE_SHARED:
2343 case OMP_CLAUSE_DEFAULT:
2344 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NOGROUP))
2345 != 0)
2346 {
2347 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NUM_THREADS))
2348 != 0)
2349 {
2350 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2351 OMP_CLAUSE_CODE (clauses));
2352 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_SHARED)
2353 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2354 else
2355 OMP_CLAUSE_DEFAULT_KIND (c)
2356 = OMP_CLAUSE_DEFAULT_KIND (clauses);
2357 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_PARALLEL];
2358 cclauses[C_OMP_CLAUSE_SPLIT_PARALLEL] = c;
2359 }
2360 s = C_OMP_CLAUSE_SPLIT_TASKLOOP;
2361 break;
2362 }
2363 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NUM_TEAMS))
2364 != 0)
2365 {
2366 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NUM_THREADS))
2367 == 0)
2368 {
2369 s = C_OMP_CLAUSE_SPLIT_TEAMS;
2370 break;
2371 }
2372 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2373 OMP_CLAUSE_CODE (clauses));
2374 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_SHARED)
2375 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2376 else
2377 OMP_CLAUSE_DEFAULT_KIND (c)
2378 = OMP_CLAUSE_DEFAULT_KIND (clauses);
2379 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_TEAMS];
2380 cclauses[C_OMP_CLAUSE_SPLIT_TEAMS] = c;
2381 }
2382 s = C_OMP_CLAUSE_SPLIT_PARALLEL;
2383 break;
2384 /* order clauses are allowed on distribute, for, simd and loop. */
2385 case OMP_CLAUSE_ORDER:
2386 if ((mask & (OMP_CLAUSE_MASK_1
2387 << PRAGMA_OMP_CLAUSE_DIST_SCHEDULE)) != 0)
2388 {
2389 if (code == OMP_DISTRIBUTE)
2390 {
2391 s = C_OMP_CLAUSE_SPLIT_DISTRIBUTE;
2392 break;
2393 }
2394 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2395 OMP_CLAUSE_ORDER);
2396 OMP_CLAUSE_ORDER_UNCONSTRAINED (c)
2397 = OMP_CLAUSE_ORDER_UNCONSTRAINED (clauses);
2398 OMP_CLAUSE_ORDER_REPRODUCIBLE (c)
2399 = OMP_CLAUSE_ORDER_REPRODUCIBLE (clauses);
2400 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_DISTRIBUTE];
2401 cclauses[C_OMP_CLAUSE_SPLIT_DISTRIBUTE] = c;
2402 }
2403 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_SCHEDULE)) != 0)
2404 {
2405 if (code == OMP_SIMD)
2406 {
2407 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2408 OMP_CLAUSE_ORDER);
2409 OMP_CLAUSE_ORDER_UNCONSTRAINED (c)
2410 = OMP_CLAUSE_ORDER_UNCONSTRAINED (clauses);
2411 OMP_CLAUSE_ORDER_REPRODUCIBLE (c)
2412 = OMP_CLAUSE_ORDER_REPRODUCIBLE (clauses);
2413 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_FOR];
2414 cclauses[C_OMP_CLAUSE_SPLIT_FOR] = c;
2415 s = C_OMP_CLAUSE_SPLIT_SIMD;
2416 }
2417 else
2418 s = C_OMP_CLAUSE_SPLIT_FOR;
2419 }
2420 else if (code == OMP_LOOP)
2421 s = C_OMP_CLAUSE_SPLIT_LOOP;
2422 else
2423 s = C_OMP_CLAUSE_SPLIT_SIMD;
2424 break;
2425 /* Reduction is allowed on simd, for, parallel, sections, taskloop,
2426 teams and loop. Duplicate it on all of them, but omit on for or
2427 sections if parallel is present (unless inscan, in that case
2428 omit on parallel). If taskloop or loop is combined with
2429 parallel, omit it on parallel. */
2430 case OMP_CLAUSE_REDUCTION:
2431 if (OMP_CLAUSE_REDUCTION_TASK (clauses))
2432 {
2433 if (code == OMP_SIMD || code == OMP_LOOP)
2434 {
2435 error_at (OMP_CLAUSE_LOCATION (clauses),
2436 "invalid %<task%> reduction modifier on construct "
2437 "combined with %<simd%> or %<loop%>");
2438 OMP_CLAUSE_REDUCTION_TASK (clauses) = 0;
2439 }
2440 else if (code != OMP_SECTIONS
2441 && (mask & (OMP_CLAUSE_MASK_1
2442 << PRAGMA_OMP_CLAUSE_NUM_THREADS)) == 0
2443 && (mask & (OMP_CLAUSE_MASK_1
2444 << PRAGMA_OMP_CLAUSE_SCHEDULE)) == 0)
2445 {
2446 error_at (OMP_CLAUSE_LOCATION (clauses),
2447 "invalid %<task%> reduction modifier on construct "
2448 "not combined with %<parallel%>, %<for%> or "
2449 "%<sections%>");
2450 OMP_CLAUSE_REDUCTION_TASK (clauses) = 0;
2451 }
2452 }
2453 if (OMP_CLAUSE_REDUCTION_INSCAN (clauses)
2454 && ((mask & ((OMP_CLAUSE_MASK_1
2455 << PRAGMA_OMP_CLAUSE_DIST_SCHEDULE)
2456 | (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_MAP)))
2457 != 0))
2458 {
2459 error_at (OMP_CLAUSE_LOCATION (clauses),
2460 "%<inscan%> %<reduction%> clause on construct other "
2461 "than %<for%>, %<simd%>, %<for simd%>, "
2462 "%<parallel for%>, %<parallel for simd%>");
2463 OMP_CLAUSE_REDUCTION_INSCAN (clauses) = 0;
2464 }
2465 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_MAP)) != 0)
2466 {
2467 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2468 OMP_CLAUSE_MAP);
2469 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2470 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2471 OMP_CLAUSE_MAP_IMPLICIT (c) = 1;
2472 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_TARGET];
2473 cclauses[C_OMP_CLAUSE_SPLIT_TARGET] = c;
2474 }
2475 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_SCHEDULE)) != 0)
2476 {
2477 if (code == OMP_SIMD)
2478 {
2479 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2480 OMP_CLAUSE_REDUCTION);
2481 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2482 OMP_CLAUSE_REDUCTION_CODE (c)
2483 = OMP_CLAUSE_REDUCTION_CODE (clauses);
2484 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
2485 = OMP_CLAUSE_REDUCTION_PLACEHOLDER (clauses);
2486 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
2487 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clauses);
2488 OMP_CLAUSE_REDUCTION_INSCAN (c)
2489 = OMP_CLAUSE_REDUCTION_INSCAN (clauses);
2490 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_SIMD];
2491 cclauses[C_OMP_CLAUSE_SPLIT_SIMD] = c;
2492 }
2493 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NUM_TEAMS))
2494 != 0)
2495 {
2496 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2497 OMP_CLAUSE_REDUCTION);
2498 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2499 OMP_CLAUSE_REDUCTION_CODE (c)
2500 = OMP_CLAUSE_REDUCTION_CODE (clauses);
2501 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
2502 = OMP_CLAUSE_REDUCTION_PLACEHOLDER (clauses);
2503 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
2504 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clauses);
2505 OMP_CLAUSE_REDUCTION_INSCAN (c)
2506 = OMP_CLAUSE_REDUCTION_INSCAN (clauses);
2507 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_TEAMS];
2508 cclauses[C_OMP_CLAUSE_SPLIT_TEAMS] = c;
2509 s = C_OMP_CLAUSE_SPLIT_PARALLEL;
2510 }
2511 else if ((mask & (OMP_CLAUSE_MASK_1
2512 << PRAGMA_OMP_CLAUSE_NUM_THREADS)) != 0
2513 && !OMP_CLAUSE_REDUCTION_INSCAN (clauses))
2514 s = C_OMP_CLAUSE_SPLIT_PARALLEL;
2515 else
2516 s = C_OMP_CLAUSE_SPLIT_FOR;
2517 }
2518 else if (code == OMP_SECTIONS
2519 || code == OMP_PARALLEL
2520 || code == OMP_MASTER
2521 || code == OMP_MASKED)
2522 s = C_OMP_CLAUSE_SPLIT_PARALLEL;
2523 else if (code == OMP_TASKLOOP)
2524 s = C_OMP_CLAUSE_SPLIT_TASKLOOP;
2525 else if (code == OMP_LOOP)
2526 s = C_OMP_CLAUSE_SPLIT_LOOP;
2527 else if (code == OMP_SIMD)
2528 {
2529 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NOGROUP))
2530 != 0)
2531 {
2532 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2533 OMP_CLAUSE_REDUCTION);
2534 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2535 OMP_CLAUSE_REDUCTION_CODE (c)
2536 = OMP_CLAUSE_REDUCTION_CODE (clauses);
2537 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
2538 = OMP_CLAUSE_REDUCTION_PLACEHOLDER (clauses);
2539 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
2540 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clauses);
2541 OMP_CLAUSE_REDUCTION_INSCAN (c)
2542 = OMP_CLAUSE_REDUCTION_INSCAN (clauses);
2543 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_TASKLOOP];
2544 cclauses[C_OMP_CLAUSE_SPLIT_TASKLOOP] = c;
2545 }
2546 else if ((mask & (OMP_CLAUSE_MASK_1
2547 << PRAGMA_OMP_CLAUSE_NUM_TEAMS)) != 0)
2548 {
2549 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2550 OMP_CLAUSE_REDUCTION);
2551 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2552 OMP_CLAUSE_REDUCTION_CODE (c)
2553 = OMP_CLAUSE_REDUCTION_CODE (clauses);
2554 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
2555 = OMP_CLAUSE_REDUCTION_PLACEHOLDER (clauses);
2556 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
2557 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clauses);
2558 OMP_CLAUSE_REDUCTION_INSCAN (c)
2559 = OMP_CLAUSE_REDUCTION_INSCAN (clauses);
2560 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_TEAMS];
2561 cclauses[C_OMP_CLAUSE_SPLIT_TEAMS] = c;
2562 }
2563 s = C_OMP_CLAUSE_SPLIT_SIMD;
2564 }
2565 else
2566 s = C_OMP_CLAUSE_SPLIT_TEAMS;
2567 break;
2568 case OMP_CLAUSE_IN_REDUCTION:
2569 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_MAP)) != 0)
2570 {
2571 /* When on target, map(always, tofrom: item) is added as
2572 well. For non-combined target it is added in the FEs. */
2573 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2574 OMP_CLAUSE_MAP);
2575 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2576 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALWAYS_TOFROM);
2577 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_TARGET];
2578 cclauses[C_OMP_CLAUSE_SPLIT_TARGET] = c;
2579 s = C_OMP_CLAUSE_SPLIT_TARGET;
2580 break;
2581 }
2582 /* in_reduction on taskloop simd becomes reduction on the simd
2583 and keeps being in_reduction on taskloop. */
2584 if (code == OMP_SIMD)
2585 {
2586 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2587 OMP_CLAUSE_REDUCTION);
2588 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2589 OMP_CLAUSE_REDUCTION_CODE (c)
2590 = OMP_CLAUSE_REDUCTION_CODE (clauses);
2591 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
2592 = OMP_CLAUSE_REDUCTION_PLACEHOLDER (clauses);
2593 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
2594 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clauses);
2595 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_SIMD];
2596 cclauses[C_OMP_CLAUSE_SPLIT_SIMD] = c;
2597 }
2598 s = C_OMP_CLAUSE_SPLIT_TASKLOOP;
2599 break;
2600 case OMP_CLAUSE_IF:
2601 if (OMP_CLAUSE_IF_MODIFIER (clauses) != ERROR_MARK)
2602 {
2603 s = C_OMP_CLAUSE_SPLIT_COUNT;
2604 switch (OMP_CLAUSE_IF_MODIFIER (clauses))
2605 {
2606 case OMP_PARALLEL:
2607 if ((mask & (OMP_CLAUSE_MASK_1
2608 << PRAGMA_OMP_CLAUSE_NUM_THREADS)) != 0)
2609 s = C_OMP_CLAUSE_SPLIT_PARALLEL;
2610 break;
2611 case OMP_SIMD:
2612 if (code == OMP_SIMD)
2613 s = C_OMP_CLAUSE_SPLIT_SIMD;
2614 break;
2615 case OMP_TASKLOOP:
2616 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NOGROUP))
2617 != 0)
2618 s = C_OMP_CLAUSE_SPLIT_TASKLOOP;
2619 break;
2620 case OMP_TARGET:
2621 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_MAP))
2622 != 0)
2623 s = C_OMP_CLAUSE_SPLIT_TARGET;
2624 break;
2625 default:
2626 break;
2627 }
2628 if (s != C_OMP_CLAUSE_SPLIT_COUNT)
2629 break;
2630 /* Error-recovery here, invalid if-modifier specified, add the
2631 clause to just one construct. */
2632 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_MAP)) != 0)
2633 s = C_OMP_CLAUSE_SPLIT_TARGET;
2634 else if ((mask & (OMP_CLAUSE_MASK_1
2635 << PRAGMA_OMP_CLAUSE_NUM_THREADS)) != 0)
2636 s = C_OMP_CLAUSE_SPLIT_PARALLEL;
2637 else if ((mask & (OMP_CLAUSE_MASK_1
2638 << PRAGMA_OMP_CLAUSE_NOGROUP)) != 0)
2639 s = C_OMP_CLAUSE_SPLIT_TASKLOOP;
2640 else if (code == OMP_SIMD)
2641 s = C_OMP_CLAUSE_SPLIT_SIMD;
2642 else
2643 gcc_unreachable ();
2644 break;
2645 }
2646 /* Otherwise, duplicate if clause to all constructs. */
2647 if (code == OMP_SIMD)
2648 {
2649 if ((mask & ((OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_MAP)
2650 | (OMP_CLAUSE_MASK_1
2651 << PRAGMA_OMP_CLAUSE_NUM_THREADS)
2652 | (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NOGROUP)))
2653 != 0)
2654 {
2655 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2656 OMP_CLAUSE_IF);
2657 OMP_CLAUSE_IF_MODIFIER (c)
2658 = OMP_CLAUSE_IF_MODIFIER (clauses);
2659 OMP_CLAUSE_IF_EXPR (c) = OMP_CLAUSE_IF_EXPR (clauses);
2660 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_SIMD];
2661 cclauses[C_OMP_CLAUSE_SPLIT_SIMD] = c;
2662 }
2663 else
2664 {
2665 s = C_OMP_CLAUSE_SPLIT_SIMD;
2666 break;
2667 }
2668 }
2669 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NOGROUP))
2670 != 0)
2671 {
2672 if ((mask & (OMP_CLAUSE_MASK_1
2673 << PRAGMA_OMP_CLAUSE_NUM_THREADS)) != 0)
2674 {
2675 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2676 OMP_CLAUSE_IF);
2677 OMP_CLAUSE_IF_MODIFIER (c)
2678 = OMP_CLAUSE_IF_MODIFIER (clauses);
2679 OMP_CLAUSE_IF_EXPR (c) = OMP_CLAUSE_IF_EXPR (clauses);
2680 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_TASKLOOP];
2681 cclauses[C_OMP_CLAUSE_SPLIT_TASKLOOP] = c;
2682 s = C_OMP_CLAUSE_SPLIT_PARALLEL;
2683 }
2684 else
2685 s = C_OMP_CLAUSE_SPLIT_TASKLOOP;
2686 }
2687 else if ((mask & (OMP_CLAUSE_MASK_1
2688 << PRAGMA_OMP_CLAUSE_NUM_THREADS)) != 0)
2689 {
2690 if ((mask & (OMP_CLAUSE_MASK_1
2691 << PRAGMA_OMP_CLAUSE_MAP)) != 0)
2692 {
2693 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2694 OMP_CLAUSE_IF);
2695 OMP_CLAUSE_IF_MODIFIER (c)
2696 = OMP_CLAUSE_IF_MODIFIER (clauses);
2697 OMP_CLAUSE_IF_EXPR (c) = OMP_CLAUSE_IF_EXPR (clauses);
2698 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_TARGET];
2699 cclauses[C_OMP_CLAUSE_SPLIT_TARGET] = c;
2700 s = C_OMP_CLAUSE_SPLIT_PARALLEL;
2701 }
2702 else
2703 s = C_OMP_CLAUSE_SPLIT_PARALLEL;
2704 }
2705 else
2706 s = C_OMP_CLAUSE_SPLIT_TARGET;
2707 break;
2708 case OMP_CLAUSE_LINEAR:
2709 /* Linear clause is allowed on simd and for. Put it on the
2710 innermost construct. */
2711 if (code == OMP_SIMD)
2712 s = C_OMP_CLAUSE_SPLIT_SIMD;
2713 else
2714 s = C_OMP_CLAUSE_SPLIT_FOR;
2715 break;
2716 case OMP_CLAUSE_NOWAIT:
2717 /* Nowait clause is allowed on target, for and sections, but
2718 is not allowed on parallel for or parallel sections. Therefore,
2719 put it on target construct if present, because that can only
2720 be combined with parallel for{, simd} and not with for{, simd},
2721 otherwise to the worksharing construct. */
2722 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_MAP))
2723 != 0)
2724 s = C_OMP_CLAUSE_SPLIT_TARGET;
2725 else
2726 s = C_OMP_CLAUSE_SPLIT_FOR;
2727 break;
2728 /* thread_limit is allowed on target and teams. Distribute it
2729 to all. */
2730 case OMP_CLAUSE_THREAD_LIMIT:
2731 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_MAP))
2732 != 0)
2733 {
2734 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NUM_TEAMS))
2735 != 0)
2736 {
2737 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2738 OMP_CLAUSE_THREAD_LIMIT);
2739 OMP_CLAUSE_THREAD_LIMIT_EXPR (c)
2740 = OMP_CLAUSE_THREAD_LIMIT_EXPR (clauses);
2741 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_TARGET];
2742 cclauses[C_OMP_CLAUSE_SPLIT_TARGET] = c;
2743 }
2744 else
2745 {
2746 s = C_OMP_CLAUSE_SPLIT_TARGET;
2747 break;
2748 }
2749 }
2750 s = C_OMP_CLAUSE_SPLIT_TEAMS;
2751 break;
2752 /* Allocate clause is allowed on target, teams, distribute, parallel,
2753 for, sections and taskloop. Distribute it to all. */
2754 case OMP_CLAUSE_ALLOCATE:
2755 s = C_OMP_CLAUSE_SPLIT_COUNT;
2756 for (i = 0; i < C_OMP_CLAUSE_SPLIT_COUNT; i++)
2757 {
2758 switch (i)
2759 {
2760 case C_OMP_CLAUSE_SPLIT_TARGET:
2761 if ((mask & (OMP_CLAUSE_MASK_1
2762 << PRAGMA_OMP_CLAUSE_MAP)) == 0)
2763 continue;
2764 break;
2765 case C_OMP_CLAUSE_SPLIT_TEAMS:
2766 if ((mask & (OMP_CLAUSE_MASK_1
2767 << PRAGMA_OMP_CLAUSE_NUM_TEAMS)) == 0)
2768 continue;
2769 break;
2770 case C_OMP_CLAUSE_SPLIT_DISTRIBUTE:
2771 if ((mask & (OMP_CLAUSE_MASK_1
2772 << PRAGMA_OMP_CLAUSE_DIST_SCHEDULE)) == 0)
2773 continue;
2774 break;
2775 case C_OMP_CLAUSE_SPLIT_PARALLEL:
2776 if ((mask & (OMP_CLAUSE_MASK_1
2777 << PRAGMA_OMP_CLAUSE_NUM_THREADS)) == 0)
2778 continue;
2779 break;
2780 case C_OMP_CLAUSE_SPLIT_FOR:
2781 STATIC_ASSERT (C_OMP_CLAUSE_SPLIT_SECTIONS
2782 == C_OMP_CLAUSE_SPLIT_FOR
2783 && (C_OMP_CLAUSE_SPLIT_TASKLOOP
2784 == C_OMP_CLAUSE_SPLIT_FOR)
2785 && (C_OMP_CLAUSE_SPLIT_LOOP
2786 == C_OMP_CLAUSE_SPLIT_FOR));
2787 if (code == OMP_SECTIONS)
2788 break;
2789 if ((mask & (OMP_CLAUSE_MASK_1
2790 << PRAGMA_OMP_CLAUSE_SCHEDULE)) != 0)
2791 break;
2792 if ((mask & (OMP_CLAUSE_MASK_1
2793 << PRAGMA_OMP_CLAUSE_NOGROUP)) != 0)
2794 break;
2795 continue;
2796 case C_OMP_CLAUSE_SPLIT_SIMD:
2797 continue;
2798 default:
2799 gcc_unreachable ();
2800 }
2801 if (s != C_OMP_CLAUSE_SPLIT_COUNT)
2802 {
2803 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2804 OMP_CLAUSE_ALLOCATE);
2805 OMP_CLAUSE_DECL (c)
2806 = OMP_CLAUSE_DECL (clauses);
2807 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
2808 = OMP_CLAUSE_ALLOCATE_ALLOCATOR (clauses);
2809 OMP_CLAUSE_ALLOCATE_ALIGN (c)
2810 = OMP_CLAUSE_ALLOCATE_ALIGN (clauses);
2811 OMP_CLAUSE_CHAIN (c) = cclauses[s];
2812 cclauses[s] = c;
2813 has_dup_allocate = true;
2814 }
2815 s = (enum c_omp_clause_split) i;
2816 }
2817 gcc_assert (s != C_OMP_CLAUSE_SPLIT_COUNT);
2818 break;
2819 default:
2820 gcc_unreachable ();
2821 }
2822 OMP_CLAUSE_CHAIN (clauses) = cclauses[s];
2823 cclauses[s] = clauses;
2824 }
2825
2826 if (has_dup_allocate)
2827 {
2828 bool need_prune = false;
2829 bitmap_obstack_initialize (NULL);
2830 for (i = 0; i < C_OMP_CLAUSE_SPLIT_SIMD - (code == OMP_LOOP); i++)
2831 if (cclauses[i])
2832 {
2833 bitmap_head allocate_head;
2834 bitmap_initialize (head: &allocate_head, obstack: &bitmap_default_obstack);
2835 for (c = cclauses[i]; c; c = OMP_CLAUSE_CHAIN (c))
2836 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE
2837 && DECL_P (OMP_CLAUSE_DECL (c)))
2838 bitmap_set_bit (&allocate_head,
2839 DECL_UID (OMP_CLAUSE_DECL (c)));
2840 for (c = cclauses[i]; c; c = OMP_CLAUSE_CHAIN (c))
2841 switch (OMP_CLAUSE_CODE (c))
2842 {
2843 case OMP_CLAUSE_REDUCTION:
2844 case OMP_CLAUSE_IN_REDUCTION:
2845 case OMP_CLAUSE_TASK_REDUCTION:
2846 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
2847 {
2848 tree t = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
2849 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
2850 t = TREE_OPERAND (t, 0);
2851 if (TREE_CODE (t) == ADDR_EXPR
2852 || INDIRECT_REF_P (t))
2853 t = TREE_OPERAND (t, 0);
2854 if (DECL_P (t))
2855 bitmap_clear_bit (&allocate_head, DECL_UID (t));
2856 break;
2857 }
2858 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == TREE_LIST)
2859 {
2860 /* TODO: This can go away once we transition all uses of
2861 TREE_LIST for representing OMP array sections to
2862 OMP_ARRAY_SECTION. */
2863 tree t;
2864 for (t = OMP_CLAUSE_DECL (c);
2865 TREE_CODE (t) == TREE_LIST; t = TREE_CHAIN (t))
2866 ;
2867 if (DECL_P (t))
2868 bitmap_clear_bit (&allocate_head, DECL_UID (t));
2869 break;
2870 }
2871 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == OMP_ARRAY_SECTION)
2872 {
2873 tree t;
2874 for (t = OMP_CLAUSE_DECL (c);
2875 TREE_CODE (t) == OMP_ARRAY_SECTION;
2876 t = TREE_OPERAND (t, 0))
2877 ;
2878 if (DECL_P (t))
2879 bitmap_clear_bit (&allocate_head, DECL_UID (t));
2880 break;
2881 }
2882 /* FALLTHRU */
2883 case OMP_CLAUSE_PRIVATE:
2884 case OMP_CLAUSE_FIRSTPRIVATE:
2885 case OMP_CLAUSE_LASTPRIVATE:
2886 case OMP_CLAUSE_LINEAR:
2887 if (DECL_P (OMP_CLAUSE_DECL (c)))
2888 bitmap_clear_bit (&allocate_head,
2889 DECL_UID (OMP_CLAUSE_DECL (c)));
2890 break;
2891 default:
2892 break;
2893 }
2894 for (c = cclauses[i]; c; c = OMP_CLAUSE_CHAIN (c))
2895 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE
2896 && DECL_P (OMP_CLAUSE_DECL (c))
2897 && bitmap_bit_p (&allocate_head,
2898 DECL_UID (OMP_CLAUSE_DECL (c))))
2899 {
2900 /* Mark allocate clauses which don't have corresponding
2901 explicit data sharing clause. */
2902 OMP_CLAUSE_ALLOCATE_COMBINED (c) = 1;
2903 need_prune = true;
2904 }
2905 }
2906 bitmap_obstack_release (NULL);
2907 if (need_prune)
2908 {
2909 /* At least one allocate clause has been marked. Walk all the
2910 duplicated allocate clauses in sync. If it is marked in all
2911 constituent constructs, diagnose it as invalid and remove
2912 them. Otherwise, remove all marked inner clauses inside
2913 a construct that doesn't have them marked. Keep the outer
2914 marked ones, because some clause duplication is done only
2915 during gimplification. */
2916 tree *p[C_OMP_CLAUSE_SPLIT_COUNT];
2917 for (i = 0; i < C_OMP_CLAUSE_SPLIT_COUNT; i++)
2918 if (cclauses[i] == NULL_TREE
2919 || i == C_OMP_CLAUSE_SPLIT_SIMD
2920 || (i == C_OMP_CLAUSE_SPLIT_LOOP && code == OMP_LOOP))
2921 p[i] = NULL;
2922 else
2923 p[i] = &cclauses[i];
2924 do
2925 {
2926 int j = -1;
2927 tree seen = NULL_TREE;
2928 for (i = C_OMP_CLAUSE_SPLIT_COUNT - 1; i >= 0; i--)
2929 if (p[i])
2930 {
2931 while (*p[i]
2932 && OMP_CLAUSE_CODE (*p[i]) != OMP_CLAUSE_ALLOCATE)
2933 p[i] = &OMP_CLAUSE_CHAIN (*p[i]);
2934 if (*p[i] == NULL_TREE)
2935 {
2936 i = C_OMP_CLAUSE_SPLIT_COUNT;
2937 break;
2938 }
2939 if (!OMP_CLAUSE_ALLOCATE_COMBINED (*p[i]) && j == -1)
2940 j = i;
2941 seen = *p[i];
2942 }
2943 if (i == C_OMP_CLAUSE_SPLIT_COUNT)
2944 break;
2945 if (j == -1)
2946 error_at (OMP_CLAUSE_LOCATION (seen),
2947 "%qD specified in %<allocate%> clause but not in "
2948 "an explicit privatization clause",
2949 OMP_CLAUSE_DECL (seen));
2950 for (i = 0; i < C_OMP_CLAUSE_SPLIT_COUNT; i++)
2951 if (p[i])
2952 {
2953 if (i > j)
2954 /* Remove. */
2955 *p[i] = OMP_CLAUSE_CHAIN (*p[i]);
2956 else
2957 /* Keep. */
2958 p[i] = &OMP_CLAUSE_CHAIN (*p[i]);
2959 }
2960 }
2961 while (1);
2962 }
2963 }
2964
2965 if (!flag_checking)
2966 return;
2967
2968 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_MAP)) == 0)
2969 gcc_assert (cclauses[C_OMP_CLAUSE_SPLIT_TARGET] == NULL_TREE);
2970 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NUM_TEAMS)) == 0)
2971 gcc_assert (cclauses[C_OMP_CLAUSE_SPLIT_TEAMS] == NULL_TREE);
2972 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_DIST_SCHEDULE)) == 0
2973 && (mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_FILTER)) == 0)
2974 gcc_assert (cclauses[C_OMP_CLAUSE_SPLIT_DISTRIBUTE] == NULL_TREE);
2975 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NUM_THREADS)) == 0)
2976 gcc_assert (cclauses[C_OMP_CLAUSE_SPLIT_PARALLEL] == NULL_TREE);
2977 if ((mask & ((OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_SCHEDULE)
2978 | (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NOGROUP))) == 0
2979 && code != OMP_SECTIONS
2980 && code != OMP_LOOP)
2981 gcc_assert (cclauses[C_OMP_CLAUSE_SPLIT_FOR] == NULL_TREE);
2982 if (code != OMP_SIMD)
2983 gcc_assert (cclauses[C_OMP_CLAUSE_SPLIT_SIMD] == NULL_TREE);
2984}
2985
2986
2987/* qsort callback to compare #pragma omp declare simd clauses. */
2988
2989static int
2990c_omp_declare_simd_clause_cmp (const void *p, const void *q)
2991{
2992 tree a = *(const tree *) p;
2993 tree b = *(const tree *) q;
2994 if (OMP_CLAUSE_CODE (a) != OMP_CLAUSE_CODE (b))
2995 {
2996 if (OMP_CLAUSE_CODE (a) > OMP_CLAUSE_CODE (b))
2997 return -1;
2998 return 1;
2999 }
3000 if (OMP_CLAUSE_CODE (a) != OMP_CLAUSE_SIMDLEN
3001 && OMP_CLAUSE_CODE (a) != OMP_CLAUSE_INBRANCH
3002 && OMP_CLAUSE_CODE (a) != OMP_CLAUSE_NOTINBRANCH)
3003 {
3004 int c = tree_to_shwi (OMP_CLAUSE_DECL (a));
3005 int d = tree_to_shwi (OMP_CLAUSE_DECL (b));
3006 if (c < d)
3007 return 1;
3008 if (c > d)
3009 return -1;
3010 }
3011 return 0;
3012}
3013
3014/* Change PARM_DECLs in OMP_CLAUSE_DECL of #pragma omp declare simd
3015 CLAUSES on FNDECL into argument indexes and sort them. */
3016
3017tree
3018c_omp_declare_simd_clauses_to_numbers (tree parms, tree clauses)
3019{
3020 tree c;
3021 vec<tree> clvec = vNULL;
3022
3023 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
3024 {
3025 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_SIMDLEN
3026 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_INBRANCH
3027 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_NOTINBRANCH)
3028 {
3029 tree decl = OMP_CLAUSE_DECL (c);
3030 tree arg;
3031 int idx;
3032 for (arg = parms, idx = 0; arg;
3033 arg = TREE_CHAIN (arg), idx++)
3034 if (arg == decl)
3035 break;
3036 if (arg == NULL_TREE)
3037 {
3038 error_at (OMP_CLAUSE_LOCATION (c),
3039 "%qD is not a function argument", decl);
3040 continue;
3041 }
3042 OMP_CLAUSE_DECL (c) = build_int_cst (integer_type_node, idx);
3043 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
3044 && OMP_CLAUSE_LINEAR_VARIABLE_STRIDE (c))
3045 {
3046 decl = OMP_CLAUSE_LINEAR_STEP (c);
3047 for (arg = parms, idx = 0; arg;
3048 arg = TREE_CHAIN (arg), idx++)
3049 if (arg == decl)
3050 break;
3051 if (arg == NULL_TREE)
3052 {
3053 error_at (OMP_CLAUSE_LOCATION (c),
3054 "%qD is not a function argument", decl);
3055 continue;
3056 }
3057 OMP_CLAUSE_LINEAR_STEP (c)
3058 = build_int_cst (integer_type_node, idx);
3059 }
3060 }
3061 clvec.safe_push (obj: c);
3062 }
3063 if (!clvec.is_empty ())
3064 {
3065 unsigned int len = clvec.length (), i;
3066 clvec.qsort (c_omp_declare_simd_clause_cmp);
3067 clauses = clvec[0];
3068 for (i = 0; i < len; i++)
3069 OMP_CLAUSE_CHAIN (clvec[i]) = (i < len - 1) ? clvec[i + 1] : NULL_TREE;
3070 }
3071 else
3072 clauses = NULL_TREE;
3073 clvec.release ();
3074 return clauses;
3075}
3076
3077/* Change argument indexes in CLAUSES of FNDECL back to PARM_DECLs. */
3078
3079void
3080c_omp_declare_simd_clauses_to_decls (tree fndecl, tree clauses)
3081{
3082 tree c;
3083
3084 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
3085 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_SIMDLEN
3086 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_INBRANCH
3087 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_NOTINBRANCH)
3088 {
3089 int idx = tree_to_shwi (OMP_CLAUSE_DECL (c)), i;
3090 tree arg;
3091 for (arg = DECL_ARGUMENTS (fndecl), i = 0; arg;
3092 arg = TREE_CHAIN (arg), i++)
3093 if (i == idx)
3094 break;
3095 gcc_assert (arg);
3096 OMP_CLAUSE_DECL (c) = arg;
3097 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
3098 && OMP_CLAUSE_LINEAR_VARIABLE_STRIDE (c))
3099 {
3100 idx = tree_to_shwi (OMP_CLAUSE_LINEAR_STEP (c));
3101 for (arg = DECL_ARGUMENTS (fndecl), i = 0; arg;
3102 arg = TREE_CHAIN (arg), i++)
3103 if (i == idx)
3104 break;
3105 gcc_assert (arg);
3106 OMP_CLAUSE_LINEAR_STEP (c) = arg;
3107 }
3108 }
3109}
3110
3111/* Return true for __func__ and similar function-local predefined
3112 variables (which are in OpenMP predetermined shared, allowed in
3113 shared/firstprivate clauses). */
3114
3115bool
3116c_omp_predefined_variable (tree decl)
3117{
3118 if (VAR_P (decl)
3119 && DECL_ARTIFICIAL (decl)
3120 && TREE_STATIC (decl)
3121 && DECL_NAME (decl))
3122 {
3123 if (TREE_READONLY (decl)
3124 && (DECL_NAME (decl) == ridpointers[RID_C99_FUNCTION_NAME]
3125 || DECL_NAME (decl) == ridpointers[RID_FUNCTION_NAME]
3126 || DECL_NAME (decl) == ridpointers[RID_PRETTY_FUNCTION_NAME]))
3127 return true;
3128 /* For UBSan handle the same also ubsan_create_data created
3129 variables. There is no magic flag for those, but user variables
3130 shouldn't be DECL_ARTIFICIAL or have TYPE_ARTIFICIAL type with
3131 such names. */
3132 if ((flag_sanitize & (SANITIZE_UNDEFINED
3133 | SANITIZE_UNDEFINED_NONDEFAULT)) != 0
3134 && DECL_IGNORED_P (decl)
3135 && !TREE_READONLY (decl)
3136 && TREE_CODE (DECL_NAME (decl)) == IDENTIFIER_NODE
3137 && TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
3138 && TYPE_ARTIFICIAL (TREE_TYPE (decl))
3139 && TYPE_NAME (TREE_TYPE (decl))
3140 && TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == TYPE_DECL
3141 && DECL_NAME (TYPE_NAME (TREE_TYPE (decl)))
3142 && (TREE_CODE (DECL_NAME (TYPE_NAME (TREE_TYPE (decl))))
3143 == IDENTIFIER_NODE))
3144 {
3145 tree id1 = DECL_NAME (decl);
3146 tree id2 = DECL_NAME (TYPE_NAME (TREE_TYPE (decl)));
3147 if (IDENTIFIER_LENGTH (id1) >= sizeof ("ubsan_data") - 1
3148 && IDENTIFIER_LENGTH (id2) >= sizeof ("__ubsan__data")
3149 && !memcmp (IDENTIFIER_POINTER (id2), s2: "__ubsan_",
3150 n: sizeof ("__ubsan_") - 1)
3151 && !memcmp (IDENTIFIER_POINTER (id2) + IDENTIFIER_LENGTH (id2)
3152 - sizeof ("_data") + 1, s2: "_data",
3153 n: sizeof ("_data") - 1)
3154 && strstr (IDENTIFIER_POINTER (id1), needle: "ubsan_data"))
3155 return true;
3156 }
3157 }
3158 return false;
3159}
3160
3161/* OMP_CLAUSE_DEFAULT_UNSPECIFIED unless OpenMP sharing attribute of DECL
3162 is predetermined. */
3163
3164enum omp_clause_default_kind
3165c_omp_predetermined_sharing (tree decl)
3166{
3167 /* Predetermine artificial variables holding integral values, those
3168 are usually result of gimplify_one_sizepos or SAVE_EXPR
3169 gimplification. */
3170 if (VAR_P (decl)
3171 && DECL_ARTIFICIAL (decl)
3172 && INTEGRAL_TYPE_P (TREE_TYPE (decl)))
3173 return OMP_CLAUSE_DEFAULT_SHARED;
3174
3175 if (c_omp_predefined_variable (decl))
3176 return OMP_CLAUSE_DEFAULT_SHARED;
3177
3178 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
3179}
3180
3181/* OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED unless OpenMP mapping attribute
3182 of DECL is predetermined. */
3183
3184enum omp_clause_defaultmap_kind
3185c_omp_predetermined_mapping (tree decl)
3186{
3187 /* Predetermine artificial variables holding integral values, those
3188 are usually result of gimplify_one_sizepos or SAVE_EXPR
3189 gimplification. */
3190 if (VAR_P (decl)
3191 && DECL_ARTIFICIAL (decl)
3192 && INTEGRAL_TYPE_P (TREE_TYPE (decl)))
3193 return OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE;
3194
3195 if (c_omp_predefined_variable (decl))
3196 return OMP_CLAUSE_DEFAULTMAP_TO;
3197
3198 return OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED;
3199}
3200
3201
3202/* Used to merge map clause information in c_omp_adjust_map_clauses. */
3203struct map_clause
3204{
3205 tree clause;
3206 bool firstprivate_ptr_p;
3207 bool decl_mapped;
3208 bool omp_declare_target;
3209 map_clause (void) : clause (NULL_TREE), firstprivate_ptr_p (false),
3210 decl_mapped (false), omp_declare_target (false) { }
3211};
3212
3213/* Adjust map clauses after normal clause parsing, mainly to mark specific
3214 base-pointer map cases addressable that may be turned into attach/detach
3215 operations during gimplification. */
3216void
3217c_omp_adjust_map_clauses (tree clauses, bool is_target)
3218{
3219 if (!is_target)
3220 {
3221 /* If this is not a target construct, just turn firstprivate pointers
3222 into attach/detach, the runtime will check and do the rest. */
3223
3224 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
3225 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
3226 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
3227 && DECL_P (OMP_CLAUSE_DECL (c))
3228 && POINTER_TYPE_P (TREE_TYPE (OMP_CLAUSE_DECL (c))))
3229 {
3230 tree ptr = OMP_CLAUSE_DECL (c);
3231 c_common_mark_addressable_vec (ptr);
3232 }
3233 return;
3234 }
3235
3236 hash_map<tree, map_clause> maps;
3237
3238 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
3239 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
3240 && DECL_P (OMP_CLAUSE_DECL (c)))
3241 {
3242 /* If this is for a target construct, the firstprivate pointer
3243 is marked addressable if either is true:
3244 (1) the base-pointer is mapped in this same construct, or
3245 (2) the base-pointer is a variable place on the device by
3246 "declare target" directives.
3247
3248 Here we iterate through all map clauses collecting these cases,
3249 and merge them with a hash_map to process below. */
3250
3251 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
3252 && POINTER_TYPE_P (TREE_TYPE (OMP_CLAUSE_DECL (c))))
3253 {
3254 tree ptr = OMP_CLAUSE_DECL (c);
3255 map_clause &mc = maps.get_or_insert (k: ptr);
3256 if (mc.clause == NULL_TREE)
3257 mc.clause = c;
3258 mc.firstprivate_ptr_p = true;
3259
3260 if (is_global_var (t: ptr)
3261 && lookup_attribute (attr_name: "omp declare target",
3262 DECL_ATTRIBUTES (ptr)))
3263 mc.omp_declare_target = true;
3264 }
3265 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALLOC
3266 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO
3267 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FROM
3268 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TOFROM
3269 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
3270 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_FROM
3271 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
3272 {
3273 map_clause &mc = maps.get_or_insert (OMP_CLAUSE_DECL (c));
3274 mc.decl_mapped = true;
3275 }
3276 }
3277
3278 for (hash_map<tree, map_clause>::iterator i = maps.begin ();
3279 i != maps.end (); ++i)
3280 {
3281 map_clause &mc = (*i).second;
3282
3283 if (mc.firstprivate_ptr_p
3284 && (mc.decl_mapped || mc.omp_declare_target))
3285 c_common_mark_addressable_vec (OMP_CLAUSE_DECL (mc.clause));
3286 }
3287}
3288
3289/* Maybe strip off an indirection from a "converted" reference, then find the
3290 origin of a pointer (i.e. without any offset). */
3291
3292tree
3293c_omp_address_inspector::unconverted_ref_origin ()
3294{
3295 tree t = orig;
3296
3297 /* We may have a reference-typed component access at the outermost level
3298 that has had convert_from_reference called on it. Get the un-dereferenced
3299 reference itself. */
3300 t = maybe_unconvert_ref (t);
3301
3302 /* Find base pointer for POINTER_PLUS_EXPR, etc. */
3303 t = get_origin (t);
3304
3305 return t;
3306}
3307
3308/* Return TRUE if the address is a component access. */
3309
3310bool
3311c_omp_address_inspector::component_access_p ()
3312{
3313 tree t = maybe_unconvert_ref (orig);
3314
3315 t = get_origin (t);
3316
3317 return TREE_CODE (t) == COMPONENT_REF;
3318}
3319
3320/* Perform various checks on the address, as described by clause CLAUSE (we
3321 only use its code and location here). */
3322
3323bool
3324c_omp_address_inspector::check_clause (tree clause)
3325{
3326 tree t = unconverted_ref_origin ();
3327
3328 if (TREE_CODE (t) != COMPONENT_REF)
3329 return true;
3330
3331 if (TREE_CODE (TREE_OPERAND (t, 1)) == FIELD_DECL
3332 && DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
3333 {
3334 error_at (OMP_CLAUSE_LOCATION (clause),
3335 "bit-field %qE in %qs clause",
3336 t, omp_clause_code_name[OMP_CLAUSE_CODE (clause)]);
3337 return false;
3338 }
3339 else if (!processing_template_decl_p ()
3340 && !omp_mappable_type (TREE_TYPE (t)))
3341 {
3342 error_at (OMP_CLAUSE_LOCATION (clause),
3343 "%qE does not have a mappable type in %qs clause",
3344 t, omp_clause_code_name[OMP_CLAUSE_CODE (clause)]);
3345 emit_unmappable_type_notes (TREE_TYPE (t));
3346 return false;
3347 }
3348 else if (TREE_TYPE (t) && TYPE_ATOMIC (TREE_TYPE (t)))
3349 {
3350 error_at (OMP_CLAUSE_LOCATION (clause),
3351 "%<_Atomic%> %qE in %qs clause", t,
3352 omp_clause_code_name[OMP_CLAUSE_CODE (clause)]);
3353 return false;
3354 }
3355
3356 return true;
3357}
3358
3359/* Find the "root term" for the address. This is the innermost decl, etc.
3360 of the access. */
3361
3362tree
3363c_omp_address_inspector::get_root_term (bool checking)
3364{
3365 if (root_term && !checking)
3366 return root_term;
3367
3368 tree t = unconverted_ref_origin ();
3369
3370 while (TREE_CODE (t) == COMPONENT_REF)
3371 {
3372 if (checking
3373 && TREE_TYPE (TREE_OPERAND (t, 0))
3374 && TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0))) == UNION_TYPE)
3375 {
3376 error_at (loc, "%qE is a member of a union", t);
3377 return error_mark_node;
3378 }
3379 t = TREE_OPERAND (t, 0);
3380 while (TREE_CODE (t) == MEM_REF
3381 || TREE_CODE (t) == INDIRECT_REF
3382 || TREE_CODE (t) == ARRAY_REF)
3383 {
3384 if (TREE_CODE (t) == MEM_REF
3385 || TREE_CODE (t) == INDIRECT_REF)
3386 indirections = true;
3387 t = TREE_OPERAND (t, 0);
3388 STRIP_NOPS (t);
3389 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
3390 t = TREE_OPERAND (t, 0);
3391 }
3392 }
3393
3394 root_term = t;
3395
3396 return t;
3397}
3398
3399/* Return TRUE if the address is supported in mapping clauses. At present,
3400 this means that the innermost expression is a DECL_P, but could be extended
3401 to other types of expression in the future. */
3402
3403bool
3404c_omp_address_inspector::map_supported_p ()
3405{
3406 /* If we've already decided if the mapped address is supported, return
3407 that. */
3408 if (map_supported != -1)
3409 return map_supported;
3410
3411 tree t = unconverted_ref_origin ();
3412
3413 STRIP_NOPS (t);
3414
3415 while (TREE_CODE (t) == INDIRECT_REF
3416 || TREE_CODE (t) == MEM_REF
3417 || TREE_CODE (t) == ARRAY_REF
3418 || TREE_CODE (t) == COMPONENT_REF
3419 || TREE_CODE (t) == COMPOUND_EXPR
3420 || TREE_CODE (t) == SAVE_EXPR
3421 || TREE_CODE (t) == POINTER_PLUS_EXPR
3422 || TREE_CODE (t) == NON_LVALUE_EXPR
3423 || TREE_CODE (t) == OMP_ARRAY_SECTION
3424 || TREE_CODE (t) == NOP_EXPR)
3425 if (TREE_CODE (t) == COMPOUND_EXPR)
3426 t = TREE_OPERAND (t, 1);
3427 else
3428 t = TREE_OPERAND (t, 0);
3429
3430 STRIP_NOPS (t);
3431
3432 map_supported = DECL_P (t);
3433
3434 return map_supported;
3435}
3436
3437/* Get the origin of an address T, stripping off offsets and some other
3438 bits. */
3439
3440tree
3441c_omp_address_inspector::get_origin (tree t)
3442{
3443 while (1)
3444 {
3445 if (TREE_CODE (t) == COMPOUND_EXPR)
3446 {
3447 t = TREE_OPERAND (t, 1);
3448 STRIP_NOPS (t);
3449 }
3450 else if (TREE_CODE (t) == POINTER_PLUS_EXPR
3451 || TREE_CODE (t) == SAVE_EXPR)
3452 t = TREE_OPERAND (t, 0);
3453 else if (!processing_template_decl_p ()
3454 && TREE_CODE (t) == INDIRECT_REF
3455 && TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0))) == REFERENCE_TYPE)
3456 t = TREE_OPERAND (t, 0);
3457 else
3458 break;
3459 }
3460 STRIP_NOPS (t);
3461 return t;
3462}
3463
3464/* For an address T that might be a reference that has had
3465 "convert_from_reference" called on it, return the actual reference without
3466 any indirection. */
3467
3468tree
3469c_omp_address_inspector::maybe_unconvert_ref (tree t)
3470{
3471 /* Be careful not to dereference the type if we're processing a
3472 template decl, else it might be NULL. */
3473 if (!processing_template_decl_p ()
3474 && TREE_CODE (t) == INDIRECT_REF
3475 && TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0))) == REFERENCE_TYPE)
3476 return TREE_OPERAND (t, 0);
3477
3478 return t;
3479}
3480
3481/* Return TRUE if CLAUSE might describe a zero-length array section. */
3482
3483bool
3484c_omp_address_inspector::maybe_zero_length_array_section (tree clause)
3485{
3486 switch (OMP_CLAUSE_MAP_KIND (clause))
3487 {
3488 case GOMP_MAP_ALLOC:
3489 case GOMP_MAP_IF_PRESENT:
3490 case GOMP_MAP_TO:
3491 case GOMP_MAP_FROM:
3492 case GOMP_MAP_TOFROM:
3493 case GOMP_MAP_ALWAYS_TO:
3494 case GOMP_MAP_ALWAYS_FROM:
3495 case GOMP_MAP_ALWAYS_TOFROM:
3496 case GOMP_MAP_PRESENT_ALLOC:
3497 case GOMP_MAP_PRESENT_TO:
3498 case GOMP_MAP_PRESENT_FROM:
3499 case GOMP_MAP_PRESENT_TOFROM:
3500 case GOMP_MAP_ALWAYS_PRESENT_TO:
3501 case GOMP_MAP_ALWAYS_PRESENT_FROM:
3502 case GOMP_MAP_ALWAYS_PRESENT_TOFROM:
3503 case GOMP_MAP_RELEASE:
3504 case GOMP_MAP_DELETE:
3505 case GOMP_MAP_FORCE_TO:
3506 case GOMP_MAP_FORCE_FROM:
3507 case GOMP_MAP_FORCE_TOFROM:
3508 case GOMP_MAP_FORCE_PRESENT:
3509 return true;
3510 default:
3511 return false;
3512 }
3513}
3514
3515/* Expand a chained access. We only expect to see a quite limited range of
3516 expression types here, because e.g. you can't have an array of
3517 references. */
3518
3519static tree
3520omp_expand_access_chain (tree c, tree expr, vec<omp_addr_token *> &addr_tokens,
3521 unsigned *idx, c_omp_region_type ort)
3522{
3523 using namespace omp_addr_tokenizer;
3524 location_t loc = OMP_CLAUSE_LOCATION (c);
3525 unsigned i = *idx;
3526 tree c2 = NULL_TREE;
3527 gomp_map_kind kind;
3528
3529 if ((ort & C_ORT_EXIT_DATA) != 0
3530 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM
3531 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
3532 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FROM
3533 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DELETE
3534 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_RELEASE
3535 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_FROM
3536 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FORCE_FROM
3537 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_PRESENT_FROM
3538 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_PRESENT_FROM)))
3539 kind = GOMP_MAP_DETACH;
3540 else
3541 kind = GOMP_MAP_ATTACH;
3542
3543 switch (addr_tokens[i]->u.access_kind)
3544 {
3545 case ACCESS_POINTER:
3546 case ACCESS_POINTER_OFFSET:
3547 {
3548 tree virtual_origin
3549 = fold_convert_loc (loc, ptrdiff_type_node, addr_tokens[i]->expr);
3550 tree data_addr = omp_accessed_addr (addr_tokens, i, expr);
3551 c2 = build_omp_clause (loc, OMP_CLAUSE_MAP);
3552 OMP_CLAUSE_SET_MAP_KIND (c2, kind);
3553 OMP_CLAUSE_DECL (c2) = addr_tokens[i]->expr;
3554 OMP_CLAUSE_SIZE (c2)
3555 = fold_build2_loc (loc, MINUS_EXPR, ptrdiff_type_node,
3556 fold_convert_loc (loc, ptrdiff_type_node,
3557 data_addr),
3558 virtual_origin);
3559 }
3560 break;
3561
3562 case ACCESS_INDEXED_ARRAY:
3563 break;
3564
3565 default:
3566 return error_mark_node;
3567 }
3568
3569 if (c2)
3570 {
3571 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (c);
3572 OMP_CLAUSE_CHAIN (c) = c2;
3573 c = c2;
3574 }
3575
3576 *idx = ++i;
3577
3578 if (i < addr_tokens.length ()
3579 && addr_tokens[i]->type == ACCESS_METHOD)
3580 return omp_expand_access_chain (c, expr, addr_tokens, idx, ort);
3581
3582 return c;
3583}
3584
3585/* Translate "array_base_decl access_method" to OMP mapping clauses. */
3586
3587tree
3588c_omp_address_inspector::expand_array_base (tree c,
3589 vec<omp_addr_token *> &addr_tokens,
3590 tree expr, unsigned *idx,
3591 c_omp_region_type ort)
3592{
3593 using namespace omp_addr_tokenizer;
3594 location_t loc = OMP_CLAUSE_LOCATION (c);
3595 int i = *idx;
3596 tree decl = addr_tokens[i + 1]->expr;
3597 bool decl_p = DECL_P (decl);
3598 bool declare_target_p = (decl_p
3599 && is_global_var (t: decl)
3600 && lookup_attribute (attr_name: "omp declare target",
3601 DECL_ATTRIBUTES (decl)));
3602 bool map_p = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP;
3603 bool implicit_p = map_p && OMP_CLAUSE_MAP_IMPLICIT (c);
3604 bool chain_p = omp_access_chain_p (addr_tokens, i + 1);
3605 tree c2 = NULL_TREE, c3 = NULL_TREE;
3606 unsigned consume_tokens = 2;
3607 bool target_p = (ort & C_ORT_TARGET) != 0;
3608 bool openmp_p = (ort & C_ORT_OMP) != 0;
3609
3610 gcc_assert (i == 0);
3611
3612 if (!openmp_p
3613 && map_p
3614 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
3615 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH))
3616 {
3617 i += 2;
3618 *idx = i;
3619 return c;
3620 }
3621
3622 switch (addr_tokens[i + 1]->u.access_kind)
3623 {
3624 case ACCESS_DIRECT:
3625 if (decl_p && !target_p)
3626 c_common_mark_addressable_vec (addr_tokens[i + 1]->expr);
3627 break;
3628
3629 case ACCESS_REF:
3630 {
3631 /* Copy the referenced object. Note that we do this even for !MAP_P
3632 clauses. */
3633 tree obj = convert_from_reference (addr_tokens[i + 1]->expr);
3634 if (TREE_CODE (TREE_TYPE (obj)) == ARRAY_TYPE)
3635 /* We have a ref to array: add a [0] element as the ME expects. */
3636 OMP_CLAUSE_DECL (c) = build_array_ref (loc, arr: obj, integer_zero_node);
3637 else
3638 OMP_CLAUSE_DECL (c) = obj;
3639 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (obj));
3640
3641 if (!map_p)
3642 {
3643 if (decl_p)
3644 c_common_mark_addressable_vec (addr_tokens[i + 1]->expr);
3645 break;
3646 }
3647
3648 if (!target_p)
3649 break;
3650
3651 /* If we have a reference to a pointer, avoid using
3652 FIRSTPRIVATE_REFERENCE here in case the pointer is modified in the
3653 offload region (we can only do that if the pointer does not point
3654 to a mapped block). We could avoid doing this if we don't have a
3655 FROM mapping... */
3656 bool ref_to_ptr = TREE_CODE (TREE_TYPE (obj)) == POINTER_TYPE;
3657
3658 c2 = build_omp_clause (loc, OMP_CLAUSE_MAP);
3659 if (!ref_to_ptr
3660 && !declare_target_p
3661 && decl_p)
3662 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
3663 else
3664 {
3665 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ATTACH_DETACH);
3666 if (decl_p)
3667 c_common_mark_addressable_vec (addr_tokens[i + 1]->expr);
3668 }
3669 OMP_CLAUSE_DECL (c2) = addr_tokens[i + 1]->expr;
3670 OMP_CLAUSE_SIZE (c2) = size_zero_node;
3671
3672 if (ref_to_ptr)
3673 {
3674 c3 = c2;
3675 c2 = build_omp_clause (loc, OMP_CLAUSE_MAP);
3676 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ALLOC);
3677 OMP_CLAUSE_DECL (c2) = addr_tokens[i + 1]->expr;
3678 OMP_CLAUSE_SIZE (c2)
3679 = TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (c2)));
3680 }
3681 }
3682 break;
3683
3684 case ACCESS_INDEXED_REF_TO_ARRAY:
3685 {
3686 if (!map_p)
3687 {
3688 if (decl_p)
3689 c_common_mark_addressable_vec (addr_tokens[i + 1]->expr);
3690 break;
3691 }
3692
3693 if (!target_p)
3694 break;
3695
3696 tree virtual_origin
3697 = convert_from_reference (addr_tokens[i + 1]->expr);
3698 virtual_origin = build_fold_addr_expr (virtual_origin);
3699 virtual_origin = fold_convert_loc (loc, ptrdiff_type_node,
3700 virtual_origin);
3701 tree data_addr = omp_accessed_addr (addr_tokens, i + 1, expr);
3702 c2 = build_omp_clause (loc, OMP_CLAUSE_MAP);
3703 if (decl_p && target_p && !declare_target_p)
3704 {
3705 /* It appears that omp-low.cc mishandles cases where we have a
3706 [reference to an] array of pointers such as:
3707
3708 int *arr[N]; (or "int *(&arr)[N] = ...")
3709 #pragma omp target map(arr[a][b:c])
3710 { ... }
3711
3712 in such cases chain_p will be true. For now, fall back to
3713 GOMP_MAP_POINTER. */
3714 enum gomp_map_kind k = chain_p ? GOMP_MAP_POINTER
3715 : GOMP_MAP_FIRSTPRIVATE_REFERENCE;
3716 OMP_CLAUSE_SET_MAP_KIND (c2, k);
3717 }
3718 else
3719 {
3720 if (decl_p)
3721 c_common_mark_addressable_vec (addr_tokens[i + 1]->expr);
3722 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ATTACH_DETACH);
3723 }
3724 OMP_CLAUSE_DECL (c2) = addr_tokens[i + 1]->expr;
3725 OMP_CLAUSE_SIZE (c2)
3726 = fold_build2_loc (loc, MINUS_EXPR, ptrdiff_type_node,
3727 fold_convert_loc (loc, ptrdiff_type_node,
3728 data_addr),
3729 virtual_origin);
3730 }
3731 break;
3732
3733 case ACCESS_INDEXED_ARRAY:
3734 {
3735 if (!map_p)
3736 {
3737 if (decl_p)
3738 c_common_mark_addressable_vec (addr_tokens[i + 1]->expr);
3739 break;
3740 }
3741
3742 /* The code handling "firstprivatize_array_bases" in gimplify.cc is
3743 relevant here. What do we need to create for arrays at this
3744 stage? (This condition doesn't feel quite right. FIXME?) */
3745 if (!target_p
3746 && (TREE_CODE (TREE_TYPE (addr_tokens[i + 1]->expr))
3747 == ARRAY_TYPE))
3748 break;
3749
3750 tree virtual_origin
3751 = build_fold_addr_expr (addr_tokens[i + 1]->expr);
3752 virtual_origin = fold_convert_loc (loc, ptrdiff_type_node,
3753 virtual_origin);
3754 tree data_addr = omp_accessed_addr (addr_tokens, i + 1, expr);
3755 c2 = build_omp_clause (loc, OMP_CLAUSE_MAP);
3756 if (decl_p && target_p)
3757 {
3758 /* See comment for ACCESS_INDEXED_REF_TO_ARRAY above. */
3759 enum gomp_map_kind k = chain_p ? GOMP_MAP_POINTER
3760 : GOMP_MAP_FIRSTPRIVATE_POINTER;
3761 OMP_CLAUSE_SET_MAP_KIND (c2, k);
3762 }
3763 else
3764 {
3765 if (decl_p)
3766 c_common_mark_addressable_vec (addr_tokens[i + 1]->expr);
3767 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ATTACH_DETACH);
3768 }
3769 OMP_CLAUSE_DECL (c2) = addr_tokens[i + 1]->expr;
3770 OMP_CLAUSE_SIZE (c2)
3771 = fold_build2_loc (loc, MINUS_EXPR, ptrdiff_type_node,
3772 fold_convert_loc (loc, ptrdiff_type_node,
3773 data_addr),
3774 virtual_origin);
3775 }
3776 break;
3777
3778 case ACCESS_POINTER:
3779 case ACCESS_POINTER_OFFSET:
3780 {
3781 if (!map_p)
3782 {
3783 if (decl_p)
3784 c_common_mark_addressable_vec (addr_tokens[i + 1]->expr);
3785 break;
3786 }
3787
3788 unsigned last_access = i + 1;
3789 tree virtual_origin;
3790
3791 if (chain_p
3792 && addr_tokens[i + 2]->type == ACCESS_METHOD
3793 && addr_tokens[i + 2]->u.access_kind == ACCESS_INDEXED_ARRAY)
3794 {
3795 /* !!! This seems wrong for ACCESS_POINTER_OFFSET. */
3796 consume_tokens = 3;
3797 chain_p = omp_access_chain_p (addr_tokens, i + 2);
3798 last_access = i + 2;
3799 virtual_origin
3800 = build_array_ref (loc, arr: addr_tokens[last_access]->expr,
3801 integer_zero_node);
3802 virtual_origin = build_fold_addr_expr (virtual_origin);
3803 virtual_origin = fold_convert_loc (loc, ptrdiff_type_node,
3804 virtual_origin);
3805 }
3806 else
3807 virtual_origin = fold_convert_loc (loc, ptrdiff_type_node,
3808 addr_tokens[last_access]->expr);
3809 tree data_addr = omp_accessed_addr (addr_tokens, last_access, expr);
3810 c2 = build_omp_clause (loc, OMP_CLAUSE_MAP);
3811 /* For OpenACC, use FIRSTPRIVATE_POINTER for decls even on non-compute
3812 regions (e.g. "acc data" constructs). It'll be removed anyway in
3813 gimplify.cc, but doing it this way maintains diagnostic
3814 behaviour. */
3815 if (decl_p && (target_p || !openmp_p) && !chain_p && !declare_target_p)
3816 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_FIRSTPRIVATE_POINTER);
3817 else
3818 {
3819 if (decl_p)
3820 c_common_mark_addressable_vec (addr_tokens[i + 1]->expr);
3821 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ATTACH_DETACH);
3822 }
3823 OMP_CLAUSE_DECL (c2) = addr_tokens[i + 1]->expr;
3824 OMP_CLAUSE_SIZE (c2)
3825 = fold_build2_loc (loc, MINUS_EXPR, ptrdiff_type_node,
3826 fold_convert_loc (loc, ptrdiff_type_node,
3827 data_addr),
3828 virtual_origin);
3829 }
3830 break;
3831
3832 case ACCESS_REF_TO_POINTER:
3833 case ACCESS_REF_TO_POINTER_OFFSET:
3834 {
3835 if (!map_p)
3836 {
3837 if (decl_p)
3838 c_common_mark_addressable_vec (addr_tokens[i + 1]->expr);
3839 break;
3840 }
3841
3842 unsigned last_access = i + 1;
3843 tree virtual_origin;
3844
3845 if (chain_p
3846 && addr_tokens[i + 2]->type == ACCESS_METHOD
3847 && addr_tokens[i + 2]->u.access_kind == ACCESS_INDEXED_ARRAY)
3848 {
3849 /* !!! This seems wrong for ACCESS_POINTER_OFFSET. */
3850 consume_tokens = 3;
3851 chain_p = omp_access_chain_p (addr_tokens, i + 2);
3852 last_access = i + 2;
3853 virtual_origin
3854 = build_array_ref (loc, arr: addr_tokens[last_access]->expr,
3855 integer_zero_node);
3856 virtual_origin = build_fold_addr_expr (virtual_origin);
3857 virtual_origin = fold_convert_loc (loc, ptrdiff_type_node,
3858 virtual_origin);
3859 }
3860 else
3861 {
3862 virtual_origin
3863 = convert_from_reference (addr_tokens[last_access]->expr);
3864 virtual_origin = fold_convert_loc (loc, ptrdiff_type_node,
3865 virtual_origin);
3866 }
3867
3868 tree data_addr = omp_accessed_addr (addr_tokens, last_access, expr);
3869 c2 = build_omp_clause (loc, OMP_CLAUSE_MAP);
3870 if (decl_p && target_p && !chain_p && !declare_target_p)
3871 {
3872 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
3873 OMP_CLAUSE_DECL (c2) = addr_tokens[i + 1]->expr;
3874 }
3875 else
3876 {
3877 if (decl_p)
3878 c_common_mark_addressable_vec (addr_tokens[i + 1]->expr);
3879 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ATTACH_DETACH);
3880 OMP_CLAUSE_DECL (c2)
3881 = convert_from_reference (addr_tokens[i + 1]->expr);
3882 }
3883 OMP_CLAUSE_SIZE (c2)
3884 = fold_build2_loc (loc, MINUS_EXPR, ptrdiff_type_node,
3885 fold_convert_loc (loc, ptrdiff_type_node,
3886 data_addr),
3887 virtual_origin);
3888 }
3889 break;
3890
3891 default:
3892 *idx = i + consume_tokens;
3893 return error_mark_node;
3894 }
3895
3896 if (c3)
3897 {
3898 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c);
3899 OMP_CLAUSE_CHAIN (c2) = c3;
3900 OMP_CLAUSE_CHAIN (c) = c2;
3901 if (implicit_p)
3902 {
3903 OMP_CLAUSE_MAP_IMPLICIT (c2) = 1;
3904 OMP_CLAUSE_MAP_IMPLICIT (c3) = 1;
3905 }
3906 c = c3;
3907 }
3908 else if (c2)
3909 {
3910 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (c);
3911 OMP_CLAUSE_CHAIN (c) = c2;
3912 if (implicit_p)
3913 OMP_CLAUSE_MAP_IMPLICIT (c2) = 1;
3914 c = c2;
3915 }
3916
3917 i += consume_tokens;
3918 *idx = i;
3919
3920 if (chain_p && map_p)
3921 return omp_expand_access_chain (c, expr, addr_tokens, idx, ort);
3922
3923 return c;
3924}
3925
3926/* Translate "component_selector access_method" to OMP mapping clauses. */
3927
3928tree
3929c_omp_address_inspector::expand_component_selector (tree c,
3930 vec<omp_addr_token *>
3931 &addr_tokens,
3932 tree expr, unsigned *idx,
3933 c_omp_region_type ort)
3934{
3935 using namespace omp_addr_tokenizer;
3936 location_t loc = OMP_CLAUSE_LOCATION (c);
3937 unsigned i = *idx;
3938 tree c2 = NULL_TREE, c3 = NULL_TREE;
3939 bool chain_p = omp_access_chain_p (addr_tokens, i + 1);
3940 bool map_p = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP;
3941
3942 switch (addr_tokens[i + 1]->u.access_kind)
3943 {
3944 case ACCESS_DIRECT:
3945 case ACCESS_INDEXED_ARRAY:
3946 break;
3947
3948 case ACCESS_REF:
3949 {
3950 /* Copy the referenced object. Note that we also do this for !MAP_P
3951 clauses. */
3952 tree obj = convert_from_reference (addr_tokens[i + 1]->expr);
3953 OMP_CLAUSE_DECL (c) = obj;
3954 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (obj));
3955
3956 if (!map_p)
3957 break;
3958
3959 c2 = build_omp_clause (loc, OMP_CLAUSE_MAP);
3960 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ATTACH_DETACH);
3961 OMP_CLAUSE_DECL (c2) = addr_tokens[i + 1]->expr;
3962 OMP_CLAUSE_SIZE (c2) = size_zero_node;
3963 }
3964 break;
3965
3966 case ACCESS_INDEXED_REF_TO_ARRAY:
3967 {
3968 if (!map_p)
3969 break;
3970
3971 tree virtual_origin
3972 = convert_from_reference (addr_tokens[i + 1]->expr);
3973 virtual_origin = build_fold_addr_expr (virtual_origin);
3974 virtual_origin = fold_convert_loc (loc, ptrdiff_type_node,
3975 virtual_origin);
3976 tree data_addr = omp_accessed_addr (addr_tokens, i + 1, expr);
3977
3978 c2 = build_omp_clause (loc, OMP_CLAUSE_MAP);
3979 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ATTACH_DETACH);
3980 OMP_CLAUSE_DECL (c2) = addr_tokens[i + 1]->expr;
3981 OMP_CLAUSE_SIZE (c2)
3982 = fold_build2_loc (loc, MINUS_EXPR, ptrdiff_type_node,
3983 fold_convert_loc (loc, ptrdiff_type_node,
3984 data_addr),
3985 virtual_origin);
3986 }
3987 break;
3988
3989 case ACCESS_POINTER:
3990 case ACCESS_POINTER_OFFSET:
3991 {
3992 if (!map_p)
3993 break;
3994
3995 tree virtual_origin
3996 = fold_convert_loc (loc, ptrdiff_type_node,
3997 addr_tokens[i + 1]->expr);
3998 tree data_addr = omp_accessed_addr (addr_tokens, i + 1, expr);
3999
4000 c2 = build_omp_clause (loc, OMP_CLAUSE_MAP);
4001 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ATTACH_DETACH);
4002 OMP_CLAUSE_DECL (c2) = addr_tokens[i + 1]->expr;
4003 OMP_CLAUSE_SIZE (c2)
4004 = fold_build2_loc (loc, MINUS_EXPR, ptrdiff_type_node,
4005 fold_convert_loc (loc, ptrdiff_type_node,
4006 data_addr),
4007 virtual_origin);
4008 }
4009 break;
4010
4011 case ACCESS_REF_TO_POINTER:
4012 case ACCESS_REF_TO_POINTER_OFFSET:
4013 {
4014 if (!map_p)
4015 break;
4016
4017 tree ptr = convert_from_reference (addr_tokens[i + 1]->expr);
4018 tree virtual_origin = fold_convert_loc (loc, ptrdiff_type_node,
4019 ptr);
4020 tree data_addr = omp_accessed_addr (addr_tokens, i + 1, expr);
4021
4022 /* Attach the pointer... */
4023 c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
4024 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ATTACH_DETACH);
4025 OMP_CLAUSE_DECL (c2) = ptr;
4026 OMP_CLAUSE_SIZE (c2)
4027 = fold_build2_loc (loc, MINUS_EXPR, ptrdiff_type_node,
4028 fold_convert_loc (loc, ptrdiff_type_node,
4029 data_addr),
4030 virtual_origin);
4031
4032 /* ...and also the reference. */
4033 c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
4034 OMP_CLAUSE_SET_MAP_KIND (c3, GOMP_MAP_ATTACH_DETACH);
4035 OMP_CLAUSE_DECL (c3) = addr_tokens[i + 1]->expr;
4036 OMP_CLAUSE_SIZE (c3) = size_zero_node;
4037 }
4038 break;
4039
4040 default:
4041 *idx = i + 2;
4042 return error_mark_node;
4043 }
4044
4045 if (c3)
4046 {
4047 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c);
4048 OMP_CLAUSE_CHAIN (c2) = c3;
4049 OMP_CLAUSE_CHAIN (c) = c2;
4050 c = c3;
4051 }
4052 else if (c2)
4053 {
4054 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (c);
4055 OMP_CLAUSE_CHAIN (c) = c2;
4056 c = c2;
4057 }
4058
4059 i += 2;
4060 *idx = i;
4061
4062 if (chain_p && map_p)
4063 return omp_expand_access_chain (c, expr, addr_tokens, idx, ort);
4064
4065 return c;
4066}
4067
4068/* Expand a map clause into a group of mapping clauses, creating nodes to
4069 attach/detach pointers and so forth as necessary. */
4070
4071tree
4072c_omp_address_inspector::expand_map_clause (tree c, tree expr,
4073 vec<omp_addr_token *> &addr_tokens,
4074 c_omp_region_type ort)
4075{
4076 using namespace omp_addr_tokenizer;
4077 unsigned i, length = addr_tokens.length ();
4078
4079 for (i = 0; i < length;)
4080 {
4081 int remaining = length - i;
4082
4083 if (remaining >= 2
4084 && addr_tokens[i]->type == ARRAY_BASE
4085 && addr_tokens[i]->u.structure_base_kind == BASE_DECL
4086 && addr_tokens[i + 1]->type == ACCESS_METHOD)
4087 {
4088 c = expand_array_base (c, addr_tokens, expr, idx: &i, ort);
4089 if (c == error_mark_node)
4090 return error_mark_node;
4091 }
4092 else if (remaining >= 2
4093 && addr_tokens[i]->type == ARRAY_BASE
4094 && addr_tokens[i]->u.structure_base_kind == BASE_ARBITRARY_EXPR
4095 && addr_tokens[i + 1]->type == ACCESS_METHOD)
4096 {
4097 c = expand_array_base (c, addr_tokens, expr, idx: &i, ort);
4098 if (c == error_mark_node)
4099 return error_mark_node;
4100 }
4101 else if (remaining >= 2
4102 && addr_tokens[i]->type == STRUCTURE_BASE
4103 && addr_tokens[i]->u.structure_base_kind == BASE_DECL
4104 && addr_tokens[i + 1]->type == ACCESS_METHOD)
4105 {
4106 if (addr_tokens[i + 1]->u.access_kind == ACCESS_DIRECT)
4107 c_common_mark_addressable_vec (addr_tokens[i + 1]->expr);
4108 i += 2;
4109 while (addr_tokens[i]->type == ACCESS_METHOD)
4110 i++;
4111 }
4112 else if (remaining >= 2
4113 && addr_tokens[i]->type == STRUCTURE_BASE
4114 && addr_tokens[i]->u.structure_base_kind == BASE_ARBITRARY_EXPR
4115 && addr_tokens[i + 1]->type == ACCESS_METHOD)
4116 {
4117 switch (addr_tokens[i + 1]->u.access_kind)
4118 {
4119 case ACCESS_DIRECT:
4120 case ACCESS_POINTER:
4121 i += 2;
4122 while (addr_tokens[i]->type == ACCESS_METHOD)
4123 i++;
4124 break;
4125 default:
4126 return error_mark_node;
4127 }
4128 }
4129 else if (remaining >= 2
4130 && addr_tokens[i]->type == COMPONENT_SELECTOR
4131 && addr_tokens[i + 1]->type == ACCESS_METHOD)
4132 {
4133 c = expand_component_selector (c, addr_tokens, expr, idx: &i, ort);
4134 /* We used 'expr', so these must have been the last tokens. */
4135 gcc_assert (i == length);
4136 if (c == error_mark_node)
4137 return error_mark_node;
4138 }
4139 else if (remaining >= 3
4140 && addr_tokens[i]->type == COMPONENT_SELECTOR
4141 && addr_tokens[i + 1]->type == STRUCTURE_BASE
4142 && (addr_tokens[i + 1]->u.structure_base_kind
4143 == BASE_COMPONENT_EXPR)
4144 && addr_tokens[i + 2]->type == ACCESS_METHOD)
4145 {
4146 i += 3;
4147 while (addr_tokens[i]->type == ACCESS_METHOD)
4148 i++;
4149 }
4150 else
4151 break;
4152 }
4153
4154 if (i == length)
4155 return c;
4156
4157 return error_mark_node;
4158}
4159
4160const struct c_omp_directive c_omp_directives[] = {
4161 /* Keep this alphabetically sorted by the first word. Non-null second/third
4162 if any should precede null ones. */
4163 { .first: "allocate", .second: nullptr, .third: nullptr, .id: PRAGMA_OMP_ALLOCATE,
4164 .kind: C_OMP_DIR_DECLARATIVE, .simd: false },
4165 { .first: "assume", .second: nullptr, .third: nullptr, .id: PRAGMA_OMP_ASSUME,
4166 .kind: C_OMP_DIR_INFORMATIONAL, .simd: false },
4167 { .first: "assumes", .second: nullptr, .third: nullptr, .id: PRAGMA_OMP_ASSUMES,
4168 .kind: C_OMP_DIR_INFORMATIONAL, .simd: false },
4169 { .first: "atomic", .second: nullptr, .third: nullptr, .id: PRAGMA_OMP_ATOMIC,
4170 .kind: C_OMP_DIR_CONSTRUCT, .simd: false },
4171 { .first: "barrier", .second: nullptr, .third: nullptr, .id: PRAGMA_OMP_BARRIER,
4172 .kind: C_OMP_DIR_STANDALONE, .simd: false },
4173 { .first: "begin", .second: "assumes", .third: nullptr, .id: PRAGMA_OMP_BEGIN,
4174 .kind: C_OMP_DIR_INFORMATIONAL, .simd: false },
4175 { .first: "begin", .second: "declare", .third: "target", .id: PRAGMA_OMP_BEGIN,
4176 .kind: C_OMP_DIR_DECLARATIVE, .simd: false },
4177 /* { "begin", "declare", "variant", PRAGMA_OMP_BEGIN,
4178 C_OMP_DIR_DECLARATIVE, false }, */
4179 /* { "begin", "metadirective", nullptr, PRAGMA_OMP_BEGIN,
4180 C_OMP_DIR_???, ??? }, */
4181 { .first: "cancel", .second: nullptr, .third: nullptr, .id: PRAGMA_OMP_CANCEL,
4182 .kind: C_OMP_DIR_STANDALONE, .simd: false },
4183 { .first: "cancellation", .second: "point", .third: nullptr, .id: PRAGMA_OMP_CANCELLATION_POINT,
4184 .kind: C_OMP_DIR_STANDALONE, .simd: false },
4185 { .first: "critical", .second: nullptr, .third: nullptr, .id: PRAGMA_OMP_CRITICAL,
4186 .kind: C_OMP_DIR_CONSTRUCT, .simd: false },
4187 /* { "declare", "mapper", nullptr, PRAGMA_OMP_DECLARE,
4188 C_OMP_DIR_DECLARATIVE, false }, */
4189 { .first: "declare", .second: "reduction", .third: nullptr, .id: PRAGMA_OMP_DECLARE,
4190 .kind: C_OMP_DIR_DECLARATIVE, .simd: true },
4191 { .first: "declare", .second: "simd", .third: nullptr, .id: PRAGMA_OMP_DECLARE,
4192 .kind: C_OMP_DIR_DECLARATIVE, .simd: true },
4193 { .first: "declare", .second: "target", .third: nullptr, .id: PRAGMA_OMP_DECLARE,
4194 .kind: C_OMP_DIR_DECLARATIVE, .simd: false },
4195 { .first: "declare", .second: "variant", .third: nullptr, .id: PRAGMA_OMP_DECLARE,
4196 .kind: C_OMP_DIR_DECLARATIVE, .simd: false },
4197 { .first: "depobj", .second: nullptr, .third: nullptr, .id: PRAGMA_OMP_DEPOBJ,
4198 .kind: C_OMP_DIR_STANDALONE, .simd: false },
4199 /* { "dispatch", nullptr, nullptr, PRAGMA_OMP_DISPATCH,
4200 C_OMP_DIR_CONSTRUCT, false }, */
4201 { .first: "distribute", .second: nullptr, .third: nullptr, .id: PRAGMA_OMP_DISTRIBUTE,
4202 .kind: C_OMP_DIR_CONSTRUCT, .simd: true },
4203 { .first: "end", .second: "assumes", .third: nullptr, .id: PRAGMA_OMP_END,
4204 .kind: C_OMP_DIR_INFORMATIONAL, .simd: false },
4205 { .first: "end", .second: "declare", .third: "target", .id: PRAGMA_OMP_END,
4206 .kind: C_OMP_DIR_DECLARATIVE, .simd: false },
4207 /* { "end", "declare", "variant", PRAGMA_OMP_END,
4208 C_OMP_DIR_DECLARATIVE, false }, */
4209 /* { "end", "metadirective", nullptr, PRAGMA_OMP_END,
4210 C_OMP_DIR_???, ??? }, */
4211 /* error with at(execution) is C_OMP_DIR_STANDALONE. */
4212 { .first: "error", .second: nullptr, .third: nullptr, .id: PRAGMA_OMP_ERROR,
4213 .kind: C_OMP_DIR_UTILITY, .simd: false },
4214 { .first: "flush", .second: nullptr, .third: nullptr, .id: PRAGMA_OMP_FLUSH,
4215 .kind: C_OMP_DIR_STANDALONE, .simd: false },
4216 { .first: "for", .second: nullptr, .third: nullptr, .id: PRAGMA_OMP_FOR,
4217 .kind: C_OMP_DIR_CONSTRUCT, .simd: true },
4218 /* { "groupprivate", nullptr, nullptr, PRAGMA_OMP_GROUPPRIVATE,
4219 C_OMP_DIR_DECLARATIVE, false }, */
4220 /* { "interop", nullptr, nullptr, PRAGMA_OMP_INTEROP,
4221 C_OMP_DIR_STANDALONE, false }, */
4222 { .first: "loop", .second: nullptr, .third: nullptr, .id: PRAGMA_OMP_LOOP,
4223 .kind: C_OMP_DIR_CONSTRUCT, .simd: true },
4224 { .first: "masked", .second: nullptr, .third: nullptr, .id: PRAGMA_OMP_MASKED,
4225 .kind: C_OMP_DIR_CONSTRUCT, .simd: true },
4226 { .first: "master", .second: nullptr, .third: nullptr, .id: PRAGMA_OMP_MASTER,
4227 .kind: C_OMP_DIR_CONSTRUCT, .simd: true },
4228 /* { "metadirective", nullptr, nullptr, PRAGMA_OMP_METADIRECTIVE,
4229 C_OMP_DIR_???, ??? }, */
4230 { .first: "nothing", .second: nullptr, .third: nullptr, .id: PRAGMA_OMP_NOTHING,
4231 .kind: C_OMP_DIR_UTILITY, .simd: false },
4232 /* ordered with depend clause is C_OMP_DIR_STANDALONE. */
4233 { .first: "ordered", .second: nullptr, .third: nullptr, .id: PRAGMA_OMP_ORDERED,
4234 .kind: C_OMP_DIR_CONSTRUCT, .simd: true },
4235 { .first: "parallel", .second: nullptr, .third: nullptr, .id: PRAGMA_OMP_PARALLEL,
4236 .kind: C_OMP_DIR_CONSTRUCT, .simd: true },
4237 { .first: "requires", .second: nullptr, .third: nullptr, .id: PRAGMA_OMP_REQUIRES,
4238 .kind: C_OMP_DIR_INFORMATIONAL, .simd: false },
4239 { .first: "scan", .second: nullptr, .third: nullptr, .id: PRAGMA_OMP_SCAN,
4240 .kind: C_OMP_DIR_CONSTRUCT, .simd: true },
4241 { .first: "scope", .second: nullptr, .third: nullptr, .id: PRAGMA_OMP_SCOPE,
4242 .kind: C_OMP_DIR_CONSTRUCT, .simd: false },
4243 { .first: "section", .second: nullptr, .third: nullptr, .id: PRAGMA_OMP_SECTION,
4244 .kind: C_OMP_DIR_CONSTRUCT, .simd: false },
4245 { .first: "sections", .second: nullptr, .third: nullptr, .id: PRAGMA_OMP_SECTIONS,
4246 .kind: C_OMP_DIR_CONSTRUCT, .simd: false },
4247 { .first: "simd", .second: nullptr, .third: nullptr, .id: PRAGMA_OMP_SIMD,
4248 .kind: C_OMP_DIR_CONSTRUCT, .simd: true },
4249 { .first: "single", .second: nullptr, .third: nullptr, .id: PRAGMA_OMP_SINGLE,
4250 .kind: C_OMP_DIR_CONSTRUCT, .simd: false },
4251 { .first: "target", .second: "data", .third: nullptr, .id: PRAGMA_OMP_TARGET,
4252 .kind: C_OMP_DIR_CONSTRUCT, .simd: false },
4253 { .first: "target", .second: "enter", .third: "data", .id: PRAGMA_OMP_TARGET,
4254 .kind: C_OMP_DIR_STANDALONE, .simd: false },
4255 { .first: "target", .second: "exit", .third: "data", .id: PRAGMA_OMP_TARGET,
4256 .kind: C_OMP_DIR_STANDALONE, .simd: false },
4257 { .first: "target", .second: "update", .third: nullptr, .id: PRAGMA_OMP_TARGET,
4258 .kind: C_OMP_DIR_STANDALONE, .simd: false },
4259 { .first: "target", .second: nullptr, .third: nullptr, .id: PRAGMA_OMP_TARGET,
4260 .kind: C_OMP_DIR_CONSTRUCT, .simd: true },
4261 { .first: "task", .second: nullptr, .third: nullptr, .id: PRAGMA_OMP_TASK,
4262 .kind: C_OMP_DIR_CONSTRUCT, .simd: false },
4263 { .first: "taskgroup", .second: nullptr, .third: nullptr, .id: PRAGMA_OMP_TASKGROUP,
4264 .kind: C_OMP_DIR_CONSTRUCT, .simd: false },
4265 { .first: "taskloop", .second: nullptr, .third: nullptr, .id: PRAGMA_OMP_TASKLOOP,
4266 .kind: C_OMP_DIR_CONSTRUCT, .simd: true },
4267 { .first: "taskwait", .second: nullptr, .third: nullptr, .id: PRAGMA_OMP_TASKWAIT,
4268 .kind: C_OMP_DIR_STANDALONE, .simd: false },
4269 { .first: "taskyield", .second: nullptr, .third: nullptr, .id: PRAGMA_OMP_TASKYIELD,
4270 .kind: C_OMP_DIR_STANDALONE, .simd: false },
4271 /* { "tile", nullptr, nullptr, PRAGMA_OMP_TILE,
4272 C_OMP_DIR_CONSTRUCT, false }, */
4273 { .first: "teams", .second: nullptr, .third: nullptr, .id: PRAGMA_OMP_TEAMS,
4274 .kind: C_OMP_DIR_CONSTRUCT, .simd: true },
4275 { .first: "threadprivate", .second: nullptr, .third: nullptr, .id: PRAGMA_OMP_THREADPRIVATE,
4276 .kind: C_OMP_DIR_DECLARATIVE, .simd: false }
4277 /* { "unroll", nullptr, nullptr, PRAGMA_OMP_UNROLL,
4278 C_OMP_DIR_CONSTRUCT, false }, */
4279};
4280
4281/* Find (non-combined/composite) OpenMP directive (if any) which starts
4282 with FIRST keyword and for multi-word directives has SECOND and
4283 THIRD keyword after it. */
4284
4285const struct c_omp_directive *
4286c_omp_categorize_directive (const char *first, const char *second,
4287 const char *third)
4288{
4289 const size_t n_omp_directives = ARRAY_SIZE (c_omp_directives);
4290 for (size_t i = 0; i < n_omp_directives; i++)
4291 {
4292 if ((unsigned char) c_omp_directives[i].first[0]
4293 < (unsigned char) first[0])
4294 continue;
4295 if ((unsigned char) c_omp_directives[i].first[0]
4296 > (unsigned char) first[0])
4297 break;
4298 if (strcmp (s1: c_omp_directives[i].first, s2: first))
4299 continue;
4300 if (!c_omp_directives[i].second)
4301 return &c_omp_directives[i];
4302 if (!second || strcmp (s1: c_omp_directives[i].second, s2: second))
4303 continue;
4304 if (!c_omp_directives[i].third)
4305 return &c_omp_directives[i];
4306 if (!third || strcmp (s1: c_omp_directives[i].third, s2: third))
4307 continue;
4308 return &c_omp_directives[i];
4309 }
4310 return NULL;
4311}
4312

source code of gcc/c-family/c-omp.cc