1 | /* Perform the semantic phase of parsing, i.e., the process of |
---|---|
2 | building tree structure, checking semantic consistency, and |
3 | building RTL. These routines are used both during actual parsing |
4 | and during the instantiation of template functions. |
5 | |
6 | Copyright (C) 1998-2025 Free Software Foundation, Inc. |
7 | Written by Mark Mitchell (mmitchell@usa.net) based on code found |
8 | formerly in parse.y and pt.cc. |
9 | |
10 | This file is part of GCC. |
11 | |
12 | GCC is free software; you can redistribute it and/or modify it |
13 | under the terms of the GNU General Public License as published by |
14 | the Free Software Foundation; either version 3, or (at your option) |
15 | any later version. |
16 | |
17 | GCC is distributed in the hope that it will be useful, but |
18 | WITHOUT ANY WARRANTY; without even the implied warranty of |
19 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
20 | General Public License for more details. |
21 | |
22 | You should have received a copy of the GNU General Public License |
23 | along with GCC; see the file COPYING3. If not see |
24 | <http://www.gnu.org/licenses/>. */ |
25 | |
26 | #include "config.h" |
27 | #include "system.h" |
28 | #include "coretypes.h" |
29 | #include "target.h" |
30 | #include "bitmap.h" |
31 | #include "cp-tree.h" |
32 | #include "stringpool.h" |
33 | #include "cgraph.h" |
34 | #include "stmt.h" |
35 | #include "varasm.h" |
36 | #include "stor-layout.h" |
37 | #include "c-family/c-objc.h" |
38 | #include "tree-inline.h" |
39 | #include "intl.h" |
40 | #include "tree-iterator.h" |
41 | #include "omp-general.h" |
42 | #include "convert.h" |
43 | #include "stringpool.h" |
44 | #include "attribs.h" |
45 | #include "gomp-constants.h" |
46 | #include "predict.h" |
47 | #include "memmodel.h" |
48 | #include "gimplify.h" |
49 | |
50 | /* There routines provide a modular interface to perform many parsing |
51 | operations. They may therefore be used during actual parsing, or |
52 | during template instantiation, which may be regarded as a |
53 | degenerate form of parsing. */ |
54 | |
55 | static tree finalize_nrv_r (tree *, int *, void *); |
56 | |
57 | /* Used for OpenMP non-static data member privatization. */ |
58 | |
59 | static hash_map<tree, tree> *omp_private_member_map; |
60 | static vec<tree> omp_private_member_vec; |
61 | static bool omp_private_member_ignore_next; |
62 | |
63 | |
64 | /* Deferred Access Checking Overview |
65 | --------------------------------- |
66 | |
67 | Most C++ expressions and declarations require access checking |
68 | to be performed during parsing. However, in several cases, |
69 | this has to be treated differently. |
70 | |
71 | For member declarations, access checking has to be deferred |
72 | until more information about the declaration is known. For |
73 | example: |
74 | |
75 | class A { |
76 | typedef int X; |
77 | public: |
78 | X f(); |
79 | }; |
80 | |
81 | A::X A::f(); |
82 | A::X g(); |
83 | |
84 | When we are parsing the function return type `A::X', we don't |
85 | really know if this is allowed until we parse the function name. |
86 | |
87 | Furthermore, some contexts require that access checking is |
88 | never performed at all. These include class heads, and template |
89 | instantiations. |
90 | |
91 | Typical use of access checking functions is described here: |
92 | |
93 | 1. When we enter a context that requires certain access checking |
94 | mode, the function `push_deferring_access_checks' is called with |
95 | DEFERRING argument specifying the desired mode. Access checking |
96 | may be performed immediately (dk_no_deferred), deferred |
97 | (dk_deferred), or not performed (dk_no_check). |
98 | |
99 | 2. When a declaration such as a type, or a variable, is encountered, |
100 | the function `perform_or_defer_access_check' is called. It |
101 | maintains a vector of all deferred checks. |
102 | |
103 | 3. The global `current_class_type' or `current_function_decl' is then |
104 | setup by the parser. `enforce_access' relies on these information |
105 | to check access. |
106 | |
107 | 4. Upon exiting the context mentioned in step 1, |
108 | `perform_deferred_access_checks' is called to check all declaration |
109 | stored in the vector. `pop_deferring_access_checks' is then |
110 | called to restore the previous access checking mode. |
111 | |
112 | In case of parsing error, we simply call `pop_deferring_access_checks' |
113 | without `perform_deferred_access_checks'. */ |
114 | |
115 | struct GTY(()) deferred_access { |
116 | /* A vector representing name-lookups for which we have deferred |
117 | checking access controls. We cannot check the accessibility of |
118 | names used in a decl-specifier-seq until we know what is being |
119 | declared because code like: |
120 | |
121 | class A { |
122 | class B {}; |
123 | B* f(); |
124 | } |
125 | |
126 | A::B* A::f() { return 0; } |
127 | |
128 | is valid, even though `A::B' is not generally accessible. */ |
129 | vec<deferred_access_check, va_gc> *deferred_access_checks; |
130 | |
131 | /* The current mode of access checks. */ |
132 | enum deferring_kind deferring_access_checks_kind; |
133 | }; |
134 | |
135 | /* Data for deferred access checking. */ |
136 | static GTY(()) vec<deferred_access, va_gc> *deferred_access_stack; |
137 | static GTY(()) unsigned deferred_access_no_check; |
138 | |
139 | /* Save the current deferred access states and start deferred |
140 | access checking iff DEFER_P is true. */ |
141 | |
142 | void |
143 | push_deferring_access_checks (deferring_kind deferring) |
144 | { |
145 | /* For context like template instantiation, access checking |
146 | disabling applies to all nested context. */ |
147 | if (deferred_access_no_check || deferring == dk_no_check) |
148 | deferred_access_no_check++; |
149 | else |
150 | { |
151 | deferred_access e = {NULL, .deferring_access_checks_kind: deferring}; |
152 | vec_safe_push (v&: deferred_access_stack, obj: e); |
153 | } |
154 | } |
155 | |
156 | /* Save the current deferred access states and start deferred access |
157 | checking, continuing the set of deferred checks in CHECKS. */ |
158 | |
159 | void |
160 | reopen_deferring_access_checks (vec<deferred_access_check, va_gc> * checks) |
161 | { |
162 | push_deferring_access_checks (deferring: dk_deferred); |
163 | if (!deferred_access_no_check) |
164 | deferred_access_stack->last().deferred_access_checks = checks; |
165 | } |
166 | |
167 | /* Resume deferring access checks again after we stopped doing |
168 | this previously. */ |
169 | |
170 | void |
171 | resume_deferring_access_checks (void) |
172 | { |
173 | if (!deferred_access_no_check) |
174 | deferred_access_stack->last().deferring_access_checks_kind = dk_deferred; |
175 | } |
176 | |
177 | /* Stop deferring access checks. */ |
178 | |
179 | void |
180 | stop_deferring_access_checks (void) |
181 | { |
182 | if (!deferred_access_no_check) |
183 | deferred_access_stack->last().deferring_access_checks_kind = dk_no_deferred; |
184 | } |
185 | |
186 | /* Discard the current deferred access checks and restore the |
187 | previous states. */ |
188 | |
189 | void |
190 | pop_deferring_access_checks (void) |
191 | { |
192 | if (deferred_access_no_check) |
193 | deferred_access_no_check--; |
194 | else |
195 | deferred_access_stack->pop (); |
196 | } |
197 | |
198 | /* Returns a TREE_LIST representing the deferred checks. |
199 | The TREE_PURPOSE of each node is the type through which the |
200 | access occurred; the TREE_VALUE is the declaration named. |
201 | */ |
202 | |
203 | vec<deferred_access_check, va_gc> * |
204 | get_deferred_access_checks (void) |
205 | { |
206 | if (deferred_access_no_check) |
207 | return NULL; |
208 | else |
209 | return (deferred_access_stack->last().deferred_access_checks); |
210 | } |
211 | |
212 | /* Take current deferred checks and combine with the |
213 | previous states if we also defer checks previously. |
214 | Otherwise perform checks now. */ |
215 | |
216 | void |
217 | pop_to_parent_deferring_access_checks (void) |
218 | { |
219 | if (deferred_access_no_check) |
220 | deferred_access_no_check--; |
221 | else |
222 | { |
223 | vec<deferred_access_check, va_gc> *checks; |
224 | deferred_access *ptr; |
225 | |
226 | checks = (deferred_access_stack->last ().deferred_access_checks); |
227 | |
228 | deferred_access_stack->pop (); |
229 | ptr = &deferred_access_stack->last (); |
230 | if (ptr->deferring_access_checks_kind == dk_no_deferred) |
231 | { |
232 | /* Check access. */ |
233 | perform_access_checks (checks, tf_warning_or_error); |
234 | } |
235 | else |
236 | { |
237 | /* Merge with parent. */ |
238 | int i, j; |
239 | deferred_access_check *chk, *probe; |
240 | |
241 | FOR_EACH_VEC_SAFE_ELT (checks, i, chk) |
242 | { |
243 | FOR_EACH_VEC_SAFE_ELT (ptr->deferred_access_checks, j, probe) |
244 | { |
245 | if (probe->binfo == chk->binfo && |
246 | probe->decl == chk->decl && |
247 | probe->diag_decl == chk->diag_decl) |
248 | goto found; |
249 | } |
250 | /* Insert into parent's checks. */ |
251 | vec_safe_push (v&: ptr->deferred_access_checks, obj: *chk); |
252 | found:; |
253 | } |
254 | } |
255 | } |
256 | } |
257 | |
258 | /* Called from enforce_access. A class has attempted (but failed) to access |
259 | DECL. It is already established that a baseclass of that class, |
260 | PARENT_BINFO, has private access to DECL. Examine certain special cases |
261 | to find a decl that accurately describes the source of the problem. If |
262 | none of the special cases apply, simply return DECL as the source of the |
263 | problem. */ |
264 | |
265 | static tree |
266 | get_class_access_diagnostic_decl (tree parent_binfo, tree decl) |
267 | { |
268 | /* When a class is denied access to a decl in a baseclass, most of the |
269 | time it is because the decl itself was declared as private at the point |
270 | of declaration. |
271 | |
272 | However, in C++, there are (at least) two situations in which a decl |
273 | can be private even though it was not originally defined as such. |
274 | These two situations only apply if a baseclass had private access to |
275 | DECL (this function is only called if that is the case). */ |
276 | |
277 | /* We should first check whether the reason the parent had private access |
278 | to DECL was simply because DECL was created and declared as private in |
279 | the parent. If it was, then DECL is definitively the source of the |
280 | problem. */ |
281 | if (SAME_BINFO_TYPE_P (context_for_name_lookup (decl), |
282 | BINFO_TYPE (parent_binfo))) |
283 | return decl; |
284 | |
285 | /* 1. If the "using" keyword is used to inherit DECL within the parent, |
286 | this may cause DECL to be private, so we should return the using |
287 | statement as the source of the problem. |
288 | |
289 | Scan the fields of PARENT_BINFO and see if there are any using decls. If |
290 | there are, see if they inherit DECL. If they do, that's where DECL must |
291 | have been declared private. */ |
292 | |
293 | for (tree parent_field = TYPE_FIELDS (BINFO_TYPE (parent_binfo)); |
294 | parent_field; |
295 | parent_field = DECL_CHAIN (parent_field)) |
296 | /* Not necessary, but also check TREE_PRIVATE for the sake of |
297 | eliminating obviously non-relevant using decls. */ |
298 | if (TREE_CODE (parent_field) == USING_DECL |
299 | && TREE_PRIVATE (parent_field)) |
300 | { |
301 | tree decl_stripped = strip_using_decl (parent_field); |
302 | |
303 | /* The using statement might be overloaded. If so, we need to |
304 | check all of the overloads. */ |
305 | for (ovl_iterator iter (decl_stripped); iter; ++iter) |
306 | /* If equal, the using statement inherits DECL, and so is the |
307 | source of the access failure, so return it. */ |
308 | if (*iter == decl) |
309 | return parent_field; |
310 | } |
311 | |
312 | /* 2. If DECL was privately inherited by the parent class, then DECL will |
313 | be inaccessible, even though it may originally have been accessible to |
314 | deriving classes. In that case, the fault lies with the parent, since it |
315 | used a private inheritance, so we return the parent as the source of the |
316 | problem. |
317 | |
318 | Since this is the last check, we just assume it's true. At worst, it |
319 | will simply point to the class that failed to give access, which is |
320 | technically true. */ |
321 | return TYPE_NAME (BINFO_TYPE (parent_binfo)); |
322 | } |
323 | |
324 | /* If the current scope isn't allowed to access DECL along |
325 | BASETYPE_PATH, give an error, or if we're parsing a function or class |
326 | template, defer the access check to be performed at instantiation time. |
327 | The most derived class in BASETYPE_PATH is the one used to qualify DECL. |
328 | DIAG_DECL is the declaration to use in the error diagnostic. */ |
329 | |
330 | static bool |
331 | enforce_access (tree basetype_path, tree decl, tree diag_decl, |
332 | tsubst_flags_t complain, access_failure_info *afi = NULL) |
333 | { |
334 | gcc_assert (TREE_CODE (basetype_path) == TREE_BINFO); |
335 | |
336 | if (flag_new_inheriting_ctors |
337 | && DECL_INHERITED_CTOR (decl)) |
338 | { |
339 | /* 7.3.3/18: The additional constructors are accessible if they would be |
340 | accessible when used to construct an object of the corresponding base |
341 | class. */ |
342 | decl = strip_inheriting_ctors (decl); |
343 | basetype_path = lookup_base (basetype_path, DECL_CONTEXT (decl), |
344 | ba_any, NULL, complain); |
345 | } |
346 | |
347 | tree cs = current_scope (); |
348 | if (in_template_context |
349 | && (CLASS_TYPE_P (cs) || TREE_CODE (cs) == FUNCTION_DECL)) |
350 | if (tree template_info = get_template_info (cs)) |
351 | { |
352 | /* When parsing a function or class template, we in general need to |
353 | defer access checks until template instantiation time, since a friend |
354 | declaration may grant access only to a particular specialization of |
355 | the template. */ |
356 | |
357 | if (accessible_p (basetype_path, decl, /*consider_local_p=*/true)) |
358 | /* But if the member is deemed accessible at parse time, then we can |
359 | assume it'll be accessible at instantiation time. */ |
360 | return true; |
361 | |
362 | /* Access of a dependent decl should be rechecked after tsubst'ing |
363 | into the user of the decl, rather than explicitly deferring the |
364 | check here. */ |
365 | gcc_assert (!uses_template_parms (decl)); |
366 | if (TREE_CODE (decl) == FIELD_DECL) |
367 | gcc_assert (!uses_template_parms (DECL_CONTEXT (decl))); |
368 | |
369 | /* Defer this access check until instantiation time. */ |
370 | deferred_access_check access_check; |
371 | access_check.binfo = basetype_path; |
372 | access_check.decl = decl; |
373 | access_check.diag_decl = diag_decl; |
374 | access_check.loc = input_location; |
375 | vec_safe_push (TI_DEFERRED_ACCESS_CHECKS (template_info), obj: access_check); |
376 | return true; |
377 | } |
378 | |
379 | if (!accessible_p (basetype_path, decl, /*consider_local_p=*/true)) |
380 | { |
381 | if (flag_new_inheriting_ctors) |
382 | diag_decl = strip_inheriting_ctors (diag_decl); |
383 | if (complain & tf_error) |
384 | { |
385 | access_kind access_failure_reason = ak_none; |
386 | |
387 | /* By default, using the decl as the source of the problem will |
388 | usually give correct results. */ |
389 | tree diag_location = diag_decl; |
390 | |
391 | /* However, if a parent of BASETYPE_PATH had private access to decl, |
392 | then it actually might be the case that the source of the problem |
393 | is not DECL. */ |
394 | tree parent_binfo = get_parent_with_private_access (decl, |
395 | binfo: basetype_path); |
396 | |
397 | /* So if a parent did have private access, then we need to do |
398 | special checks to obtain the best diagnostic location decl. */ |
399 | if (parent_binfo != NULL_TREE) |
400 | { |
401 | diag_location = get_class_access_diagnostic_decl (parent_binfo, |
402 | decl: diag_decl); |
403 | |
404 | /* We also at this point know that the reason access failed was |
405 | because decl was private. */ |
406 | access_failure_reason = ak_private; |
407 | } |
408 | |
409 | /* Finally, generate an error message. */ |
410 | complain_about_access (decl, diag_decl, diag_location, true, |
411 | access_failure_reason); |
412 | } |
413 | if (afi) |
414 | afi->record_access_failure (basetype_path, decl, diag_decl); |
415 | return false; |
416 | } |
417 | |
418 | return true; |
419 | } |
420 | |
421 | /* Perform the access checks in CHECKS. The TREE_PURPOSE of each node |
422 | is the BINFO indicating the qualifying scope used to access the |
423 | DECL node stored in the TREE_VALUE of the node. If CHECKS is empty |
424 | or we aren't in SFINAE context or all the checks succeed return TRUE, |
425 | otherwise FALSE. */ |
426 | |
427 | bool |
428 | perform_access_checks (vec<deferred_access_check, va_gc> *checks, |
429 | tsubst_flags_t complain) |
430 | { |
431 | int i; |
432 | deferred_access_check *chk; |
433 | location_t loc = input_location; |
434 | bool ok = true; |
435 | |
436 | if (!checks) |
437 | return true; |
438 | |
439 | FOR_EACH_VEC_SAFE_ELT (checks, i, chk) |
440 | { |
441 | input_location = chk->loc; |
442 | ok &= enforce_access (basetype_path: chk->binfo, decl: chk->decl, diag_decl: chk->diag_decl, complain); |
443 | } |
444 | |
445 | input_location = loc; |
446 | return (complain & tf_error) ? true : ok; |
447 | } |
448 | |
449 | /* Perform the deferred access checks. |
450 | |
451 | After performing the checks, we still have to keep the list |
452 | `deferred_access_stack->deferred_access_checks' since we may want |
453 | to check access for them again later in a different context. |
454 | For example: |
455 | |
456 | class A { |
457 | typedef int X; |
458 | static X a; |
459 | }; |
460 | A::X A::a, x; // No error for `A::a', error for `x' |
461 | |
462 | We have to perform deferred access of `A::X', first with `A::a', |
463 | next with `x'. Return value like perform_access_checks above. */ |
464 | |
465 | bool |
466 | perform_deferred_access_checks (tsubst_flags_t complain) |
467 | { |
468 | return perform_access_checks (checks: get_deferred_access_checks (), complain); |
469 | } |
470 | |
471 | /* Defer checking the accessibility of DECL, when looked up in |
472 | BINFO. DIAG_DECL is the declaration to use to print diagnostics. |
473 | Return value like perform_access_checks above. |
474 | If non-NULL, report failures to AFI. */ |
475 | |
476 | bool |
477 | perform_or_defer_access_check (tree binfo, tree decl, tree diag_decl, |
478 | tsubst_flags_t complain, |
479 | access_failure_info *afi) |
480 | { |
481 | int i; |
482 | deferred_access *ptr; |
483 | deferred_access_check *chk; |
484 | |
485 | /* Exit if we are in a context that no access checking is performed. */ |
486 | if (deferred_access_no_check) |
487 | return true; |
488 | |
489 | gcc_assert (TREE_CODE (binfo) == TREE_BINFO); |
490 | |
491 | ptr = &deferred_access_stack->last (); |
492 | |
493 | /* If we are not supposed to defer access checks, just check now. */ |
494 | if (ptr->deferring_access_checks_kind == dk_no_deferred) |
495 | { |
496 | bool ok = enforce_access (basetype_path: binfo, decl, diag_decl, complain, afi); |
497 | return (complain & tf_error) ? true : ok; |
498 | } |
499 | |
500 | /* See if we are already going to perform this check. */ |
501 | FOR_EACH_VEC_SAFE_ELT (ptr->deferred_access_checks, i, chk) |
502 | { |
503 | if (chk->decl == decl && chk->binfo == binfo && |
504 | chk->diag_decl == diag_decl) |
505 | { |
506 | return true; |
507 | } |
508 | } |
509 | /* If not, record the check. */ |
510 | deferred_access_check new_access = {.binfo: binfo, .decl: decl, .diag_decl: diag_decl, .loc: input_location}; |
511 | vec_safe_push (v&: ptr->deferred_access_checks, obj: new_access); |
512 | |
513 | return true; |
514 | } |
515 | |
516 | /* Returns nonzero if the current statement is a full expression, |
517 | i.e. temporaries created during that statement should be destroyed |
518 | at the end of the statement. */ |
519 | |
520 | int |
521 | stmts_are_full_exprs_p (void) |
522 | { |
523 | return current_stmt_tree ()->stmts_are_full_exprs_p; |
524 | } |
525 | |
526 | /* T is a statement. Add it to the statement-tree. This is the C++ |
527 | version. The C/ObjC frontends have a slightly different version of |
528 | this function. */ |
529 | |
530 | tree |
531 | add_stmt (tree t) |
532 | { |
533 | enum tree_code code = TREE_CODE (t); |
534 | |
535 | if (EXPR_P (t) && code != LABEL_EXPR) |
536 | { |
537 | if (!EXPR_HAS_LOCATION (t)) |
538 | SET_EXPR_LOCATION (t, input_location); |
539 | |
540 | /* When we expand a statement-tree, we must know whether or not the |
541 | statements are full-expressions. We record that fact here. */ |
542 | if (STATEMENT_CODE_P (TREE_CODE (t))) |
543 | STMT_IS_FULL_EXPR_P (t) = stmts_are_full_exprs_p (); |
544 | } |
545 | |
546 | if (code == LABEL_EXPR || code == CASE_LABEL_EXPR) |
547 | STATEMENT_LIST_HAS_LABEL (cur_stmt_list) = 1; |
548 | |
549 | /* Add T to the statement-tree. Non-side-effect statements need to be |
550 | recorded during statement expressions. */ |
551 | gcc_checking_assert (!stmt_list_stack->is_empty ()); |
552 | append_to_statement_list_force (t, &cur_stmt_list); |
553 | |
554 | return t; |
555 | } |
556 | |
557 | /* Returns the stmt_tree to which statements are currently being added. */ |
558 | |
559 | stmt_tree |
560 | current_stmt_tree (void) |
561 | { |
562 | return (cfun |
563 | ? &cfun->language->base.x_stmt_tree |
564 | : &scope_chain->x_stmt_tree); |
565 | } |
566 | |
567 | /* If statements are full expressions, wrap STMT in a CLEANUP_POINT_EXPR. */ |
568 | |
569 | static tree |
570 | maybe_cleanup_point_expr (tree expr) |
571 | { |
572 | if (!processing_template_decl && stmts_are_full_exprs_p ()) |
573 | expr = fold_build_cleanup_point_expr (TREE_TYPE (expr), expr); |
574 | return expr; |
575 | } |
576 | |
577 | /* Like maybe_cleanup_point_expr except have the type of the new expression be |
578 | void so we don't need to create a temporary variable to hold the inner |
579 | expression. The reason why we do this is because the original type might be |
580 | an aggregate and we cannot create a temporary variable for that type. */ |
581 | |
582 | tree |
583 | maybe_cleanup_point_expr_void (tree expr) |
584 | { |
585 | if (!processing_template_decl && stmts_are_full_exprs_p ()) |
586 | expr = fold_build_cleanup_point_expr (void_type_node, expr); |
587 | return expr; |
588 | } |
589 | |
590 | |
591 | |
592 | /* Create a declaration statement for the declaration given by the DECL. */ |
593 | |
594 | void |
595 | add_decl_expr (tree decl) |
596 | { |
597 | tree r = build_stmt (DECL_SOURCE_LOCATION (decl), DECL_EXPR, decl); |
598 | if (DECL_INITIAL (decl) |
599 | || (DECL_SIZE (decl) && TREE_SIDE_EFFECTS (DECL_SIZE (decl)))) |
600 | r = maybe_cleanup_point_expr_void (expr: r); |
601 | add_stmt (t: r); |
602 | } |
603 | |
604 | /* Set EXPR_LOCATION on one cleanup T to LOC. */ |
605 | |
606 | static void |
607 | set_one_cleanup_loc (tree t, location_t loc) |
608 | { |
609 | if (!t) |
610 | return; |
611 | if (TREE_CODE (t) != POSTCONDITION_STMT) |
612 | protected_set_expr_location (t, loc); |
613 | /* Avoid locus differences for C++ cdtor calls depending on whether |
614 | cdtor_returns_this: a conversion to void is added to discard the return |
615 | value, and this conversion ends up carrying the location, and when it |
616 | gets discarded, the location is lost. So hold it in the call as well. */ |
617 | if (TREE_CODE (t) == NOP_EXPR |
618 | && TREE_TYPE (t) == void_type_node |
619 | && TREE_CODE (TREE_OPERAND (t, 0)) == CALL_EXPR) |
620 | protected_set_expr_location (TREE_OPERAND (t, 0), loc); |
621 | } |
622 | |
623 | /* Set EXPR_LOCATION of the cleanups of any CLEANUP_STMT in STMTS to LOC. */ |
624 | |
625 | static void |
626 | set_cleanup_locs (tree stmts, location_t loc) |
627 | { |
628 | if (TREE_CODE (stmts) == CLEANUP_STMT) |
629 | { |
630 | set_one_cleanup_loc (CLEANUP_EXPR (stmts), loc); |
631 | set_cleanup_locs (CLEANUP_BODY (stmts), loc); |
632 | } |
633 | else if (TREE_CODE (stmts) == STATEMENT_LIST) |
634 | for (tree stmt : tsi_range (stmts)) |
635 | set_cleanup_locs (stmts: stmt, loc); |
636 | } |
637 | |
638 | /* True iff the innermost block scope is a try block. */ |
639 | |
640 | static bool |
641 | at_try_scope () |
642 | { |
643 | cp_binding_level *b = current_binding_level; |
644 | while (b && b->kind == sk_cleanup) |
645 | b = b->level_chain; |
646 | return b && b->kind == sk_try; |
647 | } |
648 | |
649 | /* Finish a scope. */ |
650 | |
651 | tree |
652 | do_poplevel (tree stmt_list) |
653 | { |
654 | tree block = NULL; |
655 | |
656 | bool was_try = at_try_scope (); |
657 | |
658 | if (stmts_are_full_exprs_p ()) |
659 | block = poplevel (kept_level_p (), 1, 0); |
660 | |
661 | /* This needs to come after poplevel merges sk_cleanup statement_lists. */ |
662 | maybe_splice_retval_cleanup (stmt_list, was_try); |
663 | |
664 | stmt_list = pop_stmt_list (stmt_list); |
665 | |
666 | /* input_location is the last token of the scope, usually a }. */ |
667 | set_cleanup_locs (stmts: stmt_list, loc: input_location); |
668 | |
669 | if (!processing_template_decl) |
670 | { |
671 | stmt_list = c_build_bind_expr (input_location, block, stmt_list); |
672 | /* ??? See c_end_compound_stmt re statement expressions. */ |
673 | } |
674 | |
675 | return stmt_list; |
676 | } |
677 | |
678 | /* Begin a new scope. */ |
679 | |
680 | static tree |
681 | do_pushlevel (scope_kind sk) |
682 | { |
683 | tree ret = push_stmt_list (); |
684 | if (stmts_are_full_exprs_p ()) |
685 | begin_scope (sk, NULL); |
686 | return ret; |
687 | } |
688 | |
689 | /* Queue a cleanup. CLEANUP is an expression/statement to be executed |
690 | when the current scope is exited. EH_ONLY is true when this is not |
691 | meant to apply to normal control flow transfer. DECL is the VAR_DECL |
692 | being cleaned up, if any, or null for temporaries or subobjects. */ |
693 | |
694 | void |
695 | push_cleanup (tree decl, tree cleanup, bool eh_only) |
696 | { |
697 | tree stmt = build_stmt (input_location, CLEANUP_STMT, NULL, cleanup, decl); |
698 | CLEANUP_EH_ONLY (stmt) = eh_only; |
699 | add_stmt (t: stmt); |
700 | CLEANUP_BODY (stmt) = push_stmt_list (); |
701 | } |
702 | |
703 | /* Simple infinite loop tracking for -Wreturn-type. We keep a stack of all |
704 | the current loops, represented by 'NULL_TREE' if we've seen a possible |
705 | exit, and 'error_mark_node' if not. This is currently used only to |
706 | suppress the warning about a function with no return statements, and |
707 | therefore we don't bother noting returns as possible exits. We also |
708 | don't bother with gotos. */ |
709 | |
710 | static void |
711 | begin_maybe_infinite_loop (tree cond) |
712 | { |
713 | /* Only track this while parsing a function, not during instantiation. */ |
714 | if (!cfun || (DECL_TEMPLATE_INSTANTIATION (current_function_decl) |
715 | && !processing_template_decl)) |
716 | return; |
717 | bool maybe_infinite = true; |
718 | if (cond) |
719 | { |
720 | cond = fold_non_dependent_expr (cond); |
721 | maybe_infinite = integer_nonzerop (cond); |
722 | } |
723 | vec_safe_push (cp_function_chain->infinite_loops, |
724 | obj: maybe_infinite ? error_mark_node : NULL_TREE); |
725 | |
726 | } |
727 | |
728 | /* A break is a possible exit for the current loop. */ |
729 | |
730 | void |
731 | break_maybe_infinite_loop (void) |
732 | { |
733 | if (!cfun) |
734 | return; |
735 | cp_function_chain->infinite_loops->last() = NULL_TREE; |
736 | } |
737 | |
738 | /* If we reach the end of the loop without seeing a possible exit, we have |
739 | an infinite loop. */ |
740 | |
741 | static void |
742 | end_maybe_infinite_loop (tree cond) |
743 | { |
744 | if (!cfun || (DECL_TEMPLATE_INSTANTIATION (current_function_decl) |
745 | && !processing_template_decl)) |
746 | return; |
747 | tree current = cp_function_chain->infinite_loops->pop(); |
748 | if (current != NULL_TREE) |
749 | { |
750 | cond = fold_non_dependent_expr (cond); |
751 | if (integer_nonzerop (cond)) |
752 | current_function_infinite_loop = 1; |
753 | } |
754 | } |
755 | |
756 | /* Begin a conditional that might contain a declaration. When generating |
757 | normal code, we want the declaration to appear before the statement |
758 | containing the conditional. When generating template code, we want the |
759 | conditional to be rendered as the raw DECL_EXPR. */ |
760 | |
761 | static void |
762 | begin_cond (tree *cond_p) |
763 | { |
764 | if (processing_template_decl) |
765 | *cond_p = push_stmt_list (); |
766 | } |
767 | |
768 | /* Finish such a conditional. */ |
769 | |
770 | static void |
771 | finish_cond (tree *cond_p, tree expr) |
772 | { |
773 | if (processing_template_decl) |
774 | { |
775 | tree cond = pop_stmt_list (*cond_p); |
776 | |
777 | if (expr == NULL_TREE) |
778 | /* Empty condition in 'for'. */ |
779 | gcc_assert (empty_expr_stmt_p (cond)); |
780 | else if (check_for_bare_parameter_packs (expr)) |
781 | expr = error_mark_node; |
782 | else if (!empty_expr_stmt_p (cond)) |
783 | expr = build2 (COMPOUND_EXPR, TREE_TYPE (expr), cond, expr); |
784 | } |
785 | *cond_p = expr; |
786 | } |
787 | |
788 | /* If loop condition specifies a conditional with a declaration, |
789 | such as |
790 | while (A x = 42) { } |
791 | for (; A x = 42;) { } |
792 | move the *BODY_P statements as a BIND_EXPR into {FOR,WHILE}_COND_PREP |
793 | and if there are any CLEANUP_STMT at the end, remember their count in |
794 | {FOR,WHILE}_COND_CLEANUP. |
795 | genericize_c_loop will then handle it appropriately. In particular, |
796 | the {FOR,WHILE}_COND, {FOR,WHILE}_BODY, if used continue label and |
797 | FOR_EXPR will be appended into the {FOR,WHILE}_COND_PREP BIND_EXPR, |
798 | but it can't be done too early because only the actual body should |
799 | bind BREAK_STMT and CONTINUE_STMT to the inner loop. |
800 | The statement list for *BODY will be empty if the conditional did |
801 | not declare anything. */ |
802 | |
803 | static void |
804 | adjust_loop_decl_cond (tree *body_p, tree *prep_p, tree *cleanup_p) |
805 | { |
806 | if (!TREE_SIDE_EFFECTS (*body_p)) |
807 | return; |
808 | |
809 | gcc_assert (!processing_template_decl); |
810 | *prep_p = *body_p; |
811 | if (*prep_p != cur_stmt_list) |
812 | { |
813 | /* There can be just one CLEANUP_STMT, or there could be multiple |
814 | nested CLEANUP_STMTs, e.g. for structured bindings used as |
815 | condition. */ |
816 | gcc_assert (stmt_list_stack->length () > 1); |
817 | for (unsigned i = stmt_list_stack->length () - 2; ; --i) |
818 | { |
819 | tree t = (*stmt_list_stack)[i]; |
820 | tree_stmt_iterator last = tsi_last (t); |
821 | gcc_assert (tsi_one_before_end_p (last) |
822 | && TREE_CODE (tsi_stmt (last)) == CLEANUP_STMT |
823 | && (CLEANUP_BODY (tsi_stmt (last)) |
824 | == (*stmt_list_stack)[i + 1]) |
825 | && !CLEANUP_EH_ONLY (tsi_stmt (last))); |
826 | if (t == *prep_p) |
827 | { |
828 | *cleanup_p = build_int_cst (long_unsigned_type_node, |
829 | stmt_list_stack->length () - 1 - i); |
830 | break; |
831 | } |
832 | gcc_assert (i >= 1); |
833 | } |
834 | } |
835 | current_binding_level->keep = true; |
836 | tree_stmt_iterator iter = tsi_last (cur_stmt_list); |
837 | /* Temporarily store in {FOR,WHILE}_BODY the last statement of |
838 | the innnermost statement list or NULL if it has no statement. |
839 | This is used in finish_loop_cond_prep to find out the splitting |
840 | point and then {FOR,WHILE}_BODY will be changed to the actual |
841 | body. */ |
842 | if (tsi_end_p (i: iter)) |
843 | *body_p = NULL_TREE; |
844 | else |
845 | *body_p = tsi_stmt (i: iter); |
846 | } |
847 | |
848 | /* Finalize {FOR,WHILE}_{BODY,COND_PREP} after the loop body. |
849 | The above function initialized *BODY_P to the last statement |
850 | in *PREP_P at that point. |
851 | Call do_poplevel on *PREP_P and move everything after that |
852 | former last statement into *BODY_P. genericize_c_loop |
853 | will later put those parts back together. |
854 | CLEANUP is {FOR,WHILE}_COND_CLEANUP. */ |
855 | |
856 | static void |
857 | finish_loop_cond_prep (tree *body_p, tree *prep_p, tree cleanup) |
858 | { |
859 | *prep_p = do_poplevel (stmt_list: *prep_p); |
860 | gcc_assert (TREE_CODE (*prep_p) == BIND_EXPR); |
861 | if (BIND_EXPR_BODY (*prep_p) == *body_p) |
862 | { |
863 | gcc_assert (cleanup == NULL_TREE); |
864 | *body_p = build_empty_stmt (input_location); |
865 | return; |
866 | } |
867 | tree stmt_list = BIND_EXPR_BODY (*prep_p); |
868 | gcc_assert (TREE_CODE (stmt_list) == STATEMENT_LIST); |
869 | if (cleanup) |
870 | { |
871 | tree_stmt_iterator iter = tsi_last (t: stmt_list); |
872 | gcc_assert (TREE_CODE (tsi_stmt (iter)) == CLEANUP_STMT); |
873 | for (unsigned depth = tree_to_uhwi (cleanup); depth > 1; --depth) |
874 | { |
875 | gcc_assert (TREE_CODE (CLEANUP_BODY (tsi_stmt (iter))) |
876 | == STATEMENT_LIST); |
877 | iter = tsi_last (CLEANUP_BODY (tsi_stmt (iter))); |
878 | gcc_assert (TREE_CODE (tsi_stmt (iter)) == CLEANUP_STMT); |
879 | } |
880 | if (*body_p == NULL_TREE) |
881 | { |
882 | *body_p = CLEANUP_BODY (tsi_stmt (iter)); |
883 | CLEANUP_BODY (tsi_stmt (iter)) = build_empty_stmt (input_location); |
884 | return; |
885 | } |
886 | stmt_list = CLEANUP_BODY (tsi_stmt (iter)); |
887 | } |
888 | tree_stmt_iterator iter = tsi_start (t: stmt_list); |
889 | while (tsi_stmt (i: iter) != *body_p) |
890 | tsi_next (i: &iter); |
891 | *body_p = tsi_split_stmt_list (input_location, iter); |
892 | } |
893 | |
894 | /* Finish a goto-statement. */ |
895 | |
896 | tree |
897 | finish_goto_stmt (tree destination) |
898 | { |
899 | if (identifier_p (t: destination)) |
900 | destination = lookup_label (destination); |
901 | |
902 | /* We warn about unused labels with -Wunused. That means we have to |
903 | mark the used labels as used. */ |
904 | if (TREE_CODE (destination) == LABEL_DECL) |
905 | TREE_USED (destination) = 1; |
906 | else |
907 | { |
908 | destination = mark_rvalue_use (destination); |
909 | if (!processing_template_decl) |
910 | { |
911 | destination = cp_convert (ptr_type_node, destination, |
912 | tf_warning_or_error); |
913 | if (error_operand_p (t: destination)) |
914 | return NULL_TREE; |
915 | destination |
916 | = fold_build_cleanup_point_expr (TREE_TYPE (destination), |
917 | expr: destination); |
918 | } |
919 | } |
920 | |
921 | check_goto (destination); |
922 | |
923 | add_stmt (t: build_predict_expr (PRED_GOTO, NOT_TAKEN)); |
924 | return add_stmt (t: build_stmt (input_location, GOTO_EXPR, destination)); |
925 | } |
926 | |
927 | /* Returns true if T corresponds to an assignment operator expression. */ |
928 | |
929 | static bool |
930 | is_assignment_op_expr_p (tree t) |
931 | { |
932 | if (t == NULL_TREE) |
933 | return false; |
934 | |
935 | if (TREE_CODE (t) == MODIFY_EXPR |
936 | || (TREE_CODE (t) == MODOP_EXPR |
937 | && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)) |
938 | return true; |
939 | |
940 | tree call = extract_call_expr (t); |
941 | if (call == NULL_TREE |
942 | || call == error_mark_node |
943 | || !CALL_EXPR_OPERATOR_SYNTAX (call)) |
944 | return false; |
945 | |
946 | tree fndecl = cp_get_callee_fndecl_nofold (call); |
947 | return fndecl != NULL_TREE |
948 | && DECL_ASSIGNMENT_OPERATOR_P (fndecl) |
949 | && DECL_OVERLOADED_OPERATOR_IS (fndecl, NOP_EXPR); |
950 | } |
951 | |
952 | /* Return true if TYPE is a class type that is convertible to |
953 | and assignable from bool. */ |
954 | |
955 | static GTY((deletable)) hash_map<tree, bool> *boolish_class_type_p_cache; |
956 | |
957 | static bool |
958 | boolish_class_type_p (tree type) |
959 | { |
960 | type = TYPE_MAIN_VARIANT (type); |
961 | if (!CLASS_TYPE_P (type) || !COMPLETE_TYPE_P (type)) |
962 | return false; |
963 | |
964 | if (bool *r = hash_map_safe_get (h: boolish_class_type_p_cache, k: type)) |
965 | return *r; |
966 | |
967 | tree ops; |
968 | bool has_bool_assignment = false; |
969 | bool has_bool_conversion = false; |
970 | |
971 | ops = lookup_fnfields (type, assign_op_identifier, /*protect=*/0, tf_none); |
972 | for (tree op : ovl_range (BASELINK_FUNCTIONS (ops))) |
973 | { |
974 | op = STRIP_TEMPLATE (op); |
975 | if (TREE_CODE (op) != FUNCTION_DECL) |
976 | continue; |
977 | tree parm = DECL_CHAIN (DECL_ARGUMENTS (op)); |
978 | tree parm_type = non_reference (TREE_TYPE (parm)); |
979 | if (TREE_CODE (parm_type) == BOOLEAN_TYPE) |
980 | { |
981 | has_bool_assignment = true; |
982 | break; |
983 | } |
984 | } |
985 | |
986 | if (has_bool_assignment) |
987 | { |
988 | ops = lookup_conversions (type); |
989 | for (; ops; ops = TREE_CHAIN (ops)) |
990 | { |
991 | tree op = TREE_VALUE (ops); |
992 | if (!DECL_NONCONVERTING_P (op) |
993 | && TREE_CODE (DECL_CONV_FN_TYPE (op)) == BOOLEAN_TYPE) |
994 | { |
995 | has_bool_conversion = true; |
996 | break; |
997 | } |
998 | } |
999 | } |
1000 | |
1001 | bool boolish = has_bool_assignment && has_bool_conversion; |
1002 | hash_map_safe_put<true> (h&: boolish_class_type_p_cache, k: type, v: boolish); |
1003 | return boolish; |
1004 | } |
1005 | |
1006 | |
1007 | /* Maybe warn about an unparenthesized 'a = b' (appearing in a |
1008 | boolean context where 'a == b' might have been intended). |
1009 | NESTED_P is true if T is the RHS of another assignment. */ |
1010 | |
1011 | void |
1012 | maybe_warn_unparenthesized_assignment (tree t, bool nested_p, |
1013 | tsubst_flags_t complain) |
1014 | { |
1015 | tree type = TREE_TYPE (t); |
1016 | t = STRIP_REFERENCE_REF (t); |
1017 | |
1018 | if ((complain & tf_warning) |
1019 | && warn_parentheses |
1020 | && is_assignment_op_expr_p (t) |
1021 | /* A parenthesized expression would've had this warning |
1022 | suppressed by finish_parenthesized_expr. */ |
1023 | && !warning_suppressed_p (t, OPT_Wparentheses) |
1024 | /* In c = a = b, don't warn if a has type bool or bool-like class. */ |
1025 | && (!nested_p |
1026 | || (TREE_CODE (type) != BOOLEAN_TYPE |
1027 | && !boolish_class_type_p (type)))) |
1028 | { |
1029 | warning_at (cp_expr_loc_or_input_loc (t), OPT_Wparentheses, |
1030 | "suggest parentheses around assignment used as truth value"); |
1031 | suppress_warning (t, OPT_Wparentheses); |
1032 | } |
1033 | } |
1034 | |
1035 | /* Helper class for saving/restoring ANNOTATE_EXPRs. For a tree node t, users |
1036 | can construct one of these like so: |
1037 | |
1038 | annotate_saver s (&t); |
1039 | |
1040 | and t will be updated to have any annotations removed. The user can then |
1041 | transform t, and later restore the ANNOTATE_EXPRs with: |
1042 | |
1043 | t = s.restore (t). |
1044 | |
1045 | The intent is to ensure that any ANNOTATE_EXPRs remain the outermost |
1046 | expressions following any operations on t. */ |
1047 | |
1048 | class annotate_saver { |
1049 | /* The chain of saved annotations, if there were any. Otherwise null. */ |
1050 | tree m_annotations; |
1051 | |
1052 | /* If M_ANNOTATIONS is non-null, then M_INNER points to TREE_OPERAND (A, 0) |
1053 | for the innermost annotation A. */ |
1054 | tree *m_inner; |
1055 | |
1056 | public: |
1057 | annotate_saver (tree *); |
1058 | tree restore (tree); |
1059 | }; |
1060 | |
1061 | /* If *COND is an ANNOTATE_EXPR, walk through the chain of annotations, and set |
1062 | *COND equal to the first non-ANNOTATE_EXPR (saving a pointer to the |
1063 | original chain of annotations for later use in restore). */ |
1064 | |
1065 | annotate_saver::annotate_saver (tree *cond) : m_annotations (nullptr) |
1066 | { |
1067 | tree *t = cond; |
1068 | while (TREE_CODE (*t) == ANNOTATE_EXPR) |
1069 | t = &TREE_OPERAND (*t, 0); |
1070 | |
1071 | if (t != cond) |
1072 | { |
1073 | m_annotations = *cond; |
1074 | *cond = *t; |
1075 | m_inner = t; |
1076 | } |
1077 | } |
1078 | |
1079 | /* If we didn't strip any annotations on construction, return NEW_INNER |
1080 | unmodified. Otherwise, wrap the saved annotations around NEW_INNER (updating |
1081 | the types and flags of the annotations if needed) and return the resulting |
1082 | expression. */ |
1083 | |
1084 | tree |
1085 | annotate_saver::restore (tree new_inner) |
1086 | { |
1087 | if (!m_annotations) |
1088 | return new_inner; |
1089 | |
1090 | /* If the type of the inner expression changed, we need to update the types |
1091 | of all the ANNOTATE_EXPRs. We may need to update the flags too, but we |
1092 | assume they only change if the type of the inner expression changes. |
1093 | The flag update logic assumes that the other operands to the |
1094 | ANNOTATE_EXPRs are always INTEGER_CSTs. */ |
1095 | if (TREE_TYPE (new_inner) != TREE_TYPE (*m_inner)) |
1096 | { |
1097 | const bool new_readonly |
1098 | = TREE_READONLY (new_inner) || CONSTANT_CLASS_P (new_inner); |
1099 | |
1100 | for (tree c = m_annotations; c != *m_inner; c = TREE_OPERAND (c, 0)) |
1101 | { |
1102 | gcc_checking_assert (TREE_CODE (c) == ANNOTATE_EXPR |
1103 | && TREE_CODE (TREE_OPERAND (c, 1)) == INTEGER_CST |
1104 | && TREE_CODE (TREE_OPERAND (c, 2)) == INTEGER_CST); |
1105 | TREE_TYPE (c) = TREE_TYPE (new_inner); |
1106 | TREE_SIDE_EFFECTS (c) = TREE_SIDE_EFFECTS (new_inner); |
1107 | TREE_READONLY (c) = new_readonly; |
1108 | } |
1109 | } |
1110 | |
1111 | *m_inner = new_inner; |
1112 | return m_annotations; |
1113 | } |
1114 | |
1115 | /* COND is the condition-expression for an if, while, etc., |
1116 | statement. Convert it to a boolean value, if appropriate. |
1117 | In addition, verify sequence points if -Wsequence-point is enabled. */ |
1118 | |
1119 | tree |
1120 | maybe_convert_cond (tree cond) |
1121 | { |
1122 | /* Empty conditions remain empty. */ |
1123 | if (!cond) |
1124 | return NULL_TREE; |
1125 | |
1126 | /* Wait until we instantiate templates before doing conversion. */ |
1127 | if (type_dependent_expression_p (cond)) |
1128 | return cond; |
1129 | |
1130 | /* Strip any ANNOTATE_EXPRs from COND. */ |
1131 | annotate_saver annotations (&cond); |
1132 | |
1133 | /* For structured binding used in condition, the conversion needs to be |
1134 | evaluated before the individual variables are initialized in the |
1135 | std::tuple_{size,elemenet} case. cp_finish_decomp saved the conversion |
1136 | result in a TARGET_EXPR, pick it up from there. */ |
1137 | if (DECL_DECOMPOSITION_P (cond) |
1138 | && DECL_DECOMP_IS_BASE (cond) |
1139 | && DECL_DECOMP_BASE (cond) |
1140 | && TREE_CODE (DECL_DECOMP_BASE (cond)) == TARGET_EXPR) |
1141 | cond = TARGET_EXPR_SLOT (DECL_DECOMP_BASE (cond)); |
1142 | |
1143 | if (warn_sequence_point && !processing_template_decl) |
1144 | verify_sequence_points (cond); |
1145 | |
1146 | maybe_warn_unparenthesized_assignment (t: cond, /*nested_p=*/false, |
1147 | complain: tf_warning_or_error); |
1148 | |
1149 | /* Do the conversion. */ |
1150 | cond = convert_from_reference (cond); |
1151 | cond = condition_conversion (cond); |
1152 | |
1153 | /* Restore any ANNOTATE_EXPRs around COND. */ |
1154 | return annotations.restore (new_inner: cond); |
1155 | } |
1156 | |
1157 | /* Finish an expression-statement, whose EXPRESSION is as indicated. */ |
1158 | |
1159 | tree |
1160 | finish_expr_stmt (tree expr) |
1161 | { |
1162 | tree r = NULL_TREE; |
1163 | location_t loc = EXPR_LOCATION (expr); |
1164 | |
1165 | if (expr != NULL_TREE) |
1166 | { |
1167 | /* If we ran into a problem, make sure we complained. */ |
1168 | gcc_assert (expr != error_mark_node || seen_error ()); |
1169 | |
1170 | if (!processing_template_decl) |
1171 | { |
1172 | if (warn_sequence_point) |
1173 | verify_sequence_points (expr); |
1174 | expr = convert_to_void (expr, ICV_STATEMENT, tf_warning_or_error); |
1175 | } |
1176 | else if (!type_dependent_expression_p (expr)) |
1177 | convert_to_void (expr, ICV_STATEMENT, tf_warning_or_error); |
1178 | |
1179 | if (check_for_bare_parameter_packs (expr)) |
1180 | expr = error_mark_node; |
1181 | |
1182 | /* Simplification of inner statement expressions, compound exprs, |
1183 | etc can result in us already having an EXPR_STMT or other statement |
1184 | tree. Don't wrap them in EXPR_STMT. */ |
1185 | if (TREE_CODE (expr) != CLEANUP_POINT_EXPR) |
1186 | { |
1187 | if (TREE_CODE (expr) != EXPR_STMT |
1188 | && !STATEMENT_CLASS_P (expr) |
1189 | && TREE_CODE (expr) != STATEMENT_LIST) |
1190 | expr = build_stmt (loc, EXPR_STMT, expr); |
1191 | expr = maybe_cleanup_point_expr_void (expr); |
1192 | } |
1193 | |
1194 | r = add_stmt (t: expr); |
1195 | } |
1196 | |
1197 | return r; |
1198 | } |
1199 | |
1200 | |
1201 | /* Begin an if-statement. Returns a newly created IF_STMT if |
1202 | appropriate. */ |
1203 | |
1204 | tree |
1205 | begin_if_stmt (void) |
1206 | { |
1207 | tree r, scope; |
1208 | scope = do_pushlevel (sk: sk_cond); |
1209 | r = build_stmt (input_location, IF_STMT, NULL_TREE, |
1210 | NULL_TREE, NULL_TREE, scope); |
1211 | current_binding_level->this_entity = r; |
1212 | begin_cond (cond_p: &IF_COND (r)); |
1213 | return r; |
1214 | } |
1215 | |
1216 | /* Returns true if FN, a CALL_EXPR, is a call to |
1217 | std::is_constant_evaluated or __builtin_is_constant_evaluated. */ |
1218 | |
1219 | static bool |
1220 | is_std_constant_evaluated_p (tree fn) |
1221 | { |
1222 | /* std::is_constant_evaluated takes no arguments. */ |
1223 | if (call_expr_nargs (fn) != 0) |
1224 | return false; |
1225 | |
1226 | tree fndecl = cp_get_callee_fndecl_nofold (fn); |
1227 | if (fndecl == NULL_TREE) |
1228 | return false; |
1229 | |
1230 | if (fndecl_built_in_p (node: fndecl, name: CP_BUILT_IN_IS_CONSTANT_EVALUATED, |
1231 | klass: BUILT_IN_FRONTEND)) |
1232 | return true; |
1233 | |
1234 | if (!decl_in_std_namespace_p (fndecl)) |
1235 | return false; |
1236 | |
1237 | tree name = DECL_NAME (fndecl); |
1238 | return name && id_equal (id: name, str: "is_constant_evaluated"); |
1239 | } |
1240 | |
1241 | /* Callback function for maybe_warn_for_constant_evaluated that looks |
1242 | for calls to std::is_constant_evaluated in TP. */ |
1243 | |
1244 | static tree |
1245 | find_std_constant_evaluated_r (tree *tp, int *walk_subtrees, void *) |
1246 | { |
1247 | tree t = *tp; |
1248 | |
1249 | if (TYPE_P (t) || TREE_CONSTANT (t)) |
1250 | { |
1251 | *walk_subtrees = false; |
1252 | return NULL_TREE; |
1253 | } |
1254 | |
1255 | switch (TREE_CODE (t)) |
1256 | { |
1257 | case CALL_EXPR: |
1258 | if (is_std_constant_evaluated_p (fn: t)) |
1259 | return t; |
1260 | break; |
1261 | case EXPR_STMT: |
1262 | /* Don't warn in statement expressions. */ |
1263 | *walk_subtrees = false; |
1264 | return NULL_TREE; |
1265 | default: |
1266 | break; |
1267 | } |
1268 | |
1269 | return NULL_TREE; |
1270 | } |
1271 | |
1272 | /* In certain contexts, std::is_constant_evaluated() is always true (for |
1273 | instance, in a consteval function or in a constexpr if), or always false |
1274 | (e.g., in a non-constexpr non-consteval function) so give the user a clue. */ |
1275 | |
1276 | static void |
1277 | maybe_warn_for_constant_evaluated (tree cond, bool constexpr_if, |
1278 | bool trivial_infinite) |
1279 | { |
1280 | if (!warn_tautological_compare) |
1281 | return; |
1282 | |
1283 | /* Suppress warning for std::is_constant_evaluated if the conditional |
1284 | comes from a macro. */ |
1285 | if (from_macro_expansion_at (EXPR_LOCATION (cond))) |
1286 | return; |
1287 | |
1288 | cond = cp_walk_tree_without_duplicates (&cond, find_std_constant_evaluated_r, |
1289 | NULL); |
1290 | if (cond) |
1291 | { |
1292 | if (constexpr_if) |
1293 | warning_at (EXPR_LOCATION (cond), OPT_Wtautological_compare, |
1294 | "%<std::is_constant_evaluated%> always evaluates to " |
1295 | "true in %<if constexpr%>"); |
1296 | else if (trivial_infinite) |
1297 | { |
1298 | auto_diagnostic_group d; |
1299 | if (warning_at (EXPR_LOCATION (cond), OPT_Wtautological_compare, |
1300 | "%<std::is_constant_evaluated%> evaluates to " |
1301 | "true when checking if trivially empty iteration " |
1302 | "statement is trivial infinite loop") |
1303 | && !maybe_constexpr_fn (current_function_decl)) |
1304 | inform (EXPR_LOCATION (cond), |
1305 | "and evaluates to false when actually evaluating " |
1306 | "the condition in non-%<constexpr%> function"); |
1307 | } |
1308 | else if (!maybe_constexpr_fn (current_function_decl)) |
1309 | warning_at (EXPR_LOCATION (cond), OPT_Wtautological_compare, |
1310 | "%<std::is_constant_evaluated%> always evaluates to " |
1311 | "false in a non-%<constexpr%> function"); |
1312 | else if (DECL_IMMEDIATE_FUNCTION_P (current_function_decl)) |
1313 | warning_at (EXPR_LOCATION (cond), OPT_Wtautological_compare, |
1314 | "%<std::is_constant_evaluated%> always evaluates to " |
1315 | "true in a %<consteval%> function"); |
1316 | } |
1317 | } |
1318 | |
1319 | /* Process the COND of an if-statement, which may be given by |
1320 | IF_STMT. */ |
1321 | |
1322 | tree |
1323 | finish_if_stmt_cond (tree orig_cond, tree if_stmt) |
1324 | { |
1325 | tree cond = maybe_convert_cond (cond: orig_cond); |
1326 | maybe_warn_for_constant_evaluated (cond, IF_STMT_CONSTEXPR_P (if_stmt), |
1327 | /*trivial_infinite=*/false); |
1328 | if (IF_STMT_CONSTEXPR_P (if_stmt) |
1329 | && !type_dependent_expression_p (cond) |
1330 | && require_constant_expression (cond) |
1331 | && !instantiation_dependent_expression_p (cond) |
1332 | /* Wait until instantiation time, since only then COND has been |
1333 | converted to bool. */ |
1334 | && TYPE_MAIN_VARIANT (TREE_TYPE (cond)) == boolean_type_node) |
1335 | { |
1336 | cond = instantiate_non_dependent_expr (cond); |
1337 | cond = cxx_constant_value (cond); |
1338 | } |
1339 | else if (processing_template_decl) |
1340 | cond = orig_cond; |
1341 | finish_cond (cond_p: &IF_COND (if_stmt), expr: cond); |
1342 | add_stmt (t: if_stmt); |
1343 | THEN_CLAUSE (if_stmt) = push_stmt_list (); |
1344 | return cond; |
1345 | } |
1346 | |
1347 | /* Finish the then-clause of an if-statement, which may be given by |
1348 | IF_STMT. */ |
1349 | |
1350 | tree |
1351 | finish_then_clause (tree if_stmt) |
1352 | { |
1353 | THEN_CLAUSE (if_stmt) = pop_stmt_list (THEN_CLAUSE (if_stmt)); |
1354 | return if_stmt; |
1355 | } |
1356 | |
1357 | /* Begin the else-clause of an if-statement. */ |
1358 | |
1359 | void |
1360 | begin_else_clause (tree if_stmt) |
1361 | { |
1362 | ELSE_CLAUSE (if_stmt) = push_stmt_list (); |
1363 | } |
1364 | |
1365 | /* Finish the else-clause of an if-statement, which may be given by |
1366 | IF_STMT. */ |
1367 | |
1368 | void |
1369 | finish_else_clause (tree if_stmt) |
1370 | { |
1371 | ELSE_CLAUSE (if_stmt) = pop_stmt_list (ELSE_CLAUSE (if_stmt)); |
1372 | } |
1373 | |
1374 | /* Callback for cp_walk_tree to mark all {VAR,PARM}_DECLs in a tree as |
1375 | read. */ |
1376 | |
1377 | static tree |
1378 | maybe_mark_exp_read_r (tree *tp, int *, void *) |
1379 | { |
1380 | tree t = *tp; |
1381 | if (VAR_P (t) || TREE_CODE (t) == PARM_DECL) |
1382 | mark_exp_read (t); |
1383 | return NULL_TREE; |
1384 | } |
1385 | |
1386 | /* Finish an if-statement. */ |
1387 | |
1388 | void |
1389 | finish_if_stmt (tree if_stmt) |
1390 | { |
1391 | tree scope = IF_SCOPE (if_stmt); |
1392 | IF_SCOPE (if_stmt) = NULL; |
1393 | if (IF_STMT_CONSTEXPR_P (if_stmt)) |
1394 | { |
1395 | /* Prevent various -Wunused warnings. We might not instantiate |
1396 | either of these branches, so we would not mark the variables |
1397 | used in that branch as read. */ |
1398 | cp_walk_tree_without_duplicates (&THEN_CLAUSE (if_stmt), |
1399 | maybe_mark_exp_read_r, NULL); |
1400 | cp_walk_tree_without_duplicates (&ELSE_CLAUSE (if_stmt), |
1401 | maybe_mark_exp_read_r, NULL); |
1402 | } |
1403 | add_stmt (t: do_poplevel (stmt_list: scope)); |
1404 | } |
1405 | |
1406 | /* Determine if iteration statement with *CONDP condition and |
1407 | loop BODY is trivially empty iteration statement or even |
1408 | trivial infinite loop. In the latter case for -ffinite-loops |
1409 | add ANNOTATE_EXPR to mark the loop as maybe validly infinite. |
1410 | Also, emit -Wtautological-compare warning for std::is_constant_evaluated () |
1411 | calls in the condition when needed. */ |
1412 | |
1413 | static void |
1414 | finish_loop_cond (tree *condp, tree body) |
1415 | { |
1416 | if (TREE_CODE (*condp) == INTEGER_CST) |
1417 | return; |
1418 | bool trivially_empty = expr_first (body) == NULL_TREE; |
1419 | bool trivial_infinite = false; |
1420 | if (trivially_empty) |
1421 | { |
1422 | tree c = fold_non_dependent_expr (*condp, tf_none, |
1423 | /*manifestly_const_eval=*/true); |
1424 | trivial_infinite = c && integer_nonzerop (c); |
1425 | } |
1426 | if (warn_tautological_compare) |
1427 | { |
1428 | tree cond = *condp; |
1429 | while (TREE_CODE (cond) == ANNOTATE_EXPR) |
1430 | cond = TREE_OPERAND (cond, 0); |
1431 | if (trivial_infinite |
1432 | && !DECL_IMMEDIATE_FUNCTION_P (current_function_decl)) |
1433 | maybe_warn_for_constant_evaluated (cond, /*constexpr_if=*/false, |
1434 | /*trivial_infinite=*/true); |
1435 | else if (!trivially_empty |
1436 | || !processing_template_decl |
1437 | || DECL_IMMEDIATE_FUNCTION_P (current_function_decl)) |
1438 | maybe_warn_for_constant_evaluated (cond, /*constexpr_if=*/false, |
1439 | /*trivial_infinite=*/false); |
1440 | } |
1441 | if (trivial_infinite && flag_finite_loops && !processing_template_decl) |
1442 | *condp = build3 (ANNOTATE_EXPR, TREE_TYPE (*condp), *condp, |
1443 | build_int_cst (integer_type_node, |
1444 | annot_expr_maybe_infinite_kind), |
1445 | integer_zero_node); |
1446 | } |
1447 | |
1448 | /* Begin a while-statement. Returns a newly created WHILE_STMT if |
1449 | appropriate. */ |
1450 | |
1451 | tree |
1452 | begin_while_stmt (void) |
1453 | { |
1454 | tree r; |
1455 | r = build_stmt (input_location, WHILE_STMT, NULL_TREE, NULL_TREE, NULL_TREE, |
1456 | NULL_TREE, NULL_TREE); |
1457 | add_stmt (t: r); |
1458 | WHILE_BODY (r) = do_pushlevel (sk: sk_block); |
1459 | begin_cond (cond_p: &WHILE_COND (r)); |
1460 | return r; |
1461 | } |
1462 | |
1463 | /* Process the COND of a while-statement, which may be given by |
1464 | WHILE_STMT. */ |
1465 | |
1466 | void |
1467 | finish_while_stmt_cond (tree cond, tree while_stmt, bool ivdep, |
1468 | tree unroll, bool novector) |
1469 | { |
1470 | cond = maybe_convert_cond (cond); |
1471 | finish_cond (cond_p: &WHILE_COND (while_stmt), expr: cond); |
1472 | begin_maybe_infinite_loop (cond); |
1473 | if (ivdep && cond != error_mark_node) |
1474 | WHILE_COND (while_stmt) = build3 (ANNOTATE_EXPR, |
1475 | TREE_TYPE (WHILE_COND (while_stmt)), |
1476 | WHILE_COND (while_stmt), |
1477 | build_int_cst (integer_type_node, |
1478 | annot_expr_ivdep_kind), |
1479 | integer_zero_node); |
1480 | if (unroll && cond != error_mark_node) |
1481 | WHILE_COND (while_stmt) = build3 (ANNOTATE_EXPR, |
1482 | TREE_TYPE (WHILE_COND (while_stmt)), |
1483 | WHILE_COND (while_stmt), |
1484 | build_int_cst (integer_type_node, |
1485 | annot_expr_unroll_kind), |
1486 | unroll); |
1487 | if (novector && cond != error_mark_node) |
1488 | WHILE_COND (while_stmt) = build3 (ANNOTATE_EXPR, |
1489 | TREE_TYPE (WHILE_COND (while_stmt)), |
1490 | WHILE_COND (while_stmt), |
1491 | build_int_cst (integer_type_node, |
1492 | annot_expr_no_vector_kind), |
1493 | integer_zero_node); |
1494 | adjust_loop_decl_cond (body_p: &WHILE_BODY (while_stmt), |
1495 | prep_p: &WHILE_COND_PREP (while_stmt), |
1496 | cleanup_p: &WHILE_COND_CLEANUP (while_stmt)); |
1497 | } |
1498 | |
1499 | /* Finish a while-statement, which may be given by WHILE_STMT. */ |
1500 | |
1501 | void |
1502 | finish_while_stmt (tree while_stmt) |
1503 | { |
1504 | end_maybe_infinite_loop (boolean_true_node); |
1505 | if (WHILE_COND_PREP (while_stmt)) |
1506 | finish_loop_cond_prep (body_p: &WHILE_BODY (while_stmt), |
1507 | prep_p: &WHILE_COND_PREP (while_stmt), |
1508 | WHILE_COND_CLEANUP (while_stmt)); |
1509 | else |
1510 | WHILE_BODY (while_stmt) = do_poplevel (WHILE_BODY (while_stmt)); |
1511 | finish_loop_cond (condp: &WHILE_COND (while_stmt), WHILE_BODY (while_stmt)); |
1512 | } |
1513 | |
1514 | /* Begin a do-statement. Returns a newly created DO_STMT if |
1515 | appropriate. */ |
1516 | |
1517 | tree |
1518 | begin_do_stmt (void) |
1519 | { |
1520 | tree r = build_stmt (input_location, DO_STMT, NULL_TREE, NULL_TREE, |
1521 | NULL_TREE); |
1522 | begin_maybe_infinite_loop (boolean_true_node); |
1523 | add_stmt (t: r); |
1524 | DO_BODY (r) = push_stmt_list (); |
1525 | return r; |
1526 | } |
1527 | |
1528 | /* Finish the body of a do-statement, which may be given by DO_STMT. */ |
1529 | |
1530 | void |
1531 | finish_do_body (tree do_stmt) |
1532 | { |
1533 | tree body = DO_BODY (do_stmt) = pop_stmt_list (DO_BODY (do_stmt)); |
1534 | |
1535 | if (TREE_CODE (body) == STATEMENT_LIST && STATEMENT_LIST_TAIL (body)) |
1536 | body = STATEMENT_LIST_TAIL (body)->stmt; |
1537 | |
1538 | if (IS_EMPTY_STMT (body)) |
1539 | warning (OPT_Wempty_body, |
1540 | "suggest explicit braces around empty body in %<do%> statement"); |
1541 | } |
1542 | |
1543 | /* Finish a do-statement, which may be given by DO_STMT, and whose |
1544 | COND is as indicated. */ |
1545 | |
1546 | void |
1547 | finish_do_stmt (tree cond, tree do_stmt, bool ivdep, tree unroll, |
1548 | bool novector) |
1549 | { |
1550 | cond = maybe_convert_cond (cond); |
1551 | end_maybe_infinite_loop (cond); |
1552 | /* Unlike other iteration statements, the condition may not contain |
1553 | a declaration, so we don't call finish_cond which checks for |
1554 | unexpanded parameter packs. */ |
1555 | if (check_for_bare_parameter_packs (cond)) |
1556 | cond = error_mark_node; |
1557 | if (ivdep && cond != error_mark_node) |
1558 | cond = build3 (ANNOTATE_EXPR, TREE_TYPE (cond), cond, |
1559 | build_int_cst (integer_type_node, annot_expr_ivdep_kind), |
1560 | integer_zero_node); |
1561 | if (unroll && cond != error_mark_node) |
1562 | cond = build3 (ANNOTATE_EXPR, TREE_TYPE (cond), cond, |
1563 | build_int_cst (integer_type_node, annot_expr_unroll_kind), |
1564 | unroll); |
1565 | if (novector && cond != error_mark_node) |
1566 | cond = build3 (ANNOTATE_EXPR, TREE_TYPE (cond), cond, |
1567 | build_int_cst (integer_type_node, annot_expr_no_vector_kind), |
1568 | integer_zero_node); |
1569 | DO_COND (do_stmt) = cond; |
1570 | tree do_body = DO_BODY (do_stmt); |
1571 | if (CONVERT_EXPR_P (do_body) |
1572 | && integer_zerop (TREE_OPERAND (do_body, 0)) |
1573 | && VOID_TYPE_P (TREE_TYPE (do_body))) |
1574 | do_body = NULL_TREE; |
1575 | finish_loop_cond (condp: &DO_COND (do_stmt), body: do_body); |
1576 | } |
1577 | |
1578 | /* Finish a return-statement. The EXPRESSION returned, if any, is as |
1579 | indicated. */ |
1580 | |
1581 | tree |
1582 | finish_return_stmt (tree expr) |
1583 | { |
1584 | tree r; |
1585 | bool no_warning; |
1586 | bool dangling; |
1587 | |
1588 | expr = check_return_expr (expr, &no_warning, &dangling); |
1589 | |
1590 | if (error_operand_p (t: expr) |
1591 | || (flag_openmp && !check_omp_return ())) |
1592 | { |
1593 | /* Suppress -Wreturn-type for this function. */ |
1594 | if (warn_return_type) |
1595 | suppress_warning (current_function_decl, OPT_Wreturn_type); |
1596 | return error_mark_node; |
1597 | } |
1598 | |
1599 | if (!processing_template_decl) |
1600 | { |
1601 | if (warn_sequence_point) |
1602 | verify_sequence_points (expr); |
1603 | } |
1604 | |
1605 | r = build_stmt (input_location, RETURN_EXPR, expr); |
1606 | RETURN_EXPR_LOCAL_ADDR_P (r) = dangling; |
1607 | if (no_warning) |
1608 | suppress_warning (r, OPT_Wreturn_type); |
1609 | r = maybe_cleanup_point_expr_void (expr: r); |
1610 | r = add_stmt (t: r); |
1611 | |
1612 | return r; |
1613 | } |
1614 | |
1615 | /* Begin the scope of a for-statement or a range-for-statement. |
1616 | Both the returned trees are to be used in a call to |
1617 | begin_for_stmt or begin_range_for_stmt. */ |
1618 | |
1619 | tree |
1620 | begin_for_scope (tree *init) |
1621 | { |
1622 | tree scope = do_pushlevel (sk: sk_for); |
1623 | |
1624 | if (processing_template_decl) |
1625 | *init = push_stmt_list (); |
1626 | else |
1627 | *init = NULL_TREE; |
1628 | |
1629 | return scope; |
1630 | } |
1631 | |
1632 | /* Begin a for-statement. Returns a new FOR_STMT. |
1633 | SCOPE and INIT should be the return of begin_for_scope, |
1634 | or both NULL_TREE */ |
1635 | |
1636 | tree |
1637 | begin_for_stmt (tree scope, tree init) |
1638 | { |
1639 | tree r; |
1640 | |
1641 | r = build_stmt (input_location, FOR_STMT, NULL_TREE, NULL_TREE, |
1642 | NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE, |
1643 | NULL_TREE, NULL_TREE); |
1644 | |
1645 | if (scope == NULL_TREE) |
1646 | { |
1647 | gcc_assert (!init); |
1648 | scope = begin_for_scope (init: &init); |
1649 | } |
1650 | |
1651 | FOR_INIT_STMT (r) = init; |
1652 | FOR_SCOPE (r) = scope; |
1653 | |
1654 | return r; |
1655 | } |
1656 | |
1657 | /* Finish the init-statement of a for-statement, which may be |
1658 | given by FOR_STMT. */ |
1659 | |
1660 | void |
1661 | finish_init_stmt (tree for_stmt) |
1662 | { |
1663 | if (processing_template_decl) |
1664 | FOR_INIT_STMT (for_stmt) = pop_stmt_list (FOR_INIT_STMT (for_stmt)); |
1665 | add_stmt (t: for_stmt); |
1666 | FOR_BODY (for_stmt) = do_pushlevel (sk: sk_block); |
1667 | begin_cond (cond_p: &FOR_COND (for_stmt)); |
1668 | } |
1669 | |
1670 | /* Finish the COND of a for-statement, which may be given by |
1671 | FOR_STMT. */ |
1672 | |
1673 | void |
1674 | finish_for_cond (tree cond, tree for_stmt, bool ivdep, tree unroll, |
1675 | bool novector) |
1676 | { |
1677 | cond = maybe_convert_cond (cond); |
1678 | finish_cond (cond_p: &FOR_COND (for_stmt), expr: cond); |
1679 | begin_maybe_infinite_loop (cond); |
1680 | if (ivdep && cond != error_mark_node) |
1681 | FOR_COND (for_stmt) = build3 (ANNOTATE_EXPR, |
1682 | TREE_TYPE (FOR_COND (for_stmt)), |
1683 | FOR_COND (for_stmt), |
1684 | build_int_cst (integer_type_node, |
1685 | annot_expr_ivdep_kind), |
1686 | integer_zero_node); |
1687 | if (unroll && cond != error_mark_node) |
1688 | FOR_COND (for_stmt) = build3 (ANNOTATE_EXPR, |
1689 | TREE_TYPE (FOR_COND (for_stmt)), |
1690 | FOR_COND (for_stmt), |
1691 | build_int_cst (integer_type_node, |
1692 | annot_expr_unroll_kind), |
1693 | unroll); |
1694 | if (novector && cond && cond != error_mark_node) |
1695 | FOR_COND (for_stmt) = build3 (ANNOTATE_EXPR, |
1696 | TREE_TYPE (FOR_COND (for_stmt)), |
1697 | FOR_COND (for_stmt), |
1698 | build_int_cst (integer_type_node, |
1699 | annot_expr_no_vector_kind), |
1700 | integer_zero_node); |
1701 | adjust_loop_decl_cond (body_p: &FOR_BODY (for_stmt), prep_p: &FOR_COND_PREP (for_stmt), |
1702 | cleanup_p: &FOR_COND_CLEANUP (for_stmt)); |
1703 | } |
1704 | |
1705 | /* Finish the increment-EXPRESSION in a for-statement, which may be |
1706 | given by FOR_STMT. */ |
1707 | |
1708 | void |
1709 | finish_for_expr (tree expr, tree for_stmt) |
1710 | { |
1711 | if (!expr) |
1712 | return; |
1713 | /* If EXPR is an overloaded function, issue an error; there is no |
1714 | context available to use to perform overload resolution. */ |
1715 | if (type_unknown_p (expr)) |
1716 | { |
1717 | cxx_incomplete_type_error (value: expr, TREE_TYPE (expr)); |
1718 | expr = error_mark_node; |
1719 | } |
1720 | if (!processing_template_decl) |
1721 | { |
1722 | if (warn_sequence_point) |
1723 | verify_sequence_points (expr); |
1724 | expr = convert_to_void (expr, ICV_THIRD_IN_FOR, |
1725 | tf_warning_or_error); |
1726 | } |
1727 | else if (!type_dependent_expression_p (expr)) |
1728 | convert_to_void (expr, ICV_THIRD_IN_FOR, tf_warning_or_error); |
1729 | expr = maybe_cleanup_point_expr_void (expr); |
1730 | if (check_for_bare_parameter_packs (expr)) |
1731 | expr = error_mark_node; |
1732 | FOR_EXPR (for_stmt) = expr; |
1733 | } |
1734 | |
1735 | /* During parsing of the body, range for uses "__for_{range,begin,end} " |
1736 | decl names to make those unaccessible by code in the body. |
1737 | Find those decls and store into RANGE_FOR_DECL array, so that they |
1738 | can be changed later to ones with underscore instead of space, so that |
1739 | it can be inspected in the debugger. */ |
1740 | |
1741 | void |
1742 | find_range_for_decls (tree range_for_decl[3]) |
1743 | { |
1744 | gcc_assert (CPTI_FOR_BEGIN__IDENTIFIER == CPTI_FOR_RANGE__IDENTIFIER + 1 |
1745 | && CPTI_FOR_END__IDENTIFIER == CPTI_FOR_RANGE__IDENTIFIER + 2 |
1746 | && CPTI_FOR_RANGE_IDENTIFIER == CPTI_FOR_RANGE__IDENTIFIER + 3 |
1747 | && CPTI_FOR_BEGIN_IDENTIFIER == CPTI_FOR_BEGIN__IDENTIFIER + 3 |
1748 | && CPTI_FOR_END_IDENTIFIER == CPTI_FOR_END__IDENTIFIER + 3); |
1749 | for (int i = 0; i < 3; i++) |
1750 | { |
1751 | tree id = cp_global_trees[CPTI_FOR_RANGE__IDENTIFIER + i]; |
1752 | if (IDENTIFIER_BINDING (id) |
1753 | && IDENTIFIER_BINDING (id)->scope == current_binding_level) |
1754 | { |
1755 | range_for_decl[i] = IDENTIFIER_BINDING (id)->value; |
1756 | gcc_assert (VAR_P (range_for_decl[i]) |
1757 | && DECL_ARTIFICIAL (range_for_decl[i])); |
1758 | } |
1759 | } |
1760 | } |
1761 | |
1762 | /* Finish the body of a for-statement, which may be given by |
1763 | FOR_STMT. The increment-EXPR for the loop must be |
1764 | provided. |
1765 | It can also finish RANGE_FOR_STMT. */ |
1766 | |
1767 | void |
1768 | finish_for_stmt (tree for_stmt) |
1769 | { |
1770 | end_maybe_infinite_loop (boolean_true_node); |
1771 | |
1772 | if (TREE_CODE (for_stmt) == RANGE_FOR_STMT) |
1773 | RANGE_FOR_BODY (for_stmt) = do_poplevel (RANGE_FOR_BODY (for_stmt)); |
1774 | else |
1775 | { |
1776 | if (FOR_COND_PREP (for_stmt)) |
1777 | finish_loop_cond_prep (body_p: &FOR_BODY (for_stmt), |
1778 | prep_p: &FOR_COND_PREP (for_stmt), |
1779 | FOR_COND_CLEANUP (for_stmt)); |
1780 | else |
1781 | FOR_BODY (for_stmt) = do_poplevel (FOR_BODY (for_stmt)); |
1782 | if (FOR_COND (for_stmt)) |
1783 | finish_loop_cond (condp: &FOR_COND (for_stmt), |
1784 | FOR_EXPR (for_stmt) ? integer_one_node |
1785 | : FOR_BODY (for_stmt)); |
1786 | } |
1787 | |
1788 | /* Pop the scope for the body of the loop. */ |
1789 | tree *scope_ptr = (TREE_CODE (for_stmt) == RANGE_FOR_STMT |
1790 | ? &RANGE_FOR_SCOPE (for_stmt) |
1791 | : &FOR_SCOPE (for_stmt)); |
1792 | tree scope = *scope_ptr; |
1793 | *scope_ptr = NULL; |
1794 | |
1795 | /* During parsing of the body, range for uses "__for_{range,begin,end} " |
1796 | decl names to make those unaccessible by code in the body. |
1797 | Change it to ones with underscore instead of space, so that it can |
1798 | be inspected in the debugger. */ |
1799 | tree range_for_decl[3] = { NULL_TREE, NULL_TREE, NULL_TREE }; |
1800 | find_range_for_decls (range_for_decl); |
1801 | |
1802 | add_stmt (t: do_poplevel (stmt_list: scope)); |
1803 | |
1804 | /* If we're being called from build_vec_init, don't mess with the names of |
1805 | the variables for an enclosing range-for. */ |
1806 | if (!stmts_are_full_exprs_p ()) |
1807 | return; |
1808 | |
1809 | for (int i = 0; i < 3; i++) |
1810 | if (range_for_decl[i]) |
1811 | DECL_NAME (range_for_decl[i]) |
1812 | = cp_global_trees[CPTI_FOR_RANGE_IDENTIFIER + i]; |
1813 | } |
1814 | |
1815 | /* Begin a range-for-statement. Returns a new RANGE_FOR_STMT. |
1816 | SCOPE and INIT should be the return of begin_for_scope, |
1817 | or both NULL_TREE . |
1818 | To finish it call finish_for_stmt(). */ |
1819 | |
1820 | tree |
1821 | begin_range_for_stmt (tree scope, tree init) |
1822 | { |
1823 | begin_maybe_infinite_loop (boolean_false_node); |
1824 | |
1825 | tree r = build_stmt (input_location, RANGE_FOR_STMT, NULL_TREE, NULL_TREE, |
1826 | NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE); |
1827 | |
1828 | if (scope == NULL_TREE) |
1829 | { |
1830 | gcc_assert (!init); |
1831 | scope = begin_for_scope (init: &init); |
1832 | } |
1833 | |
1834 | /* Since C++20, RANGE_FOR_STMTs can use the init tree, so save it. */ |
1835 | RANGE_FOR_INIT_STMT (r) = init; |
1836 | RANGE_FOR_SCOPE (r) = scope; |
1837 | |
1838 | return r; |
1839 | } |
1840 | |
1841 | /* Finish the head of a range-based for statement, which may |
1842 | be given by RANGE_FOR_STMT. DECL must be the declaration |
1843 | and EXPR must be the loop expression. */ |
1844 | |
1845 | void |
1846 | finish_range_for_decl (tree range_for_stmt, tree decl, tree expr) |
1847 | { |
1848 | if (processing_template_decl) |
1849 | RANGE_FOR_INIT_STMT (range_for_stmt) |
1850 | = pop_stmt_list (RANGE_FOR_INIT_STMT (range_for_stmt)); |
1851 | RANGE_FOR_DECL (range_for_stmt) = decl; |
1852 | RANGE_FOR_EXPR (range_for_stmt) = expr; |
1853 | add_stmt (t: range_for_stmt); |
1854 | RANGE_FOR_BODY (range_for_stmt) = do_pushlevel (sk: sk_block); |
1855 | } |
1856 | |
1857 | /* Finish a break-statement. */ |
1858 | |
1859 | tree |
1860 | finish_break_stmt (void) |
1861 | { |
1862 | /* In switch statements break is sometimes stylistically used after |
1863 | a return statement. This can lead to spurious warnings about |
1864 | control reaching the end of a non-void function when it is |
1865 | inlined. Note that we are calling block_may_fallthru with |
1866 | language specific tree nodes; this works because |
1867 | block_may_fallthru returns true when given something it does not |
1868 | understand. */ |
1869 | if (!block_may_fallthru (cur_stmt_list)) |
1870 | return void_node; |
1871 | note_break_stmt (); |
1872 | return add_stmt (t: build_stmt (input_location, BREAK_STMT, NULL_TREE)); |
1873 | } |
1874 | |
1875 | /* Finish a continue-statement. */ |
1876 | |
1877 | tree |
1878 | finish_continue_stmt (void) |
1879 | { |
1880 | return add_stmt (t: build_stmt (input_location, CONTINUE_STMT, NULL_TREE)); |
1881 | } |
1882 | |
1883 | /* Begin a switch-statement. Returns a new SWITCH_STMT if |
1884 | appropriate. */ |
1885 | |
1886 | tree |
1887 | begin_switch_stmt (void) |
1888 | { |
1889 | tree r, scope; |
1890 | |
1891 | scope = do_pushlevel (sk: sk_cond); |
1892 | r = build_stmt (input_location, SWITCH_STMT, NULL_TREE, NULL_TREE, NULL_TREE, |
1893 | scope, NULL_TREE); |
1894 | |
1895 | begin_cond (cond_p: &SWITCH_STMT_COND (r)); |
1896 | |
1897 | return r; |
1898 | } |
1899 | |
1900 | /* Finish the cond of a switch-statement. */ |
1901 | |
1902 | void |
1903 | finish_switch_cond (tree cond, tree switch_stmt) |
1904 | { |
1905 | tree orig_type = NULL; |
1906 | |
1907 | if (!processing_template_decl) |
1908 | { |
1909 | /* Convert the condition to an integer or enumeration type. */ |
1910 | tree orig_cond = cond; |
1911 | /* For structured binding used in condition, the conversion needs to be |
1912 | evaluated before the individual variables are initialized in the |
1913 | std::tuple_{size,elemenet} case. cp_finish_decomp saved the |
1914 | conversion result in a TARGET_EXPR, pick it up from there. */ |
1915 | if (DECL_DECOMPOSITION_P (cond) |
1916 | && DECL_DECOMP_IS_BASE (cond) |
1917 | && DECL_DECOMP_BASE (cond) |
1918 | && TREE_CODE (DECL_DECOMP_BASE (cond)) == TARGET_EXPR) |
1919 | cond = TARGET_EXPR_SLOT (DECL_DECOMP_BASE (cond)); |
1920 | cond = build_expr_type_conversion (WANT_INT | WANT_ENUM, cond, true); |
1921 | if (cond == NULL_TREE) |
1922 | { |
1923 | error_at (cp_expr_loc_or_input_loc (t: orig_cond), |
1924 | "switch quantity not an integer"); |
1925 | cond = error_mark_node; |
1926 | } |
1927 | /* We want unlowered type here to handle enum bit-fields. */ |
1928 | orig_type = unlowered_expr_type (cond); |
1929 | if (TREE_CODE (orig_type) != ENUMERAL_TYPE) |
1930 | orig_type = TREE_TYPE (cond); |
1931 | if (cond != error_mark_node) |
1932 | { |
1933 | /* [stmt.switch] |
1934 | |
1935 | Integral promotions are performed. */ |
1936 | cond = perform_integral_promotions (cond); |
1937 | cond = maybe_cleanup_point_expr (expr: cond); |
1938 | } |
1939 | } |
1940 | if (check_for_bare_parameter_packs (cond)) |
1941 | cond = error_mark_node; |
1942 | else if (!processing_template_decl && warn_sequence_point) |
1943 | verify_sequence_points (cond); |
1944 | |
1945 | finish_cond (cond_p: &SWITCH_STMT_COND (switch_stmt), expr: cond); |
1946 | SWITCH_STMT_TYPE (switch_stmt) = orig_type; |
1947 | add_stmt (t: switch_stmt); |
1948 | push_switch (switch_stmt); |
1949 | SWITCH_STMT_BODY (switch_stmt) = push_stmt_list (); |
1950 | } |
1951 | |
1952 | /* Finish the body of a switch-statement, which may be given by |
1953 | SWITCH_STMT. The COND to switch on is indicated. */ |
1954 | |
1955 | void |
1956 | finish_switch_stmt (tree switch_stmt) |
1957 | { |
1958 | tree scope; |
1959 | |
1960 | SWITCH_STMT_BODY (switch_stmt) = |
1961 | pop_stmt_list (SWITCH_STMT_BODY (switch_stmt)); |
1962 | pop_switch (); |
1963 | |
1964 | scope = SWITCH_STMT_SCOPE (switch_stmt); |
1965 | SWITCH_STMT_SCOPE (switch_stmt) = NULL; |
1966 | add_stmt (t: do_poplevel (stmt_list: scope)); |
1967 | } |
1968 | |
1969 | /* Begin a try-block. Returns a newly-created TRY_BLOCK if |
1970 | appropriate. */ |
1971 | |
1972 | tree |
1973 | begin_try_block (void) |
1974 | { |
1975 | tree r = build_stmt (input_location, TRY_BLOCK, NULL_TREE, NULL_TREE); |
1976 | add_stmt (t: r); |
1977 | TRY_STMTS (r) = push_stmt_list (); |
1978 | return r; |
1979 | } |
1980 | |
1981 | /* Likewise, for a function-try-block. The block returned in |
1982 | *COMPOUND_STMT is an artificial outer scope, containing the |
1983 | function-try-block. */ |
1984 | |
1985 | tree |
1986 | begin_function_try_block (tree *compound_stmt) |
1987 | { |
1988 | tree r; |
1989 | /* This outer scope does not exist in the C++ standard, but we need |
1990 | a place to put __FUNCTION__ and similar variables. */ |
1991 | *compound_stmt = begin_compound_stmt (0); |
1992 | current_binding_level->artificial = 1; |
1993 | r = begin_try_block (); |
1994 | FN_TRY_BLOCK_P (r) = 1; |
1995 | return r; |
1996 | } |
1997 | |
1998 | /* Finish a try-block, which may be given by TRY_BLOCK. */ |
1999 | |
2000 | void |
2001 | finish_try_block (tree try_block) |
2002 | { |
2003 | TRY_STMTS (try_block) = pop_stmt_list (TRY_STMTS (try_block)); |
2004 | TRY_HANDLERS (try_block) = push_stmt_list (); |
2005 | } |
2006 | |
2007 | /* Finish the body of a cleanup try-block, which may be given by |
2008 | TRY_BLOCK. */ |
2009 | |
2010 | void |
2011 | finish_cleanup_try_block (tree try_block) |
2012 | { |
2013 | TRY_STMTS (try_block) = pop_stmt_list (TRY_STMTS (try_block)); |
2014 | } |
2015 | |
2016 | /* Finish an implicitly generated try-block, with a cleanup is given |
2017 | by CLEANUP. */ |
2018 | |
2019 | void |
2020 | finish_cleanup (tree cleanup, tree try_block) |
2021 | { |
2022 | TRY_HANDLERS (try_block) = cleanup; |
2023 | CLEANUP_P (try_block) = 1; |
2024 | } |
2025 | |
2026 | /* Likewise, for a function-try-block. */ |
2027 | |
2028 | void |
2029 | finish_function_try_block (tree try_block) |
2030 | { |
2031 | finish_try_block (try_block); |
2032 | /* FIXME : something queer about CTOR_INITIALIZER somehow following |
2033 | the try block, but moving it inside. */ |
2034 | in_function_try_handler = 1; |
2035 | } |
2036 | |
2037 | /* Finish a handler-sequence for a try-block, which may be given by |
2038 | TRY_BLOCK. */ |
2039 | |
2040 | void |
2041 | finish_handler_sequence (tree try_block) |
2042 | { |
2043 | TRY_HANDLERS (try_block) = pop_stmt_list (TRY_HANDLERS (try_block)); |
2044 | check_handlers (TRY_HANDLERS (try_block)); |
2045 | } |
2046 | |
2047 | /* Finish the handler-seq for a function-try-block, given by |
2048 | TRY_BLOCK. COMPOUND_STMT is the outer block created by |
2049 | begin_function_try_block. */ |
2050 | |
2051 | void |
2052 | finish_function_handler_sequence (tree try_block, tree compound_stmt) |
2053 | { |
2054 | in_function_try_handler = 0; |
2055 | finish_handler_sequence (try_block); |
2056 | finish_compound_stmt (compound_stmt); |
2057 | } |
2058 | |
2059 | /* Begin a handler. Returns a HANDLER if appropriate. */ |
2060 | |
2061 | tree |
2062 | begin_handler (void) |
2063 | { |
2064 | tree r; |
2065 | |
2066 | r = build_stmt (input_location, HANDLER, NULL_TREE, NULL_TREE); |
2067 | add_stmt (t: r); |
2068 | |
2069 | /* Create a binding level for the eh_info and the exception object |
2070 | cleanup. */ |
2071 | HANDLER_BODY (r) = do_pushlevel (sk: sk_catch); |
2072 | |
2073 | return r; |
2074 | } |
2075 | |
2076 | /* Finish the handler-parameters for a handler, which may be given by |
2077 | HANDLER. DECL is the declaration for the catch parameter, or NULL |
2078 | if this is a `catch (...)' clause. */ |
2079 | |
2080 | void |
2081 | finish_handler_parms (tree decl, tree handler) |
2082 | { |
2083 | tree type = NULL_TREE; |
2084 | if (processing_template_decl) |
2085 | { |
2086 | if (decl) |
2087 | { |
2088 | decl = pushdecl (decl); |
2089 | decl = push_template_decl (decl); |
2090 | HANDLER_PARMS (handler) = decl; |
2091 | type = TREE_TYPE (decl); |
2092 | } |
2093 | } |
2094 | else |
2095 | { |
2096 | type = expand_start_catch_block (decl); |
2097 | if (warn_catch_value |
2098 | && type != NULL_TREE |
2099 | && type != error_mark_node |
2100 | && !TYPE_REF_P (TREE_TYPE (decl))) |
2101 | { |
2102 | tree orig_type = TREE_TYPE (decl); |
2103 | if (CLASS_TYPE_P (orig_type)) |
2104 | { |
2105 | if (TYPE_POLYMORPHIC_P (orig_type)) |
2106 | warning_at (DECL_SOURCE_LOCATION (decl), |
2107 | OPT_Wcatch_value_, |
2108 | "catching polymorphic type %q#T by value", |
2109 | orig_type); |
2110 | else if (warn_catch_value > 1) |
2111 | warning_at (DECL_SOURCE_LOCATION (decl), |
2112 | OPT_Wcatch_value_, |
2113 | "catching type %q#T by value", orig_type); |
2114 | } |
2115 | else if (warn_catch_value > 2) |
2116 | warning_at (DECL_SOURCE_LOCATION (decl), |
2117 | OPT_Wcatch_value_, |
2118 | "catching non-reference type %q#T", orig_type); |
2119 | } |
2120 | } |
2121 | HANDLER_TYPE (handler) = type; |
2122 | } |
2123 | |
2124 | /* Finish a handler, which may be given by HANDLER. The BLOCKs are |
2125 | the return value from the matching call to finish_handler_parms. */ |
2126 | |
2127 | void |
2128 | finish_handler (tree handler) |
2129 | { |
2130 | if (!processing_template_decl) |
2131 | expand_end_catch_block (); |
2132 | HANDLER_BODY (handler) = do_poplevel (HANDLER_BODY (handler)); |
2133 | } |
2134 | |
2135 | /* Begin a compound statement. FLAGS contains some bits that control the |
2136 | behavior and context. If BCS_NO_SCOPE is set, the compound statement |
2137 | does not define a scope. If BCS_FN_BODY is set, this is the outermost |
2138 | block of a function. If BCS_TRY_BLOCK is set, this is the block |
2139 | created on behalf of a TRY statement. Returns a token to be passed to |
2140 | finish_compound_stmt. */ |
2141 | |
2142 | tree |
2143 | begin_compound_stmt (unsigned int flags) |
2144 | { |
2145 | tree r; |
2146 | |
2147 | if (flags & BCS_NO_SCOPE) |
2148 | { |
2149 | r = push_stmt_list (); |
2150 | STATEMENT_LIST_NO_SCOPE (r) = 1; |
2151 | |
2152 | /* Normally, we try hard to keep the BLOCK for a statement-expression. |
2153 | But, if it's a statement-expression with a scopeless block, there's |
2154 | nothing to keep, and we don't want to accidentally keep a block |
2155 | *inside* the scopeless block. */ |
2156 | keep_next_level (false); |
2157 | } |
2158 | else |
2159 | { |
2160 | scope_kind sk = sk_block; |
2161 | if (flags & BCS_TRY_BLOCK) |
2162 | sk = sk_try; |
2163 | else if (flags & BCS_TRANSACTION) |
2164 | sk = sk_transaction; |
2165 | else if (flags & BCS_STMT_EXPR) |
2166 | sk = sk_stmt_expr; |
2167 | r = do_pushlevel (sk); |
2168 | } |
2169 | |
2170 | /* When processing a template, we need to remember where the braces were, |
2171 | so that we can set up identical scopes when instantiating the template |
2172 | later. BIND_EXPR is a handy candidate for this. |
2173 | Note that do_poplevel won't create a BIND_EXPR itself here (and thus |
2174 | result in nested BIND_EXPRs), since we don't build BLOCK nodes when |
2175 | processing templates. */ |
2176 | if (processing_template_decl) |
2177 | { |
2178 | r = build3 (BIND_EXPR, NULL, NULL, r, NULL); |
2179 | BIND_EXPR_TRY_BLOCK (r) = (flags & BCS_TRY_BLOCK) != 0; |
2180 | BIND_EXPR_BODY_BLOCK (r) = (flags & BCS_FN_BODY) != 0; |
2181 | TREE_SIDE_EFFECTS (r) = 1; |
2182 | } |
2183 | |
2184 | return r; |
2185 | } |
2186 | |
2187 | /* Finish a compound-statement, which is given by STMT. */ |
2188 | |
2189 | void |
2190 | finish_compound_stmt (tree stmt) |
2191 | { |
2192 | if (TREE_CODE (stmt) == BIND_EXPR) |
2193 | { |
2194 | tree body = do_poplevel (BIND_EXPR_BODY (stmt)); |
2195 | /* If the STATEMENT_LIST is empty and this BIND_EXPR isn't special, |
2196 | discard the BIND_EXPR so it can be merged with the containing |
2197 | STATEMENT_LIST. */ |
2198 | if (TREE_CODE (body) == STATEMENT_LIST |
2199 | && STATEMENT_LIST_HEAD (body) == NULL |
2200 | && !BIND_EXPR_BODY_BLOCK (stmt) |
2201 | && !BIND_EXPR_TRY_BLOCK (stmt)) |
2202 | stmt = body; |
2203 | else |
2204 | BIND_EXPR_BODY (stmt) = body; |
2205 | } |
2206 | else if (STATEMENT_LIST_NO_SCOPE (stmt)) |
2207 | stmt = pop_stmt_list (stmt); |
2208 | else |
2209 | { |
2210 | /* Destroy any ObjC "super" receivers that may have been |
2211 | created. */ |
2212 | objc_clear_super_receiver (); |
2213 | |
2214 | stmt = do_poplevel (stmt_list: stmt); |
2215 | } |
2216 | |
2217 | /* ??? See c_end_compound_stmt wrt statement expressions. */ |
2218 | add_stmt (t: stmt); |
2219 | } |
2220 | |
2221 | /* Finish an asm string literal, which can be a string literal |
2222 | or parenthesized constant expression. Extract the string literal |
2223 | from the latter. */ |
2224 | |
2225 | tree |
2226 | finish_asm_string_expression (location_t loc, tree string) |
2227 | { |
2228 | if (string == error_mark_node |
2229 | || TREE_CODE (string) == STRING_CST |
2230 | || processing_template_decl) |
2231 | return string; |
2232 | string = cxx_constant_value (t: string, complain: tf_error); |
2233 | if (TREE_CODE (string) == STRING_CST) |
2234 | string = build1_loc (loc, code: PAREN_EXPR, TREE_TYPE (string), |
2235 | arg1: string); |
2236 | cexpr_str cstr (string); |
2237 | if (!cstr.type_check (location: loc)) |
2238 | return error_mark_node; |
2239 | if (!cstr.extract (location: loc, str&: string)) |
2240 | string = error_mark_node; |
2241 | return string; |
2242 | } |
2243 | |
2244 | /* Finish an asm-statement, whose components are a STRING, some |
2245 | OUTPUT_OPERANDS, some INPUT_OPERANDS, some CLOBBERS and some |
2246 | LABELS. Also note whether the asm-statement should be |
2247 | considered volatile, and whether it is asm inline. TOPLEV_P |
2248 | is true if finishing namespace scope extended asm. */ |
2249 | |
2250 | tree |
2251 | finish_asm_stmt (location_t loc, int volatile_p, tree string, |
2252 | tree output_operands, tree input_operands, tree clobbers, |
2253 | tree labels, bool inline_p, bool toplev_p) |
2254 | { |
2255 | tree r; |
2256 | tree t; |
2257 | int ninputs = list_length (input_operands); |
2258 | int noutputs = list_length (output_operands); |
2259 | |
2260 | if (!processing_template_decl) |
2261 | { |
2262 | const char *constraint; |
2263 | const char **oconstraints; |
2264 | bool allows_mem, allows_reg, is_inout; |
2265 | tree operand; |
2266 | int i; |
2267 | |
2268 | oconstraints = XALLOCAVEC (const char *, noutputs); |
2269 | |
2270 | string = finish_asm_string_expression (loc: cp_expr_loc_or_loc (t: string, or_loc: loc), |
2271 | string); |
2272 | if (string == error_mark_node) |
2273 | return error_mark_node; |
2274 | for (int i = 0; i < 2; ++i) |
2275 | for (t = i ? input_operands : output_operands; t; t = TREE_CHAIN (t)) |
2276 | { |
2277 | tree s = TREE_VALUE (TREE_PURPOSE (t)); |
2278 | s = finish_asm_string_expression (loc: cp_expr_loc_or_loc (t: s, or_loc: loc), string: s); |
2279 | if (s == error_mark_node) |
2280 | return error_mark_node; |
2281 | TREE_VALUE (TREE_PURPOSE (t)) = s; |
2282 | } |
2283 | for (t = clobbers; t; t = TREE_CHAIN (t)) |
2284 | { |
2285 | tree s = TREE_VALUE (t); |
2286 | s = finish_asm_string_expression (loc: cp_expr_loc_or_loc (t: s, or_loc: loc), string: s); |
2287 | TREE_VALUE (t) = s; |
2288 | } |
2289 | |
2290 | string = resolve_asm_operand_names (string, output_operands, |
2291 | input_operands, labels); |
2292 | |
2293 | for (i = 0, t = output_operands; t; t = TREE_CHAIN (t), ++i) |
2294 | { |
2295 | operand = TREE_VALUE (t); |
2296 | |
2297 | /* ??? Really, this should not be here. Users should be using a |
2298 | proper lvalue, dammit. But there's a long history of using |
2299 | casts in the output operands. In cases like longlong.h, this |
2300 | becomes a primitive form of typechecking -- if the cast can be |
2301 | removed, then the output operand had a type of the proper width; |
2302 | otherwise we'll get an error. Gross, but ... */ |
2303 | STRIP_NOPS (operand); |
2304 | |
2305 | operand = mark_lvalue_use (operand); |
2306 | |
2307 | if (!lvalue_or_else (operand, lv_asm, tf_warning_or_error)) |
2308 | operand = error_mark_node; |
2309 | |
2310 | if (operand != error_mark_node |
2311 | && (TREE_READONLY (operand) |
2312 | || CP_TYPE_CONST_P (TREE_TYPE (operand)) |
2313 | /* Functions are not modifiable, even though they are |
2314 | lvalues. */ |
2315 | || FUNC_OR_METHOD_TYPE_P (TREE_TYPE (operand)) |
2316 | /* If it's an aggregate and any field is const, then it is |
2317 | effectively const. */ |
2318 | || (CLASS_TYPE_P (TREE_TYPE (operand)) |
2319 | && C_TYPE_FIELDS_READONLY (TREE_TYPE (operand))))) |
2320 | cxx_readonly_error (loc, operand, lv_asm); |
2321 | |
2322 | tree *op = &operand; |
2323 | while (TREE_CODE (*op) == COMPOUND_EXPR) |
2324 | op = &TREE_OPERAND (*op, 1); |
2325 | switch (TREE_CODE (*op)) |
2326 | { |
2327 | case PREINCREMENT_EXPR: |
2328 | case PREDECREMENT_EXPR: |
2329 | case MODIFY_EXPR: |
2330 | *op = genericize_compound_lvalue (*op); |
2331 | op = &TREE_OPERAND (*op, 1); |
2332 | break; |
2333 | default: |
2334 | break; |
2335 | } |
2336 | |
2337 | constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t))); |
2338 | oconstraints[i] = constraint; |
2339 | |
2340 | if (parse_output_constraint (&constraint, i, ninputs, noutputs, |
2341 | &allows_mem, &allows_reg, &is_inout)) |
2342 | { |
2343 | /* If the operand is going to end up in memory, |
2344 | mark it addressable. */ |
2345 | if (!allows_reg && !cxx_mark_addressable (*op)) |
2346 | operand = error_mark_node; |
2347 | if (allows_reg && toplev_p) |
2348 | { |
2349 | error_at (loc, "constraint allows registers outside of " |
2350 | "a function"); |
2351 | operand = error_mark_node; |
2352 | } |
2353 | } |
2354 | else |
2355 | operand = error_mark_node; |
2356 | |
2357 | if (toplev_p && operand != error_mark_node) |
2358 | { |
2359 | if (TREE_SIDE_EFFECTS (operand)) |
2360 | { |
2361 | error_at (loc, "side-effects in output operand outside " |
2362 | "of a function"); |
2363 | operand = error_mark_node; |
2364 | } |
2365 | else |
2366 | { |
2367 | tree addr |
2368 | = cp_build_addr_expr (operand, tf_warning_or_error); |
2369 | if (addr == error_mark_node) |
2370 | operand = error_mark_node; |
2371 | else |
2372 | { |
2373 | addr = maybe_constant_value (addr); |
2374 | if (!initializer_constant_valid_p (addr, |
2375 | TREE_TYPE (addr))) |
2376 | { |
2377 | error_at (loc, "output operand outside of a " |
2378 | "function is not constant"); |
2379 | operand = error_mark_node; |
2380 | } |
2381 | else |
2382 | operand = build_fold_indirect_ref (addr); |
2383 | } |
2384 | } |
2385 | } |
2386 | else if (operand != error_mark_node && strstr (haystack: constraint, needle: "-")) |
2387 | { |
2388 | error_at (loc, "%<-%> modifier used inside of a function"); |
2389 | operand = error_mark_node; |
2390 | } |
2391 | |
2392 | TREE_VALUE (t) = operand; |
2393 | } |
2394 | |
2395 | for (i = 0, t = input_operands; t; ++i, t = TREE_CHAIN (t)) |
2396 | { |
2397 | constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t))); |
2398 | bool constraint_parsed |
2399 | = parse_input_constraint (&constraint, i, ninputs, noutputs, 0, |
2400 | oconstraints, &allows_mem, &allows_reg); |
2401 | /* If the operand is going to end up in memory, don't call |
2402 | decay_conversion. */ |
2403 | if (constraint_parsed && !allows_reg && allows_mem) |
2404 | operand = mark_lvalue_use (TREE_VALUE (t)); |
2405 | else |
2406 | operand = decay_conversion (TREE_VALUE (t), tf_warning_or_error); |
2407 | |
2408 | /* If the type of the operand hasn't been determined (e.g., |
2409 | because it involves an overloaded function), then issue |
2410 | an error message. There's no context available to |
2411 | resolve the overloading. */ |
2412 | if (TREE_TYPE (operand) == unknown_type_node) |
2413 | { |
2414 | error_at (loc, |
2415 | "type of %<asm%> operand %qE could not be determined", |
2416 | TREE_VALUE (t)); |
2417 | operand = error_mark_node; |
2418 | } |
2419 | |
2420 | if (constraint_parsed) |
2421 | { |
2422 | /* If the operand is going to end up in memory, |
2423 | mark it addressable. */ |
2424 | if (!allows_reg && allows_mem) |
2425 | { |
2426 | /* Strip the nops as we allow this case. FIXME, this really |
2427 | should be rejected or made deprecated. */ |
2428 | STRIP_NOPS (operand); |
2429 | |
2430 | tree *op = &operand; |
2431 | while (TREE_CODE (*op) == COMPOUND_EXPR) |
2432 | op = &TREE_OPERAND (*op, 1); |
2433 | switch (TREE_CODE (*op)) |
2434 | { |
2435 | case PREINCREMENT_EXPR: |
2436 | case PREDECREMENT_EXPR: |
2437 | case MODIFY_EXPR: |
2438 | *op = genericize_compound_lvalue (*op); |
2439 | op = &TREE_OPERAND (*op, 1); |
2440 | break; |
2441 | default: |
2442 | break; |
2443 | } |
2444 | |
2445 | if (!cxx_mark_addressable (*op)) |
2446 | operand = error_mark_node; |
2447 | } |
2448 | else if (!allows_reg && !allows_mem) |
2449 | { |
2450 | /* If constraint allows neither register nor memory, |
2451 | try harder to get a constant. */ |
2452 | tree constop = maybe_constant_value (operand); |
2453 | if (TREE_CONSTANT (constop)) |
2454 | operand = constop; |
2455 | } |
2456 | if (allows_reg && toplev_p) |
2457 | { |
2458 | error_at (loc, "constraint allows registers outside of " |
2459 | "a function"); |
2460 | operand = error_mark_node; |
2461 | } |
2462 | if (constraint[0] == ':' && operand != error_mark_node) |
2463 | { |
2464 | tree t = operand; |
2465 | STRIP_NOPS (t); |
2466 | if (TREE_CODE (t) != ADDR_EXPR |
2467 | || !(TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL |
2468 | || (VAR_P (TREE_OPERAND (t, 0)) |
2469 | && is_global_var (TREE_OPERAND (t, 0))))) |
2470 | { |
2471 | error_at (loc, "%<:%> constraint operand is not address " |
2472 | "of a function or non-automatic variable"); |
2473 | operand = error_mark_node; |
2474 | } |
2475 | } |
2476 | } |
2477 | else |
2478 | operand = error_mark_node; |
2479 | |
2480 | if (toplev_p && operand != error_mark_node) |
2481 | { |
2482 | if (TREE_SIDE_EFFECTS (operand)) |
2483 | { |
2484 | error_at (loc, "side-effects in input operand outside " |
2485 | "of a function"); |
2486 | operand = error_mark_node; |
2487 | } |
2488 | else if (allows_mem && lvalue_or_else (operand, lv_asm, tf_none)) |
2489 | { |
2490 | tree addr = cp_build_addr_expr (operand, tf_warning_or_error); |
2491 | if (addr == error_mark_node) |
2492 | operand = error_mark_node; |
2493 | else |
2494 | { |
2495 | addr = maybe_constant_value (addr); |
2496 | if (!initializer_constant_valid_p (addr, |
2497 | TREE_TYPE (addr))) |
2498 | { |
2499 | error_at (loc, "input operand outside of a " |
2500 | "function is not constant"); |
2501 | operand = error_mark_node; |
2502 | } |
2503 | else |
2504 | operand = build_fold_indirect_ref (addr); |
2505 | } |
2506 | } |
2507 | else |
2508 | { |
2509 | operand = maybe_constant_value (operand); |
2510 | if (!initializer_constant_valid_p (operand, |
2511 | TREE_TYPE (operand))) |
2512 | { |
2513 | error_at (loc, "input operand outside of a " |
2514 | "function is not constant"); |
2515 | operand = error_mark_node; |
2516 | } |
2517 | } |
2518 | } |
2519 | else if (operand != error_mark_node && strstr (haystack: constraint, needle: "-")) |
2520 | { |
2521 | error_at (loc, "%<-%> modifier used inside of a function"); |
2522 | operand = error_mark_node; |
2523 | } |
2524 | |
2525 | TREE_VALUE (t) = operand; |
2526 | } |
2527 | } |
2528 | |
2529 | r = build_stmt (loc, ASM_EXPR, string, |
2530 | output_operands, input_operands, |
2531 | clobbers, labels); |
2532 | ASM_VOLATILE_P (r) = volatile_p || noutputs == 0; |
2533 | ASM_INLINE_P (r) = inline_p; |
2534 | if (toplev_p) |
2535 | { |
2536 | symtab->finalize_toplevel_asm (asm_str: r); |
2537 | return r; |
2538 | } |
2539 | r = maybe_cleanup_point_expr_void (expr: r); |
2540 | return add_stmt (t: r); |
2541 | } |
2542 | |
2543 | /* Finish a label with the indicated NAME. Returns the new label. */ |
2544 | |
2545 | tree |
2546 | finish_label_stmt (tree name) |
2547 | { |
2548 | tree decl = define_label (input_location, name); |
2549 | |
2550 | if (decl == error_mark_node) |
2551 | return error_mark_node; |
2552 | |
2553 | add_stmt (t: build_stmt (input_location, LABEL_EXPR, decl)); |
2554 | |
2555 | return decl; |
2556 | } |
2557 | |
2558 | /* Finish a series of declarations for local labels. G++ allows users |
2559 | to declare "local" labels, i.e., labels with scope. This extension |
2560 | is useful when writing code involving statement-expressions. */ |
2561 | |
2562 | void |
2563 | finish_label_decl (tree name) |
2564 | { |
2565 | if (!at_function_scope_p ()) |
2566 | { |
2567 | error ("%<__label__%> declarations are only allowed in function scopes"); |
2568 | return; |
2569 | } |
2570 | |
2571 | add_decl_expr (decl: declare_local_label (name)); |
2572 | } |
2573 | |
2574 | /* When DECL goes out of scope, make sure that CLEANUP is executed. */ |
2575 | |
2576 | void |
2577 | finish_decl_cleanup (tree decl, tree cleanup) |
2578 | { |
2579 | push_cleanup (decl, cleanup, eh_only: false); |
2580 | } |
2581 | |
2582 | /* If the current scope exits with an exception, run CLEANUP. */ |
2583 | |
2584 | void |
2585 | finish_eh_cleanup (tree cleanup) |
2586 | { |
2587 | push_cleanup (NULL, cleanup, eh_only: true); |
2588 | } |
2589 | |
2590 | /* The MEM_INITS is a list of mem-initializers, in reverse of the |
2591 | order they were written by the user. Each node is as for |
2592 | emit_mem_initializers. */ |
2593 | |
2594 | void |
2595 | finish_mem_initializers (tree mem_inits) |
2596 | { |
2597 | /* Reorder the MEM_INITS so that they are in the order they appeared |
2598 | in the source program. */ |
2599 | mem_inits = nreverse (mem_inits); |
2600 | |
2601 | if (processing_template_decl) |
2602 | { |
2603 | tree mem; |
2604 | |
2605 | for (mem = mem_inits; mem; mem = TREE_CHAIN (mem)) |
2606 | { |
2607 | /* If the TREE_PURPOSE is a TYPE_PACK_EXPANSION, skip the |
2608 | check for bare parameter packs in the TREE_VALUE, because |
2609 | any parameter packs in the TREE_VALUE have already been |
2610 | bound as part of the TREE_PURPOSE. See |
2611 | make_pack_expansion for more information. */ |
2612 | if (TREE_CODE (TREE_PURPOSE (mem)) != TYPE_PACK_EXPANSION |
2613 | && check_for_bare_parameter_packs (TREE_VALUE (mem))) |
2614 | TREE_VALUE (mem) = error_mark_node; |
2615 | } |
2616 | |
2617 | add_stmt (t: build_min_nt_loc (UNKNOWN_LOCATION, |
2618 | CTOR_INITIALIZER, mem_inits)); |
2619 | } |
2620 | else |
2621 | emit_mem_initializers (mem_inits); |
2622 | } |
2623 | |
2624 | /* Obfuscate EXPR if it looks like an id-expression or member access so |
2625 | that the call to finish_decltype in do_auto_deduction will give the |
2626 | right result. If EVEN_UNEVAL, do this even in unevaluated context. */ |
2627 | |
2628 | tree |
2629 | force_paren_expr (tree expr, bool even_uneval /* = false */) |
2630 | { |
2631 | /* This is only needed for decltype(auto) in C++14. */ |
2632 | if (cxx_dialect < cxx14) |
2633 | return expr; |
2634 | |
2635 | /* If we're in unevaluated context, we can't be deducing a |
2636 | return/initializer type, so we don't need to mess with this. */ |
2637 | if (cp_unevaluated_operand && !even_uneval) |
2638 | return expr; |
2639 | |
2640 | if (TREE_CODE (expr) == COMPONENT_REF |
2641 | || TREE_CODE (expr) == SCOPE_REF |
2642 | || REFERENCE_REF_P (expr)) |
2643 | REF_PARENTHESIZED_P (expr) = true; |
2644 | else if (DECL_P (tree_strip_any_location_wrapper (expr))) |
2645 | { |
2646 | location_t loc = cp_expr_location (t_: expr); |
2647 | const tree_code code = processing_template_decl ? PAREN_EXPR |
2648 | : VIEW_CONVERT_EXPR; |
2649 | expr = build1_loc (loc, code, TREE_TYPE (expr), arg1: expr); |
2650 | REF_PARENTHESIZED_P (expr) = true; |
2651 | } |
2652 | return expr; |
2653 | } |
2654 | |
2655 | /* If T is an id-expression obfuscated by force_paren_expr, undo the |
2656 | obfuscation and return the underlying id-expression. Otherwise |
2657 | return T. */ |
2658 | |
2659 | tree |
2660 | maybe_undo_parenthesized_ref (tree t) |
2661 | { |
2662 | if (cxx_dialect < cxx14) |
2663 | return t; |
2664 | |
2665 | if ((TREE_CODE (t) == PAREN_EXPR || TREE_CODE (t) == VIEW_CONVERT_EXPR) |
2666 | && REF_PARENTHESIZED_P (t)) |
2667 | t = TREE_OPERAND (t, 0); |
2668 | |
2669 | return t; |
2670 | } |
2671 | |
2672 | /* Finish a parenthesized expression EXPR. */ |
2673 | |
2674 | cp_expr |
2675 | finish_parenthesized_expr (cp_expr expr) |
2676 | { |
2677 | if (EXPR_P (expr)) |
2678 | { |
2679 | /* This inhibits warnings in maybe_warn_unparenthesized_assignment |
2680 | and c_common_truthvalue_conversion. */ |
2681 | suppress_warning (STRIP_REFERENCE_REF (*expr), OPT_Wparentheses); |
2682 | /* And maybe_warn_sizeof_array_div. */ |
2683 | suppress_warning (STRIP_REFERENCE_REF (*expr), OPT_Wsizeof_array_div); |
2684 | } |
2685 | |
2686 | if (TREE_CODE (expr) == OFFSET_REF |
2687 | || TREE_CODE (expr) == SCOPE_REF) |
2688 | /* [expr.unary.op]/3 The qualified id of a pointer-to-member must not be |
2689 | enclosed in parentheses. */ |
2690 | PTRMEM_OK_P (expr) = 0; |
2691 | |
2692 | tree stripped_expr = tree_strip_any_location_wrapper (exp: expr); |
2693 | if (TREE_CODE (stripped_expr) == STRING_CST) |
2694 | PAREN_STRING_LITERAL_P (stripped_expr) = 1; |
2695 | else if (TREE_CODE (stripped_expr) == PACK_INDEX_EXPR) |
2696 | PACK_INDEX_PARENTHESIZED_P (stripped_expr) = true; |
2697 | |
2698 | expr = cp_expr (force_paren_expr (expr), expr.get_location ()); |
2699 | |
2700 | return expr; |
2701 | } |
2702 | |
2703 | /* Finish a reference to a non-static data member (DECL) that is not |
2704 | preceded by `.' or `->'. */ |
2705 | |
2706 | tree |
2707 | finish_non_static_data_member (tree decl, tree object, tree qualifying_scope, |
2708 | tsubst_flags_t complain /* = tf_warning_or_error */) |
2709 | { |
2710 | gcc_assert (TREE_CODE (decl) == FIELD_DECL); |
2711 | bool try_omp_private = !object && omp_private_member_map; |
2712 | tree ret; |
2713 | |
2714 | if (!object) |
2715 | { |
2716 | tree scope = qualifying_scope; |
2717 | if (scope == NULL_TREE) |
2718 | { |
2719 | scope = context_for_name_lookup (decl); |
2720 | if (!TYPE_P (scope)) |
2721 | { |
2722 | /* Can happen during error recovery (c++/85014). */ |
2723 | gcc_assert (seen_error ()); |
2724 | return error_mark_node; |
2725 | } |
2726 | } |
2727 | object = maybe_dummy_object (scope, NULL); |
2728 | } |
2729 | |
2730 | object = maybe_resolve_dummy (object, true); |
2731 | if (object == error_mark_node) |
2732 | return error_mark_node; |
2733 | |
2734 | /* DR 613/850: Can use non-static data members without an associated |
2735 | object in sizeof/decltype/alignof. */ |
2736 | if (is_dummy_object (object) |
2737 | && !cp_unevaluated_operand |
2738 | && (!processing_template_decl || !current_class_ref)) |
2739 | { |
2740 | if (complain & tf_error) |
2741 | { |
2742 | auto_diagnostic_group d; |
2743 | if (current_function_decl |
2744 | && DECL_STATIC_FUNCTION_P (current_function_decl)) |
2745 | error ("invalid use of member %qD in static member function", decl); |
2746 | else if (current_function_decl |
2747 | && processing_contract_condition |
2748 | && DECL_CONSTRUCTOR_P (current_function_decl)) |
2749 | error ("invalid use of member %qD in constructor %<pre%> contract", decl); |
2750 | else if (current_function_decl |
2751 | && processing_contract_condition |
2752 | && DECL_DESTRUCTOR_P (current_function_decl)) |
2753 | error ("invalid use of member %qD in destructor %<post%> contract", decl); |
2754 | else |
2755 | error ("invalid use of non-static data member %qD", decl); |
2756 | inform (DECL_SOURCE_LOCATION (decl), "declared here"); |
2757 | } |
2758 | |
2759 | return error_mark_node; |
2760 | } |
2761 | |
2762 | if (current_class_ptr) |
2763 | TREE_USED (current_class_ptr) = 1; |
2764 | if (processing_template_decl) |
2765 | { |
2766 | tree type = TREE_TYPE (decl); |
2767 | |
2768 | if (TYPE_REF_P (type)) |
2769 | /* Quals on the object don't matter. */; |
2770 | else if (PACK_EXPANSION_P (type)) |
2771 | /* Don't bother trying to represent this. */ |
2772 | type = NULL_TREE; |
2773 | else if (!TREE_TYPE (object) || WILDCARD_TYPE_P (TREE_TYPE (object))) |
2774 | /* We don't know what the eventual quals will be, so punt until |
2775 | instantiation time. |
2776 | |
2777 | This can happen when called from build_capture_proxy for an explicit |
2778 | object lambda. It's a bit marginal to call this function in that |
2779 | case, since this function is for references to members of 'this', |
2780 | but the deduced type is required to be derived from the closure |
2781 | type, so it works. */ |
2782 | type = NULL_TREE; |
2783 | else |
2784 | { |
2785 | /* Set the cv qualifiers. */ |
2786 | int quals = cp_type_quals (TREE_TYPE (object)); |
2787 | |
2788 | if (DECL_MUTABLE_P (decl)) |
2789 | quals &= ~TYPE_QUAL_CONST; |
2790 | |
2791 | quals |= cp_type_quals (TREE_TYPE (decl)); |
2792 | type = cp_build_qualified_type (type, quals); |
2793 | } |
2794 | |
2795 | if (qualifying_scope) |
2796 | /* Wrap this in a SCOPE_REF for now. */ |
2797 | ret = build_qualified_name (type, qualifying_scope, decl, |
2798 | /*template_p=*/false); |
2799 | else |
2800 | ret = (convert_from_reference |
2801 | (build_min (COMPONENT_REF, type, object, decl, NULL_TREE))); |
2802 | } |
2803 | /* If PROCESSING_TEMPLATE_DECL is nonzero here, then |
2804 | QUALIFYING_SCOPE is also non-null. */ |
2805 | else |
2806 | { |
2807 | tree access_type = TREE_TYPE (object); |
2808 | |
2809 | if (!perform_or_defer_access_check (TYPE_BINFO (access_type), decl, |
2810 | diag_decl: decl, complain)) |
2811 | return error_mark_node; |
2812 | |
2813 | /* If the data member was named `C::M', convert `*this' to `C' |
2814 | first. */ |
2815 | if (qualifying_scope) |
2816 | { |
2817 | tree binfo = NULL_TREE; |
2818 | object = build_scoped_ref (object, qualifying_scope, |
2819 | &binfo); |
2820 | } |
2821 | |
2822 | ret = build_class_member_access_expr (object, decl, |
2823 | /*access_path=*/NULL_TREE, |
2824 | /*preserve_reference=*/false, |
2825 | complain); |
2826 | } |
2827 | if (try_omp_private) |
2828 | { |
2829 | tree *v = omp_private_member_map->get (k: decl); |
2830 | if (v) |
2831 | ret = convert_from_reference (*v); |
2832 | } |
2833 | return ret; |
2834 | } |
2835 | |
2836 | /* DECL was the declaration to which a qualified-id resolved. Issue |
2837 | an error message if it is not accessible. If OBJECT_TYPE is |
2838 | non-NULL, we have just seen `x->' or `x.' and OBJECT_TYPE is the |
2839 | type of `*x', or `x', respectively. If the DECL was named as |
2840 | `A::B' then NESTED_NAME_SPECIFIER is `A'. Return value is like |
2841 | perform_access_checks above. */ |
2842 | |
2843 | bool |
2844 | check_accessibility_of_qualified_id (tree decl, |
2845 | tree object_type, |
2846 | tree nested_name_specifier, |
2847 | tsubst_flags_t complain) |
2848 | { |
2849 | /* If we're not checking, return immediately. */ |
2850 | if (deferred_access_no_check) |
2851 | return true; |
2852 | |
2853 | /* Determine the SCOPE of DECL. */ |
2854 | tree scope = context_for_name_lookup (decl); |
2855 | /* If the SCOPE is not a type, then DECL is not a member. */ |
2856 | if (!TYPE_P (scope) |
2857 | /* If SCOPE is dependent then we can't perform this access check now, |
2858 | and since we'll perform this access check again after substitution |
2859 | there's no need to explicitly defer it. */ |
2860 | || dependent_type_p (scope)) |
2861 | return true; |
2862 | |
2863 | tree qualifying_type = NULL_TREE; |
2864 | /* Compute the scope through which DECL is being accessed. */ |
2865 | if (object_type |
2866 | /* OBJECT_TYPE might not be a class type; consider: |
2867 | |
2868 | class A { typedef int I; }; |
2869 | I *p; |
2870 | p->A::I::~I(); |
2871 | |
2872 | In this case, we will have "A::I" as the DECL, but "I" as the |
2873 | OBJECT_TYPE. */ |
2874 | && CLASS_TYPE_P (object_type) |
2875 | && DERIVED_FROM_P (scope, object_type)) |
2876 | { |
2877 | /* If we are processing a `->' or `.' expression, use the type of the |
2878 | left-hand side. */ |
2879 | if (tree open = currently_open_class (object_type)) |
2880 | qualifying_type = open; |
2881 | else |
2882 | qualifying_type = object_type; |
2883 | } |
2884 | else if (nested_name_specifier) |
2885 | { |
2886 | /* If the reference is to a non-static member of the |
2887 | current class, treat it as if it were referenced through |
2888 | `this'. */ |
2889 | if (DECL_NONSTATIC_MEMBER_P (decl) |
2890 | && current_class_ptr) |
2891 | if (tree current = current_nonlambda_class_type ()) |
2892 | { |
2893 | if (dependent_type_p (current)) |
2894 | /* In general we can't know whether this access goes through |
2895 | `this' until instantiation time. Punt now, or else we might |
2896 | create a deferred access check that's not relative to `this' |
2897 | when it ought to be. We'll check this access again after |
2898 | substitution, e.g. from tsubst_qualified_id. */ |
2899 | return true; |
2900 | |
2901 | if (DERIVED_FROM_P (scope, current)) |
2902 | qualifying_type = current; |
2903 | } |
2904 | /* Otherwise, use the type indicated by the |
2905 | nested-name-specifier. */ |
2906 | if (!qualifying_type) |
2907 | qualifying_type = nested_name_specifier; |
2908 | } |
2909 | else |
2910 | /* Otherwise, the name must be from the current class or one of |
2911 | its bases. */ |
2912 | qualifying_type = currently_open_derived_class (scope); |
2913 | |
2914 | if (qualifying_type |
2915 | /* It is possible for qualifying type to be a TEMPLATE_TYPE_PARM |
2916 | or similar in a default argument value. */ |
2917 | && CLASS_TYPE_P (qualifying_type)) |
2918 | return perform_or_defer_access_check (TYPE_BINFO (qualifying_type), decl, |
2919 | diag_decl: decl, complain); |
2920 | |
2921 | return true; |
2922 | } |
2923 | |
2924 | /* EXPR is the result of a qualified-id. The QUALIFYING_CLASS was the |
2925 | class named to the left of the "::" operator. DONE is true if this |
2926 | expression is a complete postfix-expression; it is false if this |
2927 | expression is followed by '->', '[', '(', etc. ADDRESS_P is true |
2928 | iff this expression is the operand of '&'. TEMPLATE_P is true iff |
2929 | the qualified-id was of the form "A::template B". TEMPLATE_ARG_P |
2930 | is true iff this qualified name appears as a template argument. */ |
2931 | |
2932 | tree |
2933 | finish_qualified_id_expr (tree qualifying_class, |
2934 | tree expr, |
2935 | bool done, |
2936 | bool address_p, |
2937 | bool template_p, |
2938 | bool template_arg_p, |
2939 | tsubst_flags_t complain) |
2940 | { |
2941 | gcc_assert (TYPE_P (qualifying_class)); |
2942 | |
2943 | if (error_operand_p (t: expr)) |
2944 | return error_mark_node; |
2945 | |
2946 | if (DECL_P (expr) |
2947 | /* Functions are marked after overload resolution; avoid redundant |
2948 | warnings. */ |
2949 | && TREE_CODE (expr) != FUNCTION_DECL |
2950 | && !mark_used (expr, complain)) |
2951 | return error_mark_node; |
2952 | |
2953 | if (template_p) |
2954 | { |
2955 | if (TREE_CODE (expr) == UNBOUND_CLASS_TEMPLATE) |
2956 | { |
2957 | /* cp_parser_lookup_name thought we were looking for a type, |
2958 | but we're actually looking for a declaration. */ |
2959 | qualifying_class = TYPE_CONTEXT (expr); |
2960 | expr = TYPE_IDENTIFIER (expr); |
2961 | } |
2962 | else |
2963 | check_template_keyword (expr); |
2964 | } |
2965 | |
2966 | /* If EXPR occurs as the operand of '&', use special handling that |
2967 | permits a pointer-to-member. */ |
2968 | if (address_p && done |
2969 | && TREE_CODE (qualifying_class) != ENUMERAL_TYPE) |
2970 | { |
2971 | if (TREE_CODE (expr) == SCOPE_REF) |
2972 | expr = TREE_OPERAND (expr, 1); |
2973 | expr = build_offset_ref (qualifying_class, expr, |
2974 | /*address_p=*/true, complain); |
2975 | return expr; |
2976 | } |
2977 | |
2978 | /* No need to check access within an enum. */ |
2979 | if (TREE_CODE (qualifying_class) == ENUMERAL_TYPE |
2980 | && TREE_CODE (expr) != IDENTIFIER_NODE) |
2981 | return expr; |
2982 | |
2983 | /* Within the scope of a class, turn references to non-static |
2984 | members into expression of the form "this->...". */ |
2985 | if (template_arg_p) |
2986 | /* But, within a template argument, we do not want make the |
2987 | transformation, as there is no "this" pointer. */ |
2988 | ; |
2989 | else if (TREE_CODE (expr) == FIELD_DECL) |
2990 | { |
2991 | push_deferring_access_checks (deferring: dk_no_check); |
2992 | expr = finish_non_static_data_member (decl: expr, NULL_TREE, |
2993 | qualifying_scope: qualifying_class, complain); |
2994 | pop_deferring_access_checks (); |
2995 | } |
2996 | else if (BASELINK_P (expr)) |
2997 | { |
2998 | /* See if any of the functions are non-static members. */ |
2999 | /* If so, the expression may be relative to 'this'. */ |
3000 | if (!shared_member_p (expr) |
3001 | && current_class_ptr |
3002 | && DERIVED_FROM_P (qualifying_class, |
3003 | current_nonlambda_class_type ())) |
3004 | expr = (build_class_member_access_expr |
3005 | (maybe_dummy_object (qualifying_class, NULL), |
3006 | expr, |
3007 | BASELINK_ACCESS_BINFO (expr), |
3008 | /*preserve_reference=*/false, |
3009 | complain)); |
3010 | else if (done) |
3011 | /* The expression is a qualified name whose address is not |
3012 | being taken. */ |
3013 | expr = build_offset_ref (qualifying_class, expr, /*address_p=*/false, |
3014 | complain); |
3015 | } |
3016 | else if (!template_p |
3017 | && TREE_CODE (expr) == TEMPLATE_DECL |
3018 | && !DECL_FUNCTION_TEMPLATE_P (expr)) |
3019 | { |
3020 | if (complain & tf_error) |
3021 | error ("%qE missing template arguments", expr); |
3022 | return error_mark_node; |
3023 | } |
3024 | else |
3025 | { |
3026 | /* In a template, return a SCOPE_REF for most qualified-ids |
3027 | so that we can check access at instantiation time. But if |
3028 | we're looking at a member of the current instantiation, we |
3029 | know we have access and building up the SCOPE_REF confuses |
3030 | non-type template argument handling. */ |
3031 | if (processing_template_decl |
3032 | && (!currently_open_class (qualifying_class) |
3033 | || TREE_CODE (expr) == IDENTIFIER_NODE |
3034 | || TREE_CODE (expr) == TEMPLATE_ID_EXPR |
3035 | || TREE_CODE (expr) == BIT_NOT_EXPR)) |
3036 | expr = build_qualified_name (TREE_TYPE (expr), |
3037 | qualifying_class, expr, |
3038 | template_p); |
3039 | else if (tree wrap = maybe_get_tls_wrapper_call (expr)) |
3040 | expr = wrap; |
3041 | |
3042 | expr = convert_from_reference (expr); |
3043 | } |
3044 | |
3045 | return expr; |
3046 | } |
3047 | |
3048 | /* Begin a statement-expression. The value returned must be passed to |
3049 | finish_stmt_expr. */ |
3050 | |
3051 | tree |
3052 | begin_stmt_expr (void) |
3053 | { |
3054 | return push_stmt_list (); |
3055 | } |
3056 | |
3057 | /* Process the final expression of a statement expression. EXPR can be |
3058 | NULL, if the final expression is empty. Return a STATEMENT_LIST |
3059 | containing all the statements in the statement-expression, or |
3060 | ERROR_MARK_NODE if there was an error. */ |
3061 | |
3062 | tree |
3063 | finish_stmt_expr_expr (tree expr, tree stmt_expr) |
3064 | { |
3065 | if (error_operand_p (t: expr)) |
3066 | { |
3067 | /* The type of the statement-expression is the type of the last |
3068 | expression. */ |
3069 | TREE_TYPE (stmt_expr) = error_mark_node; |
3070 | return error_mark_node; |
3071 | } |
3072 | |
3073 | /* If the last statement does not have "void" type, then the value |
3074 | of the last statement is the value of the entire expression. */ |
3075 | if (expr) |
3076 | { |
3077 | tree type = TREE_TYPE (expr); |
3078 | |
3079 | if (type && type_unknown_p (expr: type)) |
3080 | { |
3081 | error ("a statement expression is an insufficient context" |
3082 | " for overload resolution"); |
3083 | TREE_TYPE (stmt_expr) = error_mark_node; |
3084 | return error_mark_node; |
3085 | } |
3086 | else if (processing_template_decl) |
3087 | { |
3088 | /* Not finish_expr_stmt because we don't want convert_to_void. */ |
3089 | expr = build_stmt (input_location, EXPR_STMT, expr); |
3090 | expr = add_stmt (t: expr); |
3091 | /* Mark the last statement so that we can recognize it as such at |
3092 | template-instantiation time. */ |
3093 | EXPR_STMT_STMT_EXPR_RESULT (expr) = 1; |
3094 | } |
3095 | else if (VOID_TYPE_P (type)) |
3096 | { |
3097 | /* Just treat this like an ordinary statement. */ |
3098 | expr = finish_expr_stmt (expr); |
3099 | } |
3100 | else |
3101 | { |
3102 | /* It actually has a value we need to deal with. First, force it |
3103 | to be an rvalue so that we won't need to build up a copy |
3104 | constructor call later when we try to assign it to something. */ |
3105 | expr = force_rvalue (expr, tf_warning_or_error); |
3106 | if (error_operand_p (t: expr)) |
3107 | return error_mark_node; |
3108 | |
3109 | /* Update for array-to-pointer decay. */ |
3110 | type = TREE_TYPE (expr); |
3111 | |
3112 | /* This TARGET_EXPR will initialize the outer one added by |
3113 | finish_stmt_expr. */ |
3114 | set_target_expr_eliding (expr); |
3115 | |
3116 | /* Wrap it in a CLEANUP_POINT_EXPR and add it to the list like a |
3117 | normal statement, but don't convert to void or actually add |
3118 | the EXPR_STMT. */ |
3119 | if (TREE_CODE (expr) != CLEANUP_POINT_EXPR) |
3120 | expr = maybe_cleanup_point_expr (expr); |
3121 | add_stmt (t: expr); |
3122 | } |
3123 | |
3124 | /* The type of the statement-expression is the type of the last |
3125 | expression. */ |
3126 | TREE_TYPE (stmt_expr) = type; |
3127 | } |
3128 | |
3129 | return stmt_expr; |
3130 | } |
3131 | |
3132 | /* Finish a statement-expression. EXPR should be the value returned |
3133 | by the previous begin_stmt_expr. Returns an expression |
3134 | representing the statement-expression. */ |
3135 | |
3136 | tree |
3137 | finish_stmt_expr (tree stmt_expr, bool has_no_scope) |
3138 | { |
3139 | tree type; |
3140 | tree result; |
3141 | |
3142 | if (error_operand_p (t: stmt_expr)) |
3143 | { |
3144 | pop_stmt_list (stmt_expr); |
3145 | return error_mark_node; |
3146 | } |
3147 | |
3148 | gcc_assert (TREE_CODE (stmt_expr) == STATEMENT_LIST); |
3149 | |
3150 | type = TREE_TYPE (stmt_expr); |
3151 | result = pop_stmt_list (stmt_expr); |
3152 | TREE_TYPE (result) = type; |
3153 | |
3154 | if (processing_template_decl) |
3155 | { |
3156 | result = build_min (STMT_EXPR, type, result); |
3157 | TREE_SIDE_EFFECTS (result) = 1; |
3158 | STMT_EXPR_NO_SCOPE (result) = has_no_scope; |
3159 | } |
3160 | else if (CLASS_TYPE_P (type)) |
3161 | { |
3162 | /* Wrap the statement-expression in a TARGET_EXPR so that the |
3163 | temporary object created by the final expression is destroyed at |
3164 | the end of the full-expression containing the |
3165 | statement-expression. */ |
3166 | result = force_target_expr (type, result, tf_warning_or_error); |
3167 | } |
3168 | |
3169 | return result; |
3170 | } |
3171 | |
3172 | /* Returns the expression which provides the value of STMT_EXPR. */ |
3173 | |
3174 | tree |
3175 | stmt_expr_value_expr (tree stmt_expr) |
3176 | { |
3177 | tree t = STMT_EXPR_STMT (stmt_expr); |
3178 | |
3179 | if (TREE_CODE (t) == BIND_EXPR) |
3180 | t = BIND_EXPR_BODY (t); |
3181 | |
3182 | if (TREE_CODE (t) == STATEMENT_LIST && STATEMENT_LIST_TAIL (t)) |
3183 | t = STATEMENT_LIST_TAIL (t)->stmt; |
3184 | |
3185 | if (TREE_CODE (t) == EXPR_STMT) |
3186 | t = EXPR_STMT_EXPR (t); |
3187 | |
3188 | return t; |
3189 | } |
3190 | |
3191 | /* Return TRUE iff EXPR_STMT is an empty list of |
3192 | expression statements. */ |
3193 | |
3194 | bool |
3195 | empty_expr_stmt_p (tree expr_stmt) |
3196 | { |
3197 | tree body = NULL_TREE; |
3198 | |
3199 | if (expr_stmt == void_node) |
3200 | return true; |
3201 | |
3202 | if (expr_stmt) |
3203 | { |
3204 | if (TREE_CODE (expr_stmt) == EXPR_STMT) |
3205 | body = EXPR_STMT_EXPR (expr_stmt); |
3206 | else if (TREE_CODE (expr_stmt) == STATEMENT_LIST) |
3207 | body = expr_stmt; |
3208 | } |
3209 | |
3210 | if (body) |
3211 | { |
3212 | if (TREE_CODE (body) == STATEMENT_LIST) |
3213 | return tsi_end_p (i: tsi_start (t: body)); |
3214 | else |
3215 | return empty_expr_stmt_p (expr_stmt: body); |
3216 | } |
3217 | return false; |
3218 | } |
3219 | |
3220 | /* Perform Koenig lookup. FN_EXPR is the postfix-expression representing |
3221 | the function (or functions) to call; ARGS are the arguments to the |
3222 | call. Returns the functions to be considered by overload resolution. */ |
3223 | |
3224 | cp_expr |
3225 | perform_koenig_lookup (cp_expr fn_expr, vec<tree, va_gc> *args, |
3226 | tsubst_flags_t complain) |
3227 | { |
3228 | tree identifier = NULL_TREE; |
3229 | tree functions = NULL_TREE; |
3230 | tree tmpl_args = NULL_TREE; |
3231 | bool template_id = false; |
3232 | location_t loc = fn_expr.get_location (); |
3233 | tree fn = fn_expr.get_value (); |
3234 | |
3235 | STRIP_ANY_LOCATION_WRAPPER (fn); |
3236 | |
3237 | if (TREE_CODE (fn) == TEMPLATE_ID_EXPR) |
3238 | { |
3239 | /* Use a separate flag to handle null args. */ |
3240 | template_id = true; |
3241 | tmpl_args = TREE_OPERAND (fn, 1); |
3242 | fn = TREE_OPERAND (fn, 0); |
3243 | } |
3244 | |
3245 | /* Find the name of the overloaded function. */ |
3246 | if (identifier_p (t: fn)) |
3247 | identifier = fn; |
3248 | else |
3249 | { |
3250 | functions = fn; |
3251 | identifier = OVL_NAME (functions); |
3252 | } |
3253 | |
3254 | /* A call to a namespace-scope function using an unqualified name. |
3255 | |
3256 | Do Koenig lookup -- unless any of the arguments are |
3257 | type-dependent. */ |
3258 | if (!any_type_dependent_arguments_p (args) |
3259 | && !any_dependent_template_arguments_p (tmpl_args)) |
3260 | { |
3261 | fn = lookup_arg_dependent (identifier, functions, args); |
3262 | if (!fn) |
3263 | { |
3264 | /* The unqualified name could not be resolved. */ |
3265 | if (complain & tf_error) |
3266 | fn = unqualified_fn_lookup_error (cp_expr (identifier, loc)); |
3267 | else |
3268 | fn = identifier; |
3269 | } |
3270 | } |
3271 | |
3272 | if (fn && template_id && fn != error_mark_node) |
3273 | fn = build2 (TEMPLATE_ID_EXPR, unknown_type_node, fn, tmpl_args); |
3274 | |
3275 | return cp_expr (fn, loc); |
3276 | } |
3277 | |
3278 | /* Generate an expression for `FN (ARGS)'. This may change the |
3279 | contents of ARGS. |
3280 | |
3281 | If DISALLOW_VIRTUAL is true, the call to FN will be not generated |
3282 | as a virtual call, even if FN is virtual. (This flag is set when |
3283 | encountering an expression where the function name is explicitly |
3284 | qualified. For example a call to `X::f' never generates a virtual |
3285 | call.) |
3286 | |
3287 | Returns code for the call. */ |
3288 | |
3289 | tree |
3290 | finish_call_expr (tree fn, vec<tree, va_gc> **args, bool disallow_virtual, |
3291 | bool koenig_p, tsubst_flags_t complain) |
3292 | { |
3293 | tree result; |
3294 | tree orig_fn; |
3295 | vec<tree, va_gc> *orig_args = *args; |
3296 | |
3297 | if (fn == error_mark_node) |
3298 | return error_mark_node; |
3299 | |
3300 | gcc_assert (!TYPE_P (fn)); |
3301 | |
3302 | /* If FN may be a FUNCTION_DECL obfuscated by force_paren_expr, undo |
3303 | it so that we can tell this is a call to a known function. */ |
3304 | fn = maybe_undo_parenthesized_ref (t: fn); |
3305 | |
3306 | STRIP_ANY_LOCATION_WRAPPER (fn); |
3307 | |
3308 | orig_fn = fn; |
3309 | |
3310 | if (processing_template_decl) |
3311 | { |
3312 | /* If FN is a local extern declaration (or set thereof) in a template, |
3313 | look it up again at instantiation time. */ |
3314 | if (is_overloaded_fn (fn)) |
3315 | { |
3316 | tree ifn = get_first_fn (fn); |
3317 | if (TREE_CODE (ifn) == FUNCTION_DECL |
3318 | && dependent_local_decl_p (ifn)) |
3319 | orig_fn = DECL_NAME (ifn); |
3320 | } |
3321 | |
3322 | /* If the call expression is dependent, build a CALL_EXPR node |
3323 | with no type; type_dependent_expression_p recognizes |
3324 | expressions with no type as being dependent. */ |
3325 | if (type_dependent_expression_p (fn) |
3326 | || any_type_dependent_arguments_p (*args)) |
3327 | { |
3328 | if (koenig_p |
3329 | && TREE_CODE (orig_fn) == FUNCTION_DECL |
3330 | && !fndecl_built_in_p (node: orig_fn)) |
3331 | /* For an ADL-enabled call where unqualified lookup found a |
3332 | single non-template function, wrap it in an OVERLOAD so that |
3333 | later substitution doesn't overeagerly mark the function as |
3334 | used. */ |
3335 | orig_fn = ovl_make (fn: orig_fn, NULL_TREE); |
3336 | result = build_min_nt_call_vec (orig_fn, *args); |
3337 | SET_EXPR_LOCATION (result, cp_expr_loc_or_input_loc (fn)); |
3338 | KOENIG_LOOKUP_P (result) = koenig_p; |
3339 | /* Disable the std::move warnings since this call was dependent |
3340 | (c++/89780, c++/107363). This also suppresses the |
3341 | -Wredundant-move warning. */ |
3342 | suppress_warning (result, OPT_Wpessimizing_move); |
3343 | |
3344 | if (cfun && cp_function_chain && !cp_unevaluated_operand) |
3345 | { |
3346 | bool abnormal = true; |
3347 | for (lkp_iterator iter (maybe_get_fns (fn)); iter; ++iter) |
3348 | { |
3349 | tree fndecl = STRIP_TEMPLATE (*iter); |
3350 | if (TREE_CODE (fndecl) != FUNCTION_DECL |
3351 | || !TREE_THIS_VOLATILE (fndecl)) |
3352 | { |
3353 | abnormal = false; |
3354 | break; |
3355 | } |
3356 | } |
3357 | /* FIXME: Stop warning about falling off end of non-void |
3358 | function. But this is wrong. Even if we only see |
3359 | no-return fns at this point, we could select a |
3360 | future-defined return fn during instantiation. Or |
3361 | vice-versa. */ |
3362 | if (abnormal) |
3363 | current_function_returns_abnormally = 1; |
3364 | } |
3365 | if (TREE_CODE (fn) == COMPONENT_REF) |
3366 | maybe_generic_this_capture (TREE_OPERAND (fn, 0), |
3367 | TREE_OPERAND (fn, 1)); |
3368 | return result; |
3369 | } |
3370 | orig_args = make_tree_vector_copy (*args); |
3371 | } |
3372 | |
3373 | if (TREE_CODE (fn) == COMPONENT_REF) |
3374 | { |
3375 | tree member = TREE_OPERAND (fn, 1); |
3376 | if (BASELINK_P (member)) |
3377 | { |
3378 | tree object = TREE_OPERAND (fn, 0); |
3379 | return build_new_method_call (object, member, |
3380 | args, NULL_TREE, |
3381 | (disallow_virtual |
3382 | ? LOOKUP_NORMAL | LOOKUP_NONVIRTUAL |
3383 | : LOOKUP_NORMAL), |
3384 | /*fn_p=*/NULL, |
3385 | complain); |
3386 | } |
3387 | } |
3388 | |
3389 | /* Per 13.3.1.1, '(&f)(...)' is the same as '(f)(...)'. */ |
3390 | if (TREE_CODE (fn) == ADDR_EXPR |
3391 | && TREE_CODE (TREE_OPERAND (fn, 0)) == OVERLOAD) |
3392 | fn = TREE_OPERAND (fn, 0); |
3393 | |
3394 | if (is_overloaded_fn (fn)) |
3395 | fn = baselink_for_fns (fn); |
3396 | |
3397 | result = NULL_TREE; |
3398 | if (BASELINK_P (fn)) |
3399 | { |
3400 | tree object; |
3401 | |
3402 | /* A call to a member function. From [over.call.func]: |
3403 | |
3404 | If the keyword this is in scope and refers to the class of |
3405 | that member function, or a derived class thereof, then the |
3406 | function call is transformed into a qualified function call |
3407 | using (*this) as the postfix-expression to the left of the |
3408 | . operator.... [Otherwise] a contrived object of type T |
3409 | becomes the implied object argument. |
3410 | |
3411 | In this situation: |
3412 | |
3413 | struct A { void f(); }; |
3414 | struct B : public A {}; |
3415 | struct C : public A { void g() { B::f(); }}; |
3416 | |
3417 | "the class of that member function" refers to `A'. But 11.2 |
3418 | [class.access.base] says that we need to convert 'this' to B* as |
3419 | part of the access, so we pass 'B' to maybe_dummy_object. */ |
3420 | |
3421 | if (DECL_MAYBE_IN_CHARGE_CONSTRUCTOR_P (get_first_fn (fn))) |
3422 | { |
3423 | /* A constructor call always uses a dummy object. (This constructor |
3424 | call which has the form A::A () is actually invalid and we are |
3425 | going to reject it later in build_new_method_call.) */ |
3426 | object = build_dummy_object (BINFO_TYPE (BASELINK_ACCESS_BINFO (fn))); |
3427 | } |
3428 | else |
3429 | object = maybe_dummy_object (BINFO_TYPE (BASELINK_ACCESS_BINFO (fn)), |
3430 | NULL); |
3431 | |
3432 | result = build_new_method_call (object, fn, args, NULL_TREE, |
3433 | (disallow_virtual |
3434 | ? LOOKUP_NORMAL|LOOKUP_NONVIRTUAL |
3435 | : LOOKUP_NORMAL), |
3436 | /*fn_p=*/NULL, |
3437 | complain); |
3438 | } |
3439 | else if (concept_check_p (t: fn)) |
3440 | { |
3441 | error_at (EXPR_LOC_OR_LOC (fn, input_location), |
3442 | "cannot call a concept as a function"); |
3443 | return error_mark_node; |
3444 | } |
3445 | else if (is_overloaded_fn (fn)) |
3446 | { |
3447 | /* If the function is an overloaded builtin, resolve it. */ |
3448 | if (TREE_CODE (fn) == FUNCTION_DECL |
3449 | && (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL |
3450 | || DECL_BUILT_IN_CLASS (fn) == BUILT_IN_MD)) |
3451 | result = resolve_overloaded_builtin (input_location, fn, *args, |
3452 | complain & tf_error); |
3453 | |
3454 | if (!result) |
3455 | { |
3456 | tree alloc_size_attr = NULL_TREE; |
3457 | if (warn_calloc_transposed_args |
3458 | && TREE_CODE (fn) == FUNCTION_DECL |
3459 | && (alloc_size_attr |
3460 | = lookup_attribute (attr_name: "alloc_size", |
3461 | TYPE_ATTRIBUTES (TREE_TYPE (fn))))) |
3462 | if (TREE_VALUE (alloc_size_attr) == NULL_TREE |
3463 | || TREE_CHAIN (TREE_VALUE (alloc_size_attr)) == NULL_TREE) |
3464 | alloc_size_attr = NULL_TREE; |
3465 | if ((warn_sizeof_pointer_memaccess || alloc_size_attr) |
3466 | && (complain & tf_warning) |
3467 | && !vec_safe_is_empty (v: *args) |
3468 | && !processing_template_decl) |
3469 | { |
3470 | location_t sizeof_arg_loc[6]; |
3471 | tree sizeof_arg[6]; |
3472 | unsigned int i; |
3473 | for (i = 0; i < (alloc_size_attr ? 6 : 3); i++) |
3474 | { |
3475 | tree t; |
3476 | |
3477 | sizeof_arg_loc[i] = UNKNOWN_LOCATION; |
3478 | sizeof_arg[i] = NULL_TREE; |
3479 | if (i >= (*args)->length ()) |
3480 | continue; |
3481 | t = (**args)[i]; |
3482 | if (TREE_CODE (t) != SIZEOF_EXPR) |
3483 | continue; |
3484 | if (SIZEOF_EXPR_TYPE_P (t)) |
3485 | sizeof_arg[i] = TREE_TYPE (TREE_OPERAND (t, 0)); |
3486 | else |
3487 | sizeof_arg[i] = TREE_OPERAND (t, 0); |
3488 | sizeof_arg_loc[i] = EXPR_LOCATION (t); |
3489 | } |
3490 | if (warn_sizeof_pointer_memaccess) |
3491 | { |
3492 | auto same_p = same_type_ignoring_top_level_qualifiers_p; |
3493 | sizeof_pointer_memaccess_warning (sizeof_arg_loc, fn, *args, |
3494 | sizeof_arg, same_p); |
3495 | } |
3496 | if (alloc_size_attr) |
3497 | warn_for_calloc (sizeof_arg_loc, fn, *args, sizeof_arg, |
3498 | alloc_size_attr); |
3499 | } |
3500 | |
3501 | if ((complain & tf_warning) |
3502 | && TREE_CODE (fn) == FUNCTION_DECL |
3503 | && fndecl_built_in_p (node: fn, name1: BUILT_IN_MEMSET) |
3504 | && vec_safe_length (v: *args) == 3 |
3505 | && !any_type_dependent_arguments_p (*args)) |
3506 | { |
3507 | tree arg0 = (*orig_args)[0]; |
3508 | tree arg1 = (*orig_args)[1]; |
3509 | tree arg2 = (*orig_args)[2]; |
3510 | int literal_mask = ((literal_integer_zerop (arg1) << 1) |
3511 | | (literal_integer_zerop (arg2) << 2)); |
3512 | warn_for_memset (input_location, arg0, arg2, literal_mask); |
3513 | } |
3514 | |
3515 | /* A call to a namespace-scope function. */ |
3516 | result = build_new_function_call (fn, args, complain); |
3517 | } |
3518 | } |
3519 | else if (TREE_CODE (fn) == PSEUDO_DTOR_EXPR) |
3520 | { |
3521 | if (!vec_safe_is_empty (v: *args)) |
3522 | error ("arguments to destructor are not allowed"); |
3523 | /* C++20/DR: If the postfix-expression names a pseudo-destructor (in |
3524 | which case the postfix-expression is a possibly-parenthesized class |
3525 | member access), the function call destroys the object of scalar type |
3526 | denoted by the object expression of the class member access. */ |
3527 | tree ob = TREE_OPERAND (fn, 0); |
3528 | if (obvalue_p (ob)) |
3529 | result = build_trivial_dtor_call (ob, true); |
3530 | else |
3531 | /* No location to clobber. */ |
3532 | result = convert_to_void (ob, ICV_STATEMENT, complain); |
3533 | } |
3534 | else if (CLASS_TYPE_P (TREE_TYPE (fn))) |
3535 | /* If the "function" is really an object of class type, it might |
3536 | have an overloaded `operator ()'. */ |
3537 | result = build_op_call (fn, args, complain); |
3538 | |
3539 | if (!result) |
3540 | /* A call where the function is unknown. */ |
3541 | result = cp_build_function_call_vec (fn, args, complain); |
3542 | |
3543 | if (processing_template_decl && result != error_mark_node) |
3544 | { |
3545 | if (INDIRECT_REF_P (result)) |
3546 | result = TREE_OPERAND (result, 0); |
3547 | |
3548 | /* Prune all but the selected function from the original overload |
3549 | set so that we can avoid some duplicate work at instantiation time. */ |
3550 | if (TREE_CODE (result) == CALL_EXPR |
3551 | && really_overloaded_fn (orig_fn)) |
3552 | { |
3553 | tree sel_fn = CALL_EXPR_FN (result); |
3554 | if (TREE_CODE (sel_fn) == COMPONENT_REF) |
3555 | { |
3556 | /* The non-dependent result of build_new_method_call. */ |
3557 | sel_fn = TREE_OPERAND (sel_fn, 1); |
3558 | gcc_assert (BASELINK_P (sel_fn)); |
3559 | } |
3560 | else if (TREE_CODE (sel_fn) == ADDR_EXPR) |
3561 | /* Our original callee wasn't wrapped in an ADDR_EXPR, |
3562 | so strip this ADDR_EXPR added by build_over_call. */ |
3563 | sel_fn = TREE_OPERAND (sel_fn, 0); |
3564 | orig_fn = sel_fn; |
3565 | } |
3566 | |
3567 | tree r = build_call_vec (TREE_TYPE (result), orig_fn, orig_args); |
3568 | SET_EXPR_LOCATION (r, input_location); |
3569 | KOENIG_LOOKUP_P (r) = koenig_p; |
3570 | TREE_NO_WARNING (r) = TREE_NO_WARNING (result); |
3571 | release_tree_vector (orig_args); |
3572 | result = convert_from_reference (r); |
3573 | } |
3574 | |
3575 | return result; |
3576 | } |
3577 | |
3578 | /* Finish a call to a postfix increment or decrement or EXPR. (Which |
3579 | is indicated by CODE, which should be POSTINCREMENT_EXPR or |
3580 | POSTDECREMENT_EXPR.) */ |
3581 | |
3582 | cp_expr |
3583 | finish_increment_expr (cp_expr expr, enum tree_code code) |
3584 | { |
3585 | /* input_location holds the location of the trailing operator token. |
3586 | Build a location of the form: |
3587 | expr++ |
3588 | ~~~~^~ |
3589 | with the caret at the operator token, ranging from the start |
3590 | of EXPR to the end of the operator token. */ |
3591 | location_t combined_loc = make_location (caret: input_location, |
3592 | start: expr.get_start (), |
3593 | finish: get_finish (loc: input_location)); |
3594 | cp_expr result = build_x_unary_op (combined_loc, code, expr, |
3595 | NULL_TREE, tf_warning_or_error); |
3596 | /* TODO: build_x_unary_op doesn't honor the location, so set it here. */ |
3597 | result.set_location (combined_loc); |
3598 | return result; |
3599 | } |
3600 | |
3601 | /* Finish a use of `this'. Returns an expression for `this'. */ |
3602 | |
3603 | tree |
3604 | finish_this_expr (void) |
3605 | { |
3606 | tree result = NULL_TREE; |
3607 | |
3608 | if (current_class_type && LAMBDA_TYPE_P (current_class_type)) |
3609 | result = (lambda_expr_this_capture |
3610 | (CLASSTYPE_LAMBDA_EXPR (current_class_type), /*add*/true)); |
3611 | else if (current_class_ptr) |
3612 | result = current_class_ptr; |
3613 | |
3614 | if (result) |
3615 | /* The keyword 'this' is a prvalue expression. */ |
3616 | return rvalue (result); |
3617 | |
3618 | tree fn = current_nonlambda_function (); |
3619 | if (fn && DECL_XOBJ_MEMBER_FUNCTION_P (fn)) |
3620 | { |
3621 | auto_diagnostic_group d; |
3622 | error ("%<this%> is unavailable for explicit object member " |
3623 | "functions"); |
3624 | tree xobj_parm = DECL_ARGUMENTS (fn); |
3625 | gcc_assert (xobj_parm); |
3626 | tree parm_name = DECL_NAME (xobj_parm); |
3627 | |
3628 | static tree remembered_fn = NULL_TREE; |
3629 | /* Only output this diagnostic once per function. */ |
3630 | if (remembered_fn == fn) |
3631 | /* Early escape. */; |
3632 | else if (parm_name) |
3633 | inform (DECL_SOURCE_LOCATION (xobj_parm), |
3634 | "use explicit object parameter %qs instead", |
3635 | IDENTIFIER_POINTER (parm_name)); |
3636 | else |
3637 | inform (DECL_SOURCE_LOCATION (xobj_parm), |
3638 | "name the explicit object parameter"); |
3639 | |
3640 | remembered_fn = fn; |
3641 | } |
3642 | else if (fn && DECL_STATIC_FUNCTION_P (fn)) |
3643 | error ("%<this%> is unavailable for static member functions"); |
3644 | else if (fn && processing_contract_condition && DECL_CONSTRUCTOR_P (fn)) |
3645 | error ("invalid use of %<this%> before it is valid"); |
3646 | else if (fn && processing_contract_condition && DECL_DESTRUCTOR_P (fn)) |
3647 | error ("invalid use of %<this%> after it is valid"); |
3648 | else if (fn) |
3649 | error ("invalid use of %<this%> in non-member function"); |
3650 | else |
3651 | error ("invalid use of %<this%> at top level"); |
3652 | return error_mark_node; |
3653 | } |
3654 | |
3655 | /* Finish a pseudo-destructor expression. If SCOPE is NULL, the |
3656 | expression was of the form `OBJECT.~DESTRUCTOR' where DESTRUCTOR is |
3657 | the TYPE for the type given. If SCOPE is non-NULL, the expression |
3658 | was of the form `OBJECT.SCOPE::~DESTRUCTOR'. */ |
3659 | |
3660 | tree |
3661 | finish_pseudo_destructor_expr (tree object, tree scope, tree destructor, |
3662 | location_t loc, tsubst_flags_t complain) |
3663 | { |
3664 | if (object == error_mark_node || destructor == error_mark_node) |
3665 | return error_mark_node; |
3666 | |
3667 | gcc_assert (TYPE_P (destructor)); |
3668 | |
3669 | if (!processing_template_decl) |
3670 | { |
3671 | if (scope == error_mark_node) |
3672 | { |
3673 | if (complain & tf_error) |
3674 | error_at (loc, "invalid qualifying scope in pseudo-destructor name"); |
3675 | return error_mark_node; |
3676 | } |
3677 | if (is_auto (destructor)) |
3678 | destructor = TREE_TYPE (object); |
3679 | if (scope && TYPE_P (scope) && !check_dtor_name (scope, destructor)) |
3680 | { |
3681 | if (complain & tf_error) |
3682 | error_at (loc, |
3683 | "qualified type %qT does not match destructor name ~%qT", |
3684 | scope, destructor); |
3685 | return error_mark_node; |
3686 | } |
3687 | |
3688 | |
3689 | /* [expr.pseudo] says both: |
3690 | |
3691 | The type designated by the pseudo-destructor-name shall be |
3692 | the same as the object type. |
3693 | |
3694 | and: |
3695 | |
3696 | The cv-unqualified versions of the object type and of the |
3697 | type designated by the pseudo-destructor-name shall be the |
3698 | same type. |
3699 | |
3700 | We implement the more generous second sentence, since that is |
3701 | what most other compilers do. */ |
3702 | if (!same_type_ignoring_top_level_qualifiers_p (TREE_TYPE (object), |
3703 | destructor)) |
3704 | { |
3705 | if (complain & tf_error) |
3706 | error_at (loc, "%qE is not of type %qT", object, destructor); |
3707 | return error_mark_node; |
3708 | } |
3709 | } |
3710 | |
3711 | tree type = (type_dependent_expression_p (object) |
3712 | ? NULL_TREE : void_type_node); |
3713 | |
3714 | return build3_loc (loc, code: PSEUDO_DTOR_EXPR, type, arg0: object, |
3715 | arg1: scope, arg2: destructor); |
3716 | } |
3717 | |
3718 | /* Finish an expression of the form CODE EXPR. */ |
3719 | |
3720 | cp_expr |
3721 | finish_unary_op_expr (location_t op_loc, enum tree_code code, cp_expr expr, |
3722 | tsubst_flags_t complain) |
3723 | { |
3724 | /* Build a location of the form: |
3725 | ++expr |
3726 | ^~~~~~ |
3727 | with the caret at the operator token, ranging from the start |
3728 | of the operator token to the end of EXPR. */ |
3729 | location_t combined_loc = make_location (caret: op_loc, |
3730 | start: op_loc, finish: expr.get_finish ()); |
3731 | cp_expr result = build_x_unary_op (combined_loc, code, expr, |
3732 | NULL_TREE, complain); |
3733 | /* TODO: build_x_unary_op doesn't always honor the location. */ |
3734 | result.set_location (combined_loc); |
3735 | |
3736 | if (result == error_mark_node) |
3737 | return result; |
3738 | |
3739 | if (!(complain & tf_warning)) |
3740 | return result; |
3741 | |
3742 | tree result_ovl = result; |
3743 | tree expr_ovl = expr; |
3744 | |
3745 | if (!processing_template_decl) |
3746 | expr_ovl = cp_fully_fold (expr_ovl); |
3747 | |
3748 | if (!CONSTANT_CLASS_P (expr_ovl) |
3749 | || TREE_OVERFLOW_P (expr_ovl)) |
3750 | return result; |
3751 | |
3752 | if (!processing_template_decl) |
3753 | result_ovl = cp_fully_fold (result_ovl); |
3754 | |
3755 | if (CONSTANT_CLASS_P (result_ovl) && TREE_OVERFLOW_P (result_ovl)) |
3756 | overflow_warning (combined_loc, result_ovl); |
3757 | |
3758 | return result; |
3759 | } |
3760 | |
3761 | /* Return true if CONSTRUCTOR EXPR after pack expansion could have no |
3762 | elements. */ |
3763 | |
3764 | static bool |
3765 | maybe_zero_constructor_nelts (tree expr) |
3766 | { |
3767 | if (CONSTRUCTOR_NELTS (expr) == 0) |
3768 | return true; |
3769 | if (!processing_template_decl) |
3770 | return false; |
3771 | for (constructor_elt &elt : CONSTRUCTOR_ELTS (expr)) |
3772 | if (!PACK_EXPANSION_P (elt.value)) |
3773 | return false; |
3774 | return true; |
3775 | } |
3776 | |
3777 | /* Finish a compound-literal expression or C++11 functional cast with aggregate |
3778 | initializer. TYPE is the type to which the CONSTRUCTOR in COMPOUND_LITERAL |
3779 | is being cast. */ |
3780 | |
3781 | tree |
3782 | finish_compound_literal (tree type, tree compound_literal, |
3783 | tsubst_flags_t complain, |
3784 | fcl_t fcl_context) |
3785 | { |
3786 | if (type == error_mark_node) |
3787 | return error_mark_node; |
3788 | |
3789 | if (TYPE_REF_P (type)) |
3790 | { |
3791 | compound_literal |
3792 | = finish_compound_literal (TREE_TYPE (type), compound_literal, |
3793 | complain, fcl_context); |
3794 | /* The prvalue is then used to direct-initialize the reference. */ |
3795 | tree r = (perform_implicit_conversion_flags |
3796 | (type, compound_literal, complain, LOOKUP_NORMAL)); |
3797 | return convert_from_reference (r); |
3798 | } |
3799 | |
3800 | if (!TYPE_OBJ_P (type)) |
3801 | { |
3802 | /* DR2351 */ |
3803 | if (VOID_TYPE_P (type) && CONSTRUCTOR_NELTS (compound_literal) == 0) |
3804 | { |
3805 | if (!processing_template_decl) |
3806 | return void_node; |
3807 | TREE_TYPE (compound_literal) = type; |
3808 | TREE_HAS_CONSTRUCTOR (compound_literal) = 1; |
3809 | CONSTRUCTOR_IS_DEPENDENT (compound_literal) = 0; |
3810 | return compound_literal; |
3811 | } |
3812 | else if (VOID_TYPE_P (type) |
3813 | && processing_template_decl |
3814 | && maybe_zero_constructor_nelts (expr: compound_literal)) |
3815 | /* If there are only packs in compound_literal, it could |
3816 | be void{} after pack expansion. */; |
3817 | else |
3818 | { |
3819 | if (complain & tf_error) |
3820 | error ("compound literal of non-object type %qT", type); |
3821 | return error_mark_node; |
3822 | } |
3823 | } |
3824 | |
3825 | if (template_placeholder_p (type)) |
3826 | { |
3827 | type = do_auto_deduction (type, compound_literal, type, complain, |
3828 | adc_variable_type); |
3829 | if (type == error_mark_node) |
3830 | return error_mark_node; |
3831 | } |
3832 | /* C++23 auto{x}. */ |
3833 | else if (is_auto (type) |
3834 | && !AUTO_IS_DECLTYPE (type) |
3835 | && CONSTRUCTOR_NELTS (compound_literal) == 1) |
3836 | { |
3837 | if (is_constrained_auto (t: type)) |
3838 | { |
3839 | if (complain & tf_error) |
3840 | error ("%<auto{x}%> cannot be constrained"); |
3841 | return error_mark_node; |
3842 | } |
3843 | else if (cxx_dialect < cxx23) |
3844 | pedwarn (input_location, OPT_Wc__23_extensions, |
3845 | "%<auto{x}%> only available with " |
3846 | "%<-std=c++23%> or %<-std=gnu++23%>"); |
3847 | type = do_auto_deduction (type, compound_literal, type, complain, |
3848 | adc_variable_type); |
3849 | if (type == error_mark_node) |
3850 | return error_mark_node; |
3851 | } |
3852 | |
3853 | /* Used to hold a copy of the compound literal in a template. */ |
3854 | tree orig_cl = NULL_TREE; |
3855 | |
3856 | if (processing_template_decl) |
3857 | { |
3858 | const bool dependent_p |
3859 | = (instantiation_dependent_expression_p (compound_literal) |
3860 | || dependent_type_p (type)); |
3861 | if (dependent_p) |
3862 | /* We're about to return, no need to copy. */ |
3863 | orig_cl = compound_literal; |
3864 | else |
3865 | /* We're going to need a copy. */ |
3866 | orig_cl = unshare_constructor (compound_literal); |
3867 | TREE_TYPE (orig_cl) = type; |
3868 | /* Mark the expression as a compound literal. */ |
3869 | TREE_HAS_CONSTRUCTOR (orig_cl) = 1; |
3870 | /* And as instantiation-dependent. */ |
3871 | CONSTRUCTOR_IS_DEPENDENT (orig_cl) = dependent_p; |
3872 | if (fcl_context == fcl_c99) |
3873 | CONSTRUCTOR_C99_COMPOUND_LITERAL (orig_cl) = 1; |
3874 | /* If the compound literal is dependent, we're done for now. */ |
3875 | if (dependent_p) |
3876 | return orig_cl; |
3877 | /* Otherwise, do go on to e.g. check narrowing. */ |
3878 | } |
3879 | |
3880 | type = complete_type (type); |
3881 | |
3882 | if (TYPE_NON_AGGREGATE_CLASS (type)) |
3883 | { |
3884 | /* Trying to deal with a CONSTRUCTOR instead of a TREE_LIST |
3885 | everywhere that deals with function arguments would be a pain, so |
3886 | just wrap it in a TREE_LIST. The parser set a flag so we know |
3887 | that it came from T{} rather than T({}). */ |
3888 | CONSTRUCTOR_IS_DIRECT_INIT (compound_literal) = 1; |
3889 | compound_literal = build_tree_list (NULL_TREE, compound_literal); |
3890 | return build_functional_cast (input_location, type, |
3891 | compound_literal, complain); |
3892 | } |
3893 | |
3894 | if (TREE_CODE (type) == ARRAY_TYPE |
3895 | && check_array_initializer (NULL_TREE, type, compound_literal)) |
3896 | return error_mark_node; |
3897 | compound_literal = reshape_init (type, compound_literal, complain); |
3898 | if (SCALAR_TYPE_P (type) |
3899 | && !BRACE_ENCLOSED_INITIALIZER_P (compound_literal) |
3900 | && !check_narrowing (type, compound_literal, complain)) |
3901 | return error_mark_node; |
3902 | if (TREE_CODE (type) == ARRAY_TYPE |
3903 | && TYPE_DOMAIN (type) == NULL_TREE) |
3904 | { |
3905 | cp_complete_array_type_or_error (&type, compound_literal, |
3906 | false, complain); |
3907 | if (type == error_mark_node) |
3908 | return error_mark_node; |
3909 | } |
3910 | compound_literal = digest_init_flags (type, compound_literal, |
3911 | LOOKUP_NORMAL | LOOKUP_NO_NARROWING, |
3912 | complain); |
3913 | if (compound_literal == error_mark_node) |
3914 | return error_mark_node; |
3915 | |
3916 | /* If we're in a template, return the original compound literal. */ |
3917 | if (orig_cl) |
3918 | return orig_cl; |
3919 | |
3920 | if (TREE_CODE (compound_literal) == CONSTRUCTOR) |
3921 | { |
3922 | TREE_HAS_CONSTRUCTOR (compound_literal) = true; |
3923 | if (fcl_context == fcl_c99) |
3924 | CONSTRUCTOR_C99_COMPOUND_LITERAL (compound_literal) = 1; |
3925 | } |
3926 | |
3927 | /* Put static/constant array temporaries in static variables. */ |
3928 | /* FIXME all C99 compound literals should be variables rather than C++ |
3929 | temporaries, unless they are used as an aggregate initializer. */ |
3930 | if ((!at_function_scope_p () || CP_TYPE_CONST_P (type)) |
3931 | && fcl_context == fcl_c99 |
3932 | && TREE_CODE (type) == ARRAY_TYPE |
3933 | && !TYPE_HAS_NONTRIVIAL_DESTRUCTOR (type) |
3934 | && initializer_constant_valid_p (compound_literal, type)) |
3935 | { |
3936 | tree decl = create_temporary_var (type); |
3937 | DECL_CONTEXT (decl) = NULL_TREE; |
3938 | DECL_INITIAL (decl) = compound_literal; |
3939 | TREE_STATIC (decl) = 1; |
3940 | if (literal_type_p (type) && CP_TYPE_CONST_NON_VOLATILE_P (type)) |
3941 | { |
3942 | /* 5.19 says that a constant expression can include an |
3943 | lvalue-rvalue conversion applied to "a glvalue of literal type |
3944 | that refers to a non-volatile temporary object initialized |
3945 | with a constant expression". Rather than try to communicate |
3946 | that this VAR_DECL is a temporary, just mark it constexpr. */ |
3947 | DECL_DECLARED_CONSTEXPR_P (decl) = true; |
3948 | DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (decl) = true; |
3949 | TREE_CONSTANT (decl) = true; |
3950 | } |
3951 | cp_apply_type_quals_to_decl (cp_type_quals (type), decl); |
3952 | decl = pushdecl_top_level (decl); |
3953 | DECL_NAME (decl) = make_anon_name (); |
3954 | SET_DECL_ASSEMBLER_NAME (decl, DECL_NAME (decl)); |
3955 | /* Make sure the destructor is callable. */ |
3956 | tree clean = cxx_maybe_build_cleanup (decl, complain); |
3957 | if (clean == error_mark_node) |
3958 | return error_mark_node; |
3959 | return decl; |
3960 | } |
3961 | |
3962 | /* Represent other compound literals with TARGET_EXPR so we produce |
3963 | a prvalue, and can elide copies. */ |
3964 | if (!VECTOR_TYPE_P (type) |
3965 | && (TREE_CODE (compound_literal) == CONSTRUCTOR |
3966 | || TREE_CODE (compound_literal) == VEC_INIT_EXPR)) |
3967 | { |
3968 | /* The CONSTRUCTOR is now an initializer, not a compound literal. */ |
3969 | if (TREE_CODE (compound_literal) == CONSTRUCTOR) |
3970 | TREE_HAS_CONSTRUCTOR (compound_literal) = false; |
3971 | compound_literal = get_target_expr (compound_literal, complain); |
3972 | } |
3973 | else |
3974 | /* For e.g. int{42} just make sure it's a prvalue. */ |
3975 | compound_literal = rvalue (compound_literal); |
3976 | |
3977 | return compound_literal; |
3978 | } |
3979 | |
3980 | /* Return the declaration for the function-name variable indicated by |
3981 | ID. */ |
3982 | |
3983 | tree |
3984 | finish_fname (tree id) |
3985 | { |
3986 | tree decl; |
3987 | |
3988 | decl = fname_decl (input_location, C_RID_CODE (id), id); |
3989 | if (processing_template_decl && current_function_decl |
3990 | && decl != error_mark_node) |
3991 | decl = DECL_NAME (decl); |
3992 | return decl; |
3993 | } |
3994 | |
3995 | /* Finish a translation unit. */ |
3996 | |
3997 | void |
3998 | finish_translation_unit (void) |
3999 | { |
4000 | /* In case there were missing closebraces, |
4001 | get us back to the global binding level. */ |
4002 | pop_everything (); |
4003 | while (current_namespace != global_namespace) |
4004 | pop_namespace (); |
4005 | |
4006 | /* Do file scope __FUNCTION__ et al. */ |
4007 | finish_fname_decls (); |
4008 | |
4009 | if (vec_safe_length (v: scope_chain->omp_declare_target_attribute)) |
4010 | { |
4011 | cp_omp_declare_target_attr |
4012 | a = scope_chain->omp_declare_target_attribute->pop (); |
4013 | if (!errorcount) |
4014 | error ("%qs without corresponding %qs", |
4015 | a.device_type >= 0 ? "#pragma omp begin declare target" |
4016 | : "#pragma omp declare target", |
4017 | "#pragma omp end declare target"); |
4018 | vec_safe_truncate (v: scope_chain->omp_declare_target_attribute, size: 0); |
4019 | } |
4020 | if (vec_safe_length (v: scope_chain->omp_begin_assumes)) |
4021 | { |
4022 | if (!errorcount) |
4023 | error ("%qs without corresponding %qs", |
4024 | "#pragma omp begin assumes", "#pragma omp end assumes"); |
4025 | vec_safe_truncate (v: scope_chain->omp_begin_assumes, size: 0); |
4026 | } |
4027 | } |
4028 | |
4029 | /* Finish a template type parameter, specified as AGGR IDENTIFIER. |
4030 | Returns the parameter. */ |
4031 | |
4032 | tree |
4033 | finish_template_type_parm (tree aggr, tree identifier) |
4034 | { |
4035 | if (aggr != class_type_node) |
4036 | { |
4037 | permerror (input_location, "template type parameters must use the keyword %<class%> or %<typename%>"); |
4038 | aggr = class_type_node; |
4039 | } |
4040 | |
4041 | return build_tree_list (aggr, identifier); |
4042 | } |
4043 | |
4044 | /* Finish a template template parameter, specified as AGGR IDENTIFIER. |
4045 | Returns the parameter. */ |
4046 | |
4047 | tree |
4048 | finish_template_template_parm (tree aggr, tree identifier) |
4049 | { |
4050 | tree decl = build_decl (input_location, |
4051 | TYPE_DECL, identifier, NULL_TREE); |
4052 | |
4053 | tree tmpl = build_lang_decl (TEMPLATE_DECL, identifier, NULL_TREE); |
4054 | DECL_TEMPLATE_PARMS (tmpl) = current_template_parms; |
4055 | DECL_TEMPLATE_RESULT (tmpl) = decl; |
4056 | DECL_ARTIFICIAL (decl) = 1; |
4057 | |
4058 | /* Associate the constraints with the underlying declaration, |
4059 | not the template. */ |
4060 | tree constr = current_template_constraints (); |
4061 | set_constraints (decl, constr); |
4062 | |
4063 | end_template_decl (); |
4064 | |
4065 | gcc_assert (DECL_TEMPLATE_PARMS (tmpl)); |
4066 | |
4067 | check_default_tmpl_args (decl, DECL_TEMPLATE_PARMS (tmpl), |
4068 | /*is_primary=*/true, /*is_partial=*/false, |
4069 | /*is_friend=*/0); |
4070 | |
4071 | return finish_template_type_parm (aggr, identifier: tmpl); |
4072 | } |
4073 | |
4074 | /* ARGUMENT is the default-argument value for a template template |
4075 | parameter. If ARGUMENT is invalid, issue error messages and return |
4076 | the ERROR_MARK_NODE. Otherwise, ARGUMENT itself is returned. */ |
4077 | |
4078 | tree |
4079 | check_template_template_default_arg (tree argument) |
4080 | { |
4081 | if (TREE_CODE (argument) != TEMPLATE_DECL |
4082 | && TREE_CODE (argument) != TEMPLATE_TEMPLATE_PARM |
4083 | && TREE_CODE (argument) != UNBOUND_CLASS_TEMPLATE) |
4084 | { |
4085 | if (TREE_CODE (argument) == TYPE_DECL) |
4086 | { |
4087 | if (tree t = maybe_get_template_decl_from_type_decl (argument)) |
4088 | if (TREE_CODE (t) == TEMPLATE_DECL) |
4089 | return t; |
4090 | error ("invalid use of type %qT as a default value for a template " |
4091 | "template-parameter", TREE_TYPE (argument)); |
4092 | } |
4093 | else |
4094 | error ("invalid default argument for a template template parameter"); |
4095 | return error_mark_node; |
4096 | } |
4097 | |
4098 | return argument; |
4099 | } |
4100 | |
4101 | /* Begin a class definition, as indicated by T. */ |
4102 | |
4103 | tree |
4104 | begin_class_definition (tree t) |
4105 | { |
4106 | if (error_operand_p (t) || error_operand_p (TYPE_MAIN_DECL (t))) |
4107 | return error_mark_node; |
4108 | |
4109 | if (processing_template_parmlist && !LAMBDA_TYPE_P (t)) |
4110 | { |
4111 | error ("definition of %q#T inside template parameter list", t); |
4112 | return error_mark_node; |
4113 | } |
4114 | |
4115 | /* According to the C++ ABI, decimal classes defined in ISO/IEC TR 24733 |
4116 | are passed the same as decimal scalar types. */ |
4117 | if (TREE_CODE (t) == RECORD_TYPE |
4118 | && !processing_template_decl) |
4119 | { |
4120 | tree ns = TYPE_CONTEXT (t); |
4121 | if (ns && TREE_CODE (ns) == NAMESPACE_DECL |
4122 | && DECL_CONTEXT (ns) == std_node |
4123 | && DECL_NAME (ns) |
4124 | && id_equal (DECL_NAME (ns), str: "decimal")) |
4125 | { |
4126 | const char *n = TYPE_NAME_STRING (t); |
4127 | if ((strcmp (s1: n, s2: "decimal32") == 0) |
4128 | || (strcmp (s1: n, s2: "decimal64") == 0) |
4129 | || (strcmp (s1: n, s2: "decimal128") == 0)) |
4130 | TYPE_TRANSPARENT_AGGR (t) = 1; |
4131 | } |
4132 | } |
4133 | |
4134 | /* A non-implicit typename comes from code like: |
4135 | |
4136 | template <typename T> struct A { |
4137 | template <typename U> struct A<T>::B ... |
4138 | |
4139 | This is erroneous. */ |
4140 | else if (TREE_CODE (t) == TYPENAME_TYPE) |
4141 | { |
4142 | error ("invalid definition of qualified type %qT", t); |
4143 | t = error_mark_node; |
4144 | } |
4145 | |
4146 | if (t == error_mark_node || ! MAYBE_CLASS_TYPE_P (t)) |
4147 | { |
4148 | t = make_class_type (RECORD_TYPE); |
4149 | pushtag (make_anon_name (), t); |
4150 | } |
4151 | |
4152 | if (TYPE_BEING_DEFINED (t)) |
4153 | { |
4154 | t = make_class_type (TREE_CODE (t)); |
4155 | pushtag (TYPE_IDENTIFIER (t), t); |
4156 | } |
4157 | |
4158 | if (modules_p ()) |
4159 | { |
4160 | if (!module_may_redeclare (TYPE_NAME (t))) |
4161 | return error_mark_node; |
4162 | set_instantiating_module (TYPE_NAME (t)); |
4163 | set_defining_module (TYPE_NAME (t)); |
4164 | } |
4165 | |
4166 | maybe_process_partial_specialization (t); |
4167 | pushclass (t); |
4168 | TYPE_BEING_DEFINED (t) = 1; |
4169 | class_binding_level->defining_class_p = 1; |
4170 | |
4171 | if (flag_pack_struct) |
4172 | { |
4173 | tree v; |
4174 | TYPE_PACKED (t) = 1; |
4175 | /* Even though the type is being defined for the first time |
4176 | here, there might have been a forward declaration, so there |
4177 | might be cv-qualified variants of T. */ |
4178 | for (v = TYPE_NEXT_VARIANT (t); v; v = TYPE_NEXT_VARIANT (v)) |
4179 | TYPE_PACKED (v) = 1; |
4180 | } |
4181 | /* Reset the interface data, at the earliest possible |
4182 | moment, as it might have been set via a class foo; |
4183 | before. */ |
4184 | if (! TYPE_UNNAMED_P (t)) |
4185 | { |
4186 | struct c_fileinfo *finfo = \ |
4187 | get_fileinfo (LOCATION_FILE (input_location)); |
4188 | CLASSTYPE_INTERFACE_ONLY (t) = finfo->interface_only; |
4189 | SET_CLASSTYPE_INTERFACE_UNKNOWN_X |
4190 | (t, finfo->interface_unknown); |
4191 | } |
4192 | reset_specialization (); |
4193 | |
4194 | /* Make a declaration for this class in its own scope. */ |
4195 | build_self_reference (); |
4196 | |
4197 | return t; |
4198 | } |
4199 | |
4200 | /* Finish the member declaration given by DECL. */ |
4201 | |
4202 | void |
4203 | finish_member_declaration (tree decl) |
4204 | { |
4205 | if (decl == error_mark_node || decl == NULL_TREE) |
4206 | return; |
4207 | |
4208 | if (decl == void_type_node) |
4209 | /* The COMPONENT was a friend, not a member, and so there's |
4210 | nothing for us to do. */ |
4211 | return; |
4212 | |
4213 | /* We should see only one DECL at a time. */ |
4214 | gcc_assert (DECL_CHAIN (decl) == NULL_TREE); |
4215 | |
4216 | /* Don't add decls after definition. */ |
4217 | gcc_assert (TYPE_BEING_DEFINED (current_class_type) |
4218 | /* We can add lambda types when late parsing default |
4219 | arguments. */ |
4220 | || LAMBDA_TYPE_P (TREE_TYPE (decl))); |
4221 | |
4222 | /* Set up access control for DECL. */ |
4223 | TREE_PRIVATE (decl) |
4224 | = (current_access_specifier == access_private_node); |
4225 | TREE_PROTECTED (decl) |
4226 | = (current_access_specifier == access_protected_node); |
4227 | if (TREE_CODE (decl) == TEMPLATE_DECL) |
4228 | { |
4229 | TREE_PRIVATE (DECL_TEMPLATE_RESULT (decl)) = TREE_PRIVATE (decl); |
4230 | TREE_PROTECTED (DECL_TEMPLATE_RESULT (decl)) = TREE_PROTECTED (decl); |
4231 | } |
4232 | |
4233 | /* Mark the DECL as a member of the current class, unless it's |
4234 | a member of an enumeration. */ |
4235 | if (TREE_CODE (decl) != CONST_DECL) |
4236 | DECL_CONTEXT (decl) = current_class_type; |
4237 | |
4238 | /* Remember the single FIELD_DECL an anonymous aggregate type is used for. */ |
4239 | if (TREE_CODE (decl) == FIELD_DECL |
4240 | && ANON_AGGR_TYPE_P (TREE_TYPE (decl))) |
4241 | { |
4242 | gcc_assert (!ANON_AGGR_TYPE_FIELD (TYPE_MAIN_VARIANT (TREE_TYPE (decl)))); |
4243 | ANON_AGGR_TYPE_FIELD (TYPE_MAIN_VARIANT (TREE_TYPE (decl))) = decl; |
4244 | } |
4245 | |
4246 | if (TREE_CODE (decl) == USING_DECL) |
4247 | /* Avoid debug info for class-scope USING_DECLS for now, we'll |
4248 | call cp_emit_debug_info_for_using later. */ |
4249 | DECL_IGNORED_P (decl) = 1; |
4250 | |
4251 | /* Check for bare parameter packs in the non-static data member |
4252 | declaration. */ |
4253 | if (TREE_CODE (decl) == FIELD_DECL) |
4254 | { |
4255 | if (check_for_bare_parameter_packs (TREE_TYPE (decl))) |
4256 | TREE_TYPE (decl) = error_mark_node; |
4257 | if (check_for_bare_parameter_packs (DECL_ATTRIBUTES (decl))) |
4258 | DECL_ATTRIBUTES (decl) = NULL_TREE; |
4259 | } |
4260 | |
4261 | /* [dcl.link] |
4262 | |
4263 | A C language linkage is ignored for the names of class members |
4264 | and the member function type of class member functions. */ |
4265 | if (DECL_LANG_SPECIFIC (decl)) |
4266 | SET_DECL_LANGUAGE (decl, lang_cplusplus); |
4267 | |
4268 | bool add = false; |
4269 | |
4270 | /* Functions and non-functions are added differently. */ |
4271 | if (DECL_DECLARES_FUNCTION_P (decl)) |
4272 | add = add_method (current_class_type, decl, false); |
4273 | /* Enter the DECL into the scope of the class, if the class |
4274 | isn't a closure (whose fields are supposed to be unnamed). */ |
4275 | else if (CLASSTYPE_LAMBDA_EXPR (current_class_type) |
4276 | || maybe_push_used_methods (decl) |
4277 | || pushdecl_class_level (decl)) |
4278 | add = true; |
4279 | |
4280 | if (add) |
4281 | { |
4282 | /* All TYPE_DECLs go at the end of TYPE_FIELDS. Ordinary fields |
4283 | go at the beginning. The reason is that |
4284 | legacy_nonfn_member_lookup searches the list in order, and we |
4285 | want a field name to override a type name so that the "struct |
4286 | stat hack" will work. In particular: |
4287 | |
4288 | struct S { enum E { }; static const int E = 5; int ary[S::E]; } s; |
4289 | |
4290 | is valid. */ |
4291 | |
4292 | if (TREE_CODE (decl) == TYPE_DECL) |
4293 | TYPE_FIELDS (current_class_type) |
4294 | = chainon (TYPE_FIELDS (current_class_type), decl); |
4295 | else |
4296 | { |
4297 | DECL_CHAIN (decl) = TYPE_FIELDS (current_class_type); |
4298 | TYPE_FIELDS (current_class_type) = decl; |
4299 | } |
4300 | |
4301 | maybe_add_class_template_decl_list (current_class_type, decl, |
4302 | /*friend_p=*/0); |
4303 | } |
4304 | } |
4305 | |
4306 | /* Finish processing a complete template declaration. The PARMS are |
4307 | the template parameters. */ |
4308 | |
4309 | void |
4310 | finish_template_decl (tree parms) |
4311 | { |
4312 | if (parms) |
4313 | end_template_decl (); |
4314 | else |
4315 | end_specialization (); |
4316 | } |
4317 | |
4318 | // Returns the template type of the class scope being entered. If we're |
4319 | // entering a constrained class scope. TYPE is the class template |
4320 | // scope being entered and we may need to match the intended type with |
4321 | // a constrained specialization. For example: |
4322 | // |
4323 | // template<Object T> |
4324 | // struct S { void f(); }; #1 |
4325 | // |
4326 | // template<Object T> |
4327 | // void S<T>::f() { } #2 |
4328 | // |
4329 | // We check, in #2, that S<T> refers precisely to the type declared by |
4330 | // #1 (i.e., that the constraints match). Note that the following should |
4331 | // be an error since there is no specialization of S<T> that is |
4332 | // unconstrained, but this is not diagnosed here. |
4333 | // |
4334 | // template<typename T> |
4335 | // void S<T>::f() { } |
4336 | // |
4337 | // We cannot diagnose this problem here since this function also matches |
4338 | // qualified template names that are not part of a definition. For example: |
4339 | // |
4340 | // template<Integral T, Floating_point U> |
4341 | // typename pair<T, U>::first_type void f(T, U); |
4342 | // |
4343 | // Here, it is unlikely that there is a partial specialization of |
4344 | // pair constrained for Integral and Floating_point arguments. |
4345 | // |
4346 | // The general rule is: if a constrained specialization with matching |
4347 | // constraints is found return that type. Also note that if TYPE is not a |
4348 | // class-type (e.g. a typename type), then no fixup is needed. |
4349 | |
4350 | static tree |
4351 | fixup_template_type (tree type) |
4352 | { |
4353 | // Find the template parameter list at the a depth appropriate to |
4354 | // the scope we're trying to enter. |
4355 | tree parms = current_template_parms; |
4356 | int depth = template_class_depth (type); |
4357 | for (int n = current_template_depth; n > depth && parms; --n) |
4358 | parms = TREE_CHAIN (parms); |
4359 | if (!parms) |
4360 | return type; |
4361 | tree cur_reqs = TEMPLATE_PARMS_CONSTRAINTS (parms); |
4362 | tree cur_constr = build_constraints (cur_reqs, NULL_TREE); |
4363 | |
4364 | // Search for a specialization whose type and constraints match. |
4365 | tree tmpl = CLASSTYPE_TI_TEMPLATE (type); |
4366 | tree specs = DECL_TEMPLATE_SPECIALIZATIONS (tmpl); |
4367 | while (specs) |
4368 | { |
4369 | tree spec_constr = get_constraints (TREE_VALUE (specs)); |
4370 | |
4371 | // If the type and constraints match a specialization, then we |
4372 | // are entering that type. |
4373 | if (same_type_p (type, TREE_TYPE (specs)) |
4374 | && equivalent_constraints (cur_constr, spec_constr)) |
4375 | return TREE_TYPE (specs); |
4376 | specs = TREE_CHAIN (specs); |
4377 | } |
4378 | |
4379 | // If no specialization matches, then must return the type |
4380 | // previously found. |
4381 | return type; |
4382 | } |
4383 | |
4384 | /* Finish processing a template-id (which names a type) of the form |
4385 | NAME < ARGS >. Return the TYPE_DECL for the type named by the |
4386 | template-id. If ENTERING_SCOPE is nonzero we are about to enter |
4387 | the scope of template-id indicated. */ |
4388 | |
4389 | tree |
4390 | finish_template_type (tree name, tree args, int entering_scope) |
4391 | { |
4392 | tree type; |
4393 | |
4394 | type = lookup_template_class (name, args, |
4395 | NULL_TREE, NULL_TREE, |
4396 | tf_warning_or_error | tf_user); |
4397 | if (entering_scope) |
4398 | type = adjust_type_for_entering_scope (type); |
4399 | |
4400 | /* If we might be entering the scope of a partial specialization, |
4401 | find the one with the right constraints. */ |
4402 | if (flag_concepts |
4403 | && entering_scope |
4404 | && CLASS_TYPE_P (type) |
4405 | && CLASSTYPE_TEMPLATE_INFO (type) |
4406 | && dependent_type_p (type) |
4407 | && PRIMARY_TEMPLATE_P (CLASSTYPE_TI_TEMPLATE (type))) |
4408 | type = fixup_template_type (type); |
4409 | |
4410 | if (type == error_mark_node) |
4411 | return type; |
4412 | else if (CLASS_TYPE_P (type) && !alias_type_or_template_p (type)) |
4413 | return TYPE_STUB_DECL (type); |
4414 | else |
4415 | return TYPE_NAME (type); |
4416 | } |
4417 | |
4418 | /* Finish processing a BASE_CLASS with the indicated ACCESS_SPECIFIER. |
4419 | Return a TREE_LIST containing the ACCESS_SPECIFIER and the |
4420 | BASE_CLASS, or NULL_TREE if an error occurred. The |
4421 | ACCESS_SPECIFIER is one of |
4422 | access_{default,public,protected_private}_node. For a virtual base |
4423 | we set TREE_TYPE. */ |
4424 | |
4425 | tree |
4426 | finish_base_specifier (tree base, tree access, bool virtual_p) |
4427 | { |
4428 | tree result; |
4429 | |
4430 | if (base == error_mark_node) |
4431 | { |
4432 | error ("invalid base-class specification"); |
4433 | result = NULL_TREE; |
4434 | } |
4435 | else if (! MAYBE_CLASS_TYPE_P (base)) |
4436 | { |
4437 | error ("%qT is not a class type", base); |
4438 | result = NULL_TREE; |
4439 | } |
4440 | else |
4441 | { |
4442 | if (cp_type_quals (base) != 0) |
4443 | { |
4444 | /* DR 484: Can a base-specifier name a cv-qualified |
4445 | class type? */ |
4446 | base = TYPE_MAIN_VARIANT (base); |
4447 | } |
4448 | result = build_tree_list (access, base); |
4449 | if (virtual_p) |
4450 | TREE_TYPE (result) = integer_type_node; |
4451 | } |
4452 | |
4453 | return result; |
4454 | } |
4455 | |
4456 | /* If FNS is a member function, a set of member functions, or a |
4457 | template-id referring to one or more member functions, return a |
4458 | BASELINK for FNS, incorporating the current access context. |
4459 | Otherwise, return FNS unchanged. */ |
4460 | |
4461 | tree |
4462 | baselink_for_fns (tree fns) |
4463 | { |
4464 | tree scope; |
4465 | tree cl; |
4466 | |
4467 | if (BASELINK_P (fns) |
4468 | || error_operand_p (t: fns)) |
4469 | return fns; |
4470 | |
4471 | scope = ovl_scope (fns); |
4472 | if (!CLASS_TYPE_P (scope)) |
4473 | return fns; |
4474 | |
4475 | cl = currently_open_derived_class (scope); |
4476 | if (!cl) |
4477 | cl = scope; |
4478 | tree access_path = TYPE_BINFO (cl); |
4479 | tree conv_path = (cl == scope ? access_path |
4480 | : lookup_base (cl, scope, ba_any, NULL, tf_none)); |
4481 | return build_baselink (conv_path, access_path, fns, /*optype=*/NULL_TREE); |
4482 | } |
4483 | |
4484 | /* Returns true iff DECL is a variable from a function outside |
4485 | the current one. */ |
4486 | |
4487 | static bool |
4488 | outer_var_p (tree decl) |
4489 | { |
4490 | /* These should have been stripped or otherwise handled by the caller. */ |
4491 | gcc_checking_assert (!REFERENCE_REF_P (decl)); |
4492 | |
4493 | return ((VAR_P (decl) || TREE_CODE (decl) == PARM_DECL) |
4494 | && DECL_FUNCTION_SCOPE_P (decl) |
4495 | /* Don't get confused by temporaries. */ |
4496 | && DECL_NAME (decl) |
4497 | && (DECL_CONTEXT (decl) != current_function_decl |
4498 | || parsing_nsdmi ())); |
4499 | } |
4500 | |
4501 | /* As above, but also checks that DECL is automatic. */ |
4502 | |
4503 | bool |
4504 | outer_automatic_var_p (tree decl) |
4505 | { |
4506 | return (outer_var_p (decl) |
4507 | && !TREE_STATIC (decl)); |
4508 | } |
4509 | |
4510 | /* DECL satisfies outer_automatic_var_p. Possibly complain about it or |
4511 | rewrite it for lambda capture. |
4512 | |
4513 | If ODR_USE is true, we're being called from mark_use, and we complain about |
4514 | use of constant variables. If ODR_USE is false, we're being called for the |
4515 | id-expression, and we do lambda capture. */ |
4516 | |
4517 | tree |
4518 | process_outer_var_ref (tree decl, tsubst_flags_t complain, bool odr_use) |
4519 | { |
4520 | if (cp_unevaluated_operand) |
4521 | { |
4522 | tree type = TREE_TYPE (decl); |
4523 | if (!dependent_type_p (type) |
4524 | && variably_modified_type_p (type, NULL_TREE)) |
4525 | /* VLAs are used even in unevaluated context. */; |
4526 | else |
4527 | /* It's not a use (3.2) if we're in an unevaluated context. */ |
4528 | return decl; |
4529 | } |
4530 | if (decl == error_mark_node) |
4531 | return decl; |
4532 | |
4533 | tree context = DECL_CONTEXT (decl); |
4534 | tree containing_function = current_function_decl; |
4535 | tree lambda_stack = NULL_TREE; |
4536 | tree lambda_expr = NULL_TREE; |
4537 | tree initializer = convert_from_reference (decl); |
4538 | tree var = strip_normal_capture_proxy (decl); |
4539 | |
4540 | /* Mark it as used now even if the use is ill-formed. */ |
4541 | if (!mark_used (decl, complain)) |
4542 | return error_mark_node; |
4543 | |
4544 | if (parsing_nsdmi ()) |
4545 | containing_function = NULL_TREE; |
4546 | |
4547 | if (containing_function && LAMBDA_FUNCTION_P (containing_function)) |
4548 | { |
4549 | /* Check whether we've already built a proxy. */ |
4550 | tree d = retrieve_local_specialization (var); |
4551 | |
4552 | if (d && d != decl && is_capture_proxy (d)) |
4553 | { |
4554 | if (DECL_CONTEXT (d) == containing_function) |
4555 | /* We already have an inner proxy. */ |
4556 | return d; |
4557 | else |
4558 | /* We need to capture an outer proxy. */ |
4559 | return process_outer_var_ref (decl: d, complain, odr_use); |
4560 | } |
4561 | } |
4562 | |
4563 | /* If we are in a lambda function, we can move out until we hit |
4564 | 1. the context, |
4565 | 2. a non-lambda function, or |
4566 | 3. a non-default capturing lambda function. */ |
4567 | while (context != containing_function |
4568 | /* containing_function can be null with invalid generic lambdas. */ |
4569 | && containing_function |
4570 | && LAMBDA_FUNCTION_P (containing_function)) |
4571 | { |
4572 | tree closure = DECL_CONTEXT (containing_function); |
4573 | lambda_expr = CLASSTYPE_LAMBDA_EXPR (closure); |
4574 | |
4575 | if (TYPE_CLASS_SCOPE_P (closure)) |
4576 | /* A lambda in an NSDMI (c++/64496). */ |
4577 | break; |
4578 | |
4579 | if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda_expr) == CPLD_NONE) |
4580 | break; |
4581 | |
4582 | lambda_stack = tree_cons (NULL_TREE, lambda_expr, lambda_stack); |
4583 | |
4584 | containing_function = decl_function_context (containing_function); |
4585 | } |
4586 | |
4587 | /* In a lambda within a template, wait until instantiation time to implicitly |
4588 | capture a parameter pack. We want to wait because we don't know if we're |
4589 | capturing the whole pack or a single element, and it's OK to wait because |
4590 | find_parameter_packs_r walks into the lambda body. */ |
4591 | if (context == containing_function |
4592 | && DECL_PACK_P (decl)) |
4593 | return decl; |
4594 | |
4595 | if (lambda_expr && VAR_P (decl) && DECL_ANON_UNION_VAR_P (decl)) |
4596 | { |
4597 | if (complain & tf_error) |
4598 | error ("cannot capture member %qD of anonymous union", decl); |
4599 | return error_mark_node; |
4600 | } |
4601 | /* Do lambda capture when processing the id-expression, not when |
4602 | odr-using a variable. */ |
4603 | if (!odr_use && context == containing_function) |
4604 | decl = add_default_capture (lambda_stack, |
4605 | /*id=*/DECL_NAME (decl), initializer); |
4606 | /* Only an odr-use of an outer automatic variable causes an |
4607 | error, and a constant variable can decay to a prvalue |
4608 | constant without odr-use. So don't complain yet. */ |
4609 | else if (!odr_use && decl_constant_var_p (var)) |
4610 | return var; |
4611 | else if (lambda_expr) |
4612 | { |
4613 | if (complain & tf_error) |
4614 | { |
4615 | auto_diagnostic_group d; |
4616 | error ("%qD is not captured", decl); |
4617 | tree closure = LAMBDA_EXPR_CLOSURE (lambda_expr); |
4618 | if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda_expr) == CPLD_NONE) |
4619 | inform (location_of (closure), |
4620 | "the lambda has no capture-default"); |
4621 | else if (TYPE_CLASS_SCOPE_P (closure)) |
4622 | inform (UNKNOWN_LOCATION, "lambda in local class %q+T cannot " |
4623 | "capture variables from the enclosing context", |
4624 | TYPE_CONTEXT (closure)); |
4625 | inform (DECL_SOURCE_LOCATION (decl), "%q#D declared here", decl); |
4626 | } |
4627 | return error_mark_node; |
4628 | } |
4629 | else if (processing_contract_condition && (TREE_CODE (decl) == PARM_DECL)) |
4630 | /* Use of a parameter in a contract condition is fine. */ |
4631 | return decl; |
4632 | else |
4633 | { |
4634 | if (complain & tf_error) |
4635 | { |
4636 | auto_diagnostic_group d; |
4637 | error (VAR_P (decl) |
4638 | ? G_("use of local variable with automatic storage from " |
4639 | "containing function") |
4640 | : G_("use of parameter from containing function")); |
4641 | inform (DECL_SOURCE_LOCATION (decl), "%q#D declared here", decl); |
4642 | } |
4643 | return error_mark_node; |
4644 | } |
4645 | return decl; |
4646 | } |
4647 | |
4648 | /* ID_EXPRESSION is a representation of parsed, but unprocessed, |
4649 | id-expression. (See cp_parser_id_expression for details.) SCOPE, |
4650 | if non-NULL, is the type or namespace used to explicitly qualify |
4651 | ID_EXPRESSION. DECL is the entity to which that name has been |
4652 | resolved. |
4653 | |
4654 | *CONSTANT_EXPRESSION_P is true if we are presently parsing a |
4655 | constant-expression. In that case, *NON_CONSTANT_EXPRESSION_P will |
4656 | be set to true if this expression isn't permitted in a |
4657 | constant-expression, but it is otherwise not set by this function. |
4658 | *ALLOW_NON_CONSTANT_EXPRESSION_P is true if we are parsing a |
4659 | constant-expression, but a non-constant expression is also |
4660 | permissible. |
4661 | |
4662 | DONE is true if this expression is a complete postfix-expression; |
4663 | it is false if this expression is followed by '->', '[', '(', etc. |
4664 | ADDRESS_P is true iff this expression is the operand of '&'. |
4665 | TEMPLATE_P is true iff the qualified-id was of the form |
4666 | "A::template B". TEMPLATE_ARG_P is true iff this qualified name |
4667 | appears as a template argument. |
4668 | |
4669 | If an error occurs, and it is the kind of error that might cause |
4670 | the parser to abort a tentative parse, *ERROR_MSG is filled in. It |
4671 | is the caller's responsibility to issue the message. *ERROR_MSG |
4672 | will be a string with static storage duration, so the caller need |
4673 | not "free" it. |
4674 | |
4675 | Return an expression for the entity, after issuing appropriate |
4676 | diagnostics. This function is also responsible for transforming a |
4677 | reference to a non-static member into a COMPONENT_REF that makes |
4678 | the use of "this" explicit. |
4679 | |
4680 | Upon return, *IDK will be filled in appropriately. */ |
4681 | static cp_expr |
4682 | finish_id_expression_1 (tree id_expression, |
4683 | tree decl, |
4684 | tree scope, |
4685 | cp_id_kind *idk, |
4686 | bool integral_constant_expression_p, |
4687 | bool allow_non_integral_constant_expression_p, |
4688 | bool *non_integral_constant_expression_p, |
4689 | bool template_p, |
4690 | bool done, |
4691 | bool address_p, |
4692 | bool template_arg_p, |
4693 | const char **error_msg, |
4694 | location_t location) |
4695 | { |
4696 | decl = strip_using_decl (decl); |
4697 | |
4698 | /* Initialize the output parameters. */ |
4699 | *idk = CP_ID_KIND_NONE; |
4700 | *error_msg = NULL; |
4701 | |
4702 | if (id_expression == error_mark_node) |
4703 | return error_mark_node; |
4704 | /* If we have a template-id, then no further lookup is |
4705 | required. If the template-id was for a template-class, we |
4706 | will sometimes have a TYPE_DECL at this point. */ |
4707 | else if (TREE_CODE (decl) == TEMPLATE_ID_EXPR |
4708 | || TREE_CODE (decl) == TYPE_DECL) |
4709 | ; |
4710 | /* Look up the name. */ |
4711 | else |
4712 | { |
4713 | if (decl == error_mark_node) |
4714 | { |
4715 | /* Name lookup failed. */ |
4716 | if (scope |
4717 | && (!TYPE_P (scope) |
4718 | || (!dependentish_scope_p (scope) |
4719 | && !(identifier_p (t: id_expression) |
4720 | && IDENTIFIER_CONV_OP_P (id_expression) |
4721 | && dependent_type_p (TREE_TYPE (id_expression)))))) |
4722 | { |
4723 | /* If the qualifying type is non-dependent (and the name |
4724 | does not name a conversion operator to a dependent |
4725 | type), issue an error. */ |
4726 | qualified_name_lookup_error (scope, id_expression, decl, location); |
4727 | return error_mark_node; |
4728 | } |
4729 | else if (!scope) |
4730 | { |
4731 | /* It may be resolved via Koenig lookup. */ |
4732 | *idk = CP_ID_KIND_UNQUALIFIED; |
4733 | return id_expression; |
4734 | } |
4735 | else |
4736 | decl = id_expression; |
4737 | } |
4738 | |
4739 | /* Remember that the name was used in the definition of |
4740 | the current class so that we can check later to see if |
4741 | the meaning would have been different after the class |
4742 | was entirely defined. */ |
4743 | if (!scope && decl != error_mark_node && identifier_p (t: id_expression)) |
4744 | maybe_note_name_used_in_class (id_expression, decl); |
4745 | |
4746 | /* A use in unevaluated operand might not be instantiated appropriately |
4747 | if tsubst_copy builds a dummy parm, or if we never instantiate a |
4748 | generic lambda, so mark it now. */ |
4749 | if (processing_template_decl && cp_unevaluated_operand) |
4750 | mark_type_use (decl); |
4751 | |
4752 | /* Disallow uses of local variables from containing functions, except |
4753 | within lambda-expressions. */ |
4754 | if (outer_automatic_var_p (decl)) |
4755 | { |
4756 | decl = process_outer_var_ref (decl, complain: tf_warning_or_error); |
4757 | if (decl == error_mark_node) |
4758 | return error_mark_node; |
4759 | } |
4760 | |
4761 | /* Also disallow uses of function parameters outside the function |
4762 | body, except inside an unevaluated context (i.e. decltype). */ |
4763 | if (TREE_CODE (decl) == PARM_DECL |
4764 | && DECL_CONTEXT (decl) == NULL_TREE |
4765 | && !CONSTRAINT_VAR_P (decl) |
4766 | && !cp_unevaluated_operand |
4767 | && !processing_contract_condition |
4768 | && !processing_omp_trait_property_expr) |
4769 | { |
4770 | *error_msg = G_("use of parameter outside function body"); |
4771 | return error_mark_node; |
4772 | } |
4773 | } |
4774 | |
4775 | /* If we didn't find anything, or what we found was a type, |
4776 | then this wasn't really an id-expression. */ |
4777 | if (TREE_CODE (decl) == TEMPLATE_DECL |
4778 | && !DECL_FUNCTION_TEMPLATE_P (decl)) |
4779 | { |
4780 | *error_msg = G_("missing template arguments"); |
4781 | return error_mark_node; |
4782 | } |
4783 | else if (TREE_CODE (decl) == TYPE_DECL |
4784 | || TREE_CODE (decl) == NAMESPACE_DECL) |
4785 | { |
4786 | *error_msg = G_("expected primary-expression"); |
4787 | return error_mark_node; |
4788 | } |
4789 | |
4790 | /* If the name resolved to a template parameter, there is no |
4791 | need to look it up again later. */ |
4792 | if ((TREE_CODE (decl) == CONST_DECL && DECL_TEMPLATE_PARM_P (decl)) |
4793 | || TREE_CODE (decl) == TEMPLATE_PARM_INDEX) |
4794 | { |
4795 | tree r; |
4796 | |
4797 | *idk = CP_ID_KIND_NONE; |
4798 | if (TREE_CODE (decl) == TEMPLATE_PARM_INDEX) |
4799 | decl = TEMPLATE_PARM_DECL (decl); |
4800 | r = DECL_INITIAL (decl); |
4801 | if (CLASS_TYPE_P (TREE_TYPE (r)) && !CP_TYPE_CONST_P (TREE_TYPE (r))) |
4802 | { |
4803 | /* If the entity is a template parameter object for a template |
4804 | parameter of type T, the type of the expression is const T. */ |
4805 | tree ctype = TREE_TYPE (r); |
4806 | ctype = cp_build_qualified_type (ctype, (cp_type_quals (ctype) |
4807 | | TYPE_QUAL_CONST)); |
4808 | r = build1 (VIEW_CONVERT_EXPR, ctype, r); |
4809 | } |
4810 | r = convert_from_reference (r); |
4811 | if (integral_constant_expression_p |
4812 | && !dependent_type_p (TREE_TYPE (decl)) |
4813 | && !(INTEGRAL_OR_ENUMERATION_TYPE_P (TREE_TYPE (r)))) |
4814 | { |
4815 | if (!allow_non_integral_constant_expression_p) |
4816 | error ("template parameter %qD of type %qT is not allowed in " |
4817 | "an integral constant expression because it is not of " |
4818 | "integral or enumeration type", decl, TREE_TYPE (decl)); |
4819 | *non_integral_constant_expression_p = true; |
4820 | } |
4821 | return r; |
4822 | } |
4823 | else if (TREE_CODE (decl) == UNBOUND_CLASS_TEMPLATE) |
4824 | { |
4825 | gcc_checking_assert (scope); |
4826 | *idk = CP_ID_KIND_QUALIFIED; |
4827 | cp_warn_deprecated_use_scopes (scope); |
4828 | decl = finish_qualified_id_expr (qualifying_class: scope, expr: decl, done, address_p, |
4829 | template_p, template_arg_p, |
4830 | complain: tf_warning_or_error); |
4831 | } |
4832 | else |
4833 | { |
4834 | if (TREE_CODE (decl) == TEMPLATE_ID_EXPR |
4835 | && variable_template_p (TREE_OPERAND (decl, 0)) |
4836 | && !concept_check_p (t: decl)) |
4837 | /* Try resolving this variable TEMPLATE_ID_EXPR (which is always |
4838 | considered type-dependent) now, so that the dependence test that |
4839 | follows gives us the right answer: if it represents a non-dependent |
4840 | variable template-id then finish_template_variable will yield the |
4841 | corresponding non-dependent VAR_DECL. */ |
4842 | decl = finish_template_variable (decl); |
4843 | |
4844 | bool dependent_p = type_dependent_expression_p (decl); |
4845 | |
4846 | /* If the declaration was explicitly qualified indicate |
4847 | that. The semantics of `A::f(3)' are different than |
4848 | `f(3)' if `f' is virtual. */ |
4849 | *idk = (scope |
4850 | ? CP_ID_KIND_QUALIFIED |
4851 | : (TREE_CODE (decl) == TEMPLATE_ID_EXPR |
4852 | ? CP_ID_KIND_TEMPLATE_ID |
4853 | : (dependent_p |
4854 | ? CP_ID_KIND_UNQUALIFIED_DEPENDENT |
4855 | : CP_ID_KIND_UNQUALIFIED))); |
4856 | |
4857 | if (dependent_p |
4858 | && !scope |
4859 | && DECL_P (decl) |
4860 | && any_dependent_type_attributes_p (DECL_ATTRIBUTES (decl))) |
4861 | /* Dependent type attributes on the decl mean that the TREE_TYPE is |
4862 | wrong, so just return the identifier. */ |
4863 | return id_expression; |
4864 | |
4865 | if (DECL_CLASS_TEMPLATE_P (decl)) |
4866 | { |
4867 | error ("use of class template %qT as expression", decl); |
4868 | return error_mark_node; |
4869 | } |
4870 | |
4871 | if (TREE_CODE (decl) == TREE_LIST) |
4872 | { |
4873 | /* Ambiguous reference to base members. */ |
4874 | auto_diagnostic_group d; |
4875 | error ("request for member %qD is ambiguous in " |
4876 | "multiple inheritance lattice", id_expression); |
4877 | print_candidates (decl); |
4878 | return error_mark_node; |
4879 | } |
4880 | |
4881 | /* Mark variable-like entities as used. Functions are similarly |
4882 | marked either below or after overload resolution. */ |
4883 | if ((VAR_P (decl) |
4884 | || TREE_CODE (decl) == PARM_DECL |
4885 | || TREE_CODE (decl) == CONST_DECL |
4886 | || TREE_CODE (decl) == RESULT_DECL) |
4887 | && !mark_used (decl)) |
4888 | return error_mark_node; |
4889 | |
4890 | /* Only certain kinds of names are allowed in constant |
4891 | expression. Template parameters have already |
4892 | been handled above. */ |
4893 | if (! error_operand_p (t: decl) |
4894 | && !dependent_p |
4895 | && integral_constant_expression_p |
4896 | && !decl_constant_var_p (decl) |
4897 | && TREE_CODE (decl) != CONST_DECL |
4898 | && !builtin_valid_in_constant_expr_p (decl) |
4899 | && !concept_check_p (t: decl)) |
4900 | { |
4901 | if (!allow_non_integral_constant_expression_p) |
4902 | { |
4903 | error ("%qD cannot appear in a constant-expression", decl); |
4904 | return error_mark_node; |
4905 | } |
4906 | *non_integral_constant_expression_p = true; |
4907 | } |
4908 | |
4909 | if (tree wrap = maybe_get_tls_wrapper_call (decl)) |
4910 | /* Replace an evaluated use of the thread_local variable with |
4911 | a call to its wrapper. */ |
4912 | decl = wrap; |
4913 | else if (concept_check_p (t: decl)) |
4914 | { |
4915 | /* Nothing more to do. All of the analysis for concept checks |
4916 | is done by build_conept_id, called from the parser. */ |
4917 | } |
4918 | else if (scope) |
4919 | { |
4920 | if (TREE_CODE (decl) == SCOPE_REF) |
4921 | { |
4922 | gcc_assert (same_type_p (scope, TREE_OPERAND (decl, 0))); |
4923 | decl = TREE_OPERAND (decl, 1); |
4924 | } |
4925 | |
4926 | decl = (adjust_result_of_qualified_name_lookup |
4927 | (decl, scope, current_nonlambda_class_type())); |
4928 | |
4929 | cp_warn_deprecated_use_scopes (scope); |
4930 | |
4931 | if (TYPE_P (scope)) |
4932 | decl = finish_qualified_id_expr (qualifying_class: scope, |
4933 | expr: decl, |
4934 | done, |
4935 | address_p, |
4936 | template_p, |
4937 | template_arg_p, |
4938 | complain: tf_warning_or_error); |
4939 | else |
4940 | decl = convert_from_reference (decl); |
4941 | } |
4942 | else if (TREE_CODE (decl) == FIELD_DECL) |
4943 | { |
4944 | /* Since SCOPE is NULL here, this is an unqualified name. |
4945 | Access checking has been performed during name lookup |
4946 | already. Turn off checking to avoid duplicate errors. */ |
4947 | push_deferring_access_checks (deferring: dk_no_check); |
4948 | decl = finish_non_static_data_member (decl, NULL_TREE, |
4949 | /*qualifying_scope=*/NULL_TREE); |
4950 | pop_deferring_access_checks (); |
4951 | } |
4952 | else if (is_overloaded_fn (decl)) |
4953 | { |
4954 | /* We only need to look at the first function, |
4955 | because all the fns share the attribute we're |
4956 | concerned with (all member fns or all non-members). */ |
4957 | tree first_fn = get_first_fn (decl); |
4958 | first_fn = STRIP_TEMPLATE (first_fn); |
4959 | |
4960 | if (!template_arg_p |
4961 | && (TREE_CODE (first_fn) == USING_DECL |
4962 | || (TREE_CODE (first_fn) == FUNCTION_DECL |
4963 | && DECL_FUNCTION_MEMBER_P (first_fn) |
4964 | && !shared_member_p (decl)))) |
4965 | { |
4966 | /* A set of member functions. */ |
4967 | decl = maybe_dummy_object (DECL_CONTEXT (first_fn), 0); |
4968 | return finish_class_member_access_expr (decl, id_expression, |
4969 | /*template_p=*/false, |
4970 | tf_warning_or_error); |
4971 | } |
4972 | |
4973 | decl = baselink_for_fns (fns: decl); |
4974 | } |
4975 | else |
4976 | { |
4977 | if (DECL_P (decl) && DECL_NONLOCAL (decl) |
4978 | && DECL_CLASS_SCOPE_P (decl)) |
4979 | { |
4980 | tree context = context_for_name_lookup (decl); |
4981 | if (context != current_class_type) |
4982 | { |
4983 | tree path = currently_open_derived_class (context); |
4984 | if (!path) |
4985 | /* PATH can be null for using an enum of an unrelated |
4986 | class; we checked its access in lookup_using_decl. |
4987 | |
4988 | ??? Should this case make a clone instead, like |
4989 | handle_using_decl? */ |
4990 | gcc_assert (TREE_CODE (decl) == CONST_DECL); |
4991 | else |
4992 | perform_or_defer_access_check (TYPE_BINFO (path), |
4993 | decl, diag_decl: decl, |
4994 | complain: tf_warning_or_error); |
4995 | } |
4996 | } |
4997 | |
4998 | decl = convert_from_reference (decl); |
4999 | } |
5000 | } |
5001 | |
5002 | return cp_expr (decl, location); |
5003 | } |
5004 | |
5005 | /* As per finish_id_expression_1, but adding a wrapper node |
5006 | around the result if needed to express LOCATION. */ |
5007 | |
5008 | cp_expr |
5009 | finish_id_expression (tree id_expression, |
5010 | tree decl, |
5011 | tree scope, |
5012 | cp_id_kind *idk, |
5013 | bool integral_constant_expression_p, |
5014 | bool allow_non_integral_constant_expression_p, |
5015 | bool *non_integral_constant_expression_p, |
5016 | bool template_p, |
5017 | bool done, |
5018 | bool address_p, |
5019 | bool template_arg_p, |
5020 | const char **error_msg, |
5021 | location_t location) |
5022 | { |
5023 | cp_expr result |
5024 | = finish_id_expression_1 (id_expression, decl, scope, idk, |
5025 | integral_constant_expression_p, |
5026 | allow_non_integral_constant_expression_p, |
5027 | non_integral_constant_expression_p, |
5028 | template_p, done, address_p, template_arg_p, |
5029 | error_msg, location); |
5030 | return result.maybe_add_location_wrapper (); |
5031 | } |
5032 | |
5033 | /* Implement the __typeof keyword: Return the type of EXPR, suitable for |
5034 | use as a type-specifier. */ |
5035 | |
5036 | tree |
5037 | finish_typeof (tree expr) |
5038 | { |
5039 | tree type; |
5040 | |
5041 | if (type_dependent_expression_p (expr)) |
5042 | { |
5043 | type = cxx_make_type (TYPEOF_TYPE); |
5044 | TYPEOF_TYPE_EXPR (type) = expr; |
5045 | SET_TYPE_STRUCTURAL_EQUALITY (type); |
5046 | |
5047 | return type; |
5048 | } |
5049 | |
5050 | expr = mark_type_use (expr); |
5051 | |
5052 | type = unlowered_expr_type (expr); |
5053 | |
5054 | if (!type || type == unknown_type_node) |
5055 | { |
5056 | error ("type of %qE is unknown", expr); |
5057 | return error_mark_node; |
5058 | } |
5059 | |
5060 | return type; |
5061 | } |
5062 | |
5063 | /* Implement the __underlying_type keyword: Return the underlying |
5064 | type of TYPE, suitable for use as a type-specifier. */ |
5065 | |
5066 | tree |
5067 | finish_underlying_type (tree type) |
5068 | { |
5069 | if (!complete_type_or_else (type, NULL_TREE)) |
5070 | return error_mark_node; |
5071 | |
5072 | if (TREE_CODE (type) != ENUMERAL_TYPE) |
5073 | { |
5074 | error ("%qT is not an enumeration type", type); |
5075 | return error_mark_node; |
5076 | } |
5077 | |
5078 | tree underlying_type = ENUM_UNDERLYING_TYPE (type); |
5079 | |
5080 | /* Fixup necessary in this case because ENUM_UNDERLYING_TYPE |
5081 | includes TYPE_MIN_VALUE and TYPE_MAX_VALUE information. |
5082 | See finish_enum_value_list for details. */ |
5083 | if (!ENUM_FIXED_UNDERLYING_TYPE_P (type)) |
5084 | underlying_type |
5085 | = c_common_type_for_mode (TYPE_MODE (underlying_type), |
5086 | TYPE_UNSIGNED (underlying_type)); |
5087 | |
5088 | return underlying_type; |
5089 | } |
5090 | |
5091 | /* Implement the __type_pack_element keyword: Return the type |
5092 | at index IDX within TYPES. */ |
5093 | |
5094 | static tree |
5095 | finish_type_pack_element (tree idx, tree types, tsubst_flags_t complain) |
5096 | { |
5097 | idx = maybe_constant_value (idx, NULL_TREE, mce_true); |
5098 | if (!INTEGRAL_TYPE_P (TREE_TYPE (idx))) |
5099 | { |
5100 | if (complain & tf_error) |
5101 | error ("pack index has non-integral type %qT", TREE_TYPE (idx)); |
5102 | return error_mark_node; |
5103 | } |
5104 | if (TREE_CODE (idx) != INTEGER_CST) |
5105 | { |
5106 | if (complain & tf_error) |
5107 | { |
5108 | error ("pack index is not an integral constant"); |
5109 | cxx_constant_value (idx); |
5110 | } |
5111 | return error_mark_node; |
5112 | } |
5113 | if (tree_int_cst_sgn (idx) < 0) |
5114 | { |
5115 | if (complain & tf_error) |
5116 | error ("pack index %qE is negative", idx); |
5117 | return error_mark_node; |
5118 | } |
5119 | if (wi::to_widest (t: idx) >= TREE_VEC_LENGTH (types)) |
5120 | { |
5121 | if (complain & tf_error) |
5122 | error ("pack index %qE is out of range for pack of length %qd", |
5123 | idx, TREE_VEC_LENGTH (types)); |
5124 | return error_mark_node; |
5125 | } |
5126 | return TREE_VEC_ELT (types, tree_to_shwi (idx)); |
5127 | } |
5128 | |
5129 | /* In a pack-index T...[N], return the element at index IDX within TYPES. |
5130 | PARENTHESIZED_P is true iff the pack index was wrapped in (). */ |
5131 | |
5132 | tree |
5133 | pack_index_element (tree idx, tree types, bool parenthesized_p, |
5134 | tsubst_flags_t complain) |
5135 | { |
5136 | tree r = finish_type_pack_element (idx, types, complain); |
5137 | if (parenthesized_p) |
5138 | /* For the benefit of decltype(auto). */ |
5139 | r = force_paren_expr (expr: r); |
5140 | return r; |
5141 | } |
5142 | |
5143 | /* Implement the __direct_bases keyword: Return the direct base classes |
5144 | of type. */ |
5145 | |
5146 | tree |
5147 | calculate_direct_bases (tree type, tsubst_flags_t complain) |
5148 | { |
5149 | if (!complete_type_or_maybe_complain (type, NULL_TREE, complain) |
5150 | || !NON_UNION_CLASS_TYPE_P (type)) |
5151 | return make_tree_vec (0); |
5152 | |
5153 | releasing_vec vector; |
5154 | vec<tree, va_gc> *base_binfos = BINFO_BASE_BINFOS (TYPE_BINFO (type)); |
5155 | tree binfo; |
5156 | unsigned i; |
5157 | |
5158 | /* Virtual bases are initialized first */ |
5159 | for (i = 0; base_binfos->iterate (ix: i, ptr: &binfo); i++) |
5160 | if (BINFO_VIRTUAL_P (binfo)) |
5161 | vec_safe_push (r&: vector, t: binfo); |
5162 | |
5163 | /* Now non-virtuals */ |
5164 | for (i = 0; base_binfos->iterate (ix: i, ptr: &binfo); i++) |
5165 | if (!BINFO_VIRTUAL_P (binfo)) |
5166 | vec_safe_push (r&: vector, t: binfo); |
5167 | |
5168 | tree bases_vec = make_tree_vec (vector->length ()); |
5169 | |
5170 | for (i = 0; i < vector->length (); ++i) |
5171 | TREE_VEC_ELT (bases_vec, i) = BINFO_TYPE ((*vector)[i]); |
5172 | |
5173 | return bases_vec; |
5174 | } |
5175 | |
5176 | /* Implement the __bases keyword: Return the base classes |
5177 | of type */ |
5178 | |
5179 | /* Find morally non-virtual base classes by walking binfo hierarchy */ |
5180 | /* Virtual base classes are handled separately in finish_bases */ |
5181 | |
5182 | static tree |
5183 | dfs_calculate_bases_pre (tree binfo, void * /*data_*/) |
5184 | { |
5185 | /* Don't walk bases of virtual bases */ |
5186 | return BINFO_VIRTUAL_P (binfo) ? dfs_skip_bases : NULL_TREE; |
5187 | } |
5188 | |
5189 | static tree |
5190 | dfs_calculate_bases_post (tree binfo, void *data_) |
5191 | { |
5192 | vec<tree, va_gc> **data = ((vec<tree, va_gc> **) data_); |
5193 | if (!BINFO_VIRTUAL_P (binfo)) |
5194 | vec_safe_push (v&: *data, BINFO_TYPE (binfo)); |
5195 | return NULL_TREE; |
5196 | } |
5197 | |
5198 | /* Calculates the morally non-virtual base classes of a class */ |
5199 | static vec<tree, va_gc> * |
5200 | calculate_bases_helper (tree type) |
5201 | { |
5202 | vec<tree, va_gc> *vector = make_tree_vector (); |
5203 | |
5204 | /* Now add non-virtual base classes in order of construction */ |
5205 | if (TYPE_BINFO (type)) |
5206 | dfs_walk_all (TYPE_BINFO (type), |
5207 | dfs_calculate_bases_pre, dfs_calculate_bases_post, &vector); |
5208 | return vector; |
5209 | } |
5210 | |
5211 | tree |
5212 | calculate_bases (tree type, tsubst_flags_t complain) |
5213 | { |
5214 | if (!complete_type_or_maybe_complain (type, NULL_TREE, complain) |
5215 | || !NON_UNION_CLASS_TYPE_P (type)) |
5216 | return make_tree_vec (0); |
5217 | |
5218 | releasing_vec vector; |
5219 | tree bases_vec = NULL_TREE; |
5220 | unsigned i; |
5221 | vec<tree, va_gc> *vbases; |
5222 | tree binfo; |
5223 | |
5224 | /* First go through virtual base classes */ |
5225 | for (vbases = CLASSTYPE_VBASECLASSES (type), i = 0; |
5226 | vec_safe_iterate (v: vbases, ix: i, ptr: &binfo); i++) |
5227 | { |
5228 | releasing_vec vbase_bases |
5229 | = calculate_bases_helper (BINFO_TYPE (binfo)); |
5230 | vec_safe_splice (r&: vector, p: vbase_bases); |
5231 | } |
5232 | |
5233 | /* Now for the non-virtual bases */ |
5234 | releasing_vec nonvbases = calculate_bases_helper (type); |
5235 | vec_safe_splice (r&: vector, p: nonvbases); |
5236 | |
5237 | /* Note that during error recovery vector->length can even be zero. */ |
5238 | if (vector->length () > 1) |
5239 | { |
5240 | /* Last element is entire class, so don't copy */ |
5241 | bases_vec = make_tree_vec (vector->length () - 1); |
5242 | |
5243 | for (i = 0; i < vector->length () - 1; ++i) |
5244 | TREE_VEC_ELT (bases_vec, i) = (*vector)[i]; |
5245 | } |
5246 | else |
5247 | bases_vec = make_tree_vec (0); |
5248 | |
5249 | return bases_vec; |
5250 | } |
5251 | |
5252 | tree |
5253 | finish_bases (tree type, bool direct) |
5254 | { |
5255 | tree bases = NULL_TREE; |
5256 | |
5257 | if (!processing_template_decl) |
5258 | { |
5259 | /* Parameter packs can only be used in templates */ |
5260 | error ("parameter pack %<__bases%> only valid in template declaration"); |
5261 | return error_mark_node; |
5262 | } |
5263 | |
5264 | bases = cxx_make_type (BASES); |
5265 | BASES_TYPE (bases) = type; |
5266 | BASES_DIRECT (bases) = direct; |
5267 | SET_TYPE_STRUCTURAL_EQUALITY (bases); |
5268 | |
5269 | return bases; |
5270 | } |
5271 | |
5272 | /* Perform C++-specific checks for __builtin_offsetof before calling |
5273 | fold_offsetof. */ |
5274 | |
5275 | tree |
5276 | finish_offsetof (tree object_ptr, tree expr, location_t loc) |
5277 | { |
5278 | /* If we're processing a template, we can't finish the semantics yet. |
5279 | Otherwise we can fold the entire expression now. */ |
5280 | if (processing_template_decl) |
5281 | { |
5282 | expr = build2 (OFFSETOF_EXPR, size_type_node, expr, object_ptr); |
5283 | SET_EXPR_LOCATION (expr, loc); |
5284 | return expr; |
5285 | } |
5286 | |
5287 | if (expr == error_mark_node) |
5288 | return error_mark_node; |
5289 | |
5290 | if (TREE_CODE (expr) == PSEUDO_DTOR_EXPR) |
5291 | { |
5292 | error ("cannot apply %<offsetof%> to destructor %<~%T%>", |
5293 | TREE_OPERAND (expr, 2)); |
5294 | return error_mark_node; |
5295 | } |
5296 | if (FUNC_OR_METHOD_TYPE_P (TREE_TYPE (expr)) |
5297 | || TREE_TYPE (expr) == unknown_type_node) |
5298 | { |
5299 | while (TREE_CODE (expr) == COMPONENT_REF |
5300 | || TREE_CODE (expr) == COMPOUND_EXPR) |
5301 | expr = TREE_OPERAND (expr, 1); |
5302 | |
5303 | if (DECL_P (expr)) |
5304 | { |
5305 | auto_diagnostic_group d; |
5306 | error ("cannot apply %<offsetof%> to member function %qD", expr); |
5307 | inform (DECL_SOURCE_LOCATION (expr), "declared here"); |
5308 | } |
5309 | else |
5310 | error ("cannot apply %<offsetof%> to member function"); |
5311 | return error_mark_node; |
5312 | } |
5313 | if (TREE_CODE (expr) == CONST_DECL) |
5314 | { |
5315 | error ("cannot apply %<offsetof%> to an enumerator %qD", expr); |
5316 | return error_mark_node; |
5317 | } |
5318 | if (REFERENCE_REF_P (expr)) |
5319 | expr = TREE_OPERAND (expr, 0); |
5320 | if (!complete_type_or_else (TREE_TYPE (TREE_TYPE (object_ptr)), object_ptr)) |
5321 | return error_mark_node; |
5322 | if (warn_invalid_offsetof |
5323 | && CLASS_TYPE_P (TREE_TYPE (TREE_TYPE (object_ptr))) |
5324 | && CLASSTYPE_NON_STD_LAYOUT (TREE_TYPE (TREE_TYPE (object_ptr))) |
5325 | && cp_unevaluated_operand == 0) |
5326 | warning_at (loc, OPT_Winvalid_offsetof, "%<offsetof%> within " |
5327 | "non-standard-layout type %qT is conditionally-supported", |
5328 | TREE_TYPE (TREE_TYPE (object_ptr))); |
5329 | return fold_offsetof (expr); |
5330 | } |
5331 | |
5332 | /* Replace the AGGR_INIT_EXPR at *TP with an equivalent CALL_EXPR. This |
5333 | function is broken out from the above for the benefit of the tree-ssa |
5334 | project. */ |
5335 | |
5336 | void |
5337 | simplify_aggr_init_expr (tree *tp) |
5338 | { |
5339 | tree aggr_init_expr = *tp; |
5340 | |
5341 | /* Form an appropriate CALL_EXPR. */ |
5342 | tree fn = AGGR_INIT_EXPR_FN (aggr_init_expr); |
5343 | tree slot = AGGR_INIT_EXPR_SLOT (aggr_init_expr); |
5344 | tree type = TREE_TYPE (slot); |
5345 | |
5346 | tree call_expr; |
5347 | enum style_t { ctor, arg, pcc } style; |
5348 | |
5349 | if (AGGR_INIT_VIA_CTOR_P (aggr_init_expr)) |
5350 | style = ctor; |
5351 | #ifdef PCC_STATIC_STRUCT_RETURN |
5352 | else if (1) |
5353 | style = pcc; |
5354 | #endif |
5355 | else |
5356 | { |
5357 | gcc_assert (TREE_ADDRESSABLE (type)); |
5358 | style = arg; |
5359 | } |
5360 | |
5361 | call_expr = build_call_array_loc (input_location, |
5362 | TREE_TYPE (TREE_TYPE (TREE_TYPE (fn))), |
5363 | fn, |
5364 | aggr_init_expr_nargs (aggr_init_expr), |
5365 | AGGR_INIT_EXPR_ARGP (aggr_init_expr)); |
5366 | TREE_NOTHROW (call_expr) = TREE_NOTHROW (aggr_init_expr); |
5367 | CALL_FROM_THUNK_P (call_expr) = AGGR_INIT_FROM_THUNK_P (aggr_init_expr); |
5368 | CALL_EXPR_OPERATOR_SYNTAX (call_expr) |
5369 | = CALL_EXPR_OPERATOR_SYNTAX (aggr_init_expr); |
5370 | CALL_EXPR_ORDERED_ARGS (call_expr) = CALL_EXPR_ORDERED_ARGS (aggr_init_expr); |
5371 | CALL_EXPR_REVERSE_ARGS (call_expr) = CALL_EXPR_REVERSE_ARGS (aggr_init_expr); |
5372 | CALL_EXPR_MUST_TAIL_CALL (call_expr) = AGGR_INIT_EXPR_MUST_TAIL (aggr_init_expr); |
5373 | |
5374 | if (style == ctor) |
5375 | { |
5376 | /* Replace the first argument to the ctor with the address of the |
5377 | slot. */ |
5378 | cxx_mark_addressable (slot); |
5379 | CALL_EXPR_ARG (call_expr, 0) = |
5380 | build1 (ADDR_EXPR, build_pointer_type (type), slot); |
5381 | } |
5382 | else if (style == arg) |
5383 | { |
5384 | /* Just mark it addressable here, and leave the rest to |
5385 | expand_call{,_inline}. */ |
5386 | cxx_mark_addressable (slot); |
5387 | CALL_EXPR_RETURN_SLOT_OPT (call_expr) = true; |
5388 | call_expr = cp_build_init_expr (t: slot, i: call_expr); |
5389 | } |
5390 | else if (style == pcc) |
5391 | { |
5392 | /* If we're using the non-reentrant PCC calling convention, then we |
5393 | need to copy the returned value out of the static buffer into the |
5394 | SLOT. */ |
5395 | push_deferring_access_checks (deferring: dk_no_check); |
5396 | call_expr = build_aggr_init (slot, call_expr, |
5397 | DIRECT_BIND | LOOKUP_ONLYCONVERTING, |
5398 | tf_warning_or_error); |
5399 | pop_deferring_access_checks (); |
5400 | call_expr = build2 (COMPOUND_EXPR, TREE_TYPE (slot), call_expr, slot); |
5401 | } |
5402 | |
5403 | if (AGGR_INIT_ZERO_FIRST (aggr_init_expr)) |
5404 | { |
5405 | tree init = build_zero_init (type, NULL_TREE, |
5406 | /*static_storage_p=*/false); |
5407 | init = cp_build_init_expr (t: slot, i: init); |
5408 | call_expr = build2 (COMPOUND_EXPR, TREE_TYPE (call_expr), |
5409 | init, call_expr); |
5410 | } |
5411 | |
5412 | *tp = call_expr; |
5413 | } |
5414 | |
5415 | /* Emit all thunks to FN that should be emitted when FN is emitted. */ |
5416 | |
5417 | void |
5418 | emit_associated_thunks (tree fn) |
5419 | { |
5420 | /* When we use vcall offsets, we emit thunks with the virtual |
5421 | functions to which they thunk. The whole point of vcall offsets |
5422 | is so that you can know statically the entire set of thunks that |
5423 | will ever be needed for a given virtual function, thereby |
5424 | enabling you to output all the thunks with the function itself. */ |
5425 | if (DECL_VIRTUAL_P (fn) |
5426 | /* Do not emit thunks for extern template instantiations. */ |
5427 | && ! DECL_REALLY_EXTERN (fn) |
5428 | /* Do not emit thunks for tentative decls, those will be processed |
5429 | again at_eof if really needed. */ |
5430 | && (DECL_INTERFACE_KNOWN (fn) || !DECL_DEFER_OUTPUT (fn))) |
5431 | { |
5432 | tree thunk; |
5433 | |
5434 | for (thunk = DECL_THUNKS (fn); thunk; thunk = DECL_CHAIN (thunk)) |
5435 | { |
5436 | if (!THUNK_ALIAS (thunk)) |
5437 | { |
5438 | use_thunk (thunk, /*emit_p=*/1); |
5439 | if (DECL_RESULT_THUNK_P (thunk)) |
5440 | { |
5441 | tree probe; |
5442 | |
5443 | for (probe = DECL_THUNKS (thunk); |
5444 | probe; probe = DECL_CHAIN (probe)) |
5445 | use_thunk (probe, /*emit_p=*/1); |
5446 | } |
5447 | } |
5448 | else |
5449 | gcc_assert (!DECL_THUNKS (thunk)); |
5450 | } |
5451 | } |
5452 | } |
5453 | |
5454 | /* Generate RTL for FN. */ |
5455 | |
5456 | bool |
5457 | expand_or_defer_fn_1 (tree fn) |
5458 | { |
5459 | /* When the parser calls us after finishing the body of a template |
5460 | function, we don't really want to expand the body. */ |
5461 | if (processing_template_decl) |
5462 | { |
5463 | /* Normally, collection only occurs in rest_of_compilation. So, |
5464 | if we don't collect here, we never collect junk generated |
5465 | during the processing of templates until we hit a |
5466 | non-template function. It's not safe to do this inside a |
5467 | nested class, though, as the parser may have local state that |
5468 | is not a GC root. */ |
5469 | if (!function_depth) |
5470 | ggc_collect (); |
5471 | return false; |
5472 | } |
5473 | |
5474 | gcc_assert (DECL_SAVED_TREE (fn)); |
5475 | |
5476 | /* We make a decision about linkage for these functions at the end |
5477 | of the compilation. Until that point, we do not want the back |
5478 | end to output them -- but we do want it to see the bodies of |
5479 | these functions so that it can inline them as appropriate. */ |
5480 | if (DECL_DECLARED_INLINE_P (fn) || DECL_IMPLICIT_INSTANTIATION (fn)) |
5481 | { |
5482 | if (DECL_INTERFACE_KNOWN (fn)) |
5483 | /* We've already made a decision as to how this function will |
5484 | be handled. */; |
5485 | else if (!at_eof |
5486 | || DECL_IMMEDIATE_FUNCTION_P (fn) |
5487 | || DECL_OMP_DECLARE_REDUCTION_P (fn)) |
5488 | tentative_decl_linkage (fn); |
5489 | else |
5490 | import_export_decl (fn); |
5491 | |
5492 | /* If the user wants us to keep all inline functions, then mark |
5493 | this function as needed so that finish_file will make sure to |
5494 | output it later. Similarly, all dllexport'd functions must |
5495 | be emitted; there may be callers in other DLLs. */ |
5496 | if (DECL_DECLARED_INLINE_P (fn) |
5497 | && !DECL_REALLY_EXTERN (fn) |
5498 | && !DECL_IMMEDIATE_FUNCTION_P (fn) |
5499 | && !DECL_OMP_DECLARE_REDUCTION_P (fn) |
5500 | && (flag_keep_inline_functions |
5501 | || (flag_keep_inline_dllexport |
5502 | && lookup_attribute (attr_name: "dllexport", DECL_ATTRIBUTES (fn))))) |
5503 | { |
5504 | mark_needed (fn); |
5505 | DECL_EXTERNAL (fn) = 0; |
5506 | } |
5507 | } |
5508 | |
5509 | /* If this is a constructor or destructor body, we have to clone |
5510 | it. */ |
5511 | if (maybe_clone_body (fn)) |
5512 | { |
5513 | /* We don't want to process FN again, so pretend we've written |
5514 | it out, even though we haven't. */ |
5515 | TREE_ASM_WRITTEN (fn) = 1; |
5516 | /* If this is a constexpr function we still need the body to be |
5517 | able to evaluate it. Similarly, with modules we only stream |
5518 | the maybe-in-charge cdtor and regenerate the clones from it on |
5519 | demand, so we also need to keep the body. Otherwise we don't |
5520 | need it anymore. */ |
5521 | if (!DECL_DECLARED_CONSTEXPR_P (fn) |
5522 | && !(module_maybe_has_cmi_p () && vague_linkage_p (fn))) |
5523 | DECL_SAVED_TREE (fn) = void_node; |
5524 | return false; |
5525 | } |
5526 | |
5527 | /* There's no reason to do any of the work here if we're only doing |
5528 | semantic analysis; this code just generates RTL. */ |
5529 | if (flag_syntax_only) |
5530 | { |
5531 | /* Pretend that this function has been written out so that we don't try |
5532 | to expand it again. */ |
5533 | TREE_ASM_WRITTEN (fn) = 1; |
5534 | return false; |
5535 | } |
5536 | |
5537 | if (DECL_OMP_DECLARE_REDUCTION_P (fn)) |
5538 | return false; |
5539 | |
5540 | return true; |
5541 | } |
5542 | |
5543 | void |
5544 | expand_or_defer_fn (tree fn) |
5545 | { |
5546 | if (expand_or_defer_fn_1 (fn)) |
5547 | { |
5548 | function_depth++; |
5549 | |
5550 | /* Expand or defer, at the whim of the compilation unit manager. */ |
5551 | cgraph_node::finalize_function (fn, function_depth > 1); |
5552 | emit_associated_thunks (fn); |
5553 | |
5554 | function_depth--; |
5555 | |
5556 | if (DECL_IMMEDIATE_FUNCTION_P (fn)) |
5557 | { |
5558 | if (cgraph_node *node = cgraph_node::get (decl: fn)) |
5559 | { |
5560 | node->body_removed = true; |
5561 | node->analyzed = false; |
5562 | node->definition = false; |
5563 | node->force_output = false; |
5564 | } |
5565 | } |
5566 | } |
5567 | } |
5568 | |
5569 | class nrv_data |
5570 | { |
5571 | public: |
5572 | nrv_data () : visited (37) {} |
5573 | |
5574 | tree var; |
5575 | tree result; |
5576 | hash_set<tree> visited; |
5577 | bool simple; |
5578 | bool in_nrv_cleanup; |
5579 | }; |
5580 | |
5581 | /* Helper function for walk_tree, used by finalize_nrv below. */ |
5582 | |
5583 | static tree |
5584 | finalize_nrv_r (tree* tp, int* walk_subtrees, void* data) |
5585 | { |
5586 | class nrv_data *dp = (class nrv_data *)data; |
5587 | |
5588 | /* No need to walk into types. There wouldn't be any need to walk into |
5589 | non-statements, except that we have to consider STMT_EXPRs. */ |
5590 | if (TYPE_P (*tp)) |
5591 | *walk_subtrees = 0; |
5592 | |
5593 | /* Replace all uses of the NRV with the RESULT_DECL. */ |
5594 | else if (*tp == dp->var) |
5595 | *tp = dp->result; |
5596 | |
5597 | /* Avoid walking into the same tree more than once. Unfortunately, we |
5598 | can't just use walk_tree_without duplicates because it would only call |
5599 | us for the first occurrence of dp->var in the function body. */ |
5600 | else if (dp->visited.add (k: *tp)) |
5601 | *walk_subtrees = 0; |
5602 | |
5603 | /* If there's a label, we might need to destroy the NRV on goto (92407). */ |
5604 | else if (TREE_CODE (*tp) == LABEL_EXPR && !dp->in_nrv_cleanup) |
5605 | dp->simple = false; |
5606 | /* Change NRV returns to just refer to the RESULT_DECL; this is a nop, |
5607 | but differs from using NULL_TREE in that it indicates that we care |
5608 | about the value of the RESULT_DECL. But preserve anything appended |
5609 | by check_return_expr. */ |
5610 | else if (TREE_CODE (*tp) == RETURN_EXPR) |
5611 | { |
5612 | tree *p = &TREE_OPERAND (*tp, 0); |
5613 | while (TREE_CODE (*p) == COMPOUND_EXPR) |
5614 | p = &TREE_OPERAND (*p, 0); |
5615 | if (TREE_CODE (*p) == INIT_EXPR |
5616 | && INIT_EXPR_NRV_P (*p)) |
5617 | *p = dp->result; |
5618 | } |
5619 | /* Change all cleanups for the NRV to only run when not returning. */ |
5620 | else if (TREE_CODE (*tp) == CLEANUP_STMT |
5621 | && CLEANUP_DECL (*tp) == dp->var) |
5622 | { |
5623 | dp->in_nrv_cleanup = true; |
5624 | cp_walk_tree (&CLEANUP_BODY (*tp), finalize_nrv_r, data, 0); |
5625 | dp->in_nrv_cleanup = false; |
5626 | cp_walk_tree (&CLEANUP_EXPR (*tp), finalize_nrv_r, data, 0); |
5627 | *walk_subtrees = 0; |
5628 | |
5629 | if (dp->simple) |
5630 | /* For a simple NRV, just run it on the EH path. */ |
5631 | CLEANUP_EH_ONLY (*tp) = true; |
5632 | else |
5633 | { |
5634 | /* Not simple, we need to check current_retval_sentinel to decide |
5635 | whether to run it. If it's set, we're returning normally and |
5636 | don't want to destroy the NRV. If the sentinel is not set, we're |
5637 | leaving scope some other way, either by flowing off the end of its |
5638 | scope or throwing an exception. */ |
5639 | tree cond = build3 (COND_EXPR, void_type_node, |
5640 | current_retval_sentinel, |
5641 | void_node, CLEANUP_EXPR (*tp)); |
5642 | CLEANUP_EXPR (*tp) = cond; |
5643 | } |
5644 | |
5645 | /* If a cleanup might throw, we need to clear current_retval_sentinel on |
5646 | the exception path, both so the check above succeeds and so an outer |
5647 | cleanup added by maybe_splice_retval_cleanup doesn't run. */ |
5648 | if (cp_function_chain->throwing_cleanup) |
5649 | { |
5650 | tree clear = build2 (MODIFY_EXPR, boolean_type_node, |
5651 | current_retval_sentinel, |
5652 | boolean_false_node); |
5653 | if (dp->simple) |
5654 | { |
5655 | /* We're already only on the EH path, just prepend it. */ |
5656 | tree &exp = CLEANUP_EXPR (*tp); |
5657 | exp = build2 (COMPOUND_EXPR, void_type_node, clear, exp); |
5658 | } |
5659 | else |
5660 | { |
5661 | /* The cleanup runs on both normal and EH paths, we need another |
5662 | CLEANUP_STMT to clear the flag only on the EH path. */ |
5663 | tree &bod = CLEANUP_BODY (*tp); |
5664 | bod = build_stmt (EXPR_LOCATION (*tp), CLEANUP_STMT, |
5665 | bod, clear, current_retval_sentinel); |
5666 | CLEANUP_EH_ONLY (bod) = true; |
5667 | } |
5668 | } |
5669 | } |
5670 | /* Disable maybe_splice_retval_cleanup within the NRV cleanup scope, we don't |
5671 | want to destroy the retval before the variable goes out of scope. */ |
5672 | else if (TREE_CODE (*tp) == CLEANUP_STMT |
5673 | && dp->in_nrv_cleanup |
5674 | && CLEANUP_DECL (*tp) == dp->result) |
5675 | CLEANUP_EXPR (*tp) = void_node; |
5676 | /* Replace the DECL_EXPR for the NRV with an initialization of the |
5677 | RESULT_DECL, if needed. */ |
5678 | else if (TREE_CODE (*tp) == DECL_EXPR |
5679 | && DECL_EXPR_DECL (*tp) == dp->var) |
5680 | { |
5681 | tree init; |
5682 | if (DECL_INITIAL (dp->var) |
5683 | && DECL_INITIAL (dp->var) != error_mark_node) |
5684 | init = cp_build_init_expr (t: dp->result, |
5685 | DECL_INITIAL (dp->var)); |
5686 | else |
5687 | init = build_empty_stmt (EXPR_LOCATION (*tp)); |
5688 | DECL_INITIAL (dp->var) = NULL_TREE; |
5689 | SET_EXPR_LOCATION (init, EXPR_LOCATION (*tp)); |
5690 | *tp = init; |
5691 | } |
5692 | |
5693 | /* Keep iterating. */ |
5694 | return NULL_TREE; |
5695 | } |
5696 | |
5697 | /* Called from finish_function to implement the named return value |
5698 | optimization by overriding all the RETURN_EXPRs and pertinent |
5699 | CLEANUP_STMTs and replacing all occurrences of VAR with RESULT, the |
5700 | RESULT_DECL for the function. */ |
5701 | |
5702 | void |
5703 | finalize_nrv (tree fndecl, tree var) |
5704 | { |
5705 | class nrv_data data; |
5706 | tree result = DECL_RESULT (fndecl); |
5707 | |
5708 | /* Copy name from VAR to RESULT. */ |
5709 | DECL_NAME (result) = DECL_NAME (var); |
5710 | /* Don't forget that we take its address. */ |
5711 | TREE_ADDRESSABLE (result) = TREE_ADDRESSABLE (var); |
5712 | /* Finally set DECL_VALUE_EXPR to avoid assigning |
5713 | a stack slot at -O0 for the original var and debug info |
5714 | uses RESULT location for VAR. */ |
5715 | SET_DECL_VALUE_EXPR (var, result); |
5716 | DECL_HAS_VALUE_EXPR_P (var) = 1; |
5717 | |
5718 | data.var = var; |
5719 | data.result = result; |
5720 | data.in_nrv_cleanup = false; |
5721 | |
5722 | /* This is simpler for variables declared in the outer scope of |
5723 | the function so we know that their lifetime always ends with a |
5724 | return; see g++.dg/opt/nrv6.C. */ |
5725 | tree outer = outer_curly_brace_block (fndecl); |
5726 | data.simple = chain_member (var, BLOCK_VARS (outer)); |
5727 | |
5728 | cp_walk_tree (&DECL_SAVED_TREE (fndecl), finalize_nrv_r, &data, 0); |
5729 | } |
5730 | |
5731 | /* Create CP_OMP_CLAUSE_INFO for clause C. Returns true if it is invalid. */ |
5732 | |
5733 | bool |
5734 | cxx_omp_create_clause_info (tree c, tree type, bool need_default_ctor, |
5735 | bool need_copy_ctor, bool need_copy_assignment, |
5736 | bool need_dtor) |
5737 | { |
5738 | int save_errorcount = errorcount; |
5739 | tree info, t; |
5740 | |
5741 | /* Always allocate 3 elements for simplicity. These are the |
5742 | function decls for the ctor, dtor, and assignment op. |
5743 | This layout is known to the three lang hooks, |
5744 | cxx_omp_clause_default_init, cxx_omp_clause_copy_init, |
5745 | and cxx_omp_clause_assign_op. */ |
5746 | info = make_tree_vec (3); |
5747 | CP_OMP_CLAUSE_INFO (c) = info; |
5748 | |
5749 | if (need_default_ctor || need_copy_ctor) |
5750 | { |
5751 | if (need_default_ctor) |
5752 | t = get_default_ctor (type); |
5753 | else |
5754 | t = get_copy_ctor (type, tf_warning_or_error); |
5755 | |
5756 | if (t && !trivial_fn_p (t)) |
5757 | TREE_VEC_ELT (info, 0) = t; |
5758 | } |
5759 | |
5760 | if (need_dtor && TYPE_HAS_NONTRIVIAL_DESTRUCTOR (type)) |
5761 | TREE_VEC_ELT (info, 1) = get_dtor (type, tf_warning_or_error); |
5762 | |
5763 | if (need_copy_assignment) |
5764 | { |
5765 | t = get_copy_assign (type); |
5766 | |
5767 | if (t && !trivial_fn_p (t)) |
5768 | TREE_VEC_ELT (info, 2) = t; |
5769 | } |
5770 | |
5771 | return errorcount != save_errorcount; |
5772 | } |
5773 | |
5774 | /* If DECL is DECL_OMP_PRIVATIZED_MEMBER, return corresponding |
5775 | FIELD_DECL, otherwise return DECL itself. */ |
5776 | |
5777 | static tree |
5778 | omp_clause_decl_field (tree decl) |
5779 | { |
5780 | if (VAR_P (decl) |
5781 | && DECL_HAS_VALUE_EXPR_P (decl) |
5782 | && DECL_ARTIFICIAL (decl) |
5783 | && DECL_LANG_SPECIFIC (decl) |
5784 | && DECL_OMP_PRIVATIZED_MEMBER (decl)) |
5785 | { |
5786 | tree f = DECL_VALUE_EXPR (decl); |
5787 | if (INDIRECT_REF_P (f)) |
5788 | f = TREE_OPERAND (f, 0); |
5789 | if (TREE_CODE (f) == COMPONENT_REF) |
5790 | { |
5791 | f = TREE_OPERAND (f, 1); |
5792 | gcc_assert (TREE_CODE (f) == FIELD_DECL); |
5793 | return f; |
5794 | } |
5795 | } |
5796 | return NULL_TREE; |
5797 | } |
5798 | |
5799 | /* Adjust DECL if needed for printing using %qE. */ |
5800 | |
5801 | static tree |
5802 | omp_clause_printable_decl (tree decl) |
5803 | { |
5804 | tree t = omp_clause_decl_field (decl); |
5805 | if (t) |
5806 | return t; |
5807 | return decl; |
5808 | } |
5809 | |
5810 | /* For a FIELD_DECL F and corresponding DECL_OMP_PRIVATIZED_MEMBER |
5811 | VAR_DECL T that doesn't need a DECL_EXPR added, record it for |
5812 | privatization. */ |
5813 | |
5814 | static void |
5815 | omp_note_field_privatization (tree f, tree t) |
5816 | { |
5817 | if (!omp_private_member_map) |
5818 | omp_private_member_map = new hash_map<tree, tree>; |
5819 | tree &v = omp_private_member_map->get_or_insert (k: f); |
5820 | if (v == NULL_TREE) |
5821 | { |
5822 | v = t; |
5823 | omp_private_member_vec.safe_push (obj: f); |
5824 | /* Signal that we don't want to create DECL_EXPR for this dummy var. */ |
5825 | omp_private_member_vec.safe_push (integer_zero_node); |
5826 | } |
5827 | } |
5828 | |
5829 | /* Privatize FIELD_DECL T, return corresponding DECL_OMP_PRIVATIZED_MEMBER |
5830 | dummy VAR_DECL. */ |
5831 | |
5832 | tree |
5833 | omp_privatize_field (tree t, bool shared) |
5834 | { |
5835 | tree m = finish_non_static_data_member (decl: t, NULL_TREE, NULL_TREE); |
5836 | if (m == error_mark_node) |
5837 | return error_mark_node; |
5838 | if (!omp_private_member_map && !shared) |
5839 | omp_private_member_map = new hash_map<tree, tree>; |
5840 | if (TYPE_REF_P (TREE_TYPE (t))) |
5841 | { |
5842 | gcc_assert (INDIRECT_REF_P (m)); |
5843 | m = TREE_OPERAND (m, 0); |
5844 | } |
5845 | tree vb = NULL_TREE; |
5846 | tree &v = shared ? vb : omp_private_member_map->get_or_insert (k: t); |
5847 | if (v == NULL_TREE) |
5848 | { |
5849 | v = create_temporary_var (TREE_TYPE (m)); |
5850 | retrofit_lang_decl (v); |
5851 | DECL_OMP_PRIVATIZED_MEMBER (v) = 1; |
5852 | SET_DECL_VALUE_EXPR (v, m); |
5853 | DECL_HAS_VALUE_EXPR_P (v) = 1; |
5854 | if (!shared) |
5855 | omp_private_member_vec.safe_push (obj: t); |
5856 | } |
5857 | return v; |
5858 | } |
5859 | |
5860 | /* C++ specialisation of the c_omp_address_inspector class. */ |
5861 | |
5862 | class cp_omp_address_inspector : public c_omp_address_inspector |
5863 | { |
5864 | public: |
5865 | cp_omp_address_inspector (location_t loc, tree t) |
5866 | : c_omp_address_inspector (loc, t) |
5867 | { |
5868 | } |
5869 | |
5870 | ~cp_omp_address_inspector () |
5871 | { |
5872 | } |
5873 | |
5874 | bool processing_template_decl_p () |
5875 | { |
5876 | return processing_template_decl; |
5877 | } |
5878 | |
5879 | void emit_unmappable_type_notes (tree t) |
5880 | { |
5881 | if (TREE_TYPE (t) != error_mark_node |
5882 | && !COMPLETE_TYPE_P (TREE_TYPE (t))) |
5883 | cxx_incomplete_type_inform (TREE_TYPE (t)); |
5884 | } |
5885 | |
5886 | tree convert_from_reference (tree x) |
5887 | { |
5888 | return ::convert_from_reference (x); |
5889 | } |
5890 | |
5891 | tree build_array_ref (location_t loc, tree arr, tree idx) |
5892 | { |
5893 | return ::build_array_ref (loc, arr, idx); |
5894 | } |
5895 | |
5896 | bool check_clause (tree clause) |
5897 | { |
5898 | if (TREE_CODE (orig) == COMPONENT_REF |
5899 | && invalid_nonstatic_memfn_p (EXPR_LOCATION (orig), orig, |
5900 | tf_warning_or_error)) |
5901 | return false; |
5902 | if (!c_omp_address_inspector::check_clause (clause)) |
5903 | return false; |
5904 | return true; |
5905 | } |
5906 | }; |
5907 | |
5908 | /* Helper function for handle_omp_array_sections. Called recursively |
5909 | to handle multiple array-section-subscripts. C is the clause, |
5910 | T current expression (initially OMP_CLAUSE_DECL), which is either |
5911 | a TREE_LIST for array-section-subscript (TREE_PURPOSE is low-bound |
5912 | expression if specified, TREE_VALUE length expression if specified, |
5913 | TREE_CHAIN is what it has been specified after, or some decl. |
5914 | TYPES vector is populated with array section types, MAYBE_ZERO_LEN |
5915 | set to true if any of the array-section-subscript could have length |
5916 | of zero (explicit or implicit), FIRST_NON_ONE is the index of the |
5917 | first array-section-subscript which is known not to have length |
5918 | of one. Given say: |
5919 | map(a[:b][2:1][:c][:2][:d][e:f][2:5]) |
5920 | FIRST_NON_ONE will be 3, array-section-subscript [:b], [2:1] and [:c] |
5921 | all are or may have length of 1, array-section-subscript [:2] is the |
5922 | first one known not to have length 1. For array-section-subscript |
5923 | <= FIRST_NON_ONE we diagnose non-contiguous arrays if low bound isn't |
5924 | 0 or length isn't the array domain max + 1, for > FIRST_NON_ONE we |
5925 | can if MAYBE_ZERO_LEN is false. MAYBE_ZERO_LEN will be true in the above |
5926 | case though, as some lengths could be zero. */ |
5927 | |
5928 | static tree |
5929 | handle_omp_array_sections_1 (tree c, tree t, vec<tree> &types, |
5930 | bool &maybe_zero_len, unsigned int &first_non_one, |
5931 | enum c_omp_region_type ort) |
5932 | { |
5933 | tree ret, low_bound, length, type; |
5934 | bool openacc = (ort & C_ORT_ACC) != 0; |
5935 | if (TREE_CODE (t) != OMP_ARRAY_SECTION) |
5936 | { |
5937 | if (error_operand_p (t)) |
5938 | return error_mark_node; |
5939 | |
5940 | cp_omp_address_inspector ai (OMP_CLAUSE_LOCATION (c), t); |
5941 | tree t_refto = ai.maybe_unconvert_ref (t); |
5942 | |
5943 | if (!ai.check_clause (clause: c)) |
5944 | return error_mark_node; |
5945 | else if (ai.component_access_p () |
5946 | && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
5947 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO |
5948 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM)) |
5949 | t = ai.get_root_term (true); |
5950 | else |
5951 | t = ai.unconverted_ref_origin (); |
5952 | if (t == error_mark_node) |
5953 | return error_mark_node; |
5954 | ret = t_refto; |
5955 | if (TREE_CODE (t) == FIELD_DECL) |
5956 | ret = finish_non_static_data_member (decl: t, NULL_TREE, NULL_TREE); |
5957 | else if (!VAR_P (t) |
5958 | && (openacc || !EXPR_P (t)) |
5959 | && TREE_CODE (t) != PARM_DECL) |
5960 | { |
5961 | if (processing_template_decl && TREE_CODE (t) != OVERLOAD) |
5962 | return NULL_TREE; |
5963 | if (DECL_P (t)) |
5964 | error_at (OMP_CLAUSE_LOCATION (c), |
5965 | "%qD is not a variable in %qs clause", t, |
5966 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
5967 | else |
5968 | error_at (OMP_CLAUSE_LOCATION (c), |
5969 | "%qE is not a variable in %qs clause", t, |
5970 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
5971 | return error_mark_node; |
5972 | } |
5973 | else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_AFFINITY |
5974 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND |
5975 | && VAR_P (t) && CP_DECL_THREAD_LOCAL_P (t)) |
5976 | { |
5977 | error_at (OMP_CLAUSE_LOCATION (c), |
5978 | "%qD is threadprivate variable in %qs clause", t, |
5979 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
5980 | return error_mark_node; |
5981 | } |
5982 | if (type_dependent_expression_p (ret)) |
5983 | return NULL_TREE; |
5984 | ret = convert_from_reference (ret); |
5985 | return ret; |
5986 | } |
5987 | |
5988 | if ((ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP |
5989 | && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
5990 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION |
5991 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION) |
5992 | && TREE_CODE (TREE_OPERAND (t, 0)) == FIELD_DECL) |
5993 | TREE_OPERAND (t, 0) = omp_privatize_field (TREE_OPERAND (t, 0), shared: false); |
5994 | ret = handle_omp_array_sections_1 (c, TREE_OPERAND (t, 0), types, |
5995 | maybe_zero_len, first_non_one, ort); |
5996 | if (ret == error_mark_node || ret == NULL_TREE) |
5997 | return ret; |
5998 | |
5999 | type = TREE_TYPE (ret); |
6000 | low_bound = TREE_OPERAND (t, 1); |
6001 | length = TREE_OPERAND (t, 2); |
6002 | if ((low_bound && type_dependent_expression_p (low_bound)) |
6003 | || (length && type_dependent_expression_p (length))) |
6004 | return NULL_TREE; |
6005 | |
6006 | if (low_bound == error_mark_node || length == error_mark_node) |
6007 | return error_mark_node; |
6008 | |
6009 | if (low_bound && !INTEGRAL_TYPE_P (TREE_TYPE (low_bound))) |
6010 | { |
6011 | error_at (OMP_CLAUSE_LOCATION (c), |
6012 | "low bound %qE of array section does not have integral type", |
6013 | low_bound); |
6014 | return error_mark_node; |
6015 | } |
6016 | if (length && !INTEGRAL_TYPE_P (TREE_TYPE (length))) |
6017 | { |
6018 | error_at (OMP_CLAUSE_LOCATION (c), |
6019 | "length %qE of array section does not have integral type", |
6020 | length); |
6021 | return error_mark_node; |
6022 | } |
6023 | if (low_bound) |
6024 | low_bound = mark_rvalue_use (low_bound); |
6025 | if (length) |
6026 | length = mark_rvalue_use (length); |
6027 | /* We need to reduce to real constant-values for checks below. */ |
6028 | if (length) |
6029 | length = fold_simple (length); |
6030 | if (low_bound) |
6031 | low_bound = fold_simple (low_bound); |
6032 | if (low_bound |
6033 | && TREE_CODE (low_bound) == INTEGER_CST |
6034 | && TYPE_PRECISION (TREE_TYPE (low_bound)) |
6035 | > TYPE_PRECISION (sizetype)) |
6036 | low_bound = fold_convert (sizetype, low_bound); |
6037 | if (length |
6038 | && TREE_CODE (length) == INTEGER_CST |
6039 | && TYPE_PRECISION (TREE_TYPE (length)) |
6040 | > TYPE_PRECISION (sizetype)) |
6041 | length = fold_convert (sizetype, length); |
6042 | if (low_bound == NULL_TREE) |
6043 | low_bound = integer_zero_node; |
6044 | |
6045 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
6046 | && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH |
6047 | || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)) |
6048 | { |
6049 | if (length != integer_one_node) |
6050 | { |
6051 | error_at (OMP_CLAUSE_LOCATION (c), |
6052 | "expected single pointer in %qs clause", |
6053 | user_omp_clause_code_name (c, openacc)); |
6054 | return error_mark_node; |
6055 | } |
6056 | } |
6057 | if (length != NULL_TREE) |
6058 | { |
6059 | if (!integer_nonzerop (length)) |
6060 | { |
6061 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY |
6062 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND |
6063 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
6064 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION |
6065 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION) |
6066 | { |
6067 | if (integer_zerop (length)) |
6068 | { |
6069 | error_at (OMP_CLAUSE_LOCATION (c), |
6070 | "zero length array section in %qs clause", |
6071 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
6072 | return error_mark_node; |
6073 | } |
6074 | } |
6075 | else |
6076 | maybe_zero_len = true; |
6077 | } |
6078 | if (first_non_one == types.length () |
6079 | && (TREE_CODE (length) != INTEGER_CST || integer_onep (length))) |
6080 | first_non_one++; |
6081 | } |
6082 | if (TREE_CODE (type) == ARRAY_TYPE) |
6083 | { |
6084 | if (length == NULL_TREE |
6085 | && (TYPE_DOMAIN (type) == NULL_TREE |
6086 | || TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE)) |
6087 | { |
6088 | error_at (OMP_CLAUSE_LOCATION (c), |
6089 | "for unknown bound array type length expression must " |
6090 | "be specified"); |
6091 | return error_mark_node; |
6092 | } |
6093 | if (TREE_CODE (low_bound) == INTEGER_CST |
6094 | && tree_int_cst_sgn (low_bound) == -1) |
6095 | { |
6096 | error_at (OMP_CLAUSE_LOCATION (c), |
6097 | "negative low bound in array section in %qs clause", |
6098 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
6099 | return error_mark_node; |
6100 | } |
6101 | if (length != NULL_TREE |
6102 | && TREE_CODE (length) == INTEGER_CST |
6103 | && tree_int_cst_sgn (length) == -1) |
6104 | { |
6105 | error_at (OMP_CLAUSE_LOCATION (c), |
6106 | "negative length in array section in %qs clause", |
6107 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
6108 | return error_mark_node; |
6109 | } |
6110 | if (TYPE_DOMAIN (type) |
6111 | && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) |
6112 | && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) |
6113 | == INTEGER_CST) |
6114 | { |
6115 | tree size |
6116 | = fold_convert (sizetype, TYPE_MAX_VALUE (TYPE_DOMAIN (type))); |
6117 | size = size_binop (PLUS_EXPR, size, size_one_node); |
6118 | if (TREE_CODE (low_bound) == INTEGER_CST) |
6119 | { |
6120 | if (tree_int_cst_lt (t1: size, t2: low_bound)) |
6121 | { |
6122 | error_at (OMP_CLAUSE_LOCATION (c), |
6123 | "low bound %qE above array section size " |
6124 | "in %qs clause", low_bound, |
6125 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
6126 | return error_mark_node; |
6127 | } |
6128 | if (tree_int_cst_equal (size, low_bound)) |
6129 | { |
6130 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY |
6131 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND |
6132 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
6133 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION |
6134 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION) |
6135 | { |
6136 | error_at (OMP_CLAUSE_LOCATION (c), |
6137 | "zero length array section in %qs clause", |
6138 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
6139 | return error_mark_node; |
6140 | } |
6141 | maybe_zero_len = true; |
6142 | } |
6143 | else if (length == NULL_TREE |
6144 | && first_non_one == types.length () |
6145 | && tree_int_cst_equal |
6146 | (TYPE_MAX_VALUE (TYPE_DOMAIN (type)), |
6147 | low_bound)) |
6148 | first_non_one++; |
6149 | } |
6150 | else if (length == NULL_TREE) |
6151 | { |
6152 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_AFFINITY |
6153 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND |
6154 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION |
6155 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION |
6156 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TASK_REDUCTION) |
6157 | maybe_zero_len = true; |
6158 | if (first_non_one == types.length ()) |
6159 | first_non_one++; |
6160 | } |
6161 | if (length && TREE_CODE (length) == INTEGER_CST) |
6162 | { |
6163 | if (tree_int_cst_lt (t1: size, t2: length)) |
6164 | { |
6165 | error_at (OMP_CLAUSE_LOCATION (c), |
6166 | "length %qE above array section size " |
6167 | "in %qs clause", length, |
6168 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
6169 | return error_mark_node; |
6170 | } |
6171 | if (TREE_CODE (low_bound) == INTEGER_CST) |
6172 | { |
6173 | tree lbpluslen |
6174 | = size_binop (PLUS_EXPR, |
6175 | fold_convert (sizetype, low_bound), |
6176 | fold_convert (sizetype, length)); |
6177 | if (TREE_CODE (lbpluslen) == INTEGER_CST |
6178 | && tree_int_cst_lt (t1: size, t2: lbpluslen)) |
6179 | { |
6180 | error_at (OMP_CLAUSE_LOCATION (c), |
6181 | "high bound %qE above array section size " |
6182 | "in %qs clause", lbpluslen, |
6183 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
6184 | return error_mark_node; |
6185 | } |
6186 | } |
6187 | } |
6188 | } |
6189 | else if (length == NULL_TREE) |
6190 | { |
6191 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_AFFINITY |
6192 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND |
6193 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION |
6194 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION |
6195 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TASK_REDUCTION) |
6196 | maybe_zero_len = true; |
6197 | if (first_non_one == types.length ()) |
6198 | first_non_one++; |
6199 | } |
6200 | |
6201 | /* For [lb:] we will need to evaluate lb more than once. */ |
6202 | if (length == NULL_TREE && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND) |
6203 | { |
6204 | tree lb = cp_save_expr (low_bound); |
6205 | if (lb != low_bound) |
6206 | { |
6207 | TREE_OPERAND (t, 1) = lb; |
6208 | low_bound = lb; |
6209 | } |
6210 | } |
6211 | } |
6212 | else if (TYPE_PTR_P (type)) |
6213 | { |
6214 | if (length == NULL_TREE) |
6215 | { |
6216 | if (TREE_CODE (ret) == PARM_DECL && DECL_ARRAY_PARAMETER_P (ret)) |
6217 | error_at (OMP_CLAUSE_LOCATION (c), |
6218 | "for array function parameter length expression " |
6219 | "must be specified"); |
6220 | else |
6221 | error_at (OMP_CLAUSE_LOCATION (c), |
6222 | "for pointer type length expression must be specified"); |
6223 | return error_mark_node; |
6224 | } |
6225 | if (length != NULL_TREE |
6226 | && TREE_CODE (length) == INTEGER_CST |
6227 | && tree_int_cst_sgn (length) == -1) |
6228 | { |
6229 | error_at (OMP_CLAUSE_LOCATION (c), |
6230 | "negative length in array section in %qs clause", |
6231 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
6232 | return error_mark_node; |
6233 | } |
6234 | /* If there is a pointer type anywhere but in the very first |
6235 | array-section-subscript, the array section could be non-contiguous. */ |
6236 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_AFFINITY |
6237 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND |
6238 | && TREE_CODE (TREE_OPERAND (t, 0)) == OMP_ARRAY_SECTION) |
6239 | { |
6240 | /* If any prior dimension has a non-one length, then deem this |
6241 | array section as non-contiguous. */ |
6242 | for (tree d = TREE_OPERAND (t, 0); TREE_CODE (d) == OMP_ARRAY_SECTION; |
6243 | d = TREE_OPERAND (d, 0)) |
6244 | { |
6245 | tree d_length = TREE_OPERAND (d, 2); |
6246 | if (d_length == NULL_TREE || !integer_onep (d_length)) |
6247 | { |
6248 | error_at (OMP_CLAUSE_LOCATION (c), |
6249 | "array section is not contiguous in %qs clause", |
6250 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
6251 | return error_mark_node; |
6252 | } |
6253 | } |
6254 | } |
6255 | } |
6256 | else |
6257 | { |
6258 | error_at (OMP_CLAUSE_LOCATION (c), |
6259 | "%qE does not have pointer or array type", ret); |
6260 | return error_mark_node; |
6261 | } |
6262 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND) |
6263 | types.safe_push (TREE_TYPE (ret)); |
6264 | /* We will need to evaluate lb more than once. */ |
6265 | tree lb = cp_save_expr (low_bound); |
6266 | if (lb != low_bound) |
6267 | { |
6268 | TREE_OPERAND (t, 1) = lb; |
6269 | low_bound = lb; |
6270 | } |
6271 | /* Temporarily disable -fstrong-eval-order for array reductions. |
6272 | The SAVE_EXPR and COMPOUND_EXPR added if low_bound has side-effects |
6273 | is something the middle-end can't cope with and more importantly, |
6274 | it needs to be the actual base variable that is privatized, not some |
6275 | temporary assigned previous value of it. That, together with OpenMP |
6276 | saying how many times the side-effects are evaluated is unspecified, |
6277 | makes int *a, *b; ... reduction(+:a[a = b, 3:10]) really unspecified. */ |
6278 | warning_sentinel s (flag_strong_eval_order, |
6279 | OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
6280 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION |
6281 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION); |
6282 | ret = grok_array_decl (OMP_CLAUSE_LOCATION (c), ret, low_bound, NULL, |
6283 | tf_warning_or_error); |
6284 | return ret; |
6285 | } |
6286 | |
6287 | /* Handle array sections for clause C. */ |
6288 | |
6289 | static bool |
6290 | handle_omp_array_sections (tree &c, enum c_omp_region_type ort) |
6291 | { |
6292 | bool maybe_zero_len = false; |
6293 | unsigned int first_non_one = 0; |
6294 | auto_vec<tree, 10> types; |
6295 | tree *tp = &OMP_CLAUSE_DECL (c); |
6296 | if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND |
6297 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY) |
6298 | && TREE_CODE (*tp) == TREE_LIST |
6299 | && TREE_PURPOSE (*tp) |
6300 | && TREE_CODE (TREE_PURPOSE (*tp)) == TREE_VEC) |
6301 | tp = &TREE_VALUE (*tp); |
6302 | tree first = handle_omp_array_sections_1 (c, t: *tp, types, |
6303 | maybe_zero_len, first_non_one, |
6304 | ort); |
6305 | if (first == error_mark_node) |
6306 | return true; |
6307 | if (first == NULL_TREE) |
6308 | return false; |
6309 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND |
6310 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY) |
6311 | { |
6312 | tree t = *tp; |
6313 | tree tem = NULL_TREE; |
6314 | if (processing_template_decl) |
6315 | return false; |
6316 | /* Need to evaluate side effects in the length expressions |
6317 | if any. */ |
6318 | while (TREE_CODE (t) == TREE_LIST) |
6319 | { |
6320 | if (TREE_VALUE (t) && TREE_SIDE_EFFECTS (TREE_VALUE (t))) |
6321 | { |
6322 | if (tem == NULL_TREE) |
6323 | tem = TREE_VALUE (t); |
6324 | else |
6325 | tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), |
6326 | TREE_VALUE (t), tem); |
6327 | } |
6328 | t = TREE_CHAIN (t); |
6329 | } |
6330 | if (tem) |
6331 | first = build2 (COMPOUND_EXPR, TREE_TYPE (first), tem, first); |
6332 | *tp = first; |
6333 | } |
6334 | else |
6335 | { |
6336 | unsigned int num = types.length (), i; |
6337 | tree t, side_effects = NULL_TREE, size = NULL_TREE; |
6338 | tree condition = NULL_TREE; |
6339 | |
6340 | if (int_size_in_bytes (TREE_TYPE (first)) <= 0) |
6341 | maybe_zero_len = true; |
6342 | if (processing_template_decl && maybe_zero_len) |
6343 | return false; |
6344 | |
6345 | for (i = num, t = OMP_CLAUSE_DECL (c); i > 0; |
6346 | t = TREE_OPERAND (t, 0)) |
6347 | { |
6348 | gcc_assert (TREE_CODE (t) == OMP_ARRAY_SECTION); |
6349 | |
6350 | tree low_bound = TREE_OPERAND (t, 1); |
6351 | tree length = TREE_OPERAND (t, 2); |
6352 | |
6353 | i--; |
6354 | if (low_bound |
6355 | && TREE_CODE (low_bound) == INTEGER_CST |
6356 | && TYPE_PRECISION (TREE_TYPE (low_bound)) |
6357 | > TYPE_PRECISION (sizetype)) |
6358 | low_bound = fold_convert (sizetype, low_bound); |
6359 | if (length |
6360 | && TREE_CODE (length) == INTEGER_CST |
6361 | && TYPE_PRECISION (TREE_TYPE (length)) |
6362 | > TYPE_PRECISION (sizetype)) |
6363 | length = fold_convert (sizetype, length); |
6364 | if (low_bound == NULL_TREE) |
6365 | low_bound = integer_zero_node; |
6366 | if (!maybe_zero_len && i > first_non_one) |
6367 | { |
6368 | if (integer_nonzerop (low_bound)) |
6369 | goto do_warn_noncontiguous; |
6370 | if (length != NULL_TREE |
6371 | && TREE_CODE (length) == INTEGER_CST |
6372 | && TYPE_DOMAIN (types[i]) |
6373 | && TYPE_MAX_VALUE (TYPE_DOMAIN (types[i])) |
6374 | && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (types[i]))) |
6375 | == INTEGER_CST) |
6376 | { |
6377 | tree size; |
6378 | size = size_binop (PLUS_EXPR, |
6379 | TYPE_MAX_VALUE (TYPE_DOMAIN (types[i])), |
6380 | size_one_node); |
6381 | if (!tree_int_cst_equal (length, size)) |
6382 | { |
6383 | do_warn_noncontiguous: |
6384 | error_at (OMP_CLAUSE_LOCATION (c), |
6385 | "array section is not contiguous in %qs " |
6386 | "clause", |
6387 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
6388 | return true; |
6389 | } |
6390 | } |
6391 | if (!processing_template_decl |
6392 | && length != NULL_TREE |
6393 | && TREE_SIDE_EFFECTS (length)) |
6394 | { |
6395 | if (side_effects == NULL_TREE) |
6396 | side_effects = length; |
6397 | else |
6398 | side_effects = build2 (COMPOUND_EXPR, |
6399 | TREE_TYPE (side_effects), |
6400 | length, side_effects); |
6401 | } |
6402 | } |
6403 | else if (processing_template_decl) |
6404 | continue; |
6405 | else |
6406 | { |
6407 | tree l; |
6408 | |
6409 | if (i > first_non_one |
6410 | && ((length && integer_nonzerop (length)) |
6411 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
6412 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION |
6413 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)) |
6414 | continue; |
6415 | if (length) |
6416 | l = fold_convert (sizetype, length); |
6417 | else |
6418 | { |
6419 | l = size_binop (PLUS_EXPR, |
6420 | TYPE_MAX_VALUE (TYPE_DOMAIN (types[i])), |
6421 | size_one_node); |
6422 | l = size_binop (MINUS_EXPR, l, |
6423 | fold_convert (sizetype, low_bound)); |
6424 | } |
6425 | if (i > first_non_one) |
6426 | { |
6427 | l = fold_build2 (NE_EXPR, boolean_type_node, l, |
6428 | size_zero_node); |
6429 | if (condition == NULL_TREE) |
6430 | condition = l; |
6431 | else |
6432 | condition = fold_build2 (BIT_AND_EXPR, boolean_type_node, |
6433 | l, condition); |
6434 | } |
6435 | else if (size == NULL_TREE) |
6436 | { |
6437 | size = size_in_bytes (TREE_TYPE (types[i])); |
6438 | tree eltype = TREE_TYPE (types[num - 1]); |
6439 | while (TREE_CODE (eltype) == ARRAY_TYPE) |
6440 | eltype = TREE_TYPE (eltype); |
6441 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
6442 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION |
6443 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION) |
6444 | size = size_binop (EXACT_DIV_EXPR, size, |
6445 | size_in_bytes (eltype)); |
6446 | size = size_binop (MULT_EXPR, size, l); |
6447 | if (condition) |
6448 | size = fold_build3 (COND_EXPR, sizetype, condition, |
6449 | size, size_zero_node); |
6450 | } |
6451 | else |
6452 | size = size_binop (MULT_EXPR, size, l); |
6453 | } |
6454 | } |
6455 | if (!processing_template_decl) |
6456 | { |
6457 | if (side_effects) |
6458 | size = build2 (COMPOUND_EXPR, sizetype, side_effects, size); |
6459 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
6460 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION |
6461 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION) |
6462 | { |
6463 | size = size_binop (MINUS_EXPR, size, size_one_node); |
6464 | size = save_expr (size); |
6465 | tree index_type = build_index_type (size); |
6466 | tree eltype = TREE_TYPE (first); |
6467 | while (TREE_CODE (eltype) == ARRAY_TYPE) |
6468 | eltype = TREE_TYPE (eltype); |
6469 | tree type = build_array_type (eltype, index_type); |
6470 | tree ptype = build_pointer_type (eltype); |
6471 | if (TYPE_REF_P (TREE_TYPE (t)) |
6472 | && INDIRECT_TYPE_P (TREE_TYPE (TREE_TYPE (t)))) |
6473 | t = convert_from_reference (t); |
6474 | else if (TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE) |
6475 | t = build_fold_addr_expr (t); |
6476 | tree t2 = build_fold_addr_expr (first); |
6477 | t2 = fold_convert_loc (OMP_CLAUSE_LOCATION (c), |
6478 | ptrdiff_type_node, t2); |
6479 | t2 = fold_build2_loc (OMP_CLAUSE_LOCATION (c), MINUS_EXPR, |
6480 | ptrdiff_type_node, t2, |
6481 | fold_convert_loc (OMP_CLAUSE_LOCATION (c), |
6482 | ptrdiff_type_node, t)); |
6483 | if (tree_fits_shwi_p (t2)) |
6484 | t = build2 (MEM_REF, type, t, |
6485 | build_int_cst (ptype, tree_to_shwi (t2))); |
6486 | else |
6487 | { |
6488 | t2 = fold_convert_loc (OMP_CLAUSE_LOCATION (c), |
6489 | sizetype, t2); |
6490 | t = build2_loc (OMP_CLAUSE_LOCATION (c), code: POINTER_PLUS_EXPR, |
6491 | TREE_TYPE (t), arg0: t, arg1: t2); |
6492 | t = build2 (MEM_REF, type, t, build_int_cst (ptype, 0)); |
6493 | } |
6494 | OMP_CLAUSE_DECL (c) = t; |
6495 | return false; |
6496 | } |
6497 | OMP_CLAUSE_DECL (c) = first; |
6498 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR) |
6499 | return false; |
6500 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP |
6501 | || (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH |
6502 | && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH |
6503 | && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DETACH)) |
6504 | OMP_CLAUSE_SIZE (c) = size; |
6505 | if (TREE_CODE (t) == FIELD_DECL) |
6506 | t = finish_non_static_data_member (decl: t, NULL_TREE, NULL_TREE); |
6507 | |
6508 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP) |
6509 | return false; |
6510 | |
6511 | if (TREE_CODE (first) == INDIRECT_REF) |
6512 | { |
6513 | /* Detect and skip adding extra nodes for pointer-to-member |
6514 | mappings. These are unsupported for now. */ |
6515 | tree tmp = TREE_OPERAND (first, 0); |
6516 | |
6517 | if (TREE_CODE (tmp) == NON_LVALUE_EXPR) |
6518 | tmp = TREE_OPERAND (tmp, 0); |
6519 | |
6520 | if (TREE_CODE (tmp) == INDIRECT_REF) |
6521 | tmp = TREE_OPERAND (tmp, 0); |
6522 | |
6523 | if (TREE_CODE (tmp) == POINTER_PLUS_EXPR) |
6524 | { |
6525 | tree offset = TREE_OPERAND (tmp, 1); |
6526 | STRIP_NOPS (offset); |
6527 | if (TYPE_PTRMEM_P (TREE_TYPE (offset))) |
6528 | { |
6529 | sorry_at (OMP_CLAUSE_LOCATION (c), |
6530 | "pointer-to-member mapping %qE not supported", |
6531 | OMP_CLAUSE_DECL (c)); |
6532 | return true; |
6533 | } |
6534 | } |
6535 | } |
6536 | |
6537 | /* FIRST represents the first item of data that we are mapping. |
6538 | E.g. if we're mapping an array, FIRST might resemble |
6539 | "foo.bar.myarray[0]". */ |
6540 | |
6541 | auto_vec<omp_addr_token *, 10> addr_tokens; |
6542 | |
6543 | if (!omp_parse_expr (addr_tokens, first)) |
6544 | return true; |
6545 | |
6546 | cp_omp_address_inspector ai (OMP_CLAUSE_LOCATION (c), t); |
6547 | |
6548 | tree nc = ai.expand_map_clause (c, first, addr_tokens, ort); |
6549 | if (nc != error_mark_node) |
6550 | { |
6551 | using namespace omp_addr_tokenizer; |
6552 | |
6553 | if (ai.maybe_zero_length_array_section (c)) |
6554 | OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c) = 1; |
6555 | |
6556 | /* !!! If we're accessing a base decl via chained access |
6557 | methods (e.g. multiple indirections), duplicate clause |
6558 | detection won't work properly. Skip it in that case. */ |
6559 | if ((addr_tokens[0]->type == STRUCTURE_BASE |
6560 | || addr_tokens[0]->type == ARRAY_BASE) |
6561 | && addr_tokens[0]->u.structure_base_kind == BASE_DECL |
6562 | && addr_tokens[1]->type == ACCESS_METHOD |
6563 | && omp_access_chain_p (addr_tokens, 1)) |
6564 | c = nc; |
6565 | |
6566 | return false; |
6567 | } |
6568 | } |
6569 | } |
6570 | return false; |
6571 | } |
6572 | |
6573 | /* Return identifier to look up for omp declare reduction. */ |
6574 | |
6575 | tree |
6576 | omp_reduction_id (enum tree_code reduction_code, tree reduction_id, tree type) |
6577 | { |
6578 | const char *p = NULL; |
6579 | const char *m = NULL; |
6580 | switch (reduction_code) |
6581 | { |
6582 | case PLUS_EXPR: |
6583 | case MULT_EXPR: |
6584 | case MINUS_EXPR: |
6585 | case BIT_AND_EXPR: |
6586 | case BIT_XOR_EXPR: |
6587 | case BIT_IOR_EXPR: |
6588 | case TRUTH_ANDIF_EXPR: |
6589 | case TRUTH_ORIF_EXPR: |
6590 | reduction_id = ovl_op_identifier (isass: false, code: reduction_code); |
6591 | break; |
6592 | case MIN_EXPR: |
6593 | p = "min"; |
6594 | break; |
6595 | case MAX_EXPR: |
6596 | p = "max"; |
6597 | break; |
6598 | default: |
6599 | break; |
6600 | } |
6601 | |
6602 | if (p == NULL) |
6603 | { |
6604 | if (TREE_CODE (reduction_id) != IDENTIFIER_NODE) |
6605 | return error_mark_node; |
6606 | p = IDENTIFIER_POINTER (reduction_id); |
6607 | } |
6608 | |
6609 | if (type != NULL_TREE) |
6610 | m = mangle_type_string (TYPE_MAIN_VARIANT (type)); |
6611 | |
6612 | const char prefix[] = "omp declare reduction "; |
6613 | size_t lenp = sizeof (prefix); |
6614 | if (strncmp (s1: p, s2: prefix, n: lenp - 1) == 0) |
6615 | lenp = 1; |
6616 | size_t len = strlen (s: p); |
6617 | size_t lenm = m ? strlen (s: m) + 1 : 0; |
6618 | char *name = XALLOCAVEC (char, lenp + len + lenm); |
6619 | if (lenp > 1) |
6620 | memcpy (dest: name, src: prefix, n: lenp - 1); |
6621 | memcpy (dest: name + lenp - 1, src: p, n: len + 1); |
6622 | if (m) |
6623 | { |
6624 | name[lenp + len - 1] = '~'; |
6625 | memcpy (dest: name + lenp + len, src: m, n: lenm); |
6626 | } |
6627 | return get_identifier (name); |
6628 | } |
6629 | |
6630 | /* Lookup OpenMP UDR ID for TYPE, return the corresponding artificial |
6631 | FUNCTION_DECL or NULL_TREE if not found. */ |
6632 | |
6633 | static tree |
6634 | omp_reduction_lookup (location_t loc, tree id, tree type, tree *baselinkp, |
6635 | vec<tree> *ambiguousp) |
6636 | { |
6637 | tree orig_id = id; |
6638 | tree baselink = NULL_TREE; |
6639 | if (identifier_p (t: id)) |
6640 | { |
6641 | cp_id_kind idk; |
6642 | bool nonint_cst_expression_p; |
6643 | const char *error_msg; |
6644 | id = omp_reduction_id (reduction_code: ERROR_MARK, reduction_id: id, type); |
6645 | tree decl = lookup_name (name: id); |
6646 | if (decl == NULL_TREE) |
6647 | decl = error_mark_node; |
6648 | id = finish_id_expression (id_expression: id, decl, NULL_TREE, idk: &idk, integral_constant_expression_p: false, allow_non_integral_constant_expression_p: true, |
6649 | non_integral_constant_expression_p: &nonint_cst_expression_p, template_p: false, done: true, address_p: false, |
6650 | template_arg_p: false, error_msg: &error_msg, location: loc); |
6651 | if (idk == CP_ID_KIND_UNQUALIFIED |
6652 | && identifier_p (t: id)) |
6653 | { |
6654 | vec<tree, va_gc> *args = NULL; |
6655 | vec_safe_push (v&: args, obj: build_reference_type (type)); |
6656 | id = perform_koenig_lookup (fn_expr: id, args, complain: tf_none); |
6657 | } |
6658 | } |
6659 | else if (TREE_CODE (id) == SCOPE_REF) |
6660 | id = lookup_qualified_name (TREE_OPERAND (id, 0), |
6661 | name: omp_reduction_id (reduction_code: ERROR_MARK, |
6662 | TREE_OPERAND (id, 1), |
6663 | type), |
6664 | LOOK_want::NORMAL, false); |
6665 | tree fns = id; |
6666 | id = NULL_TREE; |
6667 | if (fns && is_overloaded_fn (fns)) |
6668 | { |
6669 | for (lkp_iterator iter (get_fns (fns)); iter; ++iter) |
6670 | { |
6671 | tree fndecl = *iter; |
6672 | if (TREE_CODE (fndecl) == FUNCTION_DECL) |
6673 | { |
6674 | tree argtype = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl))); |
6675 | if (same_type_p (TREE_TYPE (argtype), type)) |
6676 | { |
6677 | id = fndecl; |
6678 | break; |
6679 | } |
6680 | } |
6681 | } |
6682 | |
6683 | if (id && BASELINK_P (fns)) |
6684 | { |
6685 | if (baselinkp) |
6686 | *baselinkp = fns; |
6687 | else |
6688 | baselink = fns; |
6689 | } |
6690 | } |
6691 | |
6692 | if (!id && CLASS_TYPE_P (type) && TYPE_BINFO (type)) |
6693 | { |
6694 | auto_vec<tree> ambiguous; |
6695 | tree binfo = TYPE_BINFO (type), base_binfo, ret = NULL_TREE; |
6696 | unsigned int ix; |
6697 | if (ambiguousp == NULL) |
6698 | ambiguousp = &ambiguous; |
6699 | for (ix = 0; BINFO_BASE_ITERATE (binfo, ix, base_binfo); ix++) |
6700 | { |
6701 | id = omp_reduction_lookup (loc, id: orig_id, BINFO_TYPE (base_binfo), |
6702 | baselinkp: baselinkp ? baselinkp : &baselink, |
6703 | ambiguousp); |
6704 | if (id == NULL_TREE) |
6705 | continue; |
6706 | if (!ambiguousp->is_empty ()) |
6707 | ambiguousp->safe_push (obj: id); |
6708 | else if (ret != NULL_TREE) |
6709 | { |
6710 | ambiguousp->safe_push (obj: ret); |
6711 | ambiguousp->safe_push (obj: id); |
6712 | ret = NULL_TREE; |
6713 | } |
6714 | else |
6715 | ret = id; |
6716 | } |
6717 | if (ambiguousp != &ambiguous) |
6718 | return ret; |
6719 | if (!ambiguous.is_empty ()) |
6720 | { |
6721 | auto_diagnostic_group d; |
6722 | const char *str = _("candidates are:"); |
6723 | unsigned int idx; |
6724 | tree udr; |
6725 | error_at (loc, "user defined reduction lookup is ambiguous"); |
6726 | FOR_EACH_VEC_ELT (ambiguous, idx, udr) |
6727 | { |
6728 | inform (DECL_SOURCE_LOCATION (udr), "%s %#qD", str, udr); |
6729 | if (idx == 0) |
6730 | str = get_spaces (str); |
6731 | } |
6732 | ret = error_mark_node; |
6733 | baselink = NULL_TREE; |
6734 | } |
6735 | id = ret; |
6736 | } |
6737 | if (id && baselink) |
6738 | perform_or_defer_access_check (BASELINK_BINFO (baselink), |
6739 | decl: id, diag_decl: id, complain: tf_warning_or_error); |
6740 | return id; |
6741 | } |
6742 | |
6743 | /* Return identifier to look up for omp declare mapper. */ |
6744 | |
6745 | tree |
6746 | omp_mapper_id (tree mapper_id, tree type) |
6747 | { |
6748 | const char *p = NULL; |
6749 | const char *m = NULL; |
6750 | |
6751 | if (mapper_id == NULL_TREE) |
6752 | p = ""; |
6753 | else if (TREE_CODE (mapper_id) == IDENTIFIER_NODE) |
6754 | p = IDENTIFIER_POINTER (mapper_id); |
6755 | else |
6756 | return error_mark_node; |
6757 | |
6758 | if (type != NULL_TREE) |
6759 | m = mangle_type_string (TYPE_MAIN_VARIANT (type)); |
6760 | |
6761 | const char prefix[] = "omp declare mapper "; |
6762 | size_t lenp = sizeof (prefix); |
6763 | if (strncmp (s1: p, s2: prefix, n: lenp - 1) == 0) |
6764 | lenp = 1; |
6765 | size_t len = strlen (s: p); |
6766 | size_t lenm = m ? strlen (s: m) + 1 : 0; |
6767 | char *name = XALLOCAVEC (char, lenp + len + lenm); |
6768 | memcpy (dest: name, src: prefix, n: lenp - 1); |
6769 | memcpy (dest: name + lenp - 1, src: p, n: len + 1); |
6770 | if (m) |
6771 | { |
6772 | name[lenp + len - 1] = '~'; |
6773 | memcpy (dest: name + lenp + len, src: m, n: lenm); |
6774 | } |
6775 | return get_identifier (name); |
6776 | } |
6777 | |
6778 | tree |
6779 | cxx_omp_mapper_lookup (tree id, tree type) |
6780 | { |
6781 | if (!RECORD_OR_UNION_TYPE_P (type)) |
6782 | return NULL_TREE; |
6783 | id = omp_mapper_id (mapper_id: id, type); |
6784 | return lookup_name (name: id); |
6785 | } |
6786 | |
6787 | tree |
6788 | cxx_omp_extract_mapper_directive (tree vardecl) |
6789 | { |
6790 | gcc_assert (TREE_CODE (vardecl) == VAR_DECL); |
6791 | |
6792 | /* Instantiate the decl if we haven't already. */ |
6793 | mark_used (vardecl); |
6794 | tree body = DECL_INITIAL (vardecl); |
6795 | |
6796 | if (TREE_CODE (body) == STATEMENT_LIST) |
6797 | { |
6798 | tree_stmt_iterator tsi = tsi_start (t: body); |
6799 | gcc_assert (TREE_CODE (tsi_stmt (tsi)) == DECL_EXPR); |
6800 | tsi_next (i: &tsi); |
6801 | body = tsi_stmt (i: tsi); |
6802 | } |
6803 | |
6804 | gcc_assert (TREE_CODE (body) == OMP_DECLARE_MAPPER); |
6805 | |
6806 | return body; |
6807 | } |
6808 | |
6809 | /* For now we can handle singleton OMP_ARRAY_SECTIONs with custom mappers, but |
6810 | nothing more complicated. */ |
6811 | |
6812 | tree |
6813 | cxx_omp_map_array_section (location_t loc, tree t) |
6814 | { |
6815 | tree low = TREE_OPERAND (t, 1); |
6816 | tree len = TREE_OPERAND (t, 2); |
6817 | |
6818 | if (len && integer_onep (len)) |
6819 | { |
6820 | t = TREE_OPERAND (t, 0); |
6821 | |
6822 | if (!low) |
6823 | low = integer_zero_node; |
6824 | |
6825 | if (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE) |
6826 | t = convert_from_reference (t); |
6827 | |
6828 | t = build_array_ref (loc, t, low); |
6829 | } |
6830 | |
6831 | return t; |
6832 | } |
6833 | |
6834 | /* Helper function for cp_parser_omp_declare_reduction_exprs |
6835 | and tsubst_omp_udr. |
6836 | Remove CLEANUP_STMT for data (omp_priv variable). |
6837 | Also append INIT_EXPR for DECL_INITIAL of omp_priv after its |
6838 | DECL_EXPR. */ |
6839 | |
6840 | tree |
6841 | cp_remove_omp_priv_cleanup_stmt (tree *tp, int *walk_subtrees, void *data) |
6842 | { |
6843 | if (TYPE_P (*tp)) |
6844 | *walk_subtrees = 0; |
6845 | else if (TREE_CODE (*tp) == CLEANUP_STMT && CLEANUP_DECL (*tp) == (tree) data) |
6846 | *tp = CLEANUP_BODY (*tp); |
6847 | else if (TREE_CODE (*tp) == DECL_EXPR) |
6848 | { |
6849 | tree decl = DECL_EXPR_DECL (*tp); |
6850 | if (!processing_template_decl |
6851 | && decl == (tree) data |
6852 | && DECL_INITIAL (decl) |
6853 | && DECL_INITIAL (decl) != error_mark_node) |
6854 | { |
6855 | tree list = NULL_TREE; |
6856 | append_to_statement_list_force (*tp, &list); |
6857 | tree init_expr = build2 (INIT_EXPR, void_type_node, |
6858 | decl, DECL_INITIAL (decl)); |
6859 | DECL_INITIAL (decl) = NULL_TREE; |
6860 | append_to_statement_list_force (init_expr, &list); |
6861 | *tp = list; |
6862 | } |
6863 | } |
6864 | return NULL_TREE; |
6865 | } |
6866 | |
6867 | /* Data passed from cp_check_omp_declare_reduction to |
6868 | cp_check_omp_declare_reduction_r. */ |
6869 | |
6870 | struct cp_check_omp_declare_reduction_data |
6871 | { |
6872 | location_t loc; |
6873 | tree stmts[7]; |
6874 | bool combiner_p; |
6875 | }; |
6876 | |
6877 | /* Helper function for cp_check_omp_declare_reduction, called via |
6878 | cp_walk_tree. */ |
6879 | |
6880 | static tree |
6881 | cp_check_omp_declare_reduction_r (tree *tp, int *, void *data) |
6882 | { |
6883 | struct cp_check_omp_declare_reduction_data *udr_data |
6884 | = (struct cp_check_omp_declare_reduction_data *) data; |
6885 | if (SSA_VAR_P (*tp) |
6886 | && !DECL_ARTIFICIAL (*tp) |
6887 | && *tp != DECL_EXPR_DECL (udr_data->stmts[udr_data->combiner_p ? 0 : 3]) |
6888 | && *tp != DECL_EXPR_DECL (udr_data->stmts[udr_data->combiner_p ? 1 : 4])) |
6889 | { |
6890 | location_t loc = udr_data->loc; |
6891 | if (udr_data->combiner_p) |
6892 | error_at (loc, "%<#pragma omp declare reduction%> combiner refers to " |
6893 | "variable %qD which is not %<omp_out%> nor %<omp_in%>", |
6894 | *tp); |
6895 | else |
6896 | error_at (loc, "%<#pragma omp declare reduction%> initializer refers " |
6897 | "to variable %qD which is not %<omp_priv%> nor " |
6898 | "%<omp_orig%>", |
6899 | *tp); |
6900 | return *tp; |
6901 | } |
6902 | return NULL_TREE; |
6903 | } |
6904 | |
6905 | /* Diagnose violation of OpenMP #pragma omp declare reduction restrictions. */ |
6906 | |
6907 | bool |
6908 | cp_check_omp_declare_reduction (tree udr) |
6909 | { |
6910 | tree type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (udr))); |
6911 | gcc_assert (TYPE_REF_P (type)); |
6912 | type = TREE_TYPE (type); |
6913 | int i; |
6914 | location_t loc = DECL_SOURCE_LOCATION (udr); |
6915 | |
6916 | if (type == error_mark_node) |
6917 | return false; |
6918 | if (ARITHMETIC_TYPE_P (type)) |
6919 | { |
6920 | static enum tree_code predef_codes[] |
6921 | = { PLUS_EXPR, MULT_EXPR, MINUS_EXPR, BIT_AND_EXPR, BIT_XOR_EXPR, |
6922 | BIT_IOR_EXPR, TRUTH_ANDIF_EXPR, TRUTH_ORIF_EXPR }; |
6923 | for (i = 0; i < 8; i++) |
6924 | { |
6925 | tree id = omp_reduction_id (reduction_code: predef_codes[i], NULL_TREE, NULL_TREE); |
6926 | const char *n1 = IDENTIFIER_POINTER (DECL_NAME (udr)); |
6927 | const char *n2 = IDENTIFIER_POINTER (id); |
6928 | if (strncmp (s1: n1, s2: n2, IDENTIFIER_LENGTH (id)) == 0 |
6929 | && (n1[IDENTIFIER_LENGTH (id)] == '~' |
6930 | || n1[IDENTIFIER_LENGTH (id)] == '\0')) |
6931 | break; |
6932 | } |
6933 | |
6934 | if (i == 8 |
6935 | && TREE_CODE (type) != COMPLEX_EXPR) |
6936 | { |
6937 | const char prefix_minmax[] = "omp declare reduction m"; |
6938 | size_t prefix_size = sizeof (prefix_minmax) - 1; |
6939 | const char *n = IDENTIFIER_POINTER (DECL_NAME (udr)); |
6940 | if (strncmp (IDENTIFIER_POINTER (DECL_NAME (udr)), |
6941 | s2: prefix_minmax, n: prefix_size) == 0 |
6942 | && ((n[prefix_size] == 'i' && n[prefix_size + 1] == 'n') |
6943 | || (n[prefix_size] == 'a' && n[prefix_size + 1] == 'x')) |
6944 | && (n[prefix_size + 2] == '~' || n[prefix_size + 2] == '\0')) |
6945 | i = 0; |
6946 | } |
6947 | if (i < 8) |
6948 | { |
6949 | error_at (loc, "predeclared arithmetic type %qT in " |
6950 | "%<#pragma omp declare reduction%>", type); |
6951 | return false; |
6952 | } |
6953 | } |
6954 | else if (FUNC_OR_METHOD_TYPE_P (type) |
6955 | || TREE_CODE (type) == ARRAY_TYPE) |
6956 | { |
6957 | error_at (loc, "function or array type %qT in " |
6958 | "%<#pragma omp declare reduction%>", type); |
6959 | return false; |
6960 | } |
6961 | else if (TYPE_REF_P (type)) |
6962 | { |
6963 | error_at (loc, "reference type %qT in %<#pragma omp declare reduction%>", |
6964 | type); |
6965 | return false; |
6966 | } |
6967 | else if (TYPE_QUALS_NO_ADDR_SPACE (type)) |
6968 | { |
6969 | error_at (loc, "%<const%>, %<volatile%> or %<__restrict%>-qualified " |
6970 | "type %qT in %<#pragma omp declare reduction%>", type); |
6971 | return false; |
6972 | } |
6973 | |
6974 | tree body = DECL_SAVED_TREE (udr); |
6975 | if (body == NULL_TREE || TREE_CODE (body) != STATEMENT_LIST) |
6976 | return true; |
6977 | |
6978 | tree_stmt_iterator tsi; |
6979 | struct cp_check_omp_declare_reduction_data data; |
6980 | memset (s: data.stmts, c: 0, n: sizeof data.stmts); |
6981 | for (i = 0, tsi = tsi_start (t: body); |
6982 | i < 7 && !tsi_end_p (i: tsi); |
6983 | i++, tsi_next (i: &tsi)) |
6984 | data.stmts[i] = tsi_stmt (i: tsi); |
6985 | data.loc = loc; |
6986 | gcc_assert (tsi_end_p (tsi)); |
6987 | if (i >= 3) |
6988 | { |
6989 | gcc_assert (TREE_CODE (data.stmts[0]) == DECL_EXPR |
6990 | && TREE_CODE (data.stmts[1]) == DECL_EXPR); |
6991 | if (warning_suppressed_p (DECL_EXPR_DECL (data.stmts[0]) /* What warning? */)) |
6992 | return true; |
6993 | data.combiner_p = true; |
6994 | if (cp_walk_tree (&data.stmts[2], cp_check_omp_declare_reduction_r, |
6995 | &data, NULL)) |
6996 | suppress_warning (DECL_EXPR_DECL (data.stmts[0]) /* What warning? */); |
6997 | } |
6998 | if (i >= 6) |
6999 | { |
7000 | gcc_assert (TREE_CODE (data.stmts[3]) == DECL_EXPR |
7001 | && TREE_CODE (data.stmts[4]) == DECL_EXPR); |
7002 | data.combiner_p = false; |
7003 | if (cp_walk_tree (&data.stmts[5], cp_check_omp_declare_reduction_r, |
7004 | &data, NULL) |
7005 | || cp_walk_tree (&DECL_INITIAL (DECL_EXPR_DECL (data.stmts[3])), |
7006 | cp_check_omp_declare_reduction_r, &data, NULL)) |
7007 | suppress_warning (DECL_EXPR_DECL (data.stmts[0]) /* Wat warning? */); |
7008 | if (i == 7) |
7009 | gcc_assert (TREE_CODE (data.stmts[6]) == DECL_EXPR); |
7010 | } |
7011 | return true; |
7012 | } |
7013 | |
7014 | /* Helper function of finish_omp_clauses. Clone STMT as if we were making |
7015 | an inline call. But, remap |
7016 | the OMP_DECL1 VAR_DECL (omp_out resp. omp_orig) to PLACEHOLDER |
7017 | and OMP_DECL2 VAR_DECL (omp_in resp. omp_priv) to DECL. */ |
7018 | |
7019 | static tree |
7020 | clone_omp_udr (tree stmt, tree omp_decl1, tree omp_decl2, |
7021 | tree decl, tree placeholder) |
7022 | { |
7023 | copy_body_data id; |
7024 | hash_map<tree, tree> decl_map; |
7025 | |
7026 | decl_map.put (k: omp_decl1, v: placeholder); |
7027 | decl_map.put (k: omp_decl2, v: decl); |
7028 | memset (s: &id, c: 0, n: sizeof (id)); |
7029 | id.src_fn = DECL_CONTEXT (omp_decl1); |
7030 | id.dst_fn = current_function_decl; |
7031 | id.src_cfun = DECL_STRUCT_FUNCTION (id.src_fn); |
7032 | id.decl_map = &decl_map; |
7033 | |
7034 | id.copy_decl = copy_decl_no_change; |
7035 | id.transform_call_graph_edges = CB_CGE_DUPLICATE; |
7036 | id.transform_new_cfg = true; |
7037 | id.transform_return_to_modify = false; |
7038 | id.eh_lp_nr = 0; |
7039 | walk_tree (&stmt, copy_tree_body_r, &id, NULL); |
7040 | return stmt; |
7041 | } |
7042 | |
7043 | /* Helper function of finish_omp_clauses, called via cp_walk_tree. |
7044 | Find OMP_CLAUSE_PLACEHOLDER (passed in DATA) in *TP. */ |
7045 | |
7046 | static tree |
7047 | find_omp_placeholder_r (tree *tp, int *, void *data) |
7048 | { |
7049 | if (*tp == (tree) data) |
7050 | return *tp; |
7051 | return NULL_TREE; |
7052 | } |
7053 | |
7054 | /* Helper function of finish_omp_clauses. Handle OMP_CLAUSE_REDUCTION C. |
7055 | Return true if there is some error and the clause should be removed. */ |
7056 | |
7057 | static bool |
7058 | finish_omp_reduction_clause (tree c, bool *need_default_ctor, bool *need_dtor) |
7059 | { |
7060 | tree t = OMP_CLAUSE_DECL (c); |
7061 | bool predefined = false; |
7062 | if (TREE_CODE (t) == TREE_LIST) |
7063 | { |
7064 | gcc_assert (processing_template_decl); |
7065 | return false; |
7066 | } |
7067 | tree type = TREE_TYPE (t); |
7068 | if (TREE_CODE (t) == MEM_REF) |
7069 | type = TREE_TYPE (type); |
7070 | if (TYPE_REF_P (type)) |
7071 | type = TREE_TYPE (type); |
7072 | if (TREE_CODE (type) == ARRAY_TYPE) |
7073 | { |
7074 | tree oatype = type; |
7075 | gcc_assert (TREE_CODE (t) != MEM_REF); |
7076 | while (TREE_CODE (type) == ARRAY_TYPE) |
7077 | type = TREE_TYPE (type); |
7078 | if (!processing_template_decl) |
7079 | { |
7080 | t = require_complete_type (t); |
7081 | if (t == error_mark_node |
7082 | || !complete_type_or_else (oatype, NULL_TREE)) |
7083 | return true; |
7084 | tree size = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (oatype), |
7085 | TYPE_SIZE_UNIT (type)); |
7086 | if (integer_zerop (size)) |
7087 | { |
7088 | error_at (OMP_CLAUSE_LOCATION (c), |
7089 | "%qE in %<reduction%> clause is a zero size array", |
7090 | omp_clause_printable_decl (decl: t)); |
7091 | return true; |
7092 | } |
7093 | size = size_binop (MINUS_EXPR, size, size_one_node); |
7094 | size = save_expr (size); |
7095 | tree index_type = build_index_type (size); |
7096 | tree atype = build_array_type (type, index_type); |
7097 | tree ptype = build_pointer_type (type); |
7098 | if (TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE) |
7099 | t = build_fold_addr_expr (t); |
7100 | t = build2 (MEM_REF, atype, t, build_int_cst (ptype, 0)); |
7101 | OMP_CLAUSE_DECL (c) = t; |
7102 | } |
7103 | } |
7104 | if (type == error_mark_node) |
7105 | return true; |
7106 | else if (ARITHMETIC_TYPE_P (type)) |
7107 | switch (OMP_CLAUSE_REDUCTION_CODE (c)) |
7108 | { |
7109 | case PLUS_EXPR: |
7110 | case MULT_EXPR: |
7111 | case MINUS_EXPR: |
7112 | case TRUTH_ANDIF_EXPR: |
7113 | case TRUTH_ORIF_EXPR: |
7114 | predefined = true; |
7115 | break; |
7116 | case MIN_EXPR: |
7117 | case MAX_EXPR: |
7118 | if (TREE_CODE (type) == COMPLEX_TYPE) |
7119 | break; |
7120 | predefined = true; |
7121 | break; |
7122 | case BIT_AND_EXPR: |
7123 | case BIT_IOR_EXPR: |
7124 | case BIT_XOR_EXPR: |
7125 | if (FLOAT_TYPE_P (type) || TREE_CODE (type) == COMPLEX_TYPE) |
7126 | break; |
7127 | predefined = true; |
7128 | break; |
7129 | default: |
7130 | break; |
7131 | } |
7132 | else if (TYPE_READONLY (type)) |
7133 | { |
7134 | error_at (OMP_CLAUSE_LOCATION (c), |
7135 | "%qE has const type for %<reduction%>", |
7136 | omp_clause_printable_decl (decl: t)); |
7137 | return true; |
7138 | } |
7139 | else if (!processing_template_decl) |
7140 | { |
7141 | t = require_complete_type (t); |
7142 | if (t == error_mark_node) |
7143 | return true; |
7144 | OMP_CLAUSE_DECL (c) = t; |
7145 | } |
7146 | |
7147 | if (predefined) |
7148 | { |
7149 | OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL_TREE; |
7150 | return false; |
7151 | } |
7152 | else if (processing_template_decl) |
7153 | { |
7154 | if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == error_mark_node) |
7155 | return true; |
7156 | return false; |
7157 | } |
7158 | |
7159 | tree id = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c); |
7160 | |
7161 | type = TYPE_MAIN_VARIANT (type); |
7162 | OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL_TREE; |
7163 | if (id == NULL_TREE) |
7164 | id = omp_reduction_id (OMP_CLAUSE_REDUCTION_CODE (c), |
7165 | NULL_TREE, NULL_TREE); |
7166 | id = omp_reduction_lookup (OMP_CLAUSE_LOCATION (c), id, type, NULL, NULL); |
7167 | if (id) |
7168 | { |
7169 | if (id == error_mark_node) |
7170 | return true; |
7171 | mark_used (id); |
7172 | tree body = DECL_SAVED_TREE (id); |
7173 | if (!body) |
7174 | return true; |
7175 | if (TREE_CODE (body) == STATEMENT_LIST) |
7176 | { |
7177 | tree_stmt_iterator tsi; |
7178 | tree placeholder = NULL_TREE, decl_placeholder = NULL_TREE; |
7179 | int i; |
7180 | tree stmts[7]; |
7181 | tree atype = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (id))); |
7182 | atype = TREE_TYPE (atype); |
7183 | bool need_static_cast = !same_type_p (type, atype); |
7184 | memset (s: stmts, c: 0, n: sizeof stmts); |
7185 | for (i = 0, tsi = tsi_start (t: body); |
7186 | i < 7 && !tsi_end_p (i: tsi); |
7187 | i++, tsi_next (i: &tsi)) |
7188 | stmts[i] = tsi_stmt (i: tsi); |
7189 | gcc_assert (tsi_end_p (tsi)); |
7190 | |
7191 | if (i >= 3) |
7192 | { |
7193 | gcc_assert (TREE_CODE (stmts[0]) == DECL_EXPR |
7194 | && TREE_CODE (stmts[1]) == DECL_EXPR); |
7195 | placeholder = build_lang_decl (VAR_DECL, NULL_TREE, type); |
7196 | DECL_ARTIFICIAL (placeholder) = 1; |
7197 | DECL_IGNORED_P (placeholder) = 1; |
7198 | OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = placeholder; |
7199 | if (TREE_CODE (t) == MEM_REF) |
7200 | { |
7201 | decl_placeholder = build_lang_decl (VAR_DECL, NULL_TREE, |
7202 | type); |
7203 | DECL_ARTIFICIAL (decl_placeholder) = 1; |
7204 | DECL_IGNORED_P (decl_placeholder) = 1; |
7205 | OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = decl_placeholder; |
7206 | } |
7207 | if (TREE_ADDRESSABLE (DECL_EXPR_DECL (stmts[0]))) |
7208 | cxx_mark_addressable (placeholder); |
7209 | if (TREE_ADDRESSABLE (DECL_EXPR_DECL (stmts[1])) |
7210 | && (decl_placeholder |
7211 | || !TYPE_REF_P (TREE_TYPE (OMP_CLAUSE_DECL (c))))) |
7212 | cxx_mark_addressable (decl_placeholder ? decl_placeholder |
7213 | : OMP_CLAUSE_DECL (c)); |
7214 | tree omp_out = placeholder; |
7215 | tree omp_in = decl_placeholder ? decl_placeholder |
7216 | : convert_from_reference (OMP_CLAUSE_DECL (c)); |
7217 | if (need_static_cast) |
7218 | { |
7219 | tree rtype = build_reference_type (atype); |
7220 | omp_out = build_static_cast (input_location, |
7221 | rtype, omp_out, |
7222 | tf_warning_or_error); |
7223 | omp_in = build_static_cast (input_location, |
7224 | rtype, omp_in, |
7225 | tf_warning_or_error); |
7226 | if (omp_out == error_mark_node || omp_in == error_mark_node) |
7227 | return true; |
7228 | omp_out = convert_from_reference (omp_out); |
7229 | omp_in = convert_from_reference (omp_in); |
7230 | } |
7231 | OMP_CLAUSE_REDUCTION_MERGE (c) |
7232 | = clone_omp_udr (stmt: stmts[2], DECL_EXPR_DECL (stmts[0]), |
7233 | DECL_EXPR_DECL (stmts[1]), decl: omp_in, placeholder: omp_out); |
7234 | } |
7235 | if (i >= 6) |
7236 | { |
7237 | gcc_assert (TREE_CODE (stmts[3]) == DECL_EXPR |
7238 | && TREE_CODE (stmts[4]) == DECL_EXPR); |
7239 | if (TREE_ADDRESSABLE (DECL_EXPR_DECL (stmts[3])) |
7240 | && (decl_placeholder |
7241 | || !TYPE_REF_P (TREE_TYPE (OMP_CLAUSE_DECL (c))))) |
7242 | cxx_mark_addressable (decl_placeholder ? decl_placeholder |
7243 | : OMP_CLAUSE_DECL (c)); |
7244 | if (TREE_ADDRESSABLE (DECL_EXPR_DECL (stmts[4]))) |
7245 | cxx_mark_addressable (placeholder); |
7246 | tree omp_priv = decl_placeholder ? decl_placeholder |
7247 | : convert_from_reference (OMP_CLAUSE_DECL (c)); |
7248 | tree omp_orig = placeholder; |
7249 | if (need_static_cast) |
7250 | { |
7251 | if (i == 7) |
7252 | { |
7253 | error_at (OMP_CLAUSE_LOCATION (c), |
7254 | "user defined reduction with constructor " |
7255 | "initializer for base class %qT", atype); |
7256 | return true; |
7257 | } |
7258 | tree rtype = build_reference_type (atype); |
7259 | omp_priv = build_static_cast (input_location, |
7260 | rtype, omp_priv, |
7261 | tf_warning_or_error); |
7262 | omp_orig = build_static_cast (input_location, |
7263 | rtype, omp_orig, |
7264 | tf_warning_or_error); |
7265 | if (omp_priv == error_mark_node |
7266 | || omp_orig == error_mark_node) |
7267 | return true; |
7268 | omp_priv = convert_from_reference (omp_priv); |
7269 | omp_orig = convert_from_reference (omp_orig); |
7270 | } |
7271 | if (i == 6) |
7272 | *need_default_ctor = true; |
7273 | OMP_CLAUSE_REDUCTION_INIT (c) |
7274 | = clone_omp_udr (stmt: stmts[5], DECL_EXPR_DECL (stmts[4]), |
7275 | DECL_EXPR_DECL (stmts[3]), |
7276 | decl: omp_priv, placeholder: omp_orig); |
7277 | if (cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c), |
7278 | find_omp_placeholder_r, placeholder, NULL)) |
7279 | OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c) = 1; |
7280 | } |
7281 | else if (i >= 3) |
7282 | { |
7283 | if (CLASS_TYPE_P (type) && !pod_type_p (type)) |
7284 | *need_default_ctor = true; |
7285 | else |
7286 | { |
7287 | tree init; |
7288 | tree v = decl_placeholder ? decl_placeholder |
7289 | : convert_from_reference (t); |
7290 | if (AGGREGATE_TYPE_P (TREE_TYPE (v))) |
7291 | init = build_constructor (TREE_TYPE (v), NULL); |
7292 | else |
7293 | init = fold_convert (TREE_TYPE (v), integer_zero_node); |
7294 | OMP_CLAUSE_REDUCTION_INIT (c) |
7295 | = cp_build_init_expr (t: v, i: init); |
7296 | } |
7297 | } |
7298 | } |
7299 | } |
7300 | if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) |
7301 | *need_dtor = true; |
7302 | else |
7303 | { |
7304 | error_at (OMP_CLAUSE_LOCATION (c), |
7305 | "user defined reduction not found for %qE", |
7306 | omp_clause_printable_decl (decl: t)); |
7307 | return true; |
7308 | } |
7309 | if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF) |
7310 | gcc_assert (TYPE_SIZE_UNIT (type) |
7311 | && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST); |
7312 | return false; |
7313 | } |
7314 | |
7315 | /* Check an instance of an "omp declare mapper" function. */ |
7316 | |
7317 | bool |
7318 | cp_check_omp_declare_mapper (tree udm) |
7319 | { |
7320 | tree type = TREE_TYPE (udm); |
7321 | location_t loc = DECL_SOURCE_LOCATION (udm); |
7322 | |
7323 | if (type == error_mark_node) |
7324 | return false; |
7325 | |
7326 | if (!processing_template_decl && !RECORD_OR_UNION_TYPE_P (type)) |
7327 | { |
7328 | error_at (loc, "%qT is not a struct, union or class type in " |
7329 | "%<#pragma omp declare mapper%>", type); |
7330 | return false; |
7331 | } |
7332 | if (!processing_template_decl && CLASSTYPE_VBASECLASSES (type)) |
7333 | { |
7334 | error_at (loc, "%qT must not be a virtual base class in " |
7335 | "%<#pragma omp declare mapper%>", type); |
7336 | return false; |
7337 | } |
7338 | |
7339 | return true; |
7340 | } |
7341 | |
7342 | /* Called from finish_struct_1. linear(this) or linear(this:step) |
7343 | clauses might not be finalized yet because the class has been incomplete |
7344 | when parsing #pragma omp declare simd methods. Fix those up now. */ |
7345 | |
7346 | void |
7347 | finish_omp_declare_simd_methods (tree t) |
7348 | { |
7349 | if (processing_template_decl) |
7350 | return; |
7351 | |
7352 | for (tree x = TYPE_FIELDS (t); x; x = DECL_CHAIN (x)) |
7353 | { |
7354 | if (TREE_CODE (x) == USING_DECL |
7355 | || !DECL_IOBJ_MEMBER_FUNCTION_P (x)) |
7356 | continue; |
7357 | tree ods = lookup_attribute (attr_name: "omp declare simd", DECL_ATTRIBUTES (x)); |
7358 | if (!ods || !TREE_VALUE (ods)) |
7359 | continue; |
7360 | for (tree c = TREE_VALUE (TREE_VALUE (ods)); c; c = OMP_CLAUSE_CHAIN (c)) |
7361 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR |
7362 | && integer_zerop (OMP_CLAUSE_DECL (c)) |
7363 | && OMP_CLAUSE_LINEAR_STEP (c) |
7364 | && TYPE_PTR_P (TREE_TYPE (OMP_CLAUSE_LINEAR_STEP (c)))) |
7365 | { |
7366 | tree s = OMP_CLAUSE_LINEAR_STEP (c); |
7367 | s = fold_convert_loc (OMP_CLAUSE_LOCATION (c), sizetype, s); |
7368 | s = fold_build2_loc (OMP_CLAUSE_LOCATION (c), MULT_EXPR, |
7369 | sizetype, s, TYPE_SIZE_UNIT (t)); |
7370 | OMP_CLAUSE_LINEAR_STEP (c) = s; |
7371 | } |
7372 | } |
7373 | } |
7374 | |
7375 | /* Adjust sink depend/doacross clause to take into account pointer offsets. |
7376 | |
7377 | Return TRUE if there was a problem processing the offset, and the |
7378 | whole clause should be removed. */ |
7379 | |
7380 | static bool |
7381 | cp_finish_omp_clause_doacross_sink (tree sink_clause) |
7382 | { |
7383 | tree t = OMP_CLAUSE_DECL (sink_clause); |
7384 | gcc_assert (TREE_CODE (t) == TREE_LIST); |
7385 | |
7386 | /* Make sure we don't adjust things twice for templates. */ |
7387 | if (processing_template_decl) |
7388 | return false; |
7389 | |
7390 | for (; t; t = TREE_CHAIN (t)) |
7391 | { |
7392 | tree decl = TREE_VALUE (t); |
7393 | if (TYPE_PTR_P (TREE_TYPE (decl))) |
7394 | { |
7395 | tree offset = TREE_PURPOSE (t); |
7396 | bool neg = wi::neg_p (x: wi::to_wide (t: offset)); |
7397 | offset = fold_unary (ABS_EXPR, TREE_TYPE (offset), offset); |
7398 | decl = mark_rvalue_use (decl); |
7399 | decl = convert_from_reference (decl); |
7400 | tree t2 = pointer_int_sum (OMP_CLAUSE_LOCATION (sink_clause), |
7401 | neg ? MINUS_EXPR : PLUS_EXPR, |
7402 | decl, offset); |
7403 | t2 = fold_build2_loc (OMP_CLAUSE_LOCATION (sink_clause), |
7404 | MINUS_EXPR, sizetype, |
7405 | fold_convert (sizetype, t2), |
7406 | fold_convert (sizetype, decl)); |
7407 | if (t2 == error_mark_node) |
7408 | return true; |
7409 | TREE_PURPOSE (t) = t2; |
7410 | } |
7411 | } |
7412 | return false; |
7413 | } |
7414 | |
7415 | /* Finish OpenMP iterators ITER. Return true if they are errorneous |
7416 | and clauses containing them should be removed. */ |
7417 | |
7418 | static bool |
7419 | cp_omp_finish_iterators (tree iter) |
7420 | { |
7421 | bool ret = false; |
7422 | for (tree it = iter; it; it = TREE_CHAIN (it)) |
7423 | { |
7424 | tree var = TREE_VEC_ELT (it, 0); |
7425 | tree begin = TREE_VEC_ELT (it, 1); |
7426 | tree end = TREE_VEC_ELT (it, 2); |
7427 | tree step = TREE_VEC_ELT (it, 3); |
7428 | tree orig_step; |
7429 | tree type = TREE_TYPE (var); |
7430 | location_t loc = DECL_SOURCE_LOCATION (var); |
7431 | if (type == error_mark_node) |
7432 | { |
7433 | ret = true; |
7434 | continue; |
7435 | } |
7436 | if (type_dependent_expression_p (var)) |
7437 | continue; |
7438 | if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type)) |
7439 | { |
7440 | error_at (loc, "iterator %qD has neither integral nor pointer type", |
7441 | var); |
7442 | ret = true; |
7443 | continue; |
7444 | } |
7445 | else if (TYPE_READONLY (type)) |
7446 | { |
7447 | error_at (loc, "iterator %qD has const qualified type", var); |
7448 | ret = true; |
7449 | continue; |
7450 | } |
7451 | if (type_dependent_expression_p (begin) |
7452 | || type_dependent_expression_p (end) |
7453 | || type_dependent_expression_p (step)) |
7454 | continue; |
7455 | else if (error_operand_p (t: step)) |
7456 | { |
7457 | ret = true; |
7458 | continue; |
7459 | } |
7460 | else if (!INTEGRAL_TYPE_P (TREE_TYPE (step))) |
7461 | { |
7462 | error_at (EXPR_LOC_OR_LOC (step, loc), |
7463 | "iterator step with non-integral type"); |
7464 | ret = true; |
7465 | continue; |
7466 | } |
7467 | |
7468 | begin = mark_rvalue_use (begin); |
7469 | end = mark_rvalue_use (end); |
7470 | step = mark_rvalue_use (step); |
7471 | begin = cp_build_c_cast (input_location, type, begin, |
7472 | tf_warning_or_error); |
7473 | end = cp_build_c_cast (input_location, type, end, |
7474 | tf_warning_or_error); |
7475 | orig_step = step; |
7476 | if (!processing_template_decl) |
7477 | step = orig_step = save_expr (step); |
7478 | tree stype = POINTER_TYPE_P (type) ? sizetype : type; |
7479 | step = cp_build_c_cast (input_location, stype, step, |
7480 | tf_warning_or_error); |
7481 | if (POINTER_TYPE_P (type) && !processing_template_decl) |
7482 | { |
7483 | begin = save_expr (begin); |
7484 | step = pointer_int_sum (loc, PLUS_EXPR, begin, step); |
7485 | step = fold_build2_loc (loc, MINUS_EXPR, sizetype, |
7486 | fold_convert (sizetype, step), |
7487 | fold_convert (sizetype, begin)); |
7488 | step = fold_convert (ssizetype, step); |
7489 | } |
7490 | if (!processing_template_decl) |
7491 | { |
7492 | begin = maybe_constant_value (begin); |
7493 | end = maybe_constant_value (end); |
7494 | step = maybe_constant_value (step); |
7495 | orig_step = maybe_constant_value (orig_step); |
7496 | } |
7497 | if (integer_zerop (step)) |
7498 | { |
7499 | error_at (loc, "iterator %qD has zero step", var); |
7500 | ret = true; |
7501 | continue; |
7502 | } |
7503 | |
7504 | if (begin == error_mark_node |
7505 | || end == error_mark_node |
7506 | || step == error_mark_node |
7507 | || orig_step == error_mark_node) |
7508 | { |
7509 | ret = true; |
7510 | continue; |
7511 | } |
7512 | |
7513 | if (!processing_template_decl) |
7514 | { |
7515 | begin = fold_build_cleanup_point_expr (TREE_TYPE (begin), expr: begin); |
7516 | end = fold_build_cleanup_point_expr (TREE_TYPE (end), expr: end); |
7517 | step = fold_build_cleanup_point_expr (TREE_TYPE (step), expr: step); |
7518 | orig_step = fold_build_cleanup_point_expr (TREE_TYPE (orig_step), |
7519 | expr: orig_step); |
7520 | } |
7521 | hash_set<tree> pset; |
7522 | tree it2; |
7523 | for (it2 = TREE_CHAIN (it); it2; it2 = TREE_CHAIN (it2)) |
7524 | { |
7525 | tree var2 = TREE_VEC_ELT (it2, 0); |
7526 | tree begin2 = TREE_VEC_ELT (it2, 1); |
7527 | tree end2 = TREE_VEC_ELT (it2, 2); |
7528 | tree step2 = TREE_VEC_ELT (it2, 3); |
7529 | location_t loc2 = DECL_SOURCE_LOCATION (var2); |
7530 | if (cp_walk_tree (&begin2, find_omp_placeholder_r, var, &pset)) |
7531 | { |
7532 | error_at (EXPR_LOC_OR_LOC (begin2, loc2), |
7533 | "begin expression refers to outer iterator %qD", var); |
7534 | break; |
7535 | } |
7536 | else if (cp_walk_tree (&end2, find_omp_placeholder_r, var, &pset)) |
7537 | { |
7538 | error_at (EXPR_LOC_OR_LOC (end2, loc2), |
7539 | "end expression refers to outer iterator %qD", var); |
7540 | break; |
7541 | } |
7542 | else if (cp_walk_tree (&step2, find_omp_placeholder_r, var, &pset)) |
7543 | { |
7544 | error_at (EXPR_LOC_OR_LOC (step2, loc2), |
7545 | "step expression refers to outer iterator %qD", var); |
7546 | break; |
7547 | } |
7548 | } |
7549 | if (it2) |
7550 | { |
7551 | ret = true; |
7552 | continue; |
7553 | } |
7554 | TREE_VEC_ELT (it, 1) = begin; |
7555 | TREE_VEC_ELT (it, 2) = end; |
7556 | if (processing_template_decl) |
7557 | TREE_VEC_ELT (it, 3) = orig_step; |
7558 | else |
7559 | { |
7560 | TREE_VEC_ELT (it, 3) = step; |
7561 | TREE_VEC_ELT (it, 4) = orig_step; |
7562 | } |
7563 | } |
7564 | return ret; |
7565 | } |
7566 | |
7567 | /* Ensure that pointers are used in OpenACC attach and detach clauses. |
7568 | Return true if an error has been detected. */ |
7569 | |
7570 | static bool |
7571 | cp_oacc_check_attachments (tree c) |
7572 | { |
7573 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP) |
7574 | return false; |
7575 | |
7576 | /* OpenACC attach / detach clauses must be pointers. */ |
7577 | if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH |
7578 | || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH) |
7579 | { |
7580 | tree t = OMP_CLAUSE_DECL (c); |
7581 | tree type; |
7582 | |
7583 | while (TREE_CODE (t) == OMP_ARRAY_SECTION) |
7584 | t = TREE_OPERAND (t, 0); |
7585 | |
7586 | type = TREE_TYPE (t); |
7587 | |
7588 | if (TREE_CODE (type) == REFERENCE_TYPE) |
7589 | type = TREE_TYPE (type); |
7590 | |
7591 | if (TREE_CODE (type) != POINTER_TYPE) |
7592 | { |
7593 | error_at (OMP_CLAUSE_LOCATION (c), "expected pointer in %qs clause", |
7594 | user_omp_clause_code_name (c, true)); |
7595 | return true; |
7596 | } |
7597 | } |
7598 | |
7599 | return false; |
7600 | } |
7601 | |
7602 | /* Update OMP_CLAUSE_INIT_PREFER_TYPE in case template substitution |
7603 | happened. */ |
7604 | |
7605 | tree |
7606 | cp_finish_omp_init_prefer_type (tree pref_type) |
7607 | { |
7608 | if (processing_template_decl |
7609 | || pref_type == NULL_TREE |
7610 | || TREE_CODE (pref_type) != TREE_LIST) |
7611 | return pref_type; |
7612 | |
7613 | tree t = TREE_PURPOSE (pref_type); |
7614 | char *str = const_cast<char *> (TREE_STRING_POINTER (t)); |
7615 | tree fr_list = TREE_VALUE (pref_type); |
7616 | int len = TREE_VEC_LENGTH (fr_list); |
7617 | int cnt = 0; |
7618 | |
7619 | while (str[0] == (char) GOMP_INTEROP_IFR_SEPARATOR) |
7620 | { |
7621 | str++; |
7622 | if (str[0] == (char) GOMP_INTEROP_IFR_UNKNOWN) |
7623 | { |
7624 | /* Assume a no or a single 'fr'. */ |
7625 | gcc_checking_assert (str[1] == (char) GOMP_INTEROP_IFR_SEPARATOR); |
7626 | location_t loc = UNKNOWN_LOCATION; |
7627 | tree value = TREE_VEC_ELT (fr_list, cnt); |
7628 | if (value != NULL_TREE && value != error_mark_node) |
7629 | { |
7630 | loc = EXPR_LOCATION (value); |
7631 | if (value && TREE_CODE (value) == NOP_EXPR) |
7632 | value = TREE_OPERAND (value, 0); |
7633 | value = cp_fully_fold (value); |
7634 | } |
7635 | if (value != NULL_TREE && value != error_mark_node) |
7636 | { |
7637 | if (TREE_CODE (value) != INTEGER_CST |
7638 | || !tree_fits_shwi_p (value)) |
7639 | error_at (loc, |
7640 | "expected string literal or " |
7641 | "constant integer expression instead of %qE", value); |
7642 | else |
7643 | { |
7644 | HOST_WIDE_INT n = tree_to_shwi (value); |
7645 | if (n < 1 || n > GOMP_INTEROP_IFR_LAST) |
7646 | { |
7647 | warning_at (loc, OPT_Wopenmp, |
7648 | "unknown foreign runtime identifier %qwd", n); |
7649 | n = GOMP_INTEROP_IFR_UNKNOWN; |
7650 | } |
7651 | str[0] = (char) n; |
7652 | } |
7653 | } |
7654 | str++; |
7655 | } |
7656 | else if (str[0] != (char) GOMP_INTEROP_IFR_SEPARATOR) |
7657 | { |
7658 | /* Assume a no or a single 'fr'. */ |
7659 | gcc_checking_assert (str[1] == (char) GOMP_INTEROP_IFR_SEPARATOR); |
7660 | str++; |
7661 | } |
7662 | str++; |
7663 | while (str[0] != '\0') |
7664 | str += strlen (s: str) + 1; |
7665 | str++; |
7666 | cnt++; |
7667 | if (cnt >= len) |
7668 | break; |
7669 | } |
7670 | return t; |
7671 | } |
7672 | |
7673 | /* For all elements of CLAUSES, validate them vs OpenMP constraints. |
7674 | Remove any elements from the list that are invalid. */ |
7675 | |
7676 | tree |
7677 | finish_omp_clauses (tree clauses, enum c_omp_region_type ort) |
7678 | { |
7679 | bitmap_head generic_head, firstprivate_head, lastprivate_head; |
7680 | bitmap_head aligned_head, map_head, map_field_head, map_firstprivate_head; |
7681 | bitmap_head oacc_reduction_head, is_on_device_head; |
7682 | tree c, t, *pc; |
7683 | tree safelen = NULL_TREE; |
7684 | bool openacc = (ort & C_ORT_ACC) != 0; |
7685 | bool branch_seen = false; |
7686 | bool copyprivate_seen = false; |
7687 | bool ordered_seen = false; |
7688 | bool order_seen = false; |
7689 | bool schedule_seen = false; |
7690 | bool oacc_async = false; |
7691 | bool indir_component_ref_p = false; |
7692 | tree last_iterators = NULL_TREE; |
7693 | bool last_iterators_remove = false; |
7694 | /* 1 if normal/task reduction has been seen, -1 if inscan reduction |
7695 | has been seen, -2 if mixed inscan/normal reduction diagnosed. */ |
7696 | int reduction_seen = 0; |
7697 | bool allocate_seen = false; |
7698 | tree detach_seen = NULL_TREE; |
7699 | bool mergeable_seen = false; |
7700 | bool implicit_moved = false; |
7701 | bool target_in_reduction_seen = false; |
7702 | bool num_tasks_seen = false; |
7703 | bool partial_seen = false; |
7704 | bool init_seen = false; |
7705 | bool init_use_destroy_seen = false; |
7706 | tree init_no_targetsync_clause = NULL_TREE; |
7707 | tree depend_clause = NULL_TREE; |
7708 | |
7709 | bitmap_obstack_initialize (NULL); |
7710 | bitmap_initialize (head: &generic_head, obstack: &bitmap_default_obstack); |
7711 | bitmap_initialize (head: &firstprivate_head, obstack: &bitmap_default_obstack); |
7712 | bitmap_initialize (head: &lastprivate_head, obstack: &bitmap_default_obstack); |
7713 | bitmap_initialize (head: &aligned_head, obstack: &bitmap_default_obstack); |
7714 | /* If ort == C_ORT_OMP_DECLARE_SIMD used as uniform_head instead. */ |
7715 | bitmap_initialize (head: &map_head, obstack: &bitmap_default_obstack); |
7716 | bitmap_initialize (head: &map_field_head, obstack: &bitmap_default_obstack); |
7717 | bitmap_initialize (head: &map_firstprivate_head, obstack: &bitmap_default_obstack); |
7718 | /* If ort == C_ORT_OMP used as nontemporal_head or use_device_xxx_head |
7719 | instead and for ort == C_ORT_OMP_TARGET used as in_reduction_head. */ |
7720 | bitmap_initialize (head: &oacc_reduction_head, obstack: &bitmap_default_obstack); |
7721 | bitmap_initialize (head: &is_on_device_head, obstack: &bitmap_default_obstack); |
7722 | |
7723 | if (openacc) |
7724 | for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c)) |
7725 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ASYNC) |
7726 | { |
7727 | oacc_async = true; |
7728 | break; |
7729 | } |
7730 | |
7731 | tree *grp_start_p = NULL, grp_sentinel = NULL_TREE; |
7732 | |
7733 | for (pc = &clauses, c = clauses; c ; c = *pc) |
7734 | { |
7735 | bool remove = false; |
7736 | bool field_ok = false; |
7737 | |
7738 | /* We've reached the end of a list of expanded nodes. Reset the group |
7739 | start pointer. */ |
7740 | if (c == grp_sentinel) |
7741 | grp_start_p = NULL; |
7742 | |
7743 | switch (OMP_CLAUSE_CODE (c)) |
7744 | { |
7745 | case OMP_CLAUSE_SHARED: |
7746 | field_ok = ((ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP); |
7747 | goto check_dup_generic; |
7748 | case OMP_CLAUSE_PRIVATE: |
7749 | field_ok = ((ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP); |
7750 | goto check_dup_generic; |
7751 | case OMP_CLAUSE_REDUCTION: |
7752 | if (reduction_seen == 0) |
7753 | reduction_seen = OMP_CLAUSE_REDUCTION_INSCAN (c) ? -1 : 1; |
7754 | else if (reduction_seen != -2 |
7755 | && reduction_seen != (OMP_CLAUSE_REDUCTION_INSCAN (c) |
7756 | ? -1 : 1)) |
7757 | { |
7758 | error_at (OMP_CLAUSE_LOCATION (c), |
7759 | "%<inscan%> and non-%<inscan%> %<reduction%> clauses " |
7760 | "on the same construct"); |
7761 | reduction_seen = -2; |
7762 | } |
7763 | /* FALLTHRU */ |
7764 | case OMP_CLAUSE_IN_REDUCTION: |
7765 | case OMP_CLAUSE_TASK_REDUCTION: |
7766 | field_ok = ((ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP); |
7767 | t = OMP_CLAUSE_DECL (c); |
7768 | if (TREE_CODE (t) == OMP_ARRAY_SECTION) |
7769 | { |
7770 | if (handle_omp_array_sections (c, ort)) |
7771 | { |
7772 | remove = true; |
7773 | break; |
7774 | } |
7775 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
7776 | && OMP_CLAUSE_REDUCTION_INSCAN (c)) |
7777 | { |
7778 | error_at (OMP_CLAUSE_LOCATION (c), |
7779 | "%<inscan%> %<reduction%> clause with array " |
7780 | "section"); |
7781 | remove = true; |
7782 | break; |
7783 | } |
7784 | if (TREE_CODE (t) == OMP_ARRAY_SECTION) |
7785 | { |
7786 | while (TREE_CODE (t) == OMP_ARRAY_SECTION) |
7787 | t = TREE_OPERAND (t, 0); |
7788 | } |
7789 | else |
7790 | { |
7791 | gcc_assert (TREE_CODE (t) == MEM_REF); |
7792 | t = TREE_OPERAND (t, 0); |
7793 | if (TREE_CODE (t) == POINTER_PLUS_EXPR) |
7794 | t = TREE_OPERAND (t, 0); |
7795 | if (TREE_CODE (t) == ADDR_EXPR |
7796 | || INDIRECT_REF_P (t)) |
7797 | t = TREE_OPERAND (t, 0); |
7798 | } |
7799 | tree n = omp_clause_decl_field (decl: t); |
7800 | if (n) |
7801 | t = n; |
7802 | goto check_dup_generic_t; |
7803 | } |
7804 | if (oacc_async) |
7805 | cxx_mark_addressable (t); |
7806 | goto check_dup_generic; |
7807 | case OMP_CLAUSE_COPYPRIVATE: |
7808 | copyprivate_seen = true; |
7809 | field_ok = ((ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP); |
7810 | goto check_dup_generic; |
7811 | case OMP_CLAUSE_COPYIN: |
7812 | goto check_dup_generic; |
7813 | case OMP_CLAUSE_LINEAR: |
7814 | field_ok = ((ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP); |
7815 | t = OMP_CLAUSE_DECL (c); |
7816 | if (ort != C_ORT_OMP_DECLARE_SIMD |
7817 | && OMP_CLAUSE_LINEAR_KIND (c) != OMP_CLAUSE_LINEAR_DEFAULT) |
7818 | { |
7819 | if (OMP_CLAUSE_LINEAR_OLD_LINEAR_MODIFIER (c)) |
7820 | { |
7821 | error_at (OMP_CLAUSE_LOCATION (c), |
7822 | "modifier should not be specified in %<linear%> " |
7823 | "clause on %<simd%> or %<for%> constructs when " |
7824 | "not using OpenMP 5.2 modifiers"); |
7825 | OMP_CLAUSE_LINEAR_KIND (c) = OMP_CLAUSE_LINEAR_DEFAULT; |
7826 | } |
7827 | else if (OMP_CLAUSE_LINEAR_KIND (c) != OMP_CLAUSE_LINEAR_VAL) |
7828 | { |
7829 | error_at (OMP_CLAUSE_LOCATION (c), |
7830 | "modifier other than %<val%> specified in " |
7831 | "%<linear%> clause on %<simd%> or %<for%> " |
7832 | "constructs when using OpenMP 5.2 modifiers"); |
7833 | OMP_CLAUSE_LINEAR_KIND (c) = OMP_CLAUSE_LINEAR_DEFAULT; |
7834 | } |
7835 | } |
7836 | if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL) |
7837 | && !type_dependent_expression_p (t)) |
7838 | { |
7839 | tree type = TREE_TYPE (t); |
7840 | if ((OMP_CLAUSE_LINEAR_KIND (c) == OMP_CLAUSE_LINEAR_REF |
7841 | || OMP_CLAUSE_LINEAR_KIND (c) == OMP_CLAUSE_LINEAR_UVAL) |
7842 | && !TYPE_REF_P (type)) |
7843 | { |
7844 | error_at (OMP_CLAUSE_LOCATION (c), |
7845 | "linear clause with %qs modifier applied to " |
7846 | "non-reference variable with %qT type", |
7847 | OMP_CLAUSE_LINEAR_KIND (c) == OMP_CLAUSE_LINEAR_REF |
7848 | ? "ref": "uval", TREE_TYPE (t)); |
7849 | remove = true; |
7850 | break; |
7851 | } |
7852 | if (TYPE_REF_P (type)) |
7853 | type = TREE_TYPE (type); |
7854 | if (OMP_CLAUSE_LINEAR_KIND (c) != OMP_CLAUSE_LINEAR_REF) |
7855 | { |
7856 | if (!INTEGRAL_TYPE_P (type) |
7857 | && !TYPE_PTR_P (type)) |
7858 | { |
7859 | error_at (OMP_CLAUSE_LOCATION (c), |
7860 | "linear clause applied to non-integral " |
7861 | "non-pointer variable with %qT type", |
7862 | TREE_TYPE (t)); |
7863 | remove = true; |
7864 | break; |
7865 | } |
7866 | } |
7867 | } |
7868 | t = OMP_CLAUSE_LINEAR_STEP (c); |
7869 | if (t == NULL_TREE) |
7870 | t = integer_one_node; |
7871 | if (t == error_mark_node) |
7872 | { |
7873 | remove = true; |
7874 | break; |
7875 | } |
7876 | else if (!type_dependent_expression_p (t) |
7877 | && !INTEGRAL_TYPE_P (TREE_TYPE (t)) |
7878 | && (ort != C_ORT_OMP_DECLARE_SIMD |
7879 | || TREE_CODE (t) != PARM_DECL |
7880 | || !TYPE_REF_P (TREE_TYPE (t)) |
7881 | || !INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (t))))) |
7882 | { |
7883 | error_at (OMP_CLAUSE_LOCATION (c), |
7884 | "linear step expression must be integral"); |
7885 | remove = true; |
7886 | break; |
7887 | } |
7888 | else |
7889 | { |
7890 | t = mark_rvalue_use (t); |
7891 | if (ort == C_ORT_OMP_DECLARE_SIMD && TREE_CODE (t) == PARM_DECL) |
7892 | { |
7893 | OMP_CLAUSE_LINEAR_VARIABLE_STRIDE (c) = 1; |
7894 | goto check_dup_generic; |
7895 | } |
7896 | if (!processing_template_decl |
7897 | && (VAR_P (OMP_CLAUSE_DECL (c)) |
7898 | || TREE_CODE (OMP_CLAUSE_DECL (c)) == PARM_DECL)) |
7899 | { |
7900 | if (ort == C_ORT_OMP_DECLARE_SIMD) |
7901 | { |
7902 | t = maybe_constant_value (t); |
7903 | if (TREE_CODE (t) != INTEGER_CST) |
7904 | { |
7905 | error_at (OMP_CLAUSE_LOCATION (c), |
7906 | "%<linear%> clause step %qE is neither " |
7907 | "constant nor a parameter", t); |
7908 | remove = true; |
7909 | break; |
7910 | } |
7911 | } |
7912 | t = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
7913 | tree type = TREE_TYPE (OMP_CLAUSE_DECL (c)); |
7914 | if (TYPE_REF_P (type)) |
7915 | type = TREE_TYPE (type); |
7916 | if (OMP_CLAUSE_LINEAR_KIND (c) == OMP_CLAUSE_LINEAR_REF) |
7917 | { |
7918 | type = build_pointer_type (type); |
7919 | tree d = fold_convert (type, OMP_CLAUSE_DECL (c)); |
7920 | t = pointer_int_sum (OMP_CLAUSE_LOCATION (c), PLUS_EXPR, |
7921 | d, t); |
7922 | t = fold_build2_loc (OMP_CLAUSE_LOCATION (c), |
7923 | MINUS_EXPR, sizetype, |
7924 | fold_convert (sizetype, t), |
7925 | fold_convert (sizetype, d)); |
7926 | if (t == error_mark_node) |
7927 | { |
7928 | remove = true; |
7929 | break; |
7930 | } |
7931 | } |
7932 | else if (TYPE_PTR_P (type) |
7933 | /* Can't multiply the step yet if *this |
7934 | is still incomplete type. */ |
7935 | && (ort != C_ORT_OMP_DECLARE_SIMD |
7936 | || TREE_CODE (OMP_CLAUSE_DECL (c)) != PARM_DECL |
7937 | || !DECL_ARTIFICIAL (OMP_CLAUSE_DECL (c)) |
7938 | || DECL_NAME (OMP_CLAUSE_DECL (c)) |
7939 | != this_identifier |
7940 | || !TYPE_BEING_DEFINED (TREE_TYPE (type)))) |
7941 | { |
7942 | tree d = convert_from_reference (OMP_CLAUSE_DECL (c)); |
7943 | t = pointer_int_sum (OMP_CLAUSE_LOCATION (c), PLUS_EXPR, |
7944 | d, t); |
7945 | t = fold_build2_loc (OMP_CLAUSE_LOCATION (c), |
7946 | MINUS_EXPR, sizetype, |
7947 | fold_convert (sizetype, t), |
7948 | fold_convert (sizetype, d)); |
7949 | if (t == error_mark_node) |
7950 | { |
7951 | remove = true; |
7952 | break; |
7953 | } |
7954 | } |
7955 | else |
7956 | t = fold_convert (type, t); |
7957 | } |
7958 | OMP_CLAUSE_LINEAR_STEP (c) = t; |
7959 | } |
7960 | goto check_dup_generic; |
7961 | check_dup_generic: |
7962 | t = omp_clause_decl_field (OMP_CLAUSE_DECL (c)); |
7963 | if (t) |
7964 | { |
7965 | if (!remove && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_SHARED) |
7966 | omp_note_field_privatization (f: t, OMP_CLAUSE_DECL (c)); |
7967 | } |
7968 | else |
7969 | t = OMP_CLAUSE_DECL (c); |
7970 | check_dup_generic_t: |
7971 | if (t == current_class_ptr |
7972 | && ((ort != C_ORT_OMP_DECLARE_SIMD && !openacc) |
7973 | || (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR |
7974 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_UNIFORM))) |
7975 | { |
7976 | error_at (OMP_CLAUSE_LOCATION (c), |
7977 | "%<this%> allowed in OpenMP only in %<declare simd%>" |
7978 | " clauses"); |
7979 | remove = true; |
7980 | break; |
7981 | } |
7982 | if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL |
7983 | && (!field_ok || TREE_CODE (t) != FIELD_DECL)) |
7984 | { |
7985 | if (processing_template_decl && TREE_CODE (t) != OVERLOAD) |
7986 | break; |
7987 | if (DECL_P (t)) |
7988 | error_at (OMP_CLAUSE_LOCATION (c), |
7989 | "%qD is not a variable in clause %qs", t, |
7990 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
7991 | else |
7992 | error_at (OMP_CLAUSE_LOCATION (c), |
7993 | "%qE is not a variable in clause %qs", t, |
7994 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
7995 | remove = true; |
7996 | } |
7997 | else if ((openacc |
7998 | && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION) |
7999 | || (ort == C_ORT_OMP |
8000 | && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR |
8001 | || (OMP_CLAUSE_CODE (c) |
8002 | == OMP_CLAUSE_USE_DEVICE_ADDR))) |
8003 | || (ort == C_ORT_OMP_TARGET |
8004 | && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)) |
8005 | { |
8006 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION |
8007 | && (bitmap_bit_p (&generic_head, DECL_UID (t)) |
8008 | || bitmap_bit_p (&firstprivate_head, DECL_UID (t)))) |
8009 | { |
8010 | error_at (OMP_CLAUSE_LOCATION (c), |
8011 | "%qD appears more than once in data-sharing " |
8012 | "clauses", t); |
8013 | remove = true; |
8014 | break; |
8015 | } |
8016 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION) |
8017 | target_in_reduction_seen = true; |
8018 | if (bitmap_bit_p (&oacc_reduction_head, DECL_UID (t))) |
8019 | { |
8020 | error_at (OMP_CLAUSE_LOCATION (c), |
8021 | openacc |
8022 | ? "%qD appears more than once in reduction clauses" |
8023 | : "%qD appears more than once in data clauses", |
8024 | t); |
8025 | remove = true; |
8026 | } |
8027 | else |
8028 | bitmap_set_bit (&oacc_reduction_head, DECL_UID (t)); |
8029 | } |
8030 | else if (bitmap_bit_p (&generic_head, DECL_UID (t)) |
8031 | || bitmap_bit_p (&firstprivate_head, DECL_UID (t)) |
8032 | || bitmap_bit_p (&lastprivate_head, DECL_UID (t)) |
8033 | || bitmap_bit_p (&map_firstprivate_head, DECL_UID (t))) |
8034 | { |
8035 | error_at (OMP_CLAUSE_LOCATION (c), |
8036 | "%qD appears more than once in data clauses", t); |
8037 | remove = true; |
8038 | } |
8039 | else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE |
8040 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR |
8041 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR) |
8042 | && bitmap_bit_p (&map_head, DECL_UID (t))) |
8043 | { |
8044 | if (openacc) |
8045 | error_at (OMP_CLAUSE_LOCATION (c), |
8046 | "%qD appears more than once in data clauses", t); |
8047 | else |
8048 | error_at (OMP_CLAUSE_LOCATION (c), |
8049 | "%qD appears both in data and map clauses", t); |
8050 | remove = true; |
8051 | } |
8052 | else |
8053 | bitmap_set_bit (&generic_head, DECL_UID (t)); |
8054 | if (!field_ok) |
8055 | break; |
8056 | handle_field_decl: |
8057 | if (!remove |
8058 | && TREE_CODE (t) == FIELD_DECL |
8059 | && t == OMP_CLAUSE_DECL (c)) |
8060 | { |
8061 | OMP_CLAUSE_DECL (c) |
8062 | = omp_privatize_field (t, shared: (OMP_CLAUSE_CODE (c) |
8063 | == OMP_CLAUSE_SHARED)); |
8064 | if (OMP_CLAUSE_DECL (c) == error_mark_node) |
8065 | remove = true; |
8066 | } |
8067 | break; |
8068 | |
8069 | case OMP_CLAUSE_FIRSTPRIVATE: |
8070 | if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c) && !implicit_moved) |
8071 | { |
8072 | move_implicit: |
8073 | implicit_moved = true; |
8074 | /* Move firstprivate and map clauses with |
8075 | OMP_CLAUSE_{FIRSTPRIVATE,MAP}_IMPLICIT set to the end of |
8076 | clauses chain. */ |
8077 | tree cl1 = NULL_TREE, cl2 = NULL_TREE; |
8078 | tree *pc1 = pc, *pc2 = &cl1, *pc3 = &cl2; |
8079 | while (*pc1) |
8080 | if (OMP_CLAUSE_CODE (*pc1) == OMP_CLAUSE_FIRSTPRIVATE |
8081 | && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (*pc1)) |
8082 | { |
8083 | *pc3 = *pc1; |
8084 | pc3 = &OMP_CLAUSE_CHAIN (*pc3); |
8085 | *pc1 = OMP_CLAUSE_CHAIN (*pc1); |
8086 | } |
8087 | else if (OMP_CLAUSE_CODE (*pc1) == OMP_CLAUSE_MAP |
8088 | && OMP_CLAUSE_MAP_IMPLICIT (*pc1)) |
8089 | { |
8090 | *pc2 = *pc1; |
8091 | pc2 = &OMP_CLAUSE_CHAIN (*pc2); |
8092 | *pc1 = OMP_CLAUSE_CHAIN (*pc1); |
8093 | } |
8094 | else |
8095 | pc1 = &OMP_CLAUSE_CHAIN (*pc1); |
8096 | *pc3 = NULL; |
8097 | *pc2 = cl2; |
8098 | *pc1 = cl1; |
8099 | continue; |
8100 | } |
8101 | t = omp_clause_decl_field (OMP_CLAUSE_DECL (c)); |
8102 | if (t) |
8103 | omp_note_field_privatization (f: t, OMP_CLAUSE_DECL (c)); |
8104 | else |
8105 | t = OMP_CLAUSE_DECL (c); |
8106 | if (!openacc && t == current_class_ptr) |
8107 | { |
8108 | error_at (OMP_CLAUSE_LOCATION (c), |
8109 | "%<this%> allowed in OpenMP only in %<declare simd%>" |
8110 | " clauses"); |
8111 | remove = true; |
8112 | break; |
8113 | } |
8114 | if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL |
8115 | && ((ort & C_ORT_OMP_DECLARE_SIMD) != C_ORT_OMP |
8116 | || TREE_CODE (t) != FIELD_DECL)) |
8117 | { |
8118 | if (processing_template_decl && TREE_CODE (t) != OVERLOAD) |
8119 | break; |
8120 | if (DECL_P (t)) |
8121 | error_at (OMP_CLAUSE_LOCATION (c), |
8122 | "%qD is not a variable in clause %<firstprivate%>", |
8123 | t); |
8124 | else |
8125 | error_at (OMP_CLAUSE_LOCATION (c), |
8126 | "%qE is not a variable in clause %<firstprivate%>", |
8127 | t); |
8128 | remove = true; |
8129 | } |
8130 | else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c) |
8131 | && !OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (c) |
8132 | && bitmap_bit_p (&map_firstprivate_head, DECL_UID (t))) |
8133 | remove = true; |
8134 | else if (bitmap_bit_p (&generic_head, DECL_UID (t)) |
8135 | || bitmap_bit_p (&firstprivate_head, DECL_UID (t)) |
8136 | || bitmap_bit_p (&map_firstprivate_head, DECL_UID (t))) |
8137 | { |
8138 | error_at (OMP_CLAUSE_LOCATION (c), |
8139 | "%qD appears more than once in data clauses", t); |
8140 | remove = true; |
8141 | } |
8142 | else if (bitmap_bit_p (&map_head, DECL_UID (t)) |
8143 | || bitmap_bit_p (&map_field_head, DECL_UID (t))) |
8144 | { |
8145 | if (openacc) |
8146 | error_at (OMP_CLAUSE_LOCATION (c), |
8147 | "%qD appears more than once in data clauses", t); |
8148 | else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c) |
8149 | && !OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (c)) |
8150 | /* Silently drop the clause. */; |
8151 | else |
8152 | error_at (OMP_CLAUSE_LOCATION (c), |
8153 | "%qD appears both in data and map clauses", t); |
8154 | remove = true; |
8155 | } |
8156 | else |
8157 | bitmap_set_bit (&firstprivate_head, DECL_UID (t)); |
8158 | goto handle_field_decl; |
8159 | |
8160 | case OMP_CLAUSE_LASTPRIVATE: |
8161 | t = omp_clause_decl_field (OMP_CLAUSE_DECL (c)); |
8162 | if (t) |
8163 | omp_note_field_privatization (f: t, OMP_CLAUSE_DECL (c)); |
8164 | else |
8165 | t = OMP_CLAUSE_DECL (c); |
8166 | if (!openacc && t == current_class_ptr) |
8167 | { |
8168 | error_at (OMP_CLAUSE_LOCATION (c), |
8169 | "%<this%> allowed in OpenMP only in %<declare simd%>" |
8170 | " clauses"); |
8171 | remove = true; |
8172 | break; |
8173 | } |
8174 | if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL |
8175 | && ((ort & C_ORT_OMP_DECLARE_SIMD) != C_ORT_OMP |
8176 | || TREE_CODE (t) != FIELD_DECL)) |
8177 | { |
8178 | if (processing_template_decl && TREE_CODE (t) != OVERLOAD) |
8179 | break; |
8180 | if (DECL_P (t)) |
8181 | error_at (OMP_CLAUSE_LOCATION (c), |
8182 | "%qD is not a variable in clause %<lastprivate%>", |
8183 | t); |
8184 | else |
8185 | error_at (OMP_CLAUSE_LOCATION (c), |
8186 | "%qE is not a variable in clause %<lastprivate%>", |
8187 | t); |
8188 | remove = true; |
8189 | } |
8190 | else if (bitmap_bit_p (&generic_head, DECL_UID (t)) |
8191 | || bitmap_bit_p (&lastprivate_head, DECL_UID (t))) |
8192 | { |
8193 | error_at (OMP_CLAUSE_LOCATION (c), |
8194 | "%qD appears more than once in data clauses", t); |
8195 | remove = true; |
8196 | } |
8197 | else |
8198 | bitmap_set_bit (&lastprivate_head, DECL_UID (t)); |
8199 | goto handle_field_decl; |
8200 | |
8201 | case OMP_CLAUSE_IF: |
8202 | case OMP_CLAUSE_SELF: |
8203 | t = OMP_CLAUSE_OPERAND (c, 0); |
8204 | t = maybe_convert_cond (cond: t); |
8205 | if (t == error_mark_node) |
8206 | remove = true; |
8207 | else if (!processing_template_decl) |
8208 | t = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
8209 | OMP_CLAUSE_OPERAND (c, 0) = t; |
8210 | break; |
8211 | |
8212 | case OMP_CLAUSE_FINAL: |
8213 | t = OMP_CLAUSE_FINAL_EXPR (c); |
8214 | t = maybe_convert_cond (cond: t); |
8215 | if (t == error_mark_node) |
8216 | remove = true; |
8217 | else if (!processing_template_decl) |
8218 | t = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
8219 | OMP_CLAUSE_FINAL_EXPR (c) = t; |
8220 | break; |
8221 | |
8222 | case OMP_CLAUSE_NOCONTEXT: |
8223 | t = OMP_CLAUSE_NOCONTEXT_EXPR (c); |
8224 | t = maybe_convert_cond (cond: t); |
8225 | if (t == error_mark_node) |
8226 | remove = true; |
8227 | else if (!processing_template_decl) |
8228 | t = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
8229 | OMP_CLAUSE_NOCONTEXT_EXPR (c) = t; |
8230 | break; |
8231 | |
8232 | case OMP_CLAUSE_NOVARIANTS: |
8233 | t = OMP_CLAUSE_NOVARIANTS_EXPR (c); |
8234 | t = maybe_convert_cond (cond: t); |
8235 | if (t == error_mark_node) |
8236 | remove = true; |
8237 | else if (!processing_template_decl) |
8238 | t = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
8239 | OMP_CLAUSE_NOVARIANTS_EXPR (c) = t; |
8240 | break; |
8241 | |
8242 | case OMP_CLAUSE_GANG: |
8243 | /* Operand 1 is the gang static: argument. */ |
8244 | t = OMP_CLAUSE_OPERAND (c, 1); |
8245 | if (t != NULL_TREE) |
8246 | { |
8247 | if (t == error_mark_node) |
8248 | remove = true; |
8249 | else if (!type_dependent_expression_p (t) |
8250 | && !INTEGRAL_TYPE_P (TREE_TYPE (t))) |
8251 | { |
8252 | error_at (OMP_CLAUSE_LOCATION (c), |
8253 | "%<gang%> static expression must be integral"); |
8254 | remove = true; |
8255 | } |
8256 | else |
8257 | { |
8258 | t = mark_rvalue_use (t); |
8259 | if (!processing_template_decl) |
8260 | { |
8261 | t = maybe_constant_value (t); |
8262 | if (TREE_CODE (t) == INTEGER_CST |
8263 | && tree_int_cst_sgn (t) != 1 |
8264 | && t != integer_minus_one_node) |
8265 | { |
8266 | warning_at (OMP_CLAUSE_LOCATION (c), 0, |
8267 | "%<gang%> static value must be " |
8268 | "positive"); |
8269 | t = integer_one_node; |
8270 | } |
8271 | t = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
8272 | } |
8273 | } |
8274 | OMP_CLAUSE_OPERAND (c, 1) = t; |
8275 | } |
8276 | /* Check operand 0, the num argument. */ |
8277 | /* FALLTHRU */ |
8278 | |
8279 | case OMP_CLAUSE_WORKER: |
8280 | case OMP_CLAUSE_VECTOR: |
8281 | if (OMP_CLAUSE_OPERAND (c, 0) == NULL_TREE) |
8282 | break; |
8283 | /* FALLTHRU */ |
8284 | |
8285 | case OMP_CLAUSE_NUM_TASKS: |
8286 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TASKS) |
8287 | num_tasks_seen = true; |
8288 | /* FALLTHRU */ |
8289 | |
8290 | case OMP_CLAUSE_NUM_TEAMS: |
8291 | case OMP_CLAUSE_NUM_THREADS: |
8292 | case OMP_CLAUSE_NUM_GANGS: |
8293 | case OMP_CLAUSE_NUM_WORKERS: |
8294 | case OMP_CLAUSE_VECTOR_LENGTH: |
8295 | t = OMP_CLAUSE_OPERAND (c, 0); |
8296 | if (t == error_mark_node) |
8297 | remove = true; |
8298 | else if (!type_dependent_expression_p (t) |
8299 | && !INTEGRAL_TYPE_P (TREE_TYPE (t))) |
8300 | { |
8301 | switch (OMP_CLAUSE_CODE (c)) |
8302 | { |
8303 | case OMP_CLAUSE_GANG: |
8304 | error_at (OMP_CLAUSE_LOCATION (c), |
8305 | "%<gang%> num expression must be integral"); break; |
8306 | case OMP_CLAUSE_VECTOR: |
8307 | error_at (OMP_CLAUSE_LOCATION (c), |
8308 | "%<vector%> length expression must be integral"); |
8309 | break; |
8310 | case OMP_CLAUSE_WORKER: |
8311 | error_at (OMP_CLAUSE_LOCATION (c), |
8312 | "%<worker%> num expression must be integral"); |
8313 | break; |
8314 | default: |
8315 | error_at (OMP_CLAUSE_LOCATION (c), |
8316 | "%qs expression must be integral", |
8317 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
8318 | } |
8319 | remove = true; |
8320 | } |
8321 | else |
8322 | { |
8323 | t = mark_rvalue_use (t); |
8324 | if (!processing_template_decl) |
8325 | { |
8326 | t = maybe_constant_value (t); |
8327 | if (TREE_CODE (t) == INTEGER_CST |
8328 | && tree_int_cst_sgn (t) != 1) |
8329 | { |
8330 | switch (OMP_CLAUSE_CODE (c)) |
8331 | { |
8332 | case OMP_CLAUSE_GANG: |
8333 | warning_at (OMP_CLAUSE_LOCATION (c), 0, |
8334 | "%<gang%> num value must be positive"); |
8335 | break; |
8336 | case OMP_CLAUSE_VECTOR: |
8337 | warning_at (OMP_CLAUSE_LOCATION (c), 0, |
8338 | "%<vector%> length value must be " |
8339 | "positive"); |
8340 | break; |
8341 | case OMP_CLAUSE_WORKER: |
8342 | warning_at (OMP_CLAUSE_LOCATION (c), 0, |
8343 | "%<worker%> num value must be " |
8344 | "positive"); |
8345 | break; |
8346 | default: |
8347 | warning_at (OMP_CLAUSE_LOCATION (c), |
8348 | (flag_openmp || flag_openmp_simd) |
8349 | ? OPT_Wopenmp : 0, |
8350 | "%qs value must be positive", |
8351 | omp_clause_code_name |
8352 | [OMP_CLAUSE_CODE (c)]); |
8353 | } |
8354 | t = integer_one_node; |
8355 | } |
8356 | t = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
8357 | } |
8358 | OMP_CLAUSE_OPERAND (c, 0) = t; |
8359 | } |
8360 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS |
8361 | && OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c) |
8362 | && !remove) |
8363 | { |
8364 | t = OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c); |
8365 | if (t == error_mark_node) |
8366 | remove = true; |
8367 | else if (!type_dependent_expression_p (t) |
8368 | && !INTEGRAL_TYPE_P (TREE_TYPE (t))) |
8369 | { |
8370 | error_at (OMP_CLAUSE_LOCATION (c), |
8371 | "%qs expression must be integral", |
8372 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
8373 | remove = true; |
8374 | } |
8375 | else |
8376 | { |
8377 | t = mark_rvalue_use (t); |
8378 | if (!processing_template_decl) |
8379 | { |
8380 | t = maybe_constant_value (t); |
8381 | if (TREE_CODE (t) == INTEGER_CST |
8382 | && tree_int_cst_sgn (t) != 1) |
8383 | { |
8384 | warning_at (OMP_CLAUSE_LOCATION (c), OPT_Wopenmp, |
8385 | "%qs value must be positive", |
8386 | omp_clause_code_name |
8387 | [OMP_CLAUSE_CODE (c)]); |
8388 | t = NULL_TREE; |
8389 | } |
8390 | else |
8391 | t = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
8392 | tree upper = OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (c); |
8393 | if (t |
8394 | && TREE_CODE (t) == INTEGER_CST |
8395 | && TREE_CODE (upper) == INTEGER_CST |
8396 | && tree_int_cst_lt (t1: upper, t2: t)) |
8397 | { |
8398 | warning_at (OMP_CLAUSE_LOCATION (c), OPT_Wopenmp, |
8399 | "%<num_teams%> lower bound %qE bigger " |
8400 | "than upper bound %qE", t, upper); |
8401 | t = NULL_TREE; |
8402 | } |
8403 | } |
8404 | OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c) = t; |
8405 | } |
8406 | } |
8407 | break; |
8408 | |
8409 | case OMP_CLAUSE_SCHEDULE: |
8410 | t = OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c); |
8411 | if (t == NULL) |
8412 | ; |
8413 | else if (t == error_mark_node) |
8414 | remove = true; |
8415 | else if (!type_dependent_expression_p (t) |
8416 | && !INTEGRAL_TYPE_P (TREE_TYPE (t))) |
8417 | { |
8418 | error_at (OMP_CLAUSE_LOCATION (c), |
8419 | "schedule chunk size expression must be integral"); |
8420 | remove = true; |
8421 | } |
8422 | else |
8423 | { |
8424 | t = mark_rvalue_use (t); |
8425 | if (!processing_template_decl) |
8426 | { |
8427 | t = maybe_constant_value (t); |
8428 | if (TREE_CODE (t) == INTEGER_CST |
8429 | && tree_int_cst_sgn (t) != 1) |
8430 | { |
8431 | warning_at (OMP_CLAUSE_LOCATION (c), OPT_Wopenmp, |
8432 | "chunk size value must be positive"); |
8433 | t = integer_one_node; |
8434 | } |
8435 | t = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
8436 | } |
8437 | OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c) = t; |
8438 | } |
8439 | if (!remove) |
8440 | schedule_seen = true; |
8441 | break; |
8442 | |
8443 | case OMP_CLAUSE_SIMDLEN: |
8444 | case OMP_CLAUSE_SAFELEN: |
8445 | t = OMP_CLAUSE_OPERAND (c, 0); |
8446 | if (t == error_mark_node) |
8447 | remove = true; |
8448 | else if (!type_dependent_expression_p (t) |
8449 | && !INTEGRAL_TYPE_P (TREE_TYPE (t))) |
8450 | { |
8451 | error_at (OMP_CLAUSE_LOCATION (c), |
8452 | "%qs length expression must be integral", |
8453 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
8454 | remove = true; |
8455 | } |
8456 | else |
8457 | { |
8458 | t = mark_rvalue_use (t); |
8459 | if (!processing_template_decl) |
8460 | { |
8461 | t = maybe_constant_value (t); |
8462 | if (TREE_CODE (t) != INTEGER_CST |
8463 | || tree_int_cst_sgn (t) != 1) |
8464 | { |
8465 | error_at (OMP_CLAUSE_LOCATION (c), |
8466 | "%qs length expression must be positive " |
8467 | "constant integer expression", |
8468 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
8469 | remove = true; |
8470 | } |
8471 | } |
8472 | OMP_CLAUSE_OPERAND (c, 0) = t; |
8473 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SAFELEN) |
8474 | safelen = c; |
8475 | } |
8476 | break; |
8477 | |
8478 | case OMP_CLAUSE_ASYNC: |
8479 | t = OMP_CLAUSE_ASYNC_EXPR (c); |
8480 | if (t == error_mark_node) |
8481 | remove = true; |
8482 | else if (!type_dependent_expression_p (t) |
8483 | && !INTEGRAL_TYPE_P (TREE_TYPE (t))) |
8484 | { |
8485 | error_at (OMP_CLAUSE_LOCATION (c), |
8486 | "%<async%> expression must be integral"); |
8487 | remove = true; |
8488 | } |
8489 | else |
8490 | { |
8491 | t = mark_rvalue_use (t); |
8492 | if (!processing_template_decl) |
8493 | t = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
8494 | OMP_CLAUSE_ASYNC_EXPR (c) = t; |
8495 | } |
8496 | break; |
8497 | |
8498 | case OMP_CLAUSE_WAIT: |
8499 | t = OMP_CLAUSE_WAIT_EXPR (c); |
8500 | if (t == error_mark_node) |
8501 | remove = true; |
8502 | else if (!processing_template_decl) |
8503 | t = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
8504 | OMP_CLAUSE_WAIT_EXPR (c) = t; |
8505 | break; |
8506 | |
8507 | case OMP_CLAUSE_THREAD_LIMIT: |
8508 | t = OMP_CLAUSE_THREAD_LIMIT_EXPR (c); |
8509 | if (t == error_mark_node) |
8510 | remove = true; |
8511 | else if (!type_dependent_expression_p (t) |
8512 | && !INTEGRAL_TYPE_P (TREE_TYPE (t))) |
8513 | { |
8514 | error_at (OMP_CLAUSE_LOCATION (c), |
8515 | "%<thread_limit%> expression must be integral"); |
8516 | remove = true; |
8517 | } |
8518 | else |
8519 | { |
8520 | t = mark_rvalue_use (t); |
8521 | if (!processing_template_decl) |
8522 | { |
8523 | t = maybe_constant_value (t); |
8524 | if (TREE_CODE (t) == INTEGER_CST |
8525 | && tree_int_cst_sgn (t) != 1) |
8526 | { |
8527 | warning_at (OMP_CLAUSE_LOCATION (c), OPT_Wopenmp, |
8528 | "%<thread_limit%> value must be positive"); |
8529 | t = integer_one_node; |
8530 | } |
8531 | t = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
8532 | } |
8533 | OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = t; |
8534 | } |
8535 | break; |
8536 | |
8537 | case OMP_CLAUSE_DEVICE: |
8538 | t = OMP_CLAUSE_DEVICE_ID (c); |
8539 | if (t == error_mark_node) |
8540 | remove = true; |
8541 | else if (!type_dependent_expression_p (t) |
8542 | && !INTEGRAL_TYPE_P (TREE_TYPE (t))) |
8543 | { |
8544 | error_at (OMP_CLAUSE_LOCATION (c), |
8545 | "%<device%> id must be integral"); |
8546 | remove = true; |
8547 | } |
8548 | else if (OMP_CLAUSE_DEVICE_ANCESTOR (c) |
8549 | && TREE_CODE (t) == INTEGER_CST |
8550 | && !integer_onep (t)) |
8551 | { |
8552 | error_at (OMP_CLAUSE_LOCATION (c), |
8553 | "the %<device%> clause expression must evaluate to " |
8554 | "%<1%>"); |
8555 | remove = true; |
8556 | } |
8557 | else |
8558 | { |
8559 | t = mark_rvalue_use (t); |
8560 | if (!processing_template_decl) |
8561 | t = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
8562 | OMP_CLAUSE_DEVICE_ID (c) = t; |
8563 | } |
8564 | break; |
8565 | |
8566 | case OMP_CLAUSE_DIST_SCHEDULE: |
8567 | t = OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (c); |
8568 | if (t == NULL) |
8569 | ; |
8570 | else if (t == error_mark_node) |
8571 | remove = true; |
8572 | else if (!type_dependent_expression_p (t) |
8573 | && !INTEGRAL_TYPE_P (TREE_TYPE (t))) |
8574 | { |
8575 | error_at (OMP_CLAUSE_LOCATION (c), |
8576 | "%<dist_schedule%> chunk size expression must be " |
8577 | "integral"); |
8578 | remove = true; |
8579 | } |
8580 | else |
8581 | { |
8582 | t = mark_rvalue_use (t); |
8583 | if (!processing_template_decl) |
8584 | t = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
8585 | OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (c) = t; |
8586 | } |
8587 | break; |
8588 | |
8589 | case OMP_CLAUSE_ALIGNED: |
8590 | t = OMP_CLAUSE_DECL (c); |
8591 | if (t == current_class_ptr && ort != C_ORT_OMP_DECLARE_SIMD) |
8592 | { |
8593 | error_at (OMP_CLAUSE_LOCATION (c), |
8594 | "%<this%> allowed in OpenMP only in %<declare simd%>" |
8595 | " clauses"); |
8596 | remove = true; |
8597 | break; |
8598 | } |
8599 | if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL) |
8600 | { |
8601 | if (processing_template_decl && TREE_CODE (t) != OVERLOAD) |
8602 | break; |
8603 | if (DECL_P (t)) |
8604 | error_at (OMP_CLAUSE_LOCATION (c), |
8605 | "%qD is not a variable in %<aligned%> clause", t); |
8606 | else |
8607 | error_at (OMP_CLAUSE_LOCATION (c), |
8608 | "%qE is not a variable in %<aligned%> clause", t); |
8609 | remove = true; |
8610 | } |
8611 | else if (!type_dependent_expression_p (t) |
8612 | && !TYPE_PTR_P (TREE_TYPE (t)) |
8613 | && TREE_CODE (TREE_TYPE (t)) != ARRAY_TYPE |
8614 | && (!TYPE_REF_P (TREE_TYPE (t)) |
8615 | || (!INDIRECT_TYPE_P (TREE_TYPE (TREE_TYPE (t))) |
8616 | && (TREE_CODE (TREE_TYPE (TREE_TYPE (t))) |
8617 | != ARRAY_TYPE)))) |
8618 | { |
8619 | error_at (OMP_CLAUSE_LOCATION (c), |
8620 | "%qE in %<aligned%> clause is neither a pointer nor " |
8621 | "an array nor a reference to pointer or array", t); |
8622 | remove = true; |
8623 | } |
8624 | else if (bitmap_bit_p (&aligned_head, DECL_UID (t))) |
8625 | { |
8626 | error_at (OMP_CLAUSE_LOCATION (c), |
8627 | "%qD appears more than once in %<aligned%> clauses", |
8628 | t); |
8629 | remove = true; |
8630 | } |
8631 | else |
8632 | bitmap_set_bit (&aligned_head, DECL_UID (t)); |
8633 | t = OMP_CLAUSE_ALIGNED_ALIGNMENT (c); |
8634 | if (t == error_mark_node) |
8635 | remove = true; |
8636 | else if (t == NULL_TREE) |
8637 | break; |
8638 | else if (!type_dependent_expression_p (t) |
8639 | && !INTEGRAL_TYPE_P (TREE_TYPE (t))) |
8640 | { |
8641 | error_at (OMP_CLAUSE_LOCATION (c), |
8642 | "%<aligned%> clause alignment expression must " |
8643 | "be integral"); |
8644 | remove = true; |
8645 | } |
8646 | else |
8647 | { |
8648 | t = mark_rvalue_use (t); |
8649 | if (!processing_template_decl) |
8650 | { |
8651 | t = maybe_constant_value (t); |
8652 | if (TREE_CODE (t) != INTEGER_CST |
8653 | || tree_int_cst_sgn (t) != 1) |
8654 | { |
8655 | error_at (OMP_CLAUSE_LOCATION (c), |
8656 | "%<aligned%> clause alignment expression must " |
8657 | "be positive constant integer expression"); |
8658 | remove = true; |
8659 | } |
8660 | } |
8661 | OMP_CLAUSE_ALIGNED_ALIGNMENT (c) = t; |
8662 | } |
8663 | break; |
8664 | |
8665 | case OMP_CLAUSE_NONTEMPORAL: |
8666 | t = OMP_CLAUSE_DECL (c); |
8667 | if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL) |
8668 | { |
8669 | if (processing_template_decl && TREE_CODE (t) != OVERLOAD) |
8670 | break; |
8671 | if (DECL_P (t)) |
8672 | error_at (OMP_CLAUSE_LOCATION (c), |
8673 | "%qD is not a variable in %<nontemporal%> clause", |
8674 | t); |
8675 | else |
8676 | error_at (OMP_CLAUSE_LOCATION (c), |
8677 | "%qE is not a variable in %<nontemporal%> clause", |
8678 | t); |
8679 | remove = true; |
8680 | } |
8681 | else if (bitmap_bit_p (&oacc_reduction_head, DECL_UID (t))) |
8682 | { |
8683 | error_at (OMP_CLAUSE_LOCATION (c), |
8684 | "%qD appears more than once in %<nontemporal%> " |
8685 | "clauses", t); |
8686 | remove = true; |
8687 | } |
8688 | else |
8689 | bitmap_set_bit (&oacc_reduction_head, DECL_UID (t)); |
8690 | break; |
8691 | |
8692 | case OMP_CLAUSE_ALLOCATE: |
8693 | t = omp_clause_decl_field (OMP_CLAUSE_DECL (c)); |
8694 | if (t) |
8695 | omp_note_field_privatization (f: t, OMP_CLAUSE_DECL (c)); |
8696 | else |
8697 | t = OMP_CLAUSE_DECL (c); |
8698 | if (t == current_class_ptr) |
8699 | { |
8700 | error_at (OMP_CLAUSE_LOCATION (c), |
8701 | "%<this%> not allowed in %<allocate%> clause"); |
8702 | remove = true; |
8703 | break; |
8704 | } |
8705 | if (!VAR_P (t) |
8706 | && TREE_CODE (t) != PARM_DECL |
8707 | && TREE_CODE (t) != FIELD_DECL) |
8708 | { |
8709 | if (processing_template_decl && TREE_CODE (t) != OVERLOAD) |
8710 | break; |
8711 | if (DECL_P (t)) |
8712 | error_at (OMP_CLAUSE_LOCATION (c), |
8713 | "%qD is not a variable in %<allocate%> clause", t); |
8714 | else |
8715 | error_at (OMP_CLAUSE_LOCATION (c), |
8716 | "%qE is not a variable in %<allocate%> clause", t); |
8717 | remove = true; |
8718 | } |
8719 | else if (bitmap_bit_p (&aligned_head, DECL_UID (t))) |
8720 | { |
8721 | warning_at (OMP_CLAUSE_LOCATION (c), OPT_Wopenmp, |
8722 | "%qD appears more than once in %<allocate%> clauses", |
8723 | t); |
8724 | remove = true; |
8725 | } |
8726 | else |
8727 | { |
8728 | bitmap_set_bit (&aligned_head, DECL_UID (t)); |
8729 | allocate_seen = true; |
8730 | } |
8731 | tree allocator, align; |
8732 | align = OMP_CLAUSE_ALLOCATE_ALIGN (c); |
8733 | if (error_operand_p (t: align)) |
8734 | { |
8735 | remove = true; |
8736 | break; |
8737 | } |
8738 | if (align) |
8739 | { |
8740 | if (!type_dependent_expression_p (align) |
8741 | && !INTEGRAL_TYPE_P (TREE_TYPE (align))) |
8742 | { |
8743 | error_at (OMP_CLAUSE_LOCATION (c), |
8744 | "%<allocate%> clause %<align%> modifier " |
8745 | "argument needs to be positive constant " |
8746 | "power of two integer expression"); |
8747 | remove = true; |
8748 | } |
8749 | else |
8750 | { |
8751 | align = mark_rvalue_use (align); |
8752 | if (!processing_template_decl) |
8753 | { |
8754 | align = maybe_constant_value (align); |
8755 | if (TREE_CODE (align) != INTEGER_CST |
8756 | || !tree_fits_uhwi_p (align) |
8757 | || !integer_pow2p (align)) |
8758 | { |
8759 | error_at (OMP_CLAUSE_LOCATION (c), |
8760 | "%<allocate%> clause %<align%> modifier " |
8761 | "argument needs to be positive constant " |
8762 | "power of two integer expression"); |
8763 | remove = true; |
8764 | } |
8765 | } |
8766 | } |
8767 | OMP_CLAUSE_ALLOCATE_ALIGN (c) = align; |
8768 | } |
8769 | allocator = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c); |
8770 | if (error_operand_p (t: allocator)) |
8771 | { |
8772 | remove = true; |
8773 | break; |
8774 | } |
8775 | if (allocator == NULL_TREE) |
8776 | goto handle_field_decl; |
8777 | tree allocatort; |
8778 | allocatort = TYPE_MAIN_VARIANT (TREE_TYPE (allocator)); |
8779 | if (!type_dependent_expression_p (allocator) |
8780 | && (TREE_CODE (allocatort) != ENUMERAL_TYPE |
8781 | || TYPE_NAME (allocatort) == NULL_TREE |
8782 | || TREE_CODE (TYPE_NAME (allocatort)) != TYPE_DECL |
8783 | || (DECL_NAME (TYPE_NAME (allocatort)) |
8784 | != get_identifier ("omp_allocator_handle_t")) |
8785 | || (TYPE_CONTEXT (allocatort) |
8786 | != DECL_CONTEXT (global_namespace)))) |
8787 | { |
8788 | error_at (OMP_CLAUSE_LOCATION (c), |
8789 | "%<allocate%> clause allocator expression has " |
8790 | "type %qT rather than %<omp_allocator_handle_t%>", |
8791 | TREE_TYPE (allocator)); |
8792 | remove = true; |
8793 | break; |
8794 | } |
8795 | else |
8796 | { |
8797 | allocator = mark_rvalue_use (allocator); |
8798 | if (!processing_template_decl) |
8799 | allocator = maybe_constant_value (allocator); |
8800 | OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) = allocator; |
8801 | } |
8802 | goto handle_field_decl; |
8803 | |
8804 | case OMP_CLAUSE_DOACROSS: |
8805 | t = OMP_CLAUSE_DECL (c); |
8806 | if (t == NULL_TREE) |
8807 | break; |
8808 | if (OMP_CLAUSE_DOACROSS_KIND (c) == OMP_CLAUSE_DOACROSS_SINK) |
8809 | { |
8810 | if (cp_finish_omp_clause_doacross_sink (sink_clause: c)) |
8811 | remove = true; |
8812 | break; |
8813 | } |
8814 | gcc_unreachable (); |
8815 | case OMP_CLAUSE_DEPEND: |
8816 | depend_clause = c; |
8817 | /* FALLTHRU */ |
8818 | case OMP_CLAUSE_AFFINITY: |
8819 | t = OMP_CLAUSE_DECL (c); |
8820 | if (TREE_CODE (t) == TREE_LIST |
8821 | && TREE_PURPOSE (t) |
8822 | && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC) |
8823 | { |
8824 | if (TREE_PURPOSE (t) != last_iterators) |
8825 | last_iterators_remove |
8826 | = cp_omp_finish_iterators (TREE_PURPOSE (t)); |
8827 | last_iterators = TREE_PURPOSE (t); |
8828 | t = TREE_VALUE (t); |
8829 | if (last_iterators_remove) |
8830 | t = error_mark_node; |
8831 | } |
8832 | else |
8833 | last_iterators = NULL_TREE; |
8834 | |
8835 | if (TREE_CODE (t) == OMP_ARRAY_SECTION) |
8836 | { |
8837 | if (handle_omp_array_sections (c, ort)) |
8838 | remove = true; |
8839 | else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND |
8840 | && (OMP_CLAUSE_DEPEND_KIND (c) |
8841 | == OMP_CLAUSE_DEPEND_DEPOBJ)) |
8842 | { |
8843 | error_at (OMP_CLAUSE_LOCATION (c), |
8844 | "%<depend%> clause with %<depobj%> dependence " |
8845 | "type on array section"); |
8846 | remove = true; |
8847 | } |
8848 | break; |
8849 | } |
8850 | if (t == error_mark_node) |
8851 | remove = true; |
8852 | else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND |
8853 | && t == ridpointers[RID_OMP_ALL_MEMORY]) |
8854 | { |
8855 | if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_OUT |
8856 | && OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_INOUT) |
8857 | { |
8858 | error_at (OMP_CLAUSE_LOCATION (c), |
8859 | "%<omp_all_memory%> used with %<depend%> kind " |
8860 | "other than %<out%> or %<inout%>"); |
8861 | remove = true; |
8862 | } |
8863 | if (processing_template_decl) |
8864 | break; |
8865 | } |
8866 | else if (processing_template_decl && TREE_CODE (t) != OVERLOAD) |
8867 | break; |
8868 | else if (!lvalue_p (t)) |
8869 | { |
8870 | if (DECL_P (t)) |
8871 | error_at (OMP_CLAUSE_LOCATION (c), |
8872 | "%qD is not lvalue expression nor array section " |
8873 | "in %qs clause", t, |
8874 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
8875 | else |
8876 | error_at (OMP_CLAUSE_LOCATION (c), |
8877 | "%qE is not lvalue expression nor array section " |
8878 | "in %qs clause", t, |
8879 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
8880 | remove = true; |
8881 | } |
8882 | else if (TREE_CODE (t) == COMPONENT_REF |
8883 | && TREE_CODE (TREE_OPERAND (t, 1)) == FIELD_DECL |
8884 | && DECL_BIT_FIELD (TREE_OPERAND (t, 1))) |
8885 | { |
8886 | error_at (OMP_CLAUSE_LOCATION (c), |
8887 | "bit-field %qE in %qs clause", t, |
8888 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
8889 | remove = true; |
8890 | } |
8891 | else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND |
8892 | && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_DEPOBJ) |
8893 | { |
8894 | if (!c_omp_depend_t_p (TYPE_REF_P (TREE_TYPE (t)) |
8895 | ? TREE_TYPE (TREE_TYPE (t)) |
8896 | : TREE_TYPE (t))) |
8897 | { |
8898 | error_at (OMP_CLAUSE_LOCATION (c), |
8899 | "%qE does not have %<omp_depend_t%> type in " |
8900 | "%<depend%> clause with %<depobj%> dependence " |
8901 | "type", t); |
8902 | remove = true; |
8903 | } |
8904 | } |
8905 | else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND |
8906 | && c_omp_depend_t_p (TYPE_REF_P (TREE_TYPE (t)) |
8907 | ? TREE_TYPE (TREE_TYPE (t)) |
8908 | : TREE_TYPE (t))) |
8909 | { |
8910 | error_at (OMP_CLAUSE_LOCATION (c), |
8911 | "%qE should not have %<omp_depend_t%> type in " |
8912 | "%<depend%> clause with dependence type other than " |
8913 | "%<depobj%>", t); |
8914 | remove = true; |
8915 | } |
8916 | if (!remove) |
8917 | { |
8918 | if (t == ridpointers[RID_OMP_ALL_MEMORY]) |
8919 | t = null_pointer_node; |
8920 | else |
8921 | { |
8922 | tree addr = cp_build_addr_expr (t, tf_warning_or_error); |
8923 | if (addr == error_mark_node) |
8924 | { |
8925 | remove = true; |
8926 | break; |
8927 | } |
8928 | t = cp_build_indirect_ref (OMP_CLAUSE_LOCATION (c), |
8929 | addr, RO_UNARY_STAR, |
8930 | tf_warning_or_error); |
8931 | if (t == error_mark_node) |
8932 | { |
8933 | remove = true; |
8934 | break; |
8935 | } |
8936 | } |
8937 | if (TREE_CODE (OMP_CLAUSE_DECL (c)) == TREE_LIST |
8938 | && TREE_PURPOSE (OMP_CLAUSE_DECL (c)) |
8939 | && (TREE_CODE (TREE_PURPOSE (OMP_CLAUSE_DECL (c))) |
8940 | == TREE_VEC)) |
8941 | TREE_VALUE (OMP_CLAUSE_DECL (c)) = t; |
8942 | else |
8943 | OMP_CLAUSE_DECL (c) = t; |
8944 | } |
8945 | break; |
8946 | case OMP_CLAUSE_DETACH: |
8947 | t = OMP_CLAUSE_DECL (c); |
8948 | if (detach_seen) |
8949 | { |
8950 | error_at (OMP_CLAUSE_LOCATION (c), |
8951 | "too many %qs clauses on a task construct", |
8952 | "detach"); |
8953 | remove = true; |
8954 | break; |
8955 | } |
8956 | else if (error_operand_p (t)) |
8957 | { |
8958 | remove = true; |
8959 | break; |
8960 | } |
8961 | else |
8962 | { |
8963 | tree type = TYPE_MAIN_VARIANT (TREE_TYPE (t)); |
8964 | if (!type_dependent_expression_p (t) |
8965 | && (!INTEGRAL_TYPE_P (type) |
8966 | || TREE_CODE (type) != ENUMERAL_TYPE |
8967 | || TYPE_NAME (type) == NULL_TREE |
8968 | || (DECL_NAME (TYPE_NAME (type)) |
8969 | != get_identifier ("omp_event_handle_t")))) |
8970 | { |
8971 | error_at (OMP_CLAUSE_LOCATION (c), |
8972 | "%<detach%> clause event handle " |
8973 | "has type %qT rather than " |
8974 | "%<omp_event_handle_t%>", |
8975 | type); |
8976 | remove = true; |
8977 | } |
8978 | detach_seen = c; |
8979 | cxx_mark_addressable (t); |
8980 | } |
8981 | break; |
8982 | |
8983 | case OMP_CLAUSE_MAP: |
8984 | if (OMP_CLAUSE_MAP_IMPLICIT (c) && !implicit_moved) |
8985 | goto move_implicit; |
8986 | if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_PUSH_MAPPER_NAME |
8987 | || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POP_MAPPER_NAME) |
8988 | { |
8989 | remove = true; |
8990 | break; |
8991 | } |
8992 | /* FALLTHRU */ |
8993 | case OMP_CLAUSE_TO: |
8994 | case OMP_CLAUSE_FROM: |
8995 | case OMP_CLAUSE__CACHE_: |
8996 | { |
8997 | using namespace omp_addr_tokenizer; |
8998 | auto_vec<omp_addr_token *, 10> addr_tokens; |
8999 | |
9000 | t = OMP_CLAUSE_DECL (c); |
9001 | if (TREE_CODE (t) == OMP_ARRAY_SECTION) |
9002 | { |
9003 | grp_start_p = pc; |
9004 | grp_sentinel = OMP_CLAUSE_CHAIN (c); |
9005 | |
9006 | if (handle_omp_array_sections (c, ort)) |
9007 | remove = true; |
9008 | else |
9009 | { |
9010 | t = OMP_CLAUSE_DECL (c); |
9011 | if (TREE_CODE (t) != OMP_ARRAY_SECTION |
9012 | && !type_dependent_expression_p (t) |
9013 | && !omp_mappable_type (TREE_TYPE (t))) |
9014 | { |
9015 | auto_diagnostic_group d; |
9016 | error_at (OMP_CLAUSE_LOCATION (c), |
9017 | "array section does not have mappable type " |
9018 | "in %qs clause", |
9019 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
9020 | if (TREE_TYPE (t) != error_mark_node |
9021 | && !COMPLETE_TYPE_P (TREE_TYPE (t))) |
9022 | cxx_incomplete_type_inform (TREE_TYPE (t)); |
9023 | remove = true; |
9024 | } |
9025 | while (TREE_CODE (t) == ARRAY_REF) |
9026 | t = TREE_OPERAND (t, 0); |
9027 | |
9028 | if (type_dependent_expression_p (t)) |
9029 | break; |
9030 | |
9031 | cp_omp_address_inspector ai (OMP_CLAUSE_LOCATION (c), t); |
9032 | |
9033 | if (!ai.map_supported_p () |
9034 | || !omp_parse_expr (addr_tokens, t)) |
9035 | { |
9036 | sorry_at (OMP_CLAUSE_LOCATION (c), |
9037 | "unsupported map expression %qE", |
9038 | OMP_CLAUSE_DECL (c)); |
9039 | remove = true; |
9040 | break; |
9041 | } |
9042 | |
9043 | /* This check is to determine if this will be the only map |
9044 | node created for this clause. Otherwise, we'll check |
9045 | the following FIRSTPRIVATE_POINTER, |
9046 | FIRSTPRIVATE_REFERENCE or ATTACH_DETACH node on the next |
9047 | iteration(s) of the loop. */ |
9048 | if (addr_tokens.length () >= 4 |
9049 | && addr_tokens[0]->type == STRUCTURE_BASE |
9050 | && addr_tokens[0]->u.structure_base_kind == BASE_DECL |
9051 | && addr_tokens[1]->type == ACCESS_METHOD |
9052 | && addr_tokens[2]->type == COMPONENT_SELECTOR |
9053 | && addr_tokens[3]->type == ACCESS_METHOD |
9054 | && (addr_tokens[3]->u.access_kind == ACCESS_DIRECT |
9055 | || (addr_tokens[3]->u.access_kind |
9056 | == ACCESS_INDEXED_ARRAY))) |
9057 | { |
9058 | tree rt = addr_tokens[1]->expr; |
9059 | |
9060 | gcc_assert (DECL_P (rt)); |
9061 | |
9062 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
9063 | && OMP_CLAUSE_MAP_IMPLICIT (c) |
9064 | && (bitmap_bit_p (&map_head, DECL_UID (rt)) |
9065 | || bitmap_bit_p (&map_field_head, DECL_UID (rt)) |
9066 | || bitmap_bit_p (&map_firstprivate_head, |
9067 | DECL_UID (rt)))) |
9068 | { |
9069 | remove = true; |
9070 | break; |
9071 | } |
9072 | if (bitmap_bit_p (&map_field_head, DECL_UID (rt))) |
9073 | break; |
9074 | if (bitmap_bit_p (&map_head, DECL_UID (rt))) |
9075 | { |
9076 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP) |
9077 | error_at (OMP_CLAUSE_LOCATION (c), |
9078 | "%qD appears more than once in motion" |
9079 | " clauses", rt); |
9080 | else if (openacc) |
9081 | error_at (OMP_CLAUSE_LOCATION (c), |
9082 | "%qD appears more than once in data" |
9083 | " clauses", rt); |
9084 | else |
9085 | error_at (OMP_CLAUSE_LOCATION (c), |
9086 | "%qD appears more than once in map" |
9087 | " clauses", rt); |
9088 | remove = true; |
9089 | } |
9090 | else |
9091 | { |
9092 | bitmap_set_bit (&map_head, DECL_UID (rt)); |
9093 | bitmap_set_bit (&map_field_head, DECL_UID (rt)); |
9094 | } |
9095 | } |
9096 | } |
9097 | if (cp_oacc_check_attachments (c)) |
9098 | remove = true; |
9099 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
9100 | && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH |
9101 | || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH) |
9102 | && !OMP_CLAUSE_SIZE (c)) |
9103 | /* In this case, we have a single array element which is a |
9104 | pointer, and we already set OMP_CLAUSE_SIZE in |
9105 | handle_omp_array_sections above. For attach/detach |
9106 | clauses, reset the OMP_CLAUSE_SIZE (representing a bias) |
9107 | to zero here. */ |
9108 | OMP_CLAUSE_SIZE (c) = size_zero_node; |
9109 | break; |
9110 | } |
9111 | else if (type_dependent_expression_p (t)) |
9112 | break; |
9113 | else if (!omp_parse_expr (addr_tokens, t)) |
9114 | { |
9115 | sorry_at (OMP_CLAUSE_LOCATION (c), |
9116 | "unsupported map expression %qE", |
9117 | OMP_CLAUSE_DECL (c)); |
9118 | remove = true; |
9119 | break; |
9120 | } |
9121 | if (t == error_mark_node) |
9122 | { |
9123 | remove = true; |
9124 | break; |
9125 | } |
9126 | /* OpenACC attach / detach clauses must be pointers. */ |
9127 | if (cp_oacc_check_attachments (c)) |
9128 | { |
9129 | remove = true; |
9130 | break; |
9131 | } |
9132 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
9133 | && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH |
9134 | || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH) |
9135 | && !OMP_CLAUSE_SIZE (c)) |
9136 | /* For attach/detach clauses, set OMP_CLAUSE_SIZE (representing a |
9137 | bias) to zero here, so it is not set erroneously to the |
9138 | pointer size later on in gimplify.cc. */ |
9139 | OMP_CLAUSE_SIZE (c) = size_zero_node; |
9140 | |
9141 | cp_omp_address_inspector ai (OMP_CLAUSE_LOCATION (c), t); |
9142 | |
9143 | if (!ai.check_clause (clause: c)) |
9144 | { |
9145 | remove = true; |
9146 | break; |
9147 | } |
9148 | |
9149 | if (!ai.map_supported_p ()) |
9150 | { |
9151 | sorry_at (OMP_CLAUSE_LOCATION (c), |
9152 | "unsupported map expression %qE", |
9153 | OMP_CLAUSE_DECL (c)); |
9154 | remove = true; |
9155 | break; |
9156 | } |
9157 | |
9158 | gcc_assert ((addr_tokens[0]->type == ARRAY_BASE |
9159 | || addr_tokens[0]->type == STRUCTURE_BASE) |
9160 | && addr_tokens[1]->type == ACCESS_METHOD); |
9161 | |
9162 | t = addr_tokens[1]->expr; |
9163 | |
9164 | /* This is used to prevent cxx_mark_addressable from being called |
9165 | on 'this' for expressions like 'this->a', i.e. typical member |
9166 | accesses. */ |
9167 | indir_component_ref_p |
9168 | = (addr_tokens[0]->type == STRUCTURE_BASE |
9169 | && addr_tokens[1]->u.access_kind != ACCESS_DIRECT); |
9170 | |
9171 | if (addr_tokens[0]->u.structure_base_kind != BASE_DECL) |
9172 | goto skip_decl_checks; |
9173 | |
9174 | /* For OpenMP, we can access a struct "t" and "t.d" on the same |
9175 | mapping. OpenACC allows multiple fields of the same structure |
9176 | to be written. */ |
9177 | if (addr_tokens[0]->type == STRUCTURE_BASE |
9178 | && (bitmap_bit_p (&map_field_head, DECL_UID (t)) |
9179 | || (!openacc && bitmap_bit_p (&map_head, DECL_UID (t))))) |
9180 | goto skip_decl_checks; |
9181 | |
9182 | if (!processing_template_decl && TREE_CODE (t) == FIELD_DECL) |
9183 | { |
9184 | OMP_CLAUSE_DECL (c) |
9185 | = finish_non_static_data_member (decl: t, NULL_TREE, NULL_TREE); |
9186 | break; |
9187 | } |
9188 | if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL) |
9189 | { |
9190 | if (processing_template_decl && TREE_CODE (t) != OVERLOAD) |
9191 | break; |
9192 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
9193 | && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER |
9194 | || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER |
9195 | || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH |
9196 | || (!openacc && EXPR_P (t)))) |
9197 | break; |
9198 | if (DECL_P (t)) |
9199 | error_at (OMP_CLAUSE_LOCATION (c), |
9200 | "%qD is not a variable in %qs clause", t, |
9201 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
9202 | else |
9203 | error_at (OMP_CLAUSE_LOCATION (c), |
9204 | "%qE is not a variable in %qs clause", t, |
9205 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
9206 | remove = true; |
9207 | } |
9208 | else if (VAR_P (t) && CP_DECL_THREAD_LOCAL_P (t)) |
9209 | { |
9210 | error_at (OMP_CLAUSE_LOCATION (c), |
9211 | "%qD is threadprivate variable in %qs clause", t, |
9212 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
9213 | remove = true; |
9214 | } |
9215 | else if (!processing_template_decl |
9216 | && !TYPE_REF_P (TREE_TYPE (t)) |
9217 | && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP |
9218 | || (OMP_CLAUSE_MAP_KIND (c) |
9219 | != GOMP_MAP_FIRSTPRIVATE_POINTER)) |
9220 | && !indir_component_ref_p |
9221 | && (t != current_class_ptr |
9222 | || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP |
9223 | || OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH_DETACH) |
9224 | && !cxx_mark_addressable (t)) |
9225 | remove = true; |
9226 | else if (!(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
9227 | && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER |
9228 | || (OMP_CLAUSE_MAP_KIND (c) |
9229 | == GOMP_MAP_FIRSTPRIVATE_POINTER) |
9230 | || (OMP_CLAUSE_MAP_KIND (c) |
9231 | == GOMP_MAP_ATTACH_DETACH))) |
9232 | && t == OMP_CLAUSE_DECL (c) |
9233 | && !type_dependent_expression_p (t) |
9234 | && !omp_mappable_type (TYPE_REF_P (TREE_TYPE (t)) |
9235 | ? TREE_TYPE (TREE_TYPE (t)) |
9236 | : TREE_TYPE (t))) |
9237 | { |
9238 | auto_diagnostic_group d; |
9239 | error_at (OMP_CLAUSE_LOCATION (c), |
9240 | "%qD does not have a mappable type in %qs clause", t, |
9241 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
9242 | if (TREE_TYPE (t) != error_mark_node |
9243 | && !COMPLETE_TYPE_P (TREE_TYPE (t))) |
9244 | cxx_incomplete_type_inform (TREE_TYPE (t)); |
9245 | remove = true; |
9246 | } |
9247 | else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
9248 | && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FORCE_DEVICEPTR |
9249 | && !type_dependent_expression_p (t) |
9250 | && !INDIRECT_TYPE_P (TREE_TYPE (t))) |
9251 | { |
9252 | error_at (OMP_CLAUSE_LOCATION (c), |
9253 | "%qD is not a pointer variable", t); |
9254 | remove = true; |
9255 | } |
9256 | else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
9257 | && OMP_CLAUSE_MAP_IMPLICIT (c) |
9258 | && (bitmap_bit_p (&map_head, DECL_UID (t)) |
9259 | || bitmap_bit_p (&map_field_head, DECL_UID (t)) |
9260 | || bitmap_bit_p (&map_firstprivate_head, |
9261 | DECL_UID (t)))) |
9262 | remove = true; |
9263 | else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
9264 | && (OMP_CLAUSE_MAP_KIND (c) |
9265 | == GOMP_MAP_FIRSTPRIVATE_POINTER)) |
9266 | { |
9267 | if (bitmap_bit_p (&generic_head, DECL_UID (t)) |
9268 | || bitmap_bit_p (&firstprivate_head, DECL_UID (t)) |
9269 | || bitmap_bit_p (&map_firstprivate_head, DECL_UID (t))) |
9270 | { |
9271 | error_at (OMP_CLAUSE_LOCATION (c), |
9272 | "%qD appears more than once in data clauses", t); |
9273 | remove = true; |
9274 | } |
9275 | else if (bitmap_bit_p (&map_head, DECL_UID (t)) |
9276 | && !bitmap_bit_p (&map_field_head, DECL_UID (t)) |
9277 | && openacc) |
9278 | { |
9279 | error_at (OMP_CLAUSE_LOCATION (c), |
9280 | "%qD appears more than once in data clauses", t); |
9281 | remove = true; |
9282 | } |
9283 | else |
9284 | bitmap_set_bit (&map_firstprivate_head, DECL_UID (t)); |
9285 | } |
9286 | else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
9287 | && (OMP_CLAUSE_MAP_KIND (c) |
9288 | == GOMP_MAP_FIRSTPRIVATE_REFERENCE)) |
9289 | bitmap_set_bit (&map_firstprivate_head, DECL_UID (t)); |
9290 | else if (bitmap_bit_p (&map_head, DECL_UID (t)) |
9291 | && !bitmap_bit_p (&map_field_head, DECL_UID (t)) |
9292 | && ort != C_ORT_OMP |
9293 | && ort != C_ORT_OMP_EXIT_DATA) |
9294 | { |
9295 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP) |
9296 | error_at (OMP_CLAUSE_LOCATION (c), |
9297 | "%qD appears more than once in motion clauses", t); |
9298 | else if (openacc) |
9299 | error_at (OMP_CLAUSE_LOCATION (c), |
9300 | "%qD appears more than once in data clauses", t); |
9301 | else |
9302 | error_at (OMP_CLAUSE_LOCATION (c), |
9303 | "%qD appears more than once in map clauses", t); |
9304 | remove = true; |
9305 | } |
9306 | else if (openacc && bitmap_bit_p (&generic_head, DECL_UID (t))) |
9307 | { |
9308 | error_at (OMP_CLAUSE_LOCATION (c), |
9309 | "%qD appears more than once in data clauses", t); |
9310 | remove = true; |
9311 | } |
9312 | else if (bitmap_bit_p (&firstprivate_head, DECL_UID (t)) |
9313 | || bitmap_bit_p (&is_on_device_head, DECL_UID (t))) |
9314 | { |
9315 | if (openacc) |
9316 | error_at (OMP_CLAUSE_LOCATION (c), |
9317 | "%qD appears more than once in data clauses", t); |
9318 | else |
9319 | error_at (OMP_CLAUSE_LOCATION (c), |
9320 | "%qD appears both in data and map clauses", t); |
9321 | remove = true; |
9322 | } |
9323 | else if (!omp_access_chain_p (addr_tokens, 1)) |
9324 | { |
9325 | bitmap_set_bit (&map_head, DECL_UID (t)); |
9326 | |
9327 | tree decl = OMP_CLAUSE_DECL (c); |
9328 | if (t != decl |
9329 | && (TREE_CODE (decl) == COMPONENT_REF |
9330 | || (INDIRECT_REF_P (decl) |
9331 | && (TREE_CODE (TREE_OPERAND (decl, 0)) |
9332 | == COMPONENT_REF) |
9333 | && TYPE_REF_P (TREE_TYPE (TREE_OPERAND (decl, |
9334 | 0)))))) |
9335 | bitmap_set_bit (&map_field_head, DECL_UID (t)); |
9336 | } |
9337 | |
9338 | skip_decl_checks: |
9339 | /* If we call ai.expand_map_clause in handle_omp_array_sections, |
9340 | the containing loop (here) iterates through the new nodes |
9341 | created by that expansion. Avoid expanding those again (just |
9342 | by checking the node type). */ |
9343 | if (!remove |
9344 | && !processing_template_decl |
9345 | && ort != C_ORT_DECLARE_SIMD |
9346 | && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP |
9347 | || (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER |
9348 | && (OMP_CLAUSE_MAP_KIND (c) |
9349 | != GOMP_MAP_FIRSTPRIVATE_REFERENCE) |
9350 | && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER |
9351 | && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH_DETACH |
9352 | && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH |
9353 | && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH))) |
9354 | { |
9355 | grp_start_p = pc; |
9356 | grp_sentinel = OMP_CLAUSE_CHAIN (c); |
9357 | tree nc = ai.expand_map_clause (c, OMP_CLAUSE_DECL (c), |
9358 | addr_tokens, ort); |
9359 | if (nc != error_mark_node) |
9360 | c = nc; |
9361 | } |
9362 | } |
9363 | break; |
9364 | |
9365 | case OMP_CLAUSE_ENTER: |
9366 | case OMP_CLAUSE_LINK: |
9367 | t = OMP_CLAUSE_DECL (c); |
9368 | const char *cname; |
9369 | cname = omp_clause_code_name[OMP_CLAUSE_CODE (c)]; |
9370 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ENTER |
9371 | && OMP_CLAUSE_ENTER_TO (c)) |
9372 | cname = "to"; |
9373 | if (TREE_CODE (t) == FUNCTION_DECL |
9374 | && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ENTER) |
9375 | ; |
9376 | else if (!VAR_P (t)) |
9377 | { |
9378 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ENTER) |
9379 | { |
9380 | if (TREE_CODE (t) == TEMPLATE_ID_EXPR) |
9381 | error_at (OMP_CLAUSE_LOCATION (c), |
9382 | "template %qE in clause %qs", t, cname); |
9383 | else if (really_overloaded_fn (t)) |
9384 | error_at (OMP_CLAUSE_LOCATION (c), |
9385 | "overloaded function name %qE in clause %qs", t, |
9386 | cname); |
9387 | else |
9388 | error_at (OMP_CLAUSE_LOCATION (c), |
9389 | "%qE is neither a variable nor a function name " |
9390 | "in clause %qs", t, cname); |
9391 | } |
9392 | else |
9393 | error_at (OMP_CLAUSE_LOCATION (c), |
9394 | "%qE is not a variable in clause %qs", t, cname); |
9395 | remove = true; |
9396 | } |
9397 | else if (DECL_THREAD_LOCAL_P (t)) |
9398 | { |
9399 | error_at (OMP_CLAUSE_LOCATION (c), |
9400 | "%qD is threadprivate variable in %qs clause", t, |
9401 | cname); |
9402 | remove = true; |
9403 | } |
9404 | else if (!omp_mappable_type (TREE_TYPE (t))) |
9405 | { |
9406 | auto_diagnostic_group d; |
9407 | error_at (OMP_CLAUSE_LOCATION (c), |
9408 | "%qD does not have a mappable type in %qs clause", t, |
9409 | cname); |
9410 | if (TREE_TYPE (t) != error_mark_node |
9411 | && !COMPLETE_TYPE_P (TREE_TYPE (t))) |
9412 | cxx_incomplete_type_inform (TREE_TYPE (t)); |
9413 | remove = true; |
9414 | } |
9415 | if (remove) |
9416 | break; |
9417 | if (bitmap_bit_p (&generic_head, DECL_UID (t))) |
9418 | { |
9419 | error_at (OMP_CLAUSE_LOCATION (c), |
9420 | "%qE appears more than once on the same " |
9421 | "%<declare target%> directive", t); |
9422 | remove = true; |
9423 | } |
9424 | else |
9425 | bitmap_set_bit (&generic_head, DECL_UID (t)); |
9426 | break; |
9427 | |
9428 | case OMP_CLAUSE_UNIFORM: |
9429 | t = OMP_CLAUSE_DECL (c); |
9430 | if (TREE_CODE (t) != PARM_DECL) |
9431 | { |
9432 | if (processing_template_decl) |
9433 | break; |
9434 | if (DECL_P (t)) |
9435 | error_at (OMP_CLAUSE_LOCATION (c), |
9436 | "%qD is not an argument in %<uniform%> clause", t); |
9437 | else |
9438 | error_at (OMP_CLAUSE_LOCATION (c), |
9439 | "%qE is not an argument in %<uniform%> clause", t); |
9440 | remove = true; |
9441 | break; |
9442 | } |
9443 | /* map_head bitmap is used as uniform_head if declare_simd. */ |
9444 | bitmap_set_bit (&map_head, DECL_UID (t)); |
9445 | goto check_dup_generic; |
9446 | |
9447 | case OMP_CLAUSE_GRAINSIZE: |
9448 | t = OMP_CLAUSE_GRAINSIZE_EXPR (c); |
9449 | if (t == error_mark_node) |
9450 | remove = true; |
9451 | else if (!type_dependent_expression_p (t) |
9452 | && !INTEGRAL_TYPE_P (TREE_TYPE (t))) |
9453 | { |
9454 | error_at (OMP_CLAUSE_LOCATION (c), |
9455 | "%<grainsize%> expression must be integral"); |
9456 | remove = true; |
9457 | } |
9458 | else |
9459 | { |
9460 | t = mark_rvalue_use (t); |
9461 | if (!processing_template_decl) |
9462 | { |
9463 | t = maybe_constant_value (t); |
9464 | if (TREE_CODE (t) == INTEGER_CST |
9465 | && tree_int_cst_sgn (t) != 1) |
9466 | { |
9467 | warning_at (OMP_CLAUSE_LOCATION (c), OPT_Wopenmp, |
9468 | "%<grainsize%> value must be positive"); |
9469 | t = integer_one_node; |
9470 | } |
9471 | t = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
9472 | } |
9473 | OMP_CLAUSE_GRAINSIZE_EXPR (c) = t; |
9474 | } |
9475 | break; |
9476 | |
9477 | case OMP_CLAUSE_PRIORITY: |
9478 | t = OMP_CLAUSE_PRIORITY_EXPR (c); |
9479 | if (t == error_mark_node) |
9480 | remove = true; |
9481 | else if (!type_dependent_expression_p (t) |
9482 | && !INTEGRAL_TYPE_P (TREE_TYPE (t))) |
9483 | { |
9484 | error_at (OMP_CLAUSE_LOCATION (c), |
9485 | "%<priority%> expression must be integral"); |
9486 | remove = true; |
9487 | } |
9488 | else |
9489 | { |
9490 | t = mark_rvalue_use (t); |
9491 | if (!processing_template_decl) |
9492 | { |
9493 | t = maybe_constant_value (t); |
9494 | if (TREE_CODE (t) == INTEGER_CST |
9495 | && tree_int_cst_sgn (t) == -1) |
9496 | { |
9497 | warning_at (OMP_CLAUSE_LOCATION (c), OPT_Wopenmp, |
9498 | "%<priority%> value must be non-negative"); |
9499 | t = integer_one_node; |
9500 | } |
9501 | t = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
9502 | } |
9503 | OMP_CLAUSE_PRIORITY_EXPR (c) = t; |
9504 | } |
9505 | break; |
9506 | |
9507 | case OMP_CLAUSE_HINT: |
9508 | t = OMP_CLAUSE_HINT_EXPR (c); |
9509 | if (t == error_mark_node) |
9510 | remove = true; |
9511 | else if (!type_dependent_expression_p (t) |
9512 | && !INTEGRAL_TYPE_P (TREE_TYPE (t))) |
9513 | { |
9514 | error_at (OMP_CLAUSE_LOCATION (c), |
9515 | "%<hint%> expression must be integral"); |
9516 | remove = true; |
9517 | } |
9518 | else |
9519 | { |
9520 | t = mark_rvalue_use (t); |
9521 | if (!processing_template_decl) |
9522 | { |
9523 | t = maybe_constant_value (t); |
9524 | if (TREE_CODE (t) != INTEGER_CST) |
9525 | { |
9526 | error_at (OMP_CLAUSE_LOCATION (c), |
9527 | "%<hint%> expression must be constant integer " |
9528 | "expression"); |
9529 | remove = true; |
9530 | } |
9531 | } |
9532 | OMP_CLAUSE_HINT_EXPR (c) = t; |
9533 | } |
9534 | break; |
9535 | |
9536 | case OMP_CLAUSE_FILTER: |
9537 | t = OMP_CLAUSE_FILTER_EXPR (c); |
9538 | if (t == error_mark_node) |
9539 | remove = true; |
9540 | else if (!type_dependent_expression_p (t) |
9541 | && !INTEGRAL_TYPE_P (TREE_TYPE (t))) |
9542 | { |
9543 | error_at (OMP_CLAUSE_LOCATION (c), |
9544 | "%<filter%> expression must be integral"); |
9545 | remove = true; |
9546 | } |
9547 | else |
9548 | { |
9549 | t = mark_rvalue_use (t); |
9550 | if (!processing_template_decl) |
9551 | { |
9552 | t = maybe_constant_value (t); |
9553 | t = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
9554 | } |
9555 | OMP_CLAUSE_FILTER_EXPR (c) = t; |
9556 | } |
9557 | break; |
9558 | |
9559 | case OMP_CLAUSE_IS_DEVICE_PTR: |
9560 | case OMP_CLAUSE_USE_DEVICE_PTR: |
9561 | field_ok = (ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP; |
9562 | t = OMP_CLAUSE_DECL (c); |
9563 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR) |
9564 | bitmap_set_bit (&is_on_device_head, DECL_UID (t)); |
9565 | if (!type_dependent_expression_p (t)) |
9566 | { |
9567 | tree type = TREE_TYPE (t); |
9568 | if (!TYPE_PTR_P (type) |
9569 | && (!TYPE_REF_P (type) || !TYPE_PTR_P (TREE_TYPE (type)))) |
9570 | { |
9571 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR |
9572 | && ort == C_ORT_OMP) |
9573 | { |
9574 | error_at (OMP_CLAUSE_LOCATION (c), |
9575 | "%qs variable is neither a pointer " |
9576 | "nor reference to pointer", |
9577 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
9578 | remove = true; |
9579 | } |
9580 | else if (TREE_CODE (type) != ARRAY_TYPE |
9581 | && (!TYPE_REF_P (type) |
9582 | || TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE)) |
9583 | { |
9584 | error_at (OMP_CLAUSE_LOCATION (c), |
9585 | "%qs variable is neither a pointer, nor an " |
9586 | "array nor reference to pointer or array", |
9587 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
9588 | remove = true; |
9589 | } |
9590 | } |
9591 | } |
9592 | goto check_dup_generic; |
9593 | |
9594 | case OMP_CLAUSE_HAS_DEVICE_ADDR: |
9595 | t = OMP_CLAUSE_DECL (c); |
9596 | if (TREE_CODE (t) == OMP_ARRAY_SECTION) |
9597 | { |
9598 | if (handle_omp_array_sections (c, ort)) |
9599 | remove = true; |
9600 | else |
9601 | { |
9602 | t = OMP_CLAUSE_DECL (c); |
9603 | while (TREE_CODE (t) == OMP_ARRAY_SECTION) |
9604 | t = TREE_OPERAND (t, 0); |
9605 | while (INDIRECT_REF_P (t) |
9606 | || TREE_CODE (t) == ARRAY_REF) |
9607 | t = TREE_OPERAND (t, 0); |
9608 | } |
9609 | } |
9610 | if (VAR_P (t) || TREE_CODE (t) == PARM_DECL) |
9611 | { |
9612 | bitmap_set_bit (&is_on_device_head, DECL_UID (t)); |
9613 | if (!processing_template_decl |
9614 | && !cxx_mark_addressable (t)) |
9615 | remove = true; |
9616 | } |
9617 | goto check_dup_generic_t; |
9618 | |
9619 | case OMP_CLAUSE_USE_DEVICE_ADDR: |
9620 | field_ok = true; |
9621 | t = OMP_CLAUSE_DECL (c); |
9622 | if (!processing_template_decl |
9623 | && (VAR_P (t) || TREE_CODE (t) == PARM_DECL) |
9624 | && !TYPE_REF_P (TREE_TYPE (t)) |
9625 | && !cxx_mark_addressable (t)) |
9626 | remove = true; |
9627 | goto check_dup_generic; |
9628 | |
9629 | case OMP_CLAUSE_NOWAIT: |
9630 | case OMP_CLAUSE_DEFAULT: |
9631 | case OMP_CLAUSE_UNTIED: |
9632 | case OMP_CLAUSE_COLLAPSE: |
9633 | case OMP_CLAUSE_PARALLEL: |
9634 | case OMP_CLAUSE_FOR: |
9635 | case OMP_CLAUSE_SECTIONS: |
9636 | case OMP_CLAUSE_TASKGROUP: |
9637 | case OMP_CLAUSE_PROC_BIND: |
9638 | case OMP_CLAUSE_DEVICE_TYPE: |
9639 | case OMP_CLAUSE_NOGROUP: |
9640 | case OMP_CLAUSE_THREADS: |
9641 | case OMP_CLAUSE_SIMD: |
9642 | case OMP_CLAUSE_DEFAULTMAP: |
9643 | case OMP_CLAUSE_BIND: |
9644 | case OMP_CLAUSE_AUTO: |
9645 | case OMP_CLAUSE_INDEPENDENT: |
9646 | case OMP_CLAUSE_SEQ: |
9647 | case OMP_CLAUSE_IF_PRESENT: |
9648 | case OMP_CLAUSE_FINALIZE: |
9649 | case OMP_CLAUSE_NOHOST: |
9650 | case OMP_CLAUSE_INDIRECT: |
9651 | break; |
9652 | |
9653 | case OMP_CLAUSE_MERGEABLE: |
9654 | mergeable_seen = true; |
9655 | break; |
9656 | |
9657 | case OMP_CLAUSE_TILE: |
9658 | for (tree list = OMP_CLAUSE_TILE_LIST (c); !remove && list; |
9659 | list = TREE_CHAIN (list)) |
9660 | { |
9661 | t = TREE_VALUE (list); |
9662 | |
9663 | if (t == error_mark_node) |
9664 | remove = true; |
9665 | else if (!type_dependent_expression_p (t) |
9666 | && !INTEGRAL_TYPE_P (TREE_TYPE (t))) |
9667 | { |
9668 | error_at (OMP_CLAUSE_LOCATION (c), |
9669 | "%<tile%> argument needs integral type"); |
9670 | remove = true; |
9671 | } |
9672 | else |
9673 | { |
9674 | t = mark_rvalue_use (t); |
9675 | if (!processing_template_decl) |
9676 | { |
9677 | /* Zero is used to indicate '*', we permit you |
9678 | to get there via an ICE of value zero. */ |
9679 | t = maybe_constant_value (t); |
9680 | if (!tree_fits_shwi_p (t) |
9681 | || tree_to_shwi (t) < 0) |
9682 | { |
9683 | error_at (OMP_CLAUSE_LOCATION (c), |
9684 | "%<tile%> argument needs positive " |
9685 | "integral constant"); |
9686 | remove = true; |
9687 | } |
9688 | } |
9689 | } |
9690 | |
9691 | /* Update list item. */ |
9692 | TREE_VALUE (list) = t; |
9693 | } |
9694 | break; |
9695 | |
9696 | case OMP_CLAUSE_SIZES: |
9697 | for (tree list = OMP_CLAUSE_SIZES_LIST (c); |
9698 | !remove && list; list = TREE_CHAIN (list)) |
9699 | { |
9700 | t = TREE_VALUE (list); |
9701 | |
9702 | if (t == error_mark_node) |
9703 | t = integer_one_node; |
9704 | else if (!type_dependent_expression_p (t) |
9705 | && !INTEGRAL_TYPE_P (TREE_TYPE (t))) |
9706 | { |
9707 | error_at (OMP_CLAUSE_LOCATION (c), |
9708 | "%<sizes%> argument needs positive integral " |
9709 | "constant"); |
9710 | t = integer_one_node; |
9711 | } |
9712 | else |
9713 | { |
9714 | t = mark_rvalue_use (t); |
9715 | if (!processing_template_decl) |
9716 | { |
9717 | t = maybe_constant_value (t); |
9718 | HOST_WIDE_INT n; |
9719 | if (!tree_fits_shwi_p (t) |
9720 | || !INTEGRAL_TYPE_P (TREE_TYPE (t)) |
9721 | || (n = tree_to_shwi (t)) <= 0 |
9722 | || (int)n != n) |
9723 | { |
9724 | error_at (OMP_CLAUSE_LOCATION (c), |
9725 | "%<sizes%> argument needs positive " |
9726 | "integral constant"); |
9727 | t = integer_one_node; |
9728 | } |
9729 | } |
9730 | } |
9731 | |
9732 | /* Update list item. */ |
9733 | TREE_VALUE (list) = t; |
9734 | } |
9735 | break; |
9736 | |
9737 | case OMP_CLAUSE_ORDERED: |
9738 | ordered_seen = true; |
9739 | break; |
9740 | |
9741 | case OMP_CLAUSE_ORDER: |
9742 | if (order_seen) |
9743 | remove = true; |
9744 | else |
9745 | order_seen = true; |
9746 | break; |
9747 | |
9748 | case OMP_CLAUSE_INBRANCH: |
9749 | case OMP_CLAUSE_NOTINBRANCH: |
9750 | if (branch_seen) |
9751 | { |
9752 | error_at (OMP_CLAUSE_LOCATION (c), |
9753 | "%<inbranch%> clause is incompatible with " |
9754 | "%<notinbranch%>"); |
9755 | remove = true; |
9756 | } |
9757 | branch_seen = true; |
9758 | break; |
9759 | |
9760 | case OMP_CLAUSE_INCLUSIVE: |
9761 | case OMP_CLAUSE_EXCLUSIVE: |
9762 | t = omp_clause_decl_field (OMP_CLAUSE_DECL (c)); |
9763 | if (!t) |
9764 | t = OMP_CLAUSE_DECL (c); |
9765 | if (t == current_class_ptr) |
9766 | { |
9767 | error_at (OMP_CLAUSE_LOCATION (c), |
9768 | "%<this%> allowed in OpenMP only in %<declare simd%>" |
9769 | " clauses"); |
9770 | remove = true; |
9771 | break; |
9772 | } |
9773 | if (!VAR_P (t) |
9774 | && TREE_CODE (t) != PARM_DECL |
9775 | && TREE_CODE (t) != FIELD_DECL) |
9776 | { |
9777 | if (processing_template_decl && TREE_CODE (t) != OVERLOAD) |
9778 | break; |
9779 | if (DECL_P (t)) |
9780 | error_at (OMP_CLAUSE_LOCATION (c), |
9781 | "%qD is not a variable in clause %qs", t, |
9782 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
9783 | else |
9784 | error_at (OMP_CLAUSE_LOCATION (c), |
9785 | "%qE is not a variable in clause %qs", t, |
9786 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
9787 | remove = true; |
9788 | } |
9789 | break; |
9790 | |
9791 | case OMP_CLAUSE_FULL: |
9792 | break; |
9793 | |
9794 | case OMP_CLAUSE_PARTIAL: |
9795 | partial_seen = true; |
9796 | t = OMP_CLAUSE_PARTIAL_EXPR (c); |
9797 | if (!t) |
9798 | break; |
9799 | |
9800 | if (t == error_mark_node) |
9801 | t = NULL_TREE; |
9802 | else if (!type_dependent_expression_p (t) |
9803 | && !INTEGRAL_TYPE_P (TREE_TYPE (t))) |
9804 | { |
9805 | error_at (OMP_CLAUSE_LOCATION (c), |
9806 | "%<partial%> argument needs positive constant " |
9807 | "integer expression"); |
9808 | t = NULL_TREE; |
9809 | } |
9810 | else |
9811 | { |
9812 | t = mark_rvalue_use (t); |
9813 | if (!processing_template_decl) |
9814 | { |
9815 | t = maybe_constant_value (t); |
9816 | |
9817 | HOST_WIDE_INT n; |
9818 | if (!INTEGRAL_TYPE_P (TREE_TYPE (t)) |
9819 | || !tree_fits_shwi_p (t) |
9820 | || (n = tree_to_shwi (t)) <= 0 |
9821 | || (int)n != n) |
9822 | { |
9823 | error_at (OMP_CLAUSE_LOCATION (c), |
9824 | "%<partial%> argument needs positive " |
9825 | "constant integer expression"); |
9826 | t = NULL_TREE; |
9827 | } |
9828 | } |
9829 | } |
9830 | |
9831 | OMP_CLAUSE_PARTIAL_EXPR (c) = t; |
9832 | break; |
9833 | case OMP_CLAUSE_INIT: |
9834 | init_seen = true; |
9835 | OMP_CLAUSE_INIT_PREFER_TYPE (c) |
9836 | = cp_finish_omp_init_prefer_type (OMP_CLAUSE_INIT_PREFER_TYPE (c)); |
9837 | if (!OMP_CLAUSE_INIT_TARGETSYNC (c)) |
9838 | init_no_targetsync_clause = c; |
9839 | /* FALLTHRU */ |
9840 | case OMP_CLAUSE_DESTROY: |
9841 | case OMP_CLAUSE_USE: |
9842 | init_use_destroy_seen = true; |
9843 | t = OMP_CLAUSE_DECL (c); |
9844 | if (bitmap_bit_p (&generic_head, DECL_UID (t))) |
9845 | { |
9846 | error_at (OMP_CLAUSE_LOCATION (c), |
9847 | "%qD appears more than once in action clauses", t); |
9848 | remove = true; |
9849 | break; |
9850 | } |
9851 | bitmap_set_bit (&generic_head, DECL_UID (t)); |
9852 | /* FALLTHRU */ |
9853 | case OMP_CLAUSE_INTEROP: |
9854 | if (!processing_template_decl) |
9855 | { |
9856 | if (/* (ort == C_ORT_OMP_INTEROP [uncomment for depobj init] |
9857 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_INTEROP) && */ |
9858 | !c_omp_interop_t_p (TREE_TYPE (OMP_CLAUSE_DECL (c)))) |
9859 | { |
9860 | error_at (OMP_CLAUSE_LOCATION (c), |
9861 | "%qD must be of %<omp_interop_t%>", |
9862 | OMP_CLAUSE_DECL (c)); |
9863 | remove = true; |
9864 | } |
9865 | else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_INIT |
9866 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DESTROY) |
9867 | && TREE_READONLY (OMP_CLAUSE_DECL (c))) |
9868 | { |
9869 | error_at (OMP_CLAUSE_LOCATION (c), |
9870 | "%qD shall not be const", OMP_CLAUSE_DECL (c)); |
9871 | remove = true; |
9872 | } |
9873 | } |
9874 | pc = &OMP_CLAUSE_CHAIN (c); |
9875 | break; |
9876 | default: |
9877 | gcc_unreachable (); |
9878 | } |
9879 | |
9880 | if (remove) |
9881 | { |
9882 | if (grp_start_p) |
9883 | { |
9884 | /* If we found a clause to remove, we want to remove the whole |
9885 | expanded group, otherwise gimplify |
9886 | (omp_resolve_clause_dependencies) can get confused. */ |
9887 | *grp_start_p = grp_sentinel; |
9888 | pc = grp_start_p; |
9889 | grp_start_p = NULL; |
9890 | } |
9891 | else |
9892 | *pc = OMP_CLAUSE_CHAIN (c); |
9893 | } |
9894 | else |
9895 | pc = &OMP_CLAUSE_CHAIN (c); |
9896 | } |
9897 | |
9898 | if (reduction_seen < 0 && (ordered_seen || schedule_seen)) |
9899 | reduction_seen = -2; |
9900 | |
9901 | for (pc = &clauses, c = clauses; c ; c = *pc) |
9902 | { |
9903 | enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c); |
9904 | bool remove = false; |
9905 | bool need_complete_type = false; |
9906 | bool need_default_ctor = false; |
9907 | bool need_copy_ctor = false; |
9908 | bool need_copy_assignment = false; |
9909 | bool need_implicitly_determined = false; |
9910 | bool need_dtor = false; |
9911 | tree type, inner_type; |
9912 | |
9913 | switch (c_kind) |
9914 | { |
9915 | case OMP_CLAUSE_SHARED: |
9916 | need_implicitly_determined = true; |
9917 | break; |
9918 | case OMP_CLAUSE_PRIVATE: |
9919 | need_complete_type = true; |
9920 | need_default_ctor = true; |
9921 | need_dtor = true; |
9922 | need_implicitly_determined = true; |
9923 | break; |
9924 | case OMP_CLAUSE_FIRSTPRIVATE: |
9925 | need_complete_type = true; |
9926 | need_copy_ctor = true; |
9927 | need_dtor = true; |
9928 | need_implicitly_determined = true; |
9929 | break; |
9930 | case OMP_CLAUSE_LASTPRIVATE: |
9931 | need_complete_type = true; |
9932 | need_copy_assignment = true; |
9933 | need_implicitly_determined = true; |
9934 | break; |
9935 | case OMP_CLAUSE_REDUCTION: |
9936 | if (reduction_seen == -2) |
9937 | OMP_CLAUSE_REDUCTION_INSCAN (c) = 0; |
9938 | if (OMP_CLAUSE_REDUCTION_INSCAN (c)) |
9939 | need_copy_assignment = true; |
9940 | need_implicitly_determined = true; |
9941 | break; |
9942 | case OMP_CLAUSE_IN_REDUCTION: |
9943 | case OMP_CLAUSE_TASK_REDUCTION: |
9944 | case OMP_CLAUSE_INCLUSIVE: |
9945 | case OMP_CLAUSE_EXCLUSIVE: |
9946 | need_implicitly_determined = true; |
9947 | break; |
9948 | case OMP_CLAUSE_LINEAR: |
9949 | if (ort != C_ORT_OMP_DECLARE_SIMD) |
9950 | need_implicitly_determined = true; |
9951 | else if (OMP_CLAUSE_LINEAR_VARIABLE_STRIDE (c) |
9952 | && !bitmap_bit_p (&map_head, |
9953 | DECL_UID (OMP_CLAUSE_LINEAR_STEP (c)))) |
9954 | { |
9955 | error_at (OMP_CLAUSE_LOCATION (c), |
9956 | "%<linear%> clause step is a parameter %qD not " |
9957 | "specified in %<uniform%> clause", |
9958 | OMP_CLAUSE_LINEAR_STEP (c)); |
9959 | *pc = OMP_CLAUSE_CHAIN (c); |
9960 | continue; |
9961 | } |
9962 | break; |
9963 | case OMP_CLAUSE_COPYPRIVATE: |
9964 | need_copy_assignment = true; |
9965 | break; |
9966 | case OMP_CLAUSE_COPYIN: |
9967 | need_copy_assignment = true; |
9968 | break; |
9969 | case OMP_CLAUSE_SIMDLEN: |
9970 | if (safelen |
9971 | && !processing_template_decl |
9972 | && tree_int_cst_lt (OMP_CLAUSE_SAFELEN_EXPR (safelen), |
9973 | OMP_CLAUSE_SIMDLEN_EXPR (c))) |
9974 | { |
9975 | error_at (OMP_CLAUSE_LOCATION (c), |
9976 | "%<simdlen%> clause value is bigger than " |
9977 | "%<safelen%> clause value"); |
9978 | OMP_CLAUSE_SIMDLEN_EXPR (c) |
9979 | = OMP_CLAUSE_SAFELEN_EXPR (safelen); |
9980 | } |
9981 | pc = &OMP_CLAUSE_CHAIN (c); |
9982 | continue; |
9983 | case OMP_CLAUSE_SCHEDULE: |
9984 | if (ordered_seen |
9985 | && (OMP_CLAUSE_SCHEDULE_KIND (c) |
9986 | & OMP_CLAUSE_SCHEDULE_NONMONOTONIC)) |
9987 | { |
9988 | error_at (OMP_CLAUSE_LOCATION (c), |
9989 | "%<nonmonotonic%> schedule modifier specified " |
9990 | "together with %<ordered%> clause"); |
9991 | OMP_CLAUSE_SCHEDULE_KIND (c) |
9992 | = (enum omp_clause_schedule_kind) |
9993 | (OMP_CLAUSE_SCHEDULE_KIND (c) |
9994 | & ~OMP_CLAUSE_SCHEDULE_NONMONOTONIC); |
9995 | } |
9996 | if (reduction_seen == -2) |
9997 | error_at (OMP_CLAUSE_LOCATION (c), |
9998 | "%qs clause specified together with %<inscan%> " |
9999 | "%<reduction%> clause", "schedule"); |
10000 | pc = &OMP_CLAUSE_CHAIN (c); |
10001 | continue; |
10002 | case OMP_CLAUSE_NOGROUP: |
10003 | if (reduction_seen) |
10004 | { |
10005 | error_at (OMP_CLAUSE_LOCATION (c), |
10006 | "%<nogroup%> clause must not be used together with " |
10007 | "%<reduction%> clause"); |
10008 | *pc = OMP_CLAUSE_CHAIN (c); |
10009 | continue; |
10010 | } |
10011 | pc = &OMP_CLAUSE_CHAIN (c); |
10012 | continue; |
10013 | case OMP_CLAUSE_GRAINSIZE: |
10014 | if (num_tasks_seen) |
10015 | { |
10016 | error_at (OMP_CLAUSE_LOCATION (c), |
10017 | "%<grainsize%> clause must not be used together with " |
10018 | "%<num_tasks%> clause"); |
10019 | *pc = OMP_CLAUSE_CHAIN (c); |
10020 | continue; |
10021 | } |
10022 | pc = &OMP_CLAUSE_CHAIN (c); |
10023 | continue; |
10024 | case OMP_CLAUSE_ORDERED: |
10025 | if (reduction_seen == -2) |
10026 | error_at (OMP_CLAUSE_LOCATION (c), |
10027 | "%qs clause specified together with %<inscan%> " |
10028 | "%<reduction%> clause", "ordered"); |
10029 | pc = &OMP_CLAUSE_CHAIN (c); |
10030 | continue; |
10031 | case OMP_CLAUSE_ORDER: |
10032 | if (ordered_seen) |
10033 | { |
10034 | error_at (OMP_CLAUSE_LOCATION (c), |
10035 | "%<order%> clause must not be used together " |
10036 | "with %<ordered%> clause"); |
10037 | *pc = OMP_CLAUSE_CHAIN (c); |
10038 | continue; |
10039 | } |
10040 | pc = &OMP_CLAUSE_CHAIN (c); |
10041 | continue; |
10042 | case OMP_CLAUSE_DETACH: |
10043 | if (mergeable_seen) |
10044 | { |
10045 | error_at (OMP_CLAUSE_LOCATION (c), |
10046 | "%<detach%> clause must not be used together with " |
10047 | "%<mergeable%> clause"); |
10048 | *pc = OMP_CLAUSE_CHAIN (c); |
10049 | continue; |
10050 | } |
10051 | pc = &OMP_CLAUSE_CHAIN (c); |
10052 | continue; |
10053 | case OMP_CLAUSE_MAP: |
10054 | if (target_in_reduction_seen && !processing_template_decl) |
10055 | { |
10056 | t = OMP_CLAUSE_DECL (c); |
10057 | while (handled_component_p (t) |
10058 | || INDIRECT_REF_P (t) |
10059 | || TREE_CODE (t) == ADDR_EXPR |
10060 | || TREE_CODE (t) == MEM_REF |
10061 | || TREE_CODE (t) == NON_LVALUE_EXPR) |
10062 | t = TREE_OPERAND (t, 0); |
10063 | if (DECL_P (t) |
10064 | && bitmap_bit_p (&oacc_reduction_head, DECL_UID (t))) |
10065 | OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1; |
10066 | } |
10067 | pc = &OMP_CLAUSE_CHAIN (c); |
10068 | continue; |
10069 | case OMP_CLAUSE_FULL: |
10070 | if (partial_seen) |
10071 | { |
10072 | error_at (OMP_CLAUSE_LOCATION (c), |
10073 | "%<full%> clause must not be used together " |
10074 | "with %<partial%> clause"); |
10075 | *pc = OMP_CLAUSE_CHAIN (c); |
10076 | continue; |
10077 | } |
10078 | pc = &OMP_CLAUSE_CHAIN (c); |
10079 | continue; |
10080 | case OMP_CLAUSE_NOWAIT: |
10081 | if (copyprivate_seen) |
10082 | { |
10083 | error_at (OMP_CLAUSE_LOCATION (c), |
10084 | "%<nowait%> clause must not be used together " |
10085 | "with %<copyprivate%> clause"); |
10086 | *pc = OMP_CLAUSE_CHAIN (c); |
10087 | continue; |
10088 | } |
10089 | /* FALLTHRU */ |
10090 | default: |
10091 | pc = &OMP_CLAUSE_CHAIN (c); |
10092 | continue; |
10093 | } |
10094 | |
10095 | t = OMP_CLAUSE_DECL (c); |
10096 | switch (c_kind) |
10097 | { |
10098 | case OMP_CLAUSE_LASTPRIVATE: |
10099 | if (DECL_P (t) |
10100 | && !bitmap_bit_p (&firstprivate_head, DECL_UID (t))) |
10101 | { |
10102 | need_default_ctor = true; |
10103 | need_dtor = true; |
10104 | } |
10105 | break; |
10106 | |
10107 | case OMP_CLAUSE_REDUCTION: |
10108 | case OMP_CLAUSE_IN_REDUCTION: |
10109 | case OMP_CLAUSE_TASK_REDUCTION: |
10110 | if (allocate_seen) |
10111 | { |
10112 | if (TREE_CODE (t) == MEM_REF) |
10113 | { |
10114 | t = TREE_OPERAND (t, 0); |
10115 | if (TREE_CODE (t) == POINTER_PLUS_EXPR) |
10116 | t = TREE_OPERAND (t, 0); |
10117 | if (TREE_CODE (t) == ADDR_EXPR |
10118 | || INDIRECT_REF_P (t)) |
10119 | t = TREE_OPERAND (t, 0); |
10120 | if (DECL_P (t)) |
10121 | bitmap_clear_bit (&aligned_head, DECL_UID (t)); |
10122 | } |
10123 | else if (TREE_CODE (t) == OMP_ARRAY_SECTION) |
10124 | { |
10125 | while (TREE_CODE (t) == OMP_ARRAY_SECTION) |
10126 | t = TREE_OPERAND (t, 0); |
10127 | if (DECL_P (t)) |
10128 | bitmap_clear_bit (&aligned_head, DECL_UID (t)); |
10129 | t = OMP_CLAUSE_DECL (c); |
10130 | } |
10131 | else if (DECL_P (t)) |
10132 | bitmap_clear_bit (&aligned_head, DECL_UID (t)); |
10133 | t = OMP_CLAUSE_DECL (c); |
10134 | } |
10135 | if (processing_template_decl |
10136 | && !VAR_P (t) && TREE_CODE (t) != PARM_DECL) |
10137 | break; |
10138 | if (finish_omp_reduction_clause (c, need_default_ctor: &need_default_ctor, |
10139 | need_dtor: &need_dtor)) |
10140 | remove = true; |
10141 | else |
10142 | t = OMP_CLAUSE_DECL (c); |
10143 | break; |
10144 | |
10145 | case OMP_CLAUSE_COPYIN: |
10146 | if (processing_template_decl |
10147 | && !VAR_P (t) && TREE_CODE (t) != PARM_DECL) |
10148 | break; |
10149 | if (!VAR_P (t) || !CP_DECL_THREAD_LOCAL_P (t)) |
10150 | { |
10151 | error_at (OMP_CLAUSE_LOCATION (c), |
10152 | "%qE must be %<threadprivate%> for %<copyin%>", t); |
10153 | remove = true; |
10154 | } |
10155 | break; |
10156 | |
10157 | default: |
10158 | break; |
10159 | } |
10160 | |
10161 | if (processing_template_decl |
10162 | && !VAR_P (t) && TREE_CODE (t) != PARM_DECL) |
10163 | { |
10164 | pc = &OMP_CLAUSE_CHAIN (c); |
10165 | continue; |
10166 | } |
10167 | |
10168 | if (need_complete_type || need_copy_assignment) |
10169 | { |
10170 | t = require_complete_type (t); |
10171 | if (t == error_mark_node) |
10172 | remove = true; |
10173 | else if (!processing_template_decl |
10174 | && TYPE_REF_P (TREE_TYPE (t)) |
10175 | && !complete_type_or_else (TREE_TYPE (TREE_TYPE (t)), t)) |
10176 | remove = true; |
10177 | } |
10178 | if (need_implicitly_determined) |
10179 | { |
10180 | const char *share_name = NULL; |
10181 | |
10182 | if (allocate_seen |
10183 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_SHARED |
10184 | && DECL_P (t)) |
10185 | bitmap_clear_bit (&aligned_head, DECL_UID (t)); |
10186 | |
10187 | if (VAR_P (t) && CP_DECL_THREAD_LOCAL_P (t)) |
10188 | share_name = "threadprivate"; |
10189 | else switch (cxx_omp_predetermined_sharing_1 (t)) |
10190 | { |
10191 | case OMP_CLAUSE_DEFAULT_UNSPECIFIED: |
10192 | break; |
10193 | case OMP_CLAUSE_DEFAULT_SHARED: |
10194 | if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED |
10195 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE) |
10196 | && c_omp_predefined_variable (t)) |
10197 | /* The __func__ variable and similar function-local predefined |
10198 | variables may be listed in a shared or firstprivate |
10199 | clause. */ |
10200 | break; |
10201 | if (VAR_P (t) |
10202 | && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE |
10203 | && TREE_STATIC (t) |
10204 | && cxx_omp_const_qual_no_mutable (t)) |
10205 | { |
10206 | tree ctx = CP_DECL_CONTEXT (t); |
10207 | /* const qualified static data members without mutable |
10208 | member may be specified in firstprivate clause. */ |
10209 | if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx)) |
10210 | break; |
10211 | } |
10212 | share_name = "shared"; |
10213 | break; |
10214 | case OMP_CLAUSE_DEFAULT_PRIVATE: |
10215 | share_name = "private"; |
10216 | break; |
10217 | default: |
10218 | gcc_unreachable (); |
10219 | } |
10220 | if (share_name) |
10221 | { |
10222 | error_at (OMP_CLAUSE_LOCATION (c), |
10223 | "%qE is predetermined %qs for %qs", |
10224 | omp_clause_printable_decl (decl: t), share_name, |
10225 | omp_clause_code_name[OMP_CLAUSE_CODE (c)]); |
10226 | remove = true; |
10227 | } |
10228 | else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_SHARED |
10229 | && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE |
10230 | && cxx_omp_const_qual_no_mutable (t)) |
10231 | { |
10232 | error_at (OMP_CLAUSE_LOCATION (c), |
10233 | "%<const%> qualified %qE without %<mutable%> member " |
10234 | "may appear only in %<shared%> or %<firstprivate%> " |
10235 | "clauses", omp_clause_printable_decl (decl: t)); |
10236 | remove = true; |
10237 | } |
10238 | } |
10239 | |
10240 | if (detach_seen |
10241 | && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED |
10242 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE |
10243 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE |
10244 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE) |
10245 | && OMP_CLAUSE_DECL (c) == OMP_CLAUSE_DECL (detach_seen)) |
10246 | { |
10247 | error_at (OMP_CLAUSE_LOCATION (c), |
10248 | "the event handle of a %<detach%> clause " |
10249 | "should not be in a data-sharing clause"); |
10250 | remove = true; |
10251 | } |
10252 | |
10253 | /* We're interested in the base element, not arrays. */ |
10254 | inner_type = type = TREE_TYPE (t); |
10255 | if ((need_complete_type |
10256 | || need_copy_assignment |
10257 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION |
10258 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION |
10259 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION) |
10260 | && TYPE_REF_P (inner_type)) |
10261 | inner_type = TREE_TYPE (inner_type); |
10262 | while (TREE_CODE (inner_type) == ARRAY_TYPE) |
10263 | inner_type = TREE_TYPE (inner_type); |
10264 | |
10265 | /* Check for special function availability by building a call to one. |
10266 | Save the results, because later we won't be in the right context |
10267 | for making these queries. */ |
10268 | if (CLASS_TYPE_P (inner_type) |
10269 | && COMPLETE_TYPE_P (inner_type) |
10270 | && (need_default_ctor || need_copy_ctor |
10271 | || need_copy_assignment || need_dtor) |
10272 | && !type_dependent_expression_p (t) |
10273 | && cxx_omp_create_clause_info (c, type: inner_type, need_default_ctor, |
10274 | need_copy_ctor, need_copy_assignment, |
10275 | need_dtor)) |
10276 | remove = true; |
10277 | |
10278 | if (!remove |
10279 | && c_kind == OMP_CLAUSE_SHARED |
10280 | && processing_template_decl) |
10281 | { |
10282 | t = omp_clause_decl_field (OMP_CLAUSE_DECL (c)); |
10283 | if (t) |
10284 | OMP_CLAUSE_DECL (c) = t; |
10285 | } |
10286 | |
10287 | if (remove) |
10288 | *pc = OMP_CLAUSE_CHAIN (c); |
10289 | else |
10290 | pc = &OMP_CLAUSE_CHAIN (c); |
10291 | } |
10292 | |
10293 | if (allocate_seen) |
10294 | for (pc = &clauses, c = clauses; c ; c = *pc) |
10295 | { |
10296 | bool remove = false; |
10297 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE |
10298 | && !OMP_CLAUSE_ALLOCATE_COMBINED (c) |
10299 | && DECL_P (OMP_CLAUSE_DECL (c)) |
10300 | && bitmap_bit_p (&aligned_head, DECL_UID (OMP_CLAUSE_DECL (c)))) |
10301 | { |
10302 | error_at (OMP_CLAUSE_LOCATION (c), |
10303 | "%qD specified in %<allocate%> clause but not in " |
10304 | "an explicit privatization clause", OMP_CLAUSE_DECL (c)); |
10305 | remove = true; |
10306 | } |
10307 | if (remove) |
10308 | *pc = OMP_CLAUSE_CHAIN (c); |
10309 | else |
10310 | pc = &OMP_CLAUSE_CHAIN (c); |
10311 | } |
10312 | |
10313 | if (ort == C_ORT_OMP_INTEROP |
10314 | && depend_clause |
10315 | && (!init_use_destroy_seen |
10316 | || (init_seen && init_no_targetsync_clause))) |
10317 | { |
10318 | error_at (OMP_CLAUSE_LOCATION (depend_clause), |
10319 | "%<depend%> clause requires action clauses with " |
10320 | "%<targetsync%> interop-type"); |
10321 | if (init_no_targetsync_clause) |
10322 | inform (OMP_CLAUSE_LOCATION (init_no_targetsync_clause), |
10323 | "%<init%> clause lacks the %<targetsync%> modifier"); |
10324 | } |
10325 | |
10326 | bitmap_obstack_release (NULL); |
10327 | return clauses; |
10328 | } |
10329 | |
10330 | /* Start processing OpenMP clauses that can include any |
10331 | privatization clauses for non-static data members. */ |
10332 | |
10333 | tree |
10334 | push_omp_privatization_clauses (bool ignore_next) |
10335 | { |
10336 | if (omp_private_member_ignore_next) |
10337 | { |
10338 | omp_private_member_ignore_next = ignore_next; |
10339 | return NULL_TREE; |
10340 | } |
10341 | omp_private_member_ignore_next = ignore_next; |
10342 | if (omp_private_member_map) |
10343 | omp_private_member_vec.safe_push (error_mark_node); |
10344 | return push_stmt_list (); |
10345 | } |
10346 | |
10347 | /* Revert remapping of any non-static data members since |
10348 | the last push_omp_privatization_clauses () call. */ |
10349 | |
10350 | void |
10351 | pop_omp_privatization_clauses (tree stmt) |
10352 | { |
10353 | if (stmt == NULL_TREE) |
10354 | return; |
10355 | stmt = pop_stmt_list (stmt); |
10356 | if (omp_private_member_map) |
10357 | { |
10358 | while (!omp_private_member_vec.is_empty ()) |
10359 | { |
10360 | tree t = omp_private_member_vec.pop (); |
10361 | if (t == error_mark_node) |
10362 | { |
10363 | add_stmt (t: stmt); |
10364 | return; |
10365 | } |
10366 | bool no_decl_expr = t == integer_zero_node; |
10367 | if (no_decl_expr) |
10368 | t = omp_private_member_vec.pop (); |
10369 | tree *v = omp_private_member_map->get (k: t); |
10370 | gcc_assert (v); |
10371 | if (!no_decl_expr) |
10372 | add_decl_expr (decl: *v); |
10373 | omp_private_member_map->remove (k: t); |
10374 | } |
10375 | delete omp_private_member_map; |
10376 | omp_private_member_map = NULL; |
10377 | } |
10378 | add_stmt (t: stmt); |
10379 | } |
10380 | |
10381 | /* Remember OpenMP privatization clauses mapping and clear it. |
10382 | Used for lambdas. */ |
10383 | |
10384 | void |
10385 | save_omp_privatization_clauses (vec<tree> &save) |
10386 | { |
10387 | save = vNULL; |
10388 | if (omp_private_member_ignore_next) |
10389 | save.safe_push (integer_one_node); |
10390 | omp_private_member_ignore_next = false; |
10391 | if (!omp_private_member_map) |
10392 | return; |
10393 | |
10394 | while (!omp_private_member_vec.is_empty ()) |
10395 | { |
10396 | tree t = omp_private_member_vec.pop (); |
10397 | if (t == error_mark_node) |
10398 | { |
10399 | save.safe_push (obj: t); |
10400 | continue; |
10401 | } |
10402 | tree n = t; |
10403 | if (t == integer_zero_node) |
10404 | t = omp_private_member_vec.pop (); |
10405 | tree *v = omp_private_member_map->get (k: t); |
10406 | gcc_assert (v); |
10407 | save.safe_push (obj: *v); |
10408 | save.safe_push (obj: t); |
10409 | if (n != t) |
10410 | save.safe_push (obj: n); |
10411 | } |
10412 | delete omp_private_member_map; |
10413 | omp_private_member_map = NULL; |
10414 | } |
10415 | |
10416 | /* Restore OpenMP privatization clauses mapping saved by the |
10417 | above function. */ |
10418 | |
10419 | void |
10420 | restore_omp_privatization_clauses (vec<tree> &save) |
10421 | { |
10422 | gcc_assert (omp_private_member_vec.is_empty ()); |
10423 | omp_private_member_ignore_next = false; |
10424 | if (save.is_empty ()) |
10425 | return; |
10426 | if (save.length () == 1 && save[0] == integer_one_node) |
10427 | { |
10428 | omp_private_member_ignore_next = true; |
10429 | save.release (); |
10430 | return; |
10431 | } |
10432 | |
10433 | omp_private_member_map = new hash_map <tree, tree>; |
10434 | while (!save.is_empty ()) |
10435 | { |
10436 | tree t = save.pop (); |
10437 | tree n = t; |
10438 | if (t != error_mark_node) |
10439 | { |
10440 | if (t == integer_one_node) |
10441 | { |
10442 | omp_private_member_ignore_next = true; |
10443 | gcc_assert (save.is_empty ()); |
10444 | break; |
10445 | } |
10446 | if (t == integer_zero_node) |
10447 | t = save.pop (); |
10448 | tree &v = omp_private_member_map->get_or_insert (k: t); |
10449 | v = save.pop (); |
10450 | } |
10451 | omp_private_member_vec.safe_push (obj: t); |
10452 | if (n != t) |
10453 | omp_private_member_vec.safe_push (obj: n); |
10454 | } |
10455 | save.release (); |
10456 | } |
10457 | |
10458 | /* For all variables in the tree_list VARS, mark them as thread local. */ |
10459 | |
10460 | void |
10461 | finish_omp_threadprivate (tree vars) |
10462 | { |
10463 | tree t; |
10464 | |
10465 | /* Mark every variable in VARS to be assigned thread local storage. */ |
10466 | for (t = vars; t; t = TREE_CHAIN (t)) |
10467 | { |
10468 | tree v = TREE_PURPOSE (t); |
10469 | location_t loc = EXPR_LOCATION (TREE_VALUE (t)); |
10470 | |
10471 | if (error_operand_p (t: v)) |
10472 | ; |
10473 | else if (!VAR_P (v)) |
10474 | error_at (loc, "%<threadprivate%> %qD is not file, namespace " |
10475 | "or block scope variable", v); |
10476 | /* If V had already been marked threadprivate, it doesn't matter |
10477 | whether it had been used prior to this point. */ |
10478 | else if (TREE_USED (v) |
10479 | && (DECL_LANG_SPECIFIC (v) == NULL |
10480 | || !CP_DECL_THREADPRIVATE_P (v))) |
10481 | error_at (loc, "%qE declared %<threadprivate%> after first use", v); |
10482 | else if (! TREE_STATIC (v) && ! DECL_EXTERNAL (v)) |
10483 | error_at (loc, "automatic variable %qE cannot be %<threadprivate%>", v); |
10484 | else if (! COMPLETE_TYPE_P (complete_type (TREE_TYPE (v)))) |
10485 | error_at (loc, "%<threadprivate%> %qE has incomplete type", v); |
10486 | else if (TREE_STATIC (v) && TYPE_P (CP_DECL_CONTEXT (v)) |
10487 | && CP_DECL_CONTEXT (v) != current_class_type) |
10488 | error_at (loc, "%<threadprivate%> %qE directive not " |
10489 | "in %qT definition", v, CP_DECL_CONTEXT (v)); |
10490 | else |
10491 | { |
10492 | /* Allocate a LANG_SPECIFIC structure for V, if needed. */ |
10493 | if (DECL_LANG_SPECIFIC (v) == NULL) |
10494 | retrofit_lang_decl (v); |
10495 | |
10496 | if (! CP_DECL_THREAD_LOCAL_P (v)) |
10497 | { |
10498 | CP_DECL_THREAD_LOCAL_P (v) = true; |
10499 | set_decl_tls_model (v, decl_default_tls_model (v)); |
10500 | /* If rtl has been already set for this var, call |
10501 | make_decl_rtl once again, so that encode_section_info |
10502 | has a chance to look at the new decl flags. */ |
10503 | if (DECL_RTL_SET_P (v)) |
10504 | make_decl_rtl (v); |
10505 | } |
10506 | CP_DECL_THREADPRIVATE_P (v) = 1; |
10507 | } |
10508 | } |
10509 | } |
10510 | |
10511 | /* Build an OpenMP structured block. */ |
10512 | |
10513 | tree |
10514 | begin_omp_structured_block (void) |
10515 | { |
10516 | return do_pushlevel (sk: sk_omp); |
10517 | } |
10518 | |
10519 | tree |
10520 | finish_omp_structured_block (tree block) |
10521 | { |
10522 | return do_poplevel (stmt_list: block); |
10523 | } |
10524 | |
10525 | /* Similarly, except force the retention of the BLOCK. */ |
10526 | |
10527 | tree |
10528 | begin_omp_parallel (void) |
10529 | { |
10530 | keep_next_level (true); |
10531 | return begin_omp_structured_block (); |
10532 | } |
10533 | |
10534 | /* Generate OACC_DATA, with CLAUSES and BLOCK as its compound |
10535 | statement. */ |
10536 | |
10537 | tree |
10538 | finish_oacc_data (tree clauses, tree block) |
10539 | { |
10540 | tree stmt; |
10541 | |
10542 | block = finish_omp_structured_block (block); |
10543 | |
10544 | stmt = make_node (OACC_DATA); |
10545 | TREE_TYPE (stmt) = void_type_node; |
10546 | OACC_DATA_CLAUSES (stmt) = clauses; |
10547 | OACC_DATA_BODY (stmt) = block; |
10548 | |
10549 | return add_stmt (t: stmt); |
10550 | } |
10551 | |
10552 | /* Generate OACC_HOST_DATA, with CLAUSES and BLOCK as its compound |
10553 | statement. */ |
10554 | |
10555 | tree |
10556 | finish_oacc_host_data (tree clauses, tree block) |
10557 | { |
10558 | tree stmt; |
10559 | |
10560 | block = finish_omp_structured_block (block); |
10561 | |
10562 | stmt = make_node (OACC_HOST_DATA); |
10563 | TREE_TYPE (stmt) = void_type_node; |
10564 | OACC_HOST_DATA_CLAUSES (stmt) = clauses; |
10565 | OACC_HOST_DATA_BODY (stmt) = block; |
10566 | |
10567 | return add_stmt (t: stmt); |
10568 | } |
10569 | |
10570 | /* Generate OMP construct CODE, with BODY and CLAUSES as its compound |
10571 | statement. */ |
10572 | |
10573 | tree |
10574 | finish_omp_construct (enum tree_code code, tree body, tree clauses) |
10575 | { |
10576 | body = finish_omp_structured_block (block: body); |
10577 | |
10578 | tree stmt = make_node (code); |
10579 | TREE_TYPE (stmt) = void_type_node; |
10580 | OMP_BODY (stmt) = body; |
10581 | OMP_CLAUSES (stmt) = clauses; |
10582 | |
10583 | return add_stmt (t: stmt); |
10584 | } |
10585 | |
10586 | /* Used to walk OpenMP target directive body. */ |
10587 | |
10588 | struct omp_target_walk_data |
10589 | { |
10590 | /* Holds the 'this' expression found in current function. */ |
10591 | tree current_object; |
10592 | |
10593 | /* True if the 'this' expression was accessed in the target body. */ |
10594 | bool this_expr_accessed; |
10595 | |
10596 | /* For non-static functions, record which pointer-typed members were |
10597 | accessed, and the whole expression. */ |
10598 | hash_map<tree, tree> ptr_members_accessed; |
10599 | |
10600 | /* Record which lambda objects were accessed in target body. */ |
10601 | hash_set<tree> lambda_objects_accessed; |
10602 | |
10603 | /* For lambda functions, the __closure object expression of the current |
10604 | function, and the set of captured variables accessed in target body. */ |
10605 | tree current_closure; |
10606 | hash_set<tree> closure_vars_accessed; |
10607 | |
10608 | /* Local variables declared inside a BIND_EXPR, used to filter out such |
10609 | variables when recording lambda_objects_accessed. */ |
10610 | hash_set<tree> local_decls; |
10611 | |
10612 | omp_mapper_list<tree> *mappers; |
10613 | }; |
10614 | |
10615 | /* Helper function of finish_omp_target_clauses, called via |
10616 | cp_walk_tree_without_duplicates. Traverse body of OpenMP target |
10617 | directive *TP, and fill out omp_target_walk_data passed in *PTR. */ |
10618 | |
10619 | static tree |
10620 | finish_omp_target_clauses_r (tree *tp, int *walk_subtrees, void *ptr) |
10621 | { |
10622 | tree t = *tp; |
10623 | struct omp_target_walk_data *data = (struct omp_target_walk_data *) ptr; |
10624 | tree current_object = data->current_object; |
10625 | tree current_closure = data->current_closure; |
10626 | omp_mapper_list<tree> *mlist = data->mappers; |
10627 | |
10628 | /* References inside of these expression codes shouldn't incur any |
10629 | form of mapping, so return early. */ |
10630 | if (TREE_CODE (t) == SIZEOF_EXPR |
10631 | || TREE_CODE (t) == ALIGNOF_EXPR) |
10632 | { |
10633 | *walk_subtrees = 0; |
10634 | return NULL_TREE; |
10635 | } |
10636 | |
10637 | if (TREE_CODE (t) == OMP_CLAUSE) |
10638 | return NULL_TREE; |
10639 | |
10640 | if (!processing_template_decl) |
10641 | { |
10642 | tree aggr_type = NULL_TREE; |
10643 | |
10644 | if (TREE_CODE (t) == COMPONENT_REF |
10645 | && RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0)))) |
10646 | aggr_type = TREE_TYPE (TREE_OPERAND (t, 0)); |
10647 | else if ((TREE_CODE (t) == VAR_DECL |
10648 | || TREE_CODE (t) == PARM_DECL |
10649 | || TREE_CODE (t) == RESULT_DECL) |
10650 | && RECORD_OR_UNION_TYPE_P (TREE_TYPE (t))) |
10651 | aggr_type = TREE_TYPE (t); |
10652 | |
10653 | if (aggr_type) |
10654 | { |
10655 | tree mapper_fn = cxx_omp_mapper_lookup (NULL_TREE, type: aggr_type); |
10656 | if (mapper_fn) |
10657 | mlist->add_mapper (NULL_TREE, type: aggr_type, mapperfn: mapper_fn); |
10658 | } |
10659 | } |
10660 | |
10661 | if (current_object) |
10662 | { |
10663 | tree this_expr = TREE_OPERAND (current_object, 0); |
10664 | |
10665 | if (operand_equal_p (t, this_expr)) |
10666 | { |
10667 | data->this_expr_accessed = true; |
10668 | *walk_subtrees = 0; |
10669 | return NULL_TREE; |
10670 | } |
10671 | |
10672 | if (TREE_CODE (t) == COMPONENT_REF |
10673 | && POINTER_TYPE_P (TREE_TYPE (t)) |
10674 | && operand_equal_p (TREE_OPERAND (t, 0), current_object) |
10675 | && TREE_CODE (TREE_OPERAND (t, 1)) == FIELD_DECL) |
10676 | { |
10677 | data->this_expr_accessed = true; |
10678 | tree fld = TREE_OPERAND (t, 1); |
10679 | if (data->ptr_members_accessed.get (k: fld) == NULL) |
10680 | { |
10681 | if (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE) |
10682 | t = convert_from_reference (t); |
10683 | data->ptr_members_accessed.put (k: fld, v: t); |
10684 | } |
10685 | *walk_subtrees = 0; |
10686 | return NULL_TREE; |
10687 | } |
10688 | } |
10689 | |
10690 | /* When the current_function_decl is a lambda function, the closure object |
10691 | argument's type seems to not yet have fields layed out, so a recording |
10692 | of DECL_VALUE_EXPRs during the target body walk seems the only way to |
10693 | find them. */ |
10694 | if (current_closure |
10695 | && (VAR_P (t) |
10696 | || TREE_CODE (t) == PARM_DECL |
10697 | || TREE_CODE (t) == RESULT_DECL) |
10698 | && DECL_HAS_VALUE_EXPR_P (t) |
10699 | && TREE_CODE (DECL_VALUE_EXPR (t)) == COMPONENT_REF |
10700 | && operand_equal_p (current_closure, |
10701 | TREE_OPERAND (DECL_VALUE_EXPR (t), 0))) |
10702 | { |
10703 | if (!data->closure_vars_accessed.contains (k: t)) |
10704 | data->closure_vars_accessed.add (k: t); |
10705 | *walk_subtrees = 0; |
10706 | return NULL_TREE; |
10707 | } |
10708 | |
10709 | if (TREE_CODE (t) == BIND_EXPR) |
10710 | { |
10711 | if (tree block = BIND_EXPR_BLOCK (t)) |
10712 | for (tree var = BLOCK_VARS (block); var; var = DECL_CHAIN (var)) |
10713 | if (!data->local_decls.contains (k: var)) |
10714 | data->local_decls.add (k: var); |
10715 | return NULL_TREE; |
10716 | } |
10717 | |
10718 | if (TREE_TYPE (t) && LAMBDA_TYPE_P (TREE_TYPE (t))) |
10719 | { |
10720 | tree lt = TREE_TYPE (t); |
10721 | gcc_assert (CLASS_TYPE_P (lt)); |
10722 | |
10723 | if (!data->lambda_objects_accessed.contains (k: t) |
10724 | /* Do not prepare to create target maps for locally declared |
10725 | lambdas or anonymous ones. */ |
10726 | && !data->local_decls.contains (k: t) |
10727 | && TREE_CODE (t) != TARGET_EXPR) |
10728 | data->lambda_objects_accessed.add (k: t); |
10729 | *walk_subtrees = 0; |
10730 | return NULL_TREE; |
10731 | } |
10732 | |
10733 | return NULL_TREE; |
10734 | } |
10735 | |
10736 | /* Helper function for finish_omp_target, and also from tsubst_expr. |
10737 | Create additional clauses for mapping of non-static members, lambda objects, |
10738 | etc. */ |
10739 | |
10740 | void |
10741 | finish_omp_target_clauses (location_t loc, tree body, tree *clauses_ptr) |
10742 | { |
10743 | omp_target_walk_data data; |
10744 | data.this_expr_accessed = false; |
10745 | data.current_object = NULL_TREE; |
10746 | |
10747 | if (DECL_NONSTATIC_MEMBER_P (current_function_decl) && current_class_ptr) |
10748 | if (tree ct = current_nonlambda_class_type ()) |
10749 | { |
10750 | tree object = maybe_dummy_object (ct, NULL); |
10751 | object = maybe_resolve_dummy (object, true); |
10752 | data.current_object = object; |
10753 | } |
10754 | |
10755 | if (DECL_LAMBDA_FUNCTION_P (current_function_decl)) |
10756 | { |
10757 | tree closure = DECL_ARGUMENTS (current_function_decl); |
10758 | data.current_closure = build_indirect_ref (loc, closure, RO_UNARY_STAR); |
10759 | } |
10760 | else |
10761 | data.current_closure = NULL_TREE; |
10762 | |
10763 | auto_vec<tree, 16> new_clauses; |
10764 | |
10765 | if (!processing_template_decl) |
10766 | { |
10767 | hash_set<omp_name_type<tree> > seen_types; |
10768 | auto_vec<tree> mapper_fns; |
10769 | omp_mapper_list<tree> mlist (&seen_types, &mapper_fns); |
10770 | data.mappers = &mlist; |
10771 | |
10772 | cp_walk_tree_without_duplicates (&body, finish_omp_target_clauses_r, |
10773 | &data); |
10774 | |
10775 | unsigned int i; |
10776 | tree mapper_fn; |
10777 | FOR_EACH_VEC_ELT (mapper_fns, i, mapper_fn) |
10778 | c_omp_find_nested_mappers (&mlist, mapper_fn); |
10779 | |
10780 | FOR_EACH_VEC_ELT (mapper_fns, i, mapper_fn) |
10781 | { |
10782 | tree mapper = cxx_omp_extract_mapper_directive (vardecl: mapper_fn); |
10783 | if (mapper == error_mark_node) |
10784 | continue; |
10785 | tree mapper_name = OMP_DECLARE_MAPPER_ID (mapper); |
10786 | tree decl = OMP_DECLARE_MAPPER_DECL (mapper); |
10787 | if (BASELINK_P (mapper_fn)) |
10788 | mapper_fn = BASELINK_FUNCTIONS (mapper_fn); |
10789 | |
10790 | tree c = build_omp_clause (loc, OMP_CLAUSE__MAPPER_BINDING_); |
10791 | OMP_CLAUSE__MAPPER_BINDING__ID (c) = mapper_name; |
10792 | OMP_CLAUSE__MAPPER_BINDING__DECL (c) = decl; |
10793 | OMP_CLAUSE__MAPPER_BINDING__MAPPER (c) = mapper_fn; |
10794 | |
10795 | new_clauses.safe_push (obj: c); |
10796 | } |
10797 | } |
10798 | else |
10799 | { |
10800 | data.mappers = NULL; |
10801 | cp_walk_tree_without_duplicates (&body, finish_omp_target_clauses_r, |
10802 | &data); |
10803 | } |
10804 | |
10805 | tree omp_target_this_expr = NULL_TREE; |
10806 | tree *explicit_this_deref_map = NULL; |
10807 | if (data.this_expr_accessed) |
10808 | { |
10809 | omp_target_this_expr = TREE_OPERAND (data.current_object, 0); |
10810 | |
10811 | /* See if explicit user-specified map(this[:]) clause already exists. |
10812 | If not, we create an implicit map(tofrom:this[:1]) clause. */ |
10813 | for (tree *cp = clauses_ptr; *cp; cp = &OMP_CLAUSE_CHAIN (*cp)) |
10814 | if (OMP_CLAUSE_CODE (*cp) == OMP_CLAUSE_MAP |
10815 | && (TREE_CODE (OMP_CLAUSE_DECL (*cp)) == INDIRECT_REF |
10816 | || TREE_CODE (OMP_CLAUSE_DECL (*cp)) == MEM_REF) |
10817 | && operand_equal_p (TREE_OPERAND (OMP_CLAUSE_DECL (*cp), 0), |
10818 | omp_target_this_expr)) |
10819 | { |
10820 | explicit_this_deref_map = cp; |
10821 | break; |
10822 | } |
10823 | } |
10824 | |
10825 | if (DECL_LAMBDA_FUNCTION_P (current_function_decl) |
10826 | && (data.this_expr_accessed |
10827 | || !data.closure_vars_accessed.is_empty ())) |
10828 | { |
10829 | /* For lambda functions, we need to first create a copy of the |
10830 | __closure object. */ |
10831 | tree closure = DECL_ARGUMENTS (current_function_decl); |
10832 | tree c = build_omp_clause (loc, OMP_CLAUSE_MAP); |
10833 | OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO); |
10834 | OMP_CLAUSE_DECL (c) |
10835 | = build_indirect_ref (loc, closure, RO_UNARY_STAR); |
10836 | OMP_CLAUSE_SIZE (c) |
10837 | = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (closure))); |
10838 | new_clauses.safe_push (obj: c); |
10839 | |
10840 | tree closure_obj = OMP_CLAUSE_DECL (c); |
10841 | tree closure_type = TREE_TYPE (closure_obj); |
10842 | |
10843 | gcc_assert (LAMBDA_TYPE_P (closure_type) |
10844 | && CLASS_TYPE_P (closure_type)); |
10845 | |
10846 | tree c2 = build_omp_clause (loc, OMP_CLAUSE_MAP); |
10847 | OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_FIRSTPRIVATE_POINTER); |
10848 | OMP_CLAUSE_DECL (c2) = closure; |
10849 | OMP_CLAUSE_SIZE (c2) = size_zero_node; |
10850 | new_clauses.safe_push (obj: c2); |
10851 | } |
10852 | |
10853 | if (data.this_expr_accessed) |
10854 | { |
10855 | /* If the this-expr was accessed, create a map(*this) clause. */ |
10856 | enum gomp_map_kind kind = GOMP_MAP_TOFROM; |
10857 | if (explicit_this_deref_map) |
10858 | { |
10859 | tree this_map = *explicit_this_deref_map; |
10860 | tree nc = OMP_CLAUSE_CHAIN (this_map); |
10861 | gcc_assert (nc != NULL_TREE |
10862 | && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP |
10863 | && (OMP_CLAUSE_MAP_KIND (nc) |
10864 | == GOMP_MAP_FIRSTPRIVATE_POINTER)); |
10865 | kind = OMP_CLAUSE_MAP_KIND (this_map); |
10866 | /* Remove the original 'map(*this) map(firstprivate_ptr:this)' |
10867 | two-map sequence away from the chain. */ |
10868 | *explicit_this_deref_map = OMP_CLAUSE_CHAIN (nc); |
10869 | } |
10870 | tree c = build_omp_clause (loc, OMP_CLAUSE_MAP); |
10871 | OMP_CLAUSE_SET_MAP_KIND (c, kind); |
10872 | OMP_CLAUSE_DECL (c) |
10873 | = build_indirect_ref (loc, omp_target_this_expr, RO_UNARY_STAR); |
10874 | OMP_CLAUSE_SIZE (c) |
10875 | = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (omp_target_this_expr))); |
10876 | new_clauses.safe_push (obj: c); |
10877 | |
10878 | /* If we're in a lambda function, the this-pointer will actually be |
10879 | '__closure->this', a mapped member of __closure, hence always_pointer. |
10880 | Otherwise it's a firstprivate pointer. */ |
10881 | enum gomp_map_kind ptr_kind |
10882 | = (DECL_LAMBDA_FUNCTION_P (current_function_decl) |
10883 | ? GOMP_MAP_ALWAYS_POINTER |
10884 | : GOMP_MAP_FIRSTPRIVATE_POINTER); |
10885 | c = build_omp_clause (loc, OMP_CLAUSE_MAP); |
10886 | OMP_CLAUSE_SET_MAP_KIND (c, ptr_kind); |
10887 | OMP_CLAUSE_DECL (c) = omp_target_this_expr; |
10888 | OMP_CLAUSE_SIZE (c) = size_zero_node; |
10889 | new_clauses.safe_push (obj: c); |
10890 | } |
10891 | |
10892 | if (DECL_LAMBDA_FUNCTION_P (current_function_decl)) |
10893 | { |
10894 | if (omp_target_this_expr) |
10895 | { |
10896 | STRIP_NOPS (omp_target_this_expr); |
10897 | gcc_assert (DECL_HAS_VALUE_EXPR_P (omp_target_this_expr)); |
10898 | omp_target_this_expr = DECL_VALUE_EXPR (omp_target_this_expr); |
10899 | } |
10900 | |
10901 | for (hash_set<tree>::iterator i = data.closure_vars_accessed.begin (); |
10902 | i != data.closure_vars_accessed.end (); ++i) |
10903 | { |
10904 | tree orig_decl = *i; |
10905 | tree closure_expr = DECL_VALUE_EXPR (orig_decl); |
10906 | |
10907 | if (TREE_CODE (TREE_TYPE (orig_decl)) == POINTER_TYPE |
10908 | || TREE_CODE (TREE_TYPE (orig_decl)) == REFERENCE_TYPE) |
10909 | { |
10910 | /* this-pointer is processed above, outside this loop. */ |
10911 | if (omp_target_this_expr |
10912 | && operand_equal_p (closure_expr, omp_target_this_expr)) |
10913 | continue; |
10914 | |
10915 | bool ptr_p = TREE_CODE (TREE_TYPE (orig_decl)) == POINTER_TYPE; |
10916 | enum gomp_map_kind kind, ptr_kind, nc_kind; |
10917 | tree size; |
10918 | |
10919 | if (ptr_p) |
10920 | { |
10921 | /* For pointers, default mapped as zero-length array |
10922 | section. */ |
10923 | kind = GOMP_MAP_ALLOC; |
10924 | nc_kind = GOMP_MAP_FIRSTPRIVATE_POINTER; |
10925 | ptr_kind = GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION; |
10926 | size = size_zero_node; |
10927 | } |
10928 | else |
10929 | { |
10930 | /* For references, default mapped as appearing on map |
10931 | clause. */ |
10932 | kind = GOMP_MAP_TOFROM; |
10933 | nc_kind = GOMP_MAP_FIRSTPRIVATE_REFERENCE; |
10934 | ptr_kind = GOMP_MAP_ALWAYS_POINTER; |
10935 | size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (closure_expr))); |
10936 | } |
10937 | |
10938 | for (tree *p = clauses_ptr; *p; p = &OMP_CLAUSE_CHAIN (*p)) |
10939 | if (OMP_CLAUSE_CODE (*p) == OMP_CLAUSE_MAP |
10940 | && (TREE_CODE (OMP_CLAUSE_DECL (*p)) == INDIRECT_REF |
10941 | || TREE_CODE (OMP_CLAUSE_DECL (*p)) == MEM_REF) |
10942 | && operand_equal_p (TREE_OPERAND (OMP_CLAUSE_DECL (*p), 0), |
10943 | orig_decl)) |
10944 | { |
10945 | /* If this was already specified by user as a map, |
10946 | save the user specified map kind, delete the |
10947 | "map(*ptr/ref), map(firstprivate ptr/ref)" sequence, |
10948 | and insert our own sequence: |
10949 | "map(*__closure->ptr/ref), map(<ptr_kind>:__closure->ref" |
10950 | */ |
10951 | tree nc = OMP_CLAUSE_CHAIN (*p); |
10952 | gcc_assert (nc != NULL_TREE |
10953 | && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP |
10954 | && OMP_CLAUSE_MAP_KIND (nc) == nc_kind); |
10955 | /* Update with user specified kind and size. */ |
10956 | kind = OMP_CLAUSE_MAP_KIND (*p); |
10957 | size = OMP_CLAUSE_SIZE (*p); |
10958 | *p = OMP_CLAUSE_CHAIN (nc); |
10959 | break; |
10960 | } |
10961 | |
10962 | tree c = build_omp_clause (loc, OMP_CLAUSE_MAP); |
10963 | OMP_CLAUSE_SET_MAP_KIND (c, kind); |
10964 | OMP_CLAUSE_DECL (c) |
10965 | = build_indirect_ref (loc, closure_expr, RO_UNARY_STAR); |
10966 | OMP_CLAUSE_SIZE (c) = size; |
10967 | if (ptr_p) |
10968 | OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c) = 1; |
10969 | new_clauses.safe_push (obj: c); |
10970 | |
10971 | c = build_omp_clause (loc, OMP_CLAUSE_MAP); |
10972 | OMP_CLAUSE_SET_MAP_KIND (c, ptr_kind); |
10973 | OMP_CLAUSE_DECL (c) = closure_expr; |
10974 | OMP_CLAUSE_SIZE (c) = size_zero_node; |
10975 | new_clauses.safe_push (obj: c); |
10976 | } |
10977 | } |
10978 | } |
10979 | |
10980 | if (!data.ptr_members_accessed.is_empty ()) |
10981 | for (hash_map<tree, tree>::iterator i = data.ptr_members_accessed.begin (); |
10982 | i != data.ptr_members_accessed.end (); ++i) |
10983 | { |
10984 | /* For each referenced member that is of pointer or reference-to-pointer |
10985 | type, create the equivalent of map(alloc:this->ptr[:0]). */ |
10986 | tree field_decl = (*i).first; |
10987 | tree ptr_member = (*i).second; |
10988 | |
10989 | for (tree c = *clauses_ptr; c; c = OMP_CLAUSE_CHAIN (c)) |
10990 | { |
10991 | if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP) |
10992 | continue; |
10993 | /* If map(this->ptr[:N]) already exists, avoid creating another |
10994 | such map. */ |
10995 | tree decl = OMP_CLAUSE_DECL (c); |
10996 | if ((TREE_CODE (decl) == INDIRECT_REF |
10997 | || TREE_CODE (decl) == MEM_REF) |
10998 | && operand_equal_p (TREE_OPERAND (decl, 0), ptr_member)) |
10999 | goto next_ptr_member; |
11000 | } |
11001 | |
11002 | if (!cxx_mark_addressable (ptr_member)) |
11003 | gcc_unreachable (); |
11004 | |
11005 | if (TREE_CODE (TREE_TYPE (field_decl)) == REFERENCE_TYPE) |
11006 | { |
11007 | /* For reference to pointers, we need to map the referenced |
11008 | pointer first for things to be correct. */ |
11009 | tree ptr_member_type = TREE_TYPE (ptr_member); |
11010 | |
11011 | /* Map pointer target as zero-length array section. */ |
11012 | tree c = build_omp_clause (loc, OMP_CLAUSE_MAP); |
11013 | OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALLOC); |
11014 | OMP_CLAUSE_DECL (c) |
11015 | = build1 (INDIRECT_REF, TREE_TYPE (ptr_member_type), ptr_member); |
11016 | OMP_CLAUSE_SIZE (c) = size_zero_node; |
11017 | OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c) = 1; |
11018 | |
11019 | /* Map pointer to zero-length array section. */ |
11020 | tree c2 = build_omp_clause (loc, OMP_CLAUSE_MAP); |
11021 | OMP_CLAUSE_SET_MAP_KIND |
11022 | (c2, GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION); |
11023 | OMP_CLAUSE_DECL (c2) = ptr_member; |
11024 | OMP_CLAUSE_SIZE (c2) = size_zero_node; |
11025 | |
11026 | /* Attach reference-to-pointer field to pointer. */ |
11027 | tree c3 = build_omp_clause (loc, OMP_CLAUSE_MAP); |
11028 | OMP_CLAUSE_SET_MAP_KIND (c3, GOMP_MAP_ATTACH); |
11029 | OMP_CLAUSE_DECL (c3) = TREE_OPERAND (ptr_member, 0); |
11030 | OMP_CLAUSE_SIZE (c3) = size_zero_node; |
11031 | |
11032 | new_clauses.safe_push (obj: c); |
11033 | new_clauses.safe_push (obj: c2); |
11034 | new_clauses.safe_push (obj: c3); |
11035 | } |
11036 | else if (TREE_CODE (TREE_TYPE (field_decl)) == POINTER_TYPE) |
11037 | { |
11038 | /* Map pointer target as zero-length array section. */ |
11039 | tree c = build_omp_clause (loc, OMP_CLAUSE_MAP); |
11040 | OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALLOC); |
11041 | OMP_CLAUSE_DECL (c) = build_indirect_ref (loc, ptr_member, |
11042 | RO_UNARY_STAR); |
11043 | OMP_CLAUSE_SIZE (c) = size_zero_node; |
11044 | OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c) = 1; |
11045 | |
11046 | /* Attach zero-length array section to pointer. */ |
11047 | tree c2 = build_omp_clause (loc, OMP_CLAUSE_MAP); |
11048 | OMP_CLAUSE_SET_MAP_KIND |
11049 | (c2, GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION); |
11050 | OMP_CLAUSE_DECL (c2) = ptr_member; |
11051 | OMP_CLAUSE_SIZE (c2) = size_zero_node; |
11052 | |
11053 | new_clauses.safe_push (obj: c); |
11054 | new_clauses.safe_push (obj: c2); |
11055 | } |
11056 | else |
11057 | gcc_unreachable (); |
11058 | |
11059 | next_ptr_member: |
11060 | ; |
11061 | } |
11062 | |
11063 | for (hash_set<tree>::iterator i = data.lambda_objects_accessed.begin (); |
11064 | i != data.lambda_objects_accessed.end (); ++i) |
11065 | { |
11066 | tree lobj = *i; |
11067 | if (TREE_CODE (lobj) == TARGET_EXPR) |
11068 | lobj = TARGET_EXPR_SLOT (lobj); |
11069 | |
11070 | tree lt = TREE_TYPE (lobj); |
11071 | gcc_assert (LAMBDA_TYPE_P (lt) && CLASS_TYPE_P (lt)); |
11072 | |
11073 | tree lc = build_omp_clause (loc, OMP_CLAUSE_MAP); |
11074 | OMP_CLAUSE_SET_MAP_KIND (lc, GOMP_MAP_TO); |
11075 | OMP_CLAUSE_DECL (lc) = lobj; |
11076 | OMP_CLAUSE_SIZE (lc) = TYPE_SIZE_UNIT (lt); |
11077 | new_clauses.safe_push (obj: lc); |
11078 | |
11079 | for (tree fld = TYPE_FIELDS (lt); fld; fld = DECL_CHAIN (fld)) |
11080 | { |
11081 | if (TREE_CODE (TREE_TYPE (fld)) == POINTER_TYPE) |
11082 | { |
11083 | tree exp = build3 (COMPONENT_REF, TREE_TYPE (fld), |
11084 | lobj, fld, NULL_TREE); |
11085 | tree c = build_omp_clause (loc, OMP_CLAUSE_MAP); |
11086 | OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALLOC); |
11087 | OMP_CLAUSE_DECL (c) |
11088 | = build_indirect_ref (loc, exp, RO_UNARY_STAR); |
11089 | OMP_CLAUSE_SIZE (c) = size_zero_node; |
11090 | OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c) = 1; |
11091 | new_clauses.safe_push (obj: c); |
11092 | |
11093 | c = build_omp_clause (loc, OMP_CLAUSE_MAP); |
11094 | OMP_CLAUSE_SET_MAP_KIND |
11095 | (c, GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION); |
11096 | OMP_CLAUSE_DECL (c) = exp; |
11097 | OMP_CLAUSE_SIZE (c) = size_zero_node; |
11098 | new_clauses.safe_push (obj: c); |
11099 | } |
11100 | else if (TREE_CODE (TREE_TYPE (fld)) == REFERENCE_TYPE) |
11101 | { |
11102 | tree exp = build3 (COMPONENT_REF, TREE_TYPE (fld), |
11103 | lobj, fld, NULL_TREE); |
11104 | tree c = build_omp_clause (loc, OMP_CLAUSE_MAP); |
11105 | OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM); |
11106 | OMP_CLAUSE_DECL (c) |
11107 | = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp); |
11108 | OMP_CLAUSE_SIZE (c) |
11109 | = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (exp))); |
11110 | new_clauses.safe_push (obj: c); |
11111 | |
11112 | c = build_omp_clause (loc, OMP_CLAUSE_MAP); |
11113 | OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALWAYS_POINTER); |
11114 | OMP_CLAUSE_DECL (c) = exp; |
11115 | OMP_CLAUSE_SIZE (c) = size_zero_node; |
11116 | new_clauses.safe_push (obj: c); |
11117 | } |
11118 | } |
11119 | } |
11120 | |
11121 | tree c = *clauses_ptr; |
11122 | for (int i = new_clauses.length () - 1; i >= 0; i--) |
11123 | { |
11124 | OMP_CLAUSE_CHAIN (new_clauses[i]) = c; |
11125 | c = new_clauses[i]; |
11126 | } |
11127 | *clauses_ptr = c; |
11128 | } |
11129 | |
11130 | /* Called from cp_parser_omp_target. Create additional implicit clauses for |
11131 | OpenMP target directives, and do sanity checks. */ |
11132 | |
11133 | tree |
11134 | finish_omp_target (location_t loc, tree clauses, tree body, bool combined_p) |
11135 | { |
11136 | if (!processing_template_decl) |
11137 | finish_omp_target_clauses (loc, body, clauses_ptr: &clauses); |
11138 | |
11139 | tree stmt = make_node (OMP_TARGET); |
11140 | TREE_TYPE (stmt) = void_type_node; |
11141 | OMP_TARGET_CLAUSES (stmt) = clauses; |
11142 | OMP_TARGET_BODY (stmt) = body; |
11143 | OMP_TARGET_COMBINED (stmt) = combined_p; |
11144 | SET_EXPR_LOCATION (stmt, loc); |
11145 | |
11146 | tree c = clauses; |
11147 | while (c) |
11148 | { |
11149 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP) |
11150 | switch (OMP_CLAUSE_MAP_KIND (c)) |
11151 | { |
11152 | case GOMP_MAP_TO: |
11153 | case GOMP_MAP_ALWAYS_TO: |
11154 | case GOMP_MAP_PRESENT_TO: |
11155 | case GOMP_MAP_ALWAYS_PRESENT_TO: |
11156 | case GOMP_MAP_FROM: |
11157 | case GOMP_MAP_ALWAYS_FROM: |
11158 | case GOMP_MAP_PRESENT_FROM: |
11159 | case GOMP_MAP_ALWAYS_PRESENT_FROM: |
11160 | case GOMP_MAP_TOFROM: |
11161 | case GOMP_MAP_ALWAYS_TOFROM: |
11162 | case GOMP_MAP_PRESENT_TOFROM: |
11163 | case GOMP_MAP_ALWAYS_PRESENT_TOFROM: |
11164 | case GOMP_MAP_ALLOC: |
11165 | case GOMP_MAP_PRESENT_ALLOC: |
11166 | case GOMP_MAP_FIRSTPRIVATE_POINTER: |
11167 | case GOMP_MAP_FIRSTPRIVATE_REFERENCE: |
11168 | case GOMP_MAP_ALWAYS_POINTER: |
11169 | case GOMP_MAP_ATTACH_DETACH: |
11170 | case GOMP_MAP_ATTACH: |
11171 | case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION: |
11172 | case GOMP_MAP_POINTER: |
11173 | case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION: |
11174 | break; |
11175 | default: |
11176 | error_at (OMP_CLAUSE_LOCATION (c), |
11177 | "%<#pragma omp target%> with map-type other " |
11178 | "than %<to%>, %<from%>, %<tofrom%> or %<alloc%> " |
11179 | "on %<map%> clause"); |
11180 | break; |
11181 | } |
11182 | c = OMP_CLAUSE_CHAIN (c); |
11183 | } |
11184 | return add_stmt (t: stmt); |
11185 | } |
11186 | |
11187 | tree |
11188 | finish_omp_parallel (tree clauses, tree body) |
11189 | { |
11190 | tree stmt; |
11191 | |
11192 | body = finish_omp_structured_block (block: body); |
11193 | |
11194 | stmt = make_node (OMP_PARALLEL); |
11195 | TREE_TYPE (stmt) = void_type_node; |
11196 | OMP_PARALLEL_CLAUSES (stmt) = clauses; |
11197 | OMP_PARALLEL_BODY (stmt) = body; |
11198 | |
11199 | return add_stmt (t: stmt); |
11200 | } |
11201 | |
11202 | tree |
11203 | begin_omp_task (void) |
11204 | { |
11205 | keep_next_level (true); |
11206 | return begin_omp_structured_block (); |
11207 | } |
11208 | |
11209 | tree |
11210 | finish_omp_task (tree clauses, tree body) |
11211 | { |
11212 | tree stmt; |
11213 | |
11214 | body = finish_omp_structured_block (block: body); |
11215 | |
11216 | stmt = make_node (OMP_TASK); |
11217 | TREE_TYPE (stmt) = void_type_node; |
11218 | OMP_TASK_CLAUSES (stmt) = clauses; |
11219 | OMP_TASK_BODY (stmt) = body; |
11220 | |
11221 | return add_stmt (t: stmt); |
11222 | } |
11223 | |
11224 | /* Helper function for finish_omp_for. Convert Ith random access iterator |
11225 | into integral iterator. Return FALSE if successful. */ |
11226 | |
11227 | static bool |
11228 | handle_omp_for_class_iterator (int i, location_t locus, enum tree_code code, |
11229 | tree declv, tree orig_declv, tree initv, |
11230 | tree condv, tree incrv, tree *body, |
11231 | tree *pre_body, tree &clauses, |
11232 | int collapse, int ordered) |
11233 | { |
11234 | tree diff, iter_init, iter_incr = NULL, last; |
11235 | tree incr_var = NULL, orig_pre_body, orig_body, c; |
11236 | tree decl = TREE_VEC_ELT (declv, i); |
11237 | tree init = TREE_VEC_ELT (initv, i); |
11238 | tree cond = TREE_VEC_ELT (condv, i); |
11239 | tree incr = TREE_VEC_ELT (incrv, i); |
11240 | tree iter = decl; |
11241 | location_t elocus = locus; |
11242 | |
11243 | if (init && EXPR_HAS_LOCATION (init)) |
11244 | elocus = EXPR_LOCATION (init); |
11245 | |
11246 | switch (TREE_CODE (cond)) |
11247 | { |
11248 | case GT_EXPR: |
11249 | case GE_EXPR: |
11250 | case LT_EXPR: |
11251 | case LE_EXPR: |
11252 | case NE_EXPR: |
11253 | if (TREE_OPERAND (cond, 1) == iter) |
11254 | cond = build2 (swap_tree_comparison (TREE_CODE (cond)), |
11255 | TREE_TYPE (cond), iter, TREE_OPERAND (cond, 0)); |
11256 | if (TREE_OPERAND (cond, 0) != iter) |
11257 | cond = error_mark_node; |
11258 | else |
11259 | { |
11260 | tree tem = build_x_binary_op (EXPR_LOCATION (cond), |
11261 | TREE_CODE (cond), |
11262 | iter, ERROR_MARK, |
11263 | TREE_OPERAND (cond, 1), ERROR_MARK, |
11264 | NULL_TREE, NULL, tf_warning_or_error); |
11265 | if (error_operand_p (t: tem)) |
11266 | return true; |
11267 | } |
11268 | break; |
11269 | default: |
11270 | cond = error_mark_node; |
11271 | break; |
11272 | } |
11273 | if (cond == error_mark_node) |
11274 | { |
11275 | error_at (elocus, "invalid controlling predicate"); |
11276 | return true; |
11277 | } |
11278 | diff = build_x_binary_op (elocus, MINUS_EXPR, |
11279 | TREE_OPERAND (cond, 1), ERROR_MARK, |
11280 | iter, ERROR_MARK, |
11281 | NULL_TREE, NULL, tf_warning_or_error); |
11282 | diff = cp_fully_fold (diff); |
11283 | if (error_operand_p (t: diff)) |
11284 | return true; |
11285 | if (TREE_CODE (TREE_TYPE (diff)) != INTEGER_TYPE) |
11286 | { |
11287 | error_at (elocus, "difference between %qE and %qD does not have integer type", |
11288 | TREE_OPERAND (cond, 1), iter); |
11289 | return true; |
11290 | } |
11291 | if (!c_omp_check_loop_iv_exprs (locus, code, orig_declv, i, |
11292 | TREE_VEC_ELT (declv, i), NULL_TREE, |
11293 | cond, cp_walk_subtrees)) |
11294 | return true; |
11295 | |
11296 | switch (TREE_CODE (incr)) |
11297 | { |
11298 | case PREINCREMENT_EXPR: |
11299 | case PREDECREMENT_EXPR: |
11300 | case POSTINCREMENT_EXPR: |
11301 | case POSTDECREMENT_EXPR: |
11302 | if (TREE_OPERAND (incr, 0) != iter) |
11303 | { |
11304 | incr = error_mark_node; |
11305 | break; |
11306 | } |
11307 | iter_incr = build_x_unary_op (EXPR_LOCATION (incr), |
11308 | TREE_CODE (incr), iter, |
11309 | NULL_TREE, tf_warning_or_error); |
11310 | if (error_operand_p (t: iter_incr)) |
11311 | return true; |
11312 | else if (TREE_CODE (incr) == PREINCREMENT_EXPR |
11313 | || TREE_CODE (incr) == POSTINCREMENT_EXPR) |
11314 | incr = integer_one_node; |
11315 | else |
11316 | incr = integer_minus_one_node; |
11317 | break; |
11318 | case MODIFY_EXPR: |
11319 | if (TREE_OPERAND (incr, 0) != iter) |
11320 | incr = error_mark_node; |
11321 | else if (TREE_CODE (TREE_OPERAND (incr, 1)) == PLUS_EXPR |
11322 | || TREE_CODE (TREE_OPERAND (incr, 1)) == MINUS_EXPR) |
11323 | { |
11324 | tree rhs = TREE_OPERAND (incr, 1); |
11325 | if (TREE_OPERAND (rhs, 0) == iter) |
11326 | { |
11327 | if (TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs, 1))) |
11328 | != INTEGER_TYPE) |
11329 | incr = error_mark_node; |
11330 | else |
11331 | { |
11332 | iter_incr = build_x_modify_expr (EXPR_LOCATION (rhs), |
11333 | iter, TREE_CODE (rhs), |
11334 | TREE_OPERAND (rhs, 1), |
11335 | NULL_TREE, |
11336 | tf_warning_or_error); |
11337 | if (error_operand_p (t: iter_incr)) |
11338 | return true; |
11339 | incr = TREE_OPERAND (rhs, 1); |
11340 | incr = cp_convert (TREE_TYPE (diff), incr, |
11341 | tf_warning_or_error); |
11342 | if (TREE_CODE (rhs) == MINUS_EXPR) |
11343 | { |
11344 | incr = build1 (NEGATE_EXPR, TREE_TYPE (diff), incr); |
11345 | incr = fold_simple (incr); |
11346 | } |
11347 | if (TREE_CODE (incr) != INTEGER_CST |
11348 | && (TREE_CODE (incr) != NOP_EXPR |
11349 | || (TREE_CODE (TREE_OPERAND (incr, 0)) |
11350 | != INTEGER_CST))) |
11351 | iter_incr = NULL; |
11352 | } |
11353 | } |
11354 | else if (TREE_OPERAND (rhs, 1) == iter) |
11355 | { |
11356 | if (TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs, 0))) != INTEGER_TYPE |
11357 | || TREE_CODE (rhs) != PLUS_EXPR) |
11358 | incr = error_mark_node; |
11359 | else |
11360 | { |
11361 | iter_incr = build_x_binary_op (EXPR_LOCATION (rhs), |
11362 | PLUS_EXPR, |
11363 | TREE_OPERAND (rhs, 0), |
11364 | ERROR_MARK, iter, |
11365 | ERROR_MARK, NULL_TREE, NULL, |
11366 | tf_warning_or_error); |
11367 | if (error_operand_p (t: iter_incr)) |
11368 | return true; |
11369 | iter_incr = build_x_modify_expr (EXPR_LOCATION (rhs), |
11370 | iter, NOP_EXPR, |
11371 | iter_incr, NULL_TREE, |
11372 | tf_warning_or_error); |
11373 | if (error_operand_p (t: iter_incr)) |
11374 | return true; |
11375 | incr = TREE_OPERAND (rhs, 0); |
11376 | iter_incr = NULL; |
11377 | } |
11378 | } |
11379 | else |
11380 | incr = error_mark_node; |
11381 | } |
11382 | else |
11383 | incr = error_mark_node; |
11384 | break; |
11385 | default: |
11386 | incr = error_mark_node; |
11387 | break; |
11388 | } |
11389 | |
11390 | if (incr == error_mark_node) |
11391 | { |
11392 | error_at (elocus, "invalid increment expression"); |
11393 | return true; |
11394 | } |
11395 | |
11396 | incr = cp_convert (TREE_TYPE (diff), incr, tf_warning_or_error); |
11397 | incr = cp_fully_fold (incr); |
11398 | tree loop_iv_seen = NULL_TREE; |
11399 | for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c)) |
11400 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE |
11401 | && OMP_CLAUSE_DECL (c) == iter) |
11402 | { |
11403 | if (code == OMP_TASKLOOP || code == OMP_LOOP) |
11404 | { |
11405 | loop_iv_seen = c; |
11406 | OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c) = 1; |
11407 | } |
11408 | break; |
11409 | } |
11410 | else if ((code == OMP_TASKLOOP || code == OMP_LOOP) |
11411 | && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE |
11412 | && OMP_CLAUSE_DECL (c) == iter) |
11413 | { |
11414 | loop_iv_seen = c; |
11415 | if (code == OMP_TASKLOOP) |
11416 | OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c) = 1; |
11417 | } |
11418 | |
11419 | decl = create_temporary_var (TREE_TYPE (diff)); |
11420 | pushdecl (decl); |
11421 | add_decl_expr (decl); |
11422 | last = create_temporary_var (TREE_TYPE (diff)); |
11423 | pushdecl (last); |
11424 | add_decl_expr (decl: last); |
11425 | if (c && iter_incr == NULL && TREE_CODE (incr) != INTEGER_CST |
11426 | && (!ordered || (i < collapse && collapse > 1))) |
11427 | { |
11428 | incr_var = create_temporary_var (TREE_TYPE (diff)); |
11429 | pushdecl (incr_var); |
11430 | add_decl_expr (decl: incr_var); |
11431 | } |
11432 | gcc_assert (stmts_are_full_exprs_p ()); |
11433 | tree diffvar = NULL_TREE; |
11434 | if (code == OMP_TASKLOOP) |
11435 | { |
11436 | if (!loop_iv_seen) |
11437 | { |
11438 | tree ivc = build_omp_clause (locus, OMP_CLAUSE_FIRSTPRIVATE); |
11439 | OMP_CLAUSE_DECL (ivc) = iter; |
11440 | cxx_omp_finish_clause (ivc, NULL, false); |
11441 | OMP_CLAUSE_CHAIN (ivc) = clauses; |
11442 | clauses = ivc; |
11443 | } |
11444 | tree lvc = build_omp_clause (locus, OMP_CLAUSE_FIRSTPRIVATE); |
11445 | OMP_CLAUSE_DECL (lvc) = last; |
11446 | OMP_CLAUSE_CHAIN (lvc) = clauses; |
11447 | clauses = lvc; |
11448 | diffvar = create_temporary_var (TREE_TYPE (diff)); |
11449 | pushdecl (diffvar); |
11450 | add_decl_expr (decl: diffvar); |
11451 | } |
11452 | else if (code == OMP_LOOP) |
11453 | { |
11454 | if (!loop_iv_seen) |
11455 | { |
11456 | /* While iterators on the loop construct are predetermined |
11457 | lastprivate, if the decl is not declared inside of the |
11458 | loop, OMP_CLAUSE_LASTPRIVATE should have been added |
11459 | already. */ |
11460 | loop_iv_seen = build_omp_clause (locus, OMP_CLAUSE_FIRSTPRIVATE); |
11461 | OMP_CLAUSE_DECL (loop_iv_seen) = iter; |
11462 | OMP_CLAUSE_CHAIN (loop_iv_seen) = clauses; |
11463 | clauses = loop_iv_seen; |
11464 | } |
11465 | else if (OMP_CLAUSE_CODE (loop_iv_seen) == OMP_CLAUSE_PRIVATE) |
11466 | { |
11467 | OMP_CLAUSE_PRIVATE_DEBUG (loop_iv_seen) = 0; |
11468 | OMP_CLAUSE_PRIVATE_OUTER_REF (loop_iv_seen) = 0; |
11469 | OMP_CLAUSE_CODE (loop_iv_seen) = OMP_CLAUSE_FIRSTPRIVATE; |
11470 | } |
11471 | if (OMP_CLAUSE_CODE (loop_iv_seen) == OMP_CLAUSE_FIRSTPRIVATE) |
11472 | cxx_omp_finish_clause (loop_iv_seen, NULL, false); |
11473 | } |
11474 | |
11475 | orig_pre_body = *pre_body; |
11476 | *pre_body = push_stmt_list (); |
11477 | if (orig_pre_body) |
11478 | add_stmt (t: orig_pre_body); |
11479 | if (init != NULL) |
11480 | finish_expr_stmt (expr: build_x_modify_expr (elocus, |
11481 | iter, NOP_EXPR, init, |
11482 | NULL_TREE, tf_warning_or_error)); |
11483 | init = build_int_cst (TREE_TYPE (diff), 0); |
11484 | if (c && iter_incr == NULL |
11485 | && (!ordered || (i < collapse && collapse > 1))) |
11486 | { |
11487 | if (incr_var) |
11488 | { |
11489 | finish_expr_stmt (expr: build_x_modify_expr (elocus, |
11490 | incr_var, NOP_EXPR, |
11491 | incr, NULL_TREE, |
11492 | tf_warning_or_error)); |
11493 | incr = incr_var; |
11494 | } |
11495 | iter_incr = build_x_modify_expr (elocus, |
11496 | iter, PLUS_EXPR, incr, |
11497 | NULL_TREE, tf_warning_or_error); |
11498 | } |
11499 | if (c && ordered && i < collapse && collapse > 1) |
11500 | iter_incr = incr; |
11501 | finish_expr_stmt (expr: build_x_modify_expr (elocus, |
11502 | last, NOP_EXPR, init, |
11503 | NULL_TREE, tf_warning_or_error)); |
11504 | if (diffvar) |
11505 | { |
11506 | finish_expr_stmt (expr: build_x_modify_expr (elocus, |
11507 | diffvar, NOP_EXPR, |
11508 | diff, NULL_TREE, tf_warning_or_error)); |
11509 | diff = diffvar; |
11510 | } |
11511 | *pre_body = pop_stmt_list (*pre_body); |
11512 | |
11513 | cond = cp_build_binary_op (elocus, |
11514 | TREE_CODE (cond), decl, diff, |
11515 | tf_warning_or_error); |
11516 | incr = build_modify_expr (elocus, decl, NULL_TREE, PLUS_EXPR, |
11517 | elocus, incr, NULL_TREE); |
11518 | |
11519 | orig_body = *body; |
11520 | *body = push_stmt_list (); |
11521 | iter_init = build2 (MINUS_EXPR, TREE_TYPE (diff), decl, last); |
11522 | iter_init = build_x_modify_expr (elocus, |
11523 | iter, PLUS_EXPR, iter_init, |
11524 | NULL_TREE, tf_warning_or_error); |
11525 | if (iter_init != error_mark_node) |
11526 | iter_init = build1 (NOP_EXPR, void_type_node, iter_init); |
11527 | finish_expr_stmt (expr: iter_init); |
11528 | finish_expr_stmt (expr: build_x_modify_expr (elocus, |
11529 | last, NOP_EXPR, decl, |
11530 | NULL_TREE, tf_warning_or_error)); |
11531 | add_stmt (t: orig_body); |
11532 | *body = pop_stmt_list (*body); |
11533 | |
11534 | if (c) |
11535 | { |
11536 | OMP_CLAUSE_LASTPRIVATE_STMT (c) = push_stmt_list (); |
11537 | if (!ordered) |
11538 | finish_expr_stmt (expr: iter_incr); |
11539 | else |
11540 | { |
11541 | iter_init = decl; |
11542 | if (i < collapse && collapse > 1 && !error_operand_p (t: iter_incr)) |
11543 | iter_init = build2 (PLUS_EXPR, TREE_TYPE (diff), |
11544 | iter_init, iter_incr); |
11545 | iter_init = build2 (MINUS_EXPR, TREE_TYPE (diff), iter_init, last); |
11546 | iter_init = build_x_modify_expr (elocus, |
11547 | iter, PLUS_EXPR, iter_init, |
11548 | NULL_TREE, tf_warning_or_error); |
11549 | if (iter_init != error_mark_node) |
11550 | iter_init = build1 (NOP_EXPR, void_type_node, iter_init); |
11551 | finish_expr_stmt (expr: iter_init); |
11552 | } |
11553 | OMP_CLAUSE_LASTPRIVATE_STMT (c) |
11554 | = pop_stmt_list (OMP_CLAUSE_LASTPRIVATE_STMT (c)); |
11555 | } |
11556 | |
11557 | if (TREE_CODE (TREE_VEC_ELT (orig_declv, i)) == TREE_LIST) |
11558 | { |
11559 | tree t = TREE_VEC_ELT (orig_declv, i); |
11560 | gcc_assert (TREE_PURPOSE (t) == NULL_TREE |
11561 | && TREE_VALUE (t) == NULL_TREE |
11562 | && TREE_CODE (TREE_CHAIN (t)) == TREE_VEC); |
11563 | TREE_PURPOSE (t) = TREE_VEC_ELT (declv, i); |
11564 | TREE_VALUE (t) = last; |
11565 | } |
11566 | else |
11567 | TREE_VEC_ELT (orig_declv, i) |
11568 | = tree_cons (TREE_VEC_ELT (declv, i), last, NULL_TREE); |
11569 | TREE_VEC_ELT (declv, i) = decl; |
11570 | TREE_VEC_ELT (initv, i) = init; |
11571 | TREE_VEC_ELT (condv, i) = cond; |
11572 | TREE_VEC_ELT (incrv, i) = incr; |
11573 | |
11574 | return false; |
11575 | } |
11576 | |
11577 | /* Build and validate an OMP_FOR statement. CLAUSES, BODY, COND, INCR |
11578 | are directly for their associated operands in the statement. DECL |
11579 | and INIT are a combo; if DECL is NULL then INIT ought to be a |
11580 | MODIFY_EXPR, and the DECL should be extracted. PRE_BODY are |
11581 | optional statements that need to go before the loop into its |
11582 | sk_omp scope. */ |
11583 | |
11584 | tree |
11585 | finish_omp_for (location_t locus, enum tree_code code, tree declv, |
11586 | tree orig_declv, tree initv, tree condv, tree incrv, |
11587 | tree body, tree pre_body, vec<tree> *orig_inits, tree clauses) |
11588 | { |
11589 | tree omp_for = NULL, orig_incr = NULL; |
11590 | tree decl = NULL, init, cond, incr; |
11591 | location_t elocus; |
11592 | int i; |
11593 | int collapse = 1; |
11594 | int ordered = 0; |
11595 | auto_vec<location_t> init_locv; |
11596 | |
11597 | gcc_assert (TREE_VEC_LENGTH (declv) == TREE_VEC_LENGTH (initv)); |
11598 | gcc_assert (TREE_VEC_LENGTH (declv) == TREE_VEC_LENGTH (condv)); |
11599 | gcc_assert (TREE_VEC_LENGTH (declv) == TREE_VEC_LENGTH (incrv)); |
11600 | if (TREE_VEC_LENGTH (declv) > 1) |
11601 | { |
11602 | if (tree ti = omp_find_clause (clauses, kind: OMP_CLAUSE_TILE)) |
11603 | collapse = list_length (OMP_CLAUSE_TILE_LIST (ti)); |
11604 | else |
11605 | { |
11606 | if (tree co = omp_find_clause (clauses, kind: OMP_CLAUSE_COLLAPSE)) |
11607 | collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (co)); |
11608 | else if (tree si = omp_find_clause (clauses, kind: OMP_CLAUSE_SIZES)) |
11609 | collapse = list_length (OMP_CLAUSE_SIZES_LIST (si)); |
11610 | if (collapse != TREE_VEC_LENGTH (declv)) |
11611 | ordered = TREE_VEC_LENGTH (declv); |
11612 | } |
11613 | } |
11614 | for (i = 0; i < TREE_VEC_LENGTH (declv); i++) |
11615 | { |
11616 | decl = TREE_VEC_ELT (declv, i); |
11617 | init = TREE_VEC_ELT (initv, i); |
11618 | cond = TREE_VEC_ELT (condv, i); |
11619 | incr = TREE_VEC_ELT (incrv, i); |
11620 | elocus = locus; |
11621 | |
11622 | if (decl == global_namespace) |
11623 | { |
11624 | gcc_assert (init == NULL_TREE && cond == NULL_TREE && incr == NULL_TREE); |
11625 | TREE_VEC_ELT (declv, i) = NULL_TREE; |
11626 | init_locv.safe_push (UNKNOWN_LOCATION); |
11627 | continue; |
11628 | } |
11629 | /* We are going to throw out the init's original MODIFY_EXPR or |
11630 | MODOP_EXPR below. Save its location so we can use it when |
11631 | reconstructing the expression farther down. Alternatively, if the |
11632 | initializer is a binding of the iteration variable, save |
11633 | that location. Any of these locations in the initialization clause |
11634 | for the current nested loop are better than using the argument locus, |
11635 | that points to the "for" of the outermost loop in the nest. */ |
11636 | if (init && EXPR_HAS_LOCATION (init)) |
11637 | elocus = EXPR_LOCATION (init); |
11638 | else if (decl && INDIRECT_REF_P (decl) && EXPR_HAS_LOCATION (decl)) |
11639 | /* This can happen for class iterators. */ |
11640 | elocus = EXPR_LOCATION (decl); |
11641 | else if (decl && DECL_P (decl)) |
11642 | { |
11643 | if (DECL_SOURCE_LOCATION (decl) != UNKNOWN_LOCATION) |
11644 | elocus = DECL_SOURCE_LOCATION (decl); |
11645 | else if (DECL_INITIAL (decl) |
11646 | && EXPR_HAS_LOCATION (DECL_INITIAL (decl))) |
11647 | elocus = EXPR_LOCATION (DECL_INITIAL (decl)); |
11648 | } |
11649 | init_locv.safe_push (obj: elocus); |
11650 | |
11651 | if (decl == NULL) |
11652 | { |
11653 | if (init != NULL) |
11654 | switch (TREE_CODE (init)) |
11655 | { |
11656 | case MODIFY_EXPR: |
11657 | decl = TREE_OPERAND (init, 0); |
11658 | init = TREE_OPERAND (init, 1); |
11659 | break; |
11660 | case MODOP_EXPR: |
11661 | if (TREE_CODE (TREE_OPERAND (init, 1)) == NOP_EXPR) |
11662 | { |
11663 | decl = TREE_OPERAND (init, 0); |
11664 | init = TREE_OPERAND (init, 2); |
11665 | } |
11666 | break; |
11667 | default: |
11668 | break; |
11669 | } |
11670 | |
11671 | if (decl == NULL) |
11672 | { |
11673 | error_at (locus, |
11674 | "expected iteration declaration or initialization"); |
11675 | return NULL; |
11676 | } |
11677 | } |
11678 | |
11679 | if (cond == global_namespace) |
11680 | continue; |
11681 | |
11682 | if (cond == NULL) |
11683 | { |
11684 | error_at (elocus, "missing controlling predicate"); |
11685 | return NULL; |
11686 | } |
11687 | |
11688 | if (incr == NULL) |
11689 | { |
11690 | error_at (elocus, "missing increment expression"); |
11691 | return NULL; |
11692 | } |
11693 | |
11694 | TREE_VEC_ELT (declv, i) = decl; |
11695 | TREE_VEC_ELT (initv, i) = init; |
11696 | } |
11697 | |
11698 | if (orig_inits) |
11699 | { |
11700 | bool fail = false; |
11701 | tree orig_init; |
11702 | FOR_EACH_VEC_ELT (*orig_inits, i, orig_init) |
11703 | if (orig_init |
11704 | && !c_omp_check_loop_iv_exprs (locus, code, |
11705 | orig_declv ? orig_declv : declv, i, |
11706 | TREE_VEC_ELT (declv, i), orig_init, |
11707 | NULL_TREE, cp_walk_subtrees)) |
11708 | fail = true; |
11709 | if (fail) |
11710 | return NULL; |
11711 | } |
11712 | |
11713 | if (dependent_omp_for_p (declv, initv, condv, incrv, body)) |
11714 | { |
11715 | tree stmt; |
11716 | |
11717 | stmt = make_node (code); |
11718 | |
11719 | for (i = 0; i < TREE_VEC_LENGTH (declv); i++) |
11720 | { |
11721 | if (TREE_VEC_ELT (declv, i) == NULL_TREE) |
11722 | continue; |
11723 | /* This is really just a place-holder. We'll be decomposing this |
11724 | again and going through the cp_build_modify_expr path below when |
11725 | we instantiate the thing. */ |
11726 | TREE_VEC_ELT (initv, i) |
11727 | = build2_loc (loc: init_locv[i], code: MODIFY_EXPR, void_type_node, |
11728 | TREE_VEC_ELT (declv, i), TREE_VEC_ELT (initv, i)); |
11729 | } |
11730 | |
11731 | TREE_TYPE (stmt) = void_type_node; |
11732 | OMP_FOR_INIT (stmt) = initv; |
11733 | OMP_FOR_COND (stmt) = condv; |
11734 | OMP_FOR_INCR (stmt) = incrv; |
11735 | OMP_FOR_BODY (stmt) = body; |
11736 | OMP_FOR_PRE_BODY (stmt) = pre_body; |
11737 | OMP_FOR_CLAUSES (stmt) = clauses; |
11738 | |
11739 | SET_EXPR_LOCATION (stmt, locus); |
11740 | return add_stmt (t: stmt); |
11741 | } |
11742 | |
11743 | if (!orig_declv) |
11744 | orig_declv = copy_node (declv); |
11745 | |
11746 | if (processing_template_decl) |
11747 | orig_incr = make_tree_vec (TREE_VEC_LENGTH (incrv)); |
11748 | |
11749 | for (i = 0; i < TREE_VEC_LENGTH (declv); ) |
11750 | { |
11751 | decl = TREE_VEC_ELT (declv, i); |
11752 | init = TREE_VEC_ELT (initv, i); |
11753 | cond = TREE_VEC_ELT (condv, i); |
11754 | incr = TREE_VEC_ELT (incrv, i); |
11755 | if (orig_incr) |
11756 | TREE_VEC_ELT (orig_incr, i) = incr; |
11757 | elocus = init_locv[i]; |
11758 | |
11759 | if (decl == NULL_TREE) |
11760 | { |
11761 | i++; |
11762 | continue; |
11763 | } |
11764 | |
11765 | if (!DECL_P (decl)) |
11766 | { |
11767 | error_at (elocus, "expected iteration declaration or initialization"); |
11768 | return NULL; |
11769 | } |
11770 | |
11771 | if (incr && TREE_CODE (incr) == MODOP_EXPR) |
11772 | { |
11773 | if (orig_incr) |
11774 | TREE_VEC_ELT (orig_incr, i) = incr; |
11775 | incr = cp_build_modify_expr (elocus, TREE_OPERAND (incr, 0), |
11776 | TREE_CODE (TREE_OPERAND (incr, 1)), |
11777 | TREE_OPERAND (incr, 2), |
11778 | tf_warning_or_error); |
11779 | } |
11780 | |
11781 | if (CLASS_TYPE_P (TREE_TYPE (decl))) |
11782 | { |
11783 | if (code == OMP_SIMD) |
11784 | { |
11785 | error_at (elocus, "%<#pragma omp simd%> used with class " |
11786 | "iteration variable %qE", decl); |
11787 | return NULL; |
11788 | } |
11789 | if (handle_omp_for_class_iterator (i, locus, code, declv, orig_declv, |
11790 | initv, condv, incrv, body: &body, |
11791 | pre_body: &pre_body, clauses, |
11792 | collapse, ordered)) |
11793 | return NULL; |
11794 | continue; |
11795 | } |
11796 | |
11797 | if (!INTEGRAL_TYPE_P (TREE_TYPE (decl)) |
11798 | && !TYPE_PTR_P (TREE_TYPE (decl))) |
11799 | { |
11800 | error_at (elocus, "invalid type for iteration variable %qE", decl); |
11801 | return NULL; |
11802 | } |
11803 | |
11804 | if (!processing_template_decl && TREE_CODE (init) != TREE_VEC) |
11805 | init = cp_build_modify_expr (elocus, decl, NOP_EXPR, init, |
11806 | tf_warning_or_error); |
11807 | else |
11808 | init = build2_loc (loc: elocus, code: MODIFY_EXPR, void_type_node, arg0: decl, arg1: init); |
11809 | if (decl == error_mark_node || init == error_mark_node) |
11810 | return NULL; |
11811 | |
11812 | TREE_VEC_ELT (declv, i) = decl; |
11813 | TREE_VEC_ELT (initv, i) = init; |
11814 | TREE_VEC_ELT (condv, i) = cond; |
11815 | TREE_VEC_ELT (incrv, i) = incr; |
11816 | i++; |
11817 | } |
11818 | |
11819 | if (pre_body && IS_EMPTY_STMT (pre_body)) |
11820 | pre_body = NULL; |
11821 | |
11822 | omp_for = c_finish_omp_for (locus, code, declv, orig_declv, initv, condv, |
11823 | incrv, body, pre_body, |
11824 | !processing_template_decl); |
11825 | |
11826 | /* Check for iterators appearing in lb, b or incr expressions. */ |
11827 | if (omp_for && !c_omp_check_loop_iv (omp_for, orig_declv, cp_walk_subtrees)) |
11828 | omp_for = NULL_TREE; |
11829 | |
11830 | if (omp_for == NULL) |
11831 | return NULL; |
11832 | |
11833 | add_stmt (t: omp_for); |
11834 | |
11835 | for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INCR (omp_for)); i++) |
11836 | { |
11837 | init = TREE_VEC_ELT (OMP_FOR_INIT (omp_for), i); |
11838 | if (init == NULL_TREE) |
11839 | continue; |
11840 | decl = TREE_OPERAND (init, 0); |
11841 | cond = TREE_VEC_ELT (OMP_FOR_COND (omp_for), i); |
11842 | incr = TREE_VEC_ELT (OMP_FOR_INCR (omp_for), i); |
11843 | |
11844 | if (!processing_template_decl) |
11845 | { |
11846 | if (TREE_CODE (TREE_OPERAND (init, 1)) == TREE_VEC) |
11847 | { |
11848 | tree t = TREE_VEC_ELT (TREE_OPERAND (init, 1), 1); |
11849 | TREE_VEC_ELT (TREE_OPERAND (init, 1), 1) |
11850 | = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
11851 | t = TREE_VEC_ELT (TREE_OPERAND (init, 1), 2); |
11852 | TREE_VEC_ELT (TREE_OPERAND (init, 1), 2) |
11853 | = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
11854 | } |
11855 | else |
11856 | { |
11857 | tree t = TREE_OPERAND (init, 1); |
11858 | TREE_OPERAND (init, 1) |
11859 | = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
11860 | } |
11861 | if (TREE_CODE (TREE_OPERAND (cond, 1)) == TREE_VEC) |
11862 | { |
11863 | tree t = TREE_VEC_ELT (TREE_OPERAND (cond, 1), 1); |
11864 | TREE_VEC_ELT (TREE_OPERAND (cond, 1), 1) |
11865 | = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
11866 | t = TREE_VEC_ELT (TREE_OPERAND (cond, 1), 2); |
11867 | TREE_VEC_ELT (TREE_OPERAND (cond, 1), 2) |
11868 | = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
11869 | } |
11870 | else |
11871 | { |
11872 | tree t = TREE_OPERAND (cond, 1); |
11873 | TREE_OPERAND (cond, 1) |
11874 | = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
11875 | } |
11876 | } |
11877 | |
11878 | if (TREE_CODE (incr) != MODIFY_EXPR) |
11879 | continue; |
11880 | |
11881 | if (TREE_SIDE_EFFECTS (TREE_OPERAND (incr, 1)) |
11882 | && BINARY_CLASS_P (TREE_OPERAND (incr, 1)) |
11883 | && !processing_template_decl) |
11884 | { |
11885 | tree t = TREE_OPERAND (TREE_OPERAND (incr, 1), 0); |
11886 | if (TREE_SIDE_EFFECTS (t) |
11887 | && t != decl |
11888 | && (TREE_CODE (t) != NOP_EXPR |
11889 | || TREE_OPERAND (t, 0) != decl)) |
11890 | TREE_OPERAND (TREE_OPERAND (incr, 1), 0) |
11891 | = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
11892 | |
11893 | t = TREE_OPERAND (TREE_OPERAND (incr, 1), 1); |
11894 | if (TREE_SIDE_EFFECTS (t) |
11895 | && t != decl |
11896 | && (TREE_CODE (t) != NOP_EXPR |
11897 | || TREE_OPERAND (t, 0) != decl)) |
11898 | TREE_OPERAND (TREE_OPERAND (incr, 1), 1) |
11899 | = fold_build_cleanup_point_expr (TREE_TYPE (t), expr: t); |
11900 | } |
11901 | |
11902 | if (orig_incr) |
11903 | TREE_VEC_ELT (OMP_FOR_INCR (omp_for), i) = TREE_VEC_ELT (orig_incr, i); |
11904 | } |
11905 | OMP_FOR_CLAUSES (omp_for) = clauses; |
11906 | |
11907 | /* For simd loops with non-static data member iterators, we could have added |
11908 | OMP_CLAUSE_LINEAR clauses without OMP_CLAUSE_LINEAR_STEP. As we know the |
11909 | step at this point, fill it in. */ |
11910 | if (code == OMP_SIMD && !processing_template_decl |
11911 | && TREE_VEC_LENGTH (OMP_FOR_INCR (omp_for)) == 1) |
11912 | for (tree c = omp_find_clause (clauses, kind: OMP_CLAUSE_LINEAR); c; |
11913 | c = omp_find_clause (OMP_CLAUSE_CHAIN (c), kind: OMP_CLAUSE_LINEAR)) |
11914 | if (OMP_CLAUSE_LINEAR_STEP (c) == NULL_TREE) |
11915 | { |
11916 | decl = TREE_OPERAND (TREE_VEC_ELT (OMP_FOR_INIT (omp_for), 0), 0); |
11917 | gcc_assert (decl == OMP_CLAUSE_DECL (c)); |
11918 | incr = TREE_VEC_ELT (OMP_FOR_INCR (omp_for), 0); |
11919 | tree step, stept; |
11920 | switch (TREE_CODE (incr)) |
11921 | { |
11922 | case PREINCREMENT_EXPR: |
11923 | case POSTINCREMENT_EXPR: |
11924 | /* c_omp_for_incr_canonicalize_ptr() should have been |
11925 | called to massage things appropriately. */ |
11926 | gcc_assert (!INDIRECT_TYPE_P (TREE_TYPE (decl))); |
11927 | OMP_CLAUSE_LINEAR_STEP (c) = build_int_cst (TREE_TYPE (decl), 1); |
11928 | break; |
11929 | case PREDECREMENT_EXPR: |
11930 | case POSTDECREMENT_EXPR: |
11931 | /* c_omp_for_incr_canonicalize_ptr() should have been |
11932 | called to massage things appropriately. */ |
11933 | gcc_assert (!INDIRECT_TYPE_P (TREE_TYPE (decl))); |
11934 | OMP_CLAUSE_LINEAR_STEP (c) |
11935 | = build_int_cst (TREE_TYPE (decl), -1); |
11936 | break; |
11937 | case MODIFY_EXPR: |
11938 | gcc_assert (TREE_OPERAND (incr, 0) == decl); |
11939 | incr = TREE_OPERAND (incr, 1); |
11940 | switch (TREE_CODE (incr)) |
11941 | { |
11942 | case PLUS_EXPR: |
11943 | if (TREE_OPERAND (incr, 1) == decl) |
11944 | step = TREE_OPERAND (incr, 0); |
11945 | else |
11946 | step = TREE_OPERAND (incr, 1); |
11947 | break; |
11948 | case MINUS_EXPR: |
11949 | case POINTER_PLUS_EXPR: |
11950 | gcc_assert (TREE_OPERAND (incr, 0) == decl); |
11951 | step = TREE_OPERAND (incr, 1); |
11952 | break; |
11953 | default: |
11954 | gcc_unreachable (); |
11955 | } |
11956 | stept = TREE_TYPE (decl); |
11957 | if (INDIRECT_TYPE_P (stept)) |
11958 | stept = sizetype; |
11959 | step = fold_convert (stept, step); |
11960 | if (TREE_CODE (incr) == MINUS_EXPR) |
11961 | step = fold_build1 (NEGATE_EXPR, stept, step); |
11962 | OMP_CLAUSE_LINEAR_STEP (c) = step; |
11963 | break; |
11964 | default: |
11965 | gcc_unreachable (); |
11966 | } |
11967 | } |
11968 | /* Override saved methods on OMP_LOOP's OMP_CLAUSE_LASTPRIVATE_LOOP_IV |
11969 | clauses, we need copy ctor for those rather than default ctor, |
11970 | plus as for other lastprivates assignment op and dtor. */ |
11971 | if (code == OMP_LOOP && !processing_template_decl) |
11972 | for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c)) |
11973 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE |
11974 | && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c) |
11975 | && cxx_omp_create_clause_info (c, TREE_TYPE (OMP_CLAUSE_DECL (c)), |
11976 | need_default_ctor: false, need_copy_ctor: true, need_copy_assignment: true, need_dtor: true)) |
11977 | CP_OMP_CLAUSE_INFO (c) = NULL_TREE; |
11978 | |
11979 | return omp_for; |
11980 | } |
11981 | |
11982 | /* Code walker for finish_omp_for_block: extract binding of DP->var |
11983 | from its current block and move it to a new BIND_EXPR DP->b |
11984 | surrounding the body of DP->omp_for. */ |
11985 | |
11986 | struct fofb_data { |
11987 | tree var; |
11988 | tree b; |
11989 | tree omp_for; |
11990 | }; |
11991 | |
11992 | static tree |
11993 | finish_omp_for_block_walker (tree *tp, int *walk_subtrees, void *dp) |
11994 | { |
11995 | struct fofb_data *fofb = (struct fofb_data *)dp; |
11996 | if (TREE_CODE (*tp) == BIND_EXPR) |
11997 | for (tree *p = &BIND_EXPR_VARS (*tp); *p; p = &DECL_CHAIN (*p)) |
11998 | { |
11999 | if (*p == fofb->var) |
12000 | { |
12001 | *p = DECL_CHAIN (*p); |
12002 | if (fofb->b == NULL_TREE) |
12003 | { |
12004 | fofb->b = make_node (BLOCK); |
12005 | fofb->b = build3 (BIND_EXPR, void_type_node, NULL_TREE, |
12006 | OMP_FOR_BODY (fofb->omp_for), fofb->b); |
12007 | TREE_SIDE_EFFECTS (fofb->b) = 1; |
12008 | OMP_FOR_BODY (fofb->omp_for) = fofb->b; |
12009 | } |
12010 | DECL_CHAIN (fofb->var) = BIND_EXPR_VARS (fofb->b); |
12011 | BIND_EXPR_VARS (fofb->b) = fofb->var; |
12012 | BLOCK_VARS (BIND_EXPR_BLOCK (fofb->b)) = fofb->var; |
12013 | BLOCK_VARS (BIND_EXPR_BLOCK (*tp)) = BIND_EXPR_VARS (*tp); |
12014 | return *tp; |
12015 | } |
12016 | } |
12017 | if (TREE_CODE (*tp) != BIND_EXPR && TREE_CODE (*tp) != STATEMENT_LIST) |
12018 | *walk_subtrees = false; |
12019 | return NULL_TREE; |
12020 | } |
12021 | |
12022 | /* Fix up range for decls. Those decls were pushed into BIND's |
12023 | BIND_EXPR_VARS, or that of a nested BIND_EXPR inside its body, |
12024 | and need to be moved into a new BIND_EXPR surrounding OMP_FOR's body |
12025 | so that processing of combined loop directives can find them. */ |
12026 | tree |
12027 | finish_omp_for_block (tree bind, tree omp_for) |
12028 | { |
12029 | if (omp_for == NULL_TREE |
12030 | || !OMP_FOR_ORIG_DECLS (omp_for) |
12031 | || bind == NULL_TREE) |
12032 | return bind; |
12033 | struct fofb_data fofb; |
12034 | fofb.b = NULL_TREE; |
12035 | fofb.omp_for = omp_for; |
12036 | for (int i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (omp_for)); i++) |
12037 | if (TREE_VEC_ELT (OMP_FOR_INIT (omp_for), i) |
12038 | && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (omp_for), i)) |
12039 | == TREE_LIST) |
12040 | && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (omp_for), i))) |
12041 | { |
12042 | tree v = TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (omp_for), i)); |
12043 | for (int j = 2; j < TREE_VEC_LENGTH (v); j++) |
12044 | { |
12045 | fofb.var = TREE_VEC_ELT (v, j); |
12046 | cp_walk_tree (&bind, finish_omp_for_block_walker, |
12047 | (void *)&fofb, NULL); |
12048 | } |
12049 | } |
12050 | return bind; |
12051 | } |
12052 | |
12053 | void |
12054 | finish_omp_atomic (location_t loc, enum tree_code code, enum tree_code opcode, |
12055 | tree lhs, tree rhs, tree v, tree lhs1, tree rhs1, tree r, |
12056 | tree clauses, enum omp_memory_order mo, bool weak) |
12057 | { |
12058 | tree orig_lhs; |
12059 | tree orig_rhs; |
12060 | tree orig_v; |
12061 | tree orig_lhs1; |
12062 | tree orig_rhs1; |
12063 | tree orig_r; |
12064 | bool dependent_p; |
12065 | tree stmt; |
12066 | |
12067 | orig_lhs = lhs; |
12068 | orig_rhs = rhs; |
12069 | orig_v = v; |
12070 | orig_lhs1 = lhs1; |
12071 | orig_rhs1 = rhs1; |
12072 | orig_r = r; |
12073 | dependent_p = false; |
12074 | stmt = NULL_TREE; |
12075 | |
12076 | /* Even in a template, we can detect invalid uses of the atomic |
12077 | pragma if neither LHS nor RHS is type-dependent. */ |
12078 | if (processing_template_decl) |
12079 | { |
12080 | dependent_p = (type_dependent_expression_p (lhs) |
12081 | || (rhs && type_dependent_expression_p (rhs)) |
12082 | || (v && type_dependent_expression_p (v)) |
12083 | || (lhs1 && type_dependent_expression_p (lhs1)) |
12084 | || (rhs1 && type_dependent_expression_p (rhs1)) |
12085 | || (r |
12086 | && r != void_list_node |
12087 | && type_dependent_expression_p (r))); |
12088 | if (clauses) |
12089 | { |
12090 | gcc_assert (TREE_CODE (clauses) == OMP_CLAUSE |
12091 | && OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_HINT |
12092 | && OMP_CLAUSE_CHAIN (clauses) == NULL_TREE); |
12093 | if (type_dependent_expression_p (OMP_CLAUSE_HINT_EXPR (clauses)) |
12094 | || TREE_CODE (OMP_CLAUSE_HINT_EXPR (clauses)) != INTEGER_CST) |
12095 | dependent_p = true; |
12096 | } |
12097 | } |
12098 | if (!dependent_p) |
12099 | { |
12100 | bool swapped = false; |
12101 | if (rhs1 && opcode != COND_EXPR && cp_tree_equal (lhs, rhs)) |
12102 | { |
12103 | std::swap (a&: rhs, b&: rhs1); |
12104 | swapped = !commutative_tree_code (opcode); |
12105 | } |
12106 | if (rhs1 && opcode != COND_EXPR && !cp_tree_equal (lhs, rhs1)) |
12107 | { |
12108 | if (code == OMP_ATOMIC) |
12109 | error ("%<#pragma omp atomic update%> uses two different " |
12110 | "expressions for memory"); |
12111 | else |
12112 | error ("%<#pragma omp atomic capture%> uses two different " |
12113 | "expressions for memory"); |
12114 | return; |
12115 | } |
12116 | if (lhs1 && !cp_tree_equal (lhs, lhs1)) |
12117 | { |
12118 | if (code == OMP_ATOMIC) |
12119 | error ("%<#pragma omp atomic update%> uses two different " |
12120 | "expressions for memory"); |
12121 | else |
12122 | error ("%<#pragma omp atomic capture%> uses two different " |
12123 | "expressions for memory"); |
12124 | return; |
12125 | } |
12126 | stmt = c_finish_omp_atomic (loc, code, opcode, lhs, rhs, |
12127 | v, lhs1, rhs1, r, swapped, mo, weak, |
12128 | processing_template_decl != 0); |
12129 | if (stmt == error_mark_node) |
12130 | return; |
12131 | } |
12132 | if (processing_template_decl) |
12133 | { |
12134 | if (code == OMP_ATOMIC_READ) |
12135 | { |
12136 | stmt = build_min_nt_loc (loc, OMP_ATOMIC_READ, orig_lhs); |
12137 | OMP_ATOMIC_MEMORY_ORDER (stmt) = mo; |
12138 | stmt = build2 (MODIFY_EXPR, void_type_node, orig_v, stmt); |
12139 | } |
12140 | else |
12141 | { |
12142 | if (opcode == NOP_EXPR) |
12143 | stmt = build2 (MODIFY_EXPR, void_type_node, orig_lhs, orig_rhs); |
12144 | else if (opcode == COND_EXPR) |
12145 | { |
12146 | stmt = build2 (EQ_EXPR, boolean_type_node, orig_lhs, orig_rhs); |
12147 | if (orig_r) |
12148 | stmt = build2 (MODIFY_EXPR, boolean_type_node, orig_r, |
12149 | stmt); |
12150 | stmt = build3 (COND_EXPR, void_type_node, stmt, orig_rhs1, |
12151 | orig_lhs); |
12152 | orig_rhs1 = NULL_TREE; |
12153 | } |
12154 | else |
12155 | stmt = build2 (opcode, void_type_node, orig_lhs, orig_rhs); |
12156 | if (orig_rhs1) |
12157 | stmt = build_min_nt_loc (EXPR_LOCATION (orig_rhs1), |
12158 | COMPOUND_EXPR, orig_rhs1, stmt); |
12159 | if (code != OMP_ATOMIC) |
12160 | { |
12161 | stmt = build_min_nt_loc (loc, code, orig_lhs1, stmt); |
12162 | OMP_ATOMIC_MEMORY_ORDER (stmt) = mo; |
12163 | OMP_ATOMIC_WEAK (stmt) = weak; |
12164 | stmt = build2 (MODIFY_EXPR, void_type_node, orig_v, stmt); |
12165 | } |
12166 | } |
12167 | stmt = build2 (OMP_ATOMIC, void_type_node, |
12168 | clauses ? clauses : integer_zero_node, stmt); |
12169 | OMP_ATOMIC_MEMORY_ORDER (stmt) = mo; |
12170 | OMP_ATOMIC_WEAK (stmt) = weak; |
12171 | SET_EXPR_LOCATION (stmt, loc); |
12172 | } |
12173 | |
12174 | /* Avoid -Wunused-value warnings here, the whole construct has side-effects |
12175 | and even if it might be wrapped from fold-const.cc or c-omp.cc wrapped |
12176 | in some tree that appears to be unused, the value is not unused. */ |
12177 | warning_sentinel w (warn_unused_value); |
12178 | finish_expr_stmt (expr: stmt); |
12179 | } |
12180 | |
12181 | void |
12182 | finish_omp_barrier (void) |
12183 | { |
12184 | tree fn = builtin_decl_explicit (fncode: BUILT_IN_GOMP_BARRIER); |
12185 | releasing_vec vec; |
12186 | tree stmt = finish_call_expr (fn, args: &vec, disallow_virtual: false, koenig_p: false, complain: tf_warning_or_error); |
12187 | finish_expr_stmt (expr: stmt); |
12188 | } |
12189 | |
12190 | void |
12191 | finish_omp_depobj (location_t loc, tree depobj, |
12192 | enum omp_clause_depend_kind kind, tree clause) |
12193 | { |
12194 | if (!error_operand_p (t: depobj) && !type_dependent_expression_p (depobj)) |
12195 | { |
12196 | if (!lvalue_p (depobj)) |
12197 | { |
12198 | error_at (EXPR_LOC_OR_LOC (depobj, loc), |
12199 | "%<depobj%> expression is not lvalue expression"); |
12200 | depobj = error_mark_node; |
12201 | } |
12202 | } |
12203 | |
12204 | if (processing_template_decl) |
12205 | { |
12206 | if (clause == NULL_TREE) |
12207 | clause = build_int_cst (integer_type_node, kind); |
12208 | add_stmt (t: build_min_nt_loc (loc, OMP_DEPOBJ, depobj, clause)); |
12209 | return; |
12210 | } |
12211 | |
12212 | if (!error_operand_p (t: depobj)) |
12213 | { |
12214 | tree addr = cp_build_addr_expr (depobj, tf_warning_or_error); |
12215 | if (addr == error_mark_node) |
12216 | depobj = error_mark_node; |
12217 | else |
12218 | depobj = cp_build_indirect_ref (loc, addr, RO_UNARY_STAR, |
12219 | tf_warning_or_error); |
12220 | } |
12221 | |
12222 | c_finish_omp_depobj (loc, depobj, kind, clause); |
12223 | } |
12224 | |
12225 | void |
12226 | finish_omp_flush (int mo) |
12227 | { |
12228 | tree fn = builtin_decl_explicit (fncode: BUILT_IN_SYNC_SYNCHRONIZE); |
12229 | releasing_vec vec; |
12230 | if (mo != MEMMODEL_LAST && mo != MEMMODEL_SEQ_CST) |
12231 | { |
12232 | fn = builtin_decl_explicit (fncode: BUILT_IN_ATOMIC_THREAD_FENCE); |
12233 | vec->quick_push (obj: build_int_cst (integer_type_node, mo)); |
12234 | } |
12235 | tree stmt = finish_call_expr (fn, args: &vec, disallow_virtual: false, koenig_p: false, complain: tf_warning_or_error); |
12236 | finish_expr_stmt (expr: stmt); |
12237 | } |
12238 | |
12239 | void |
12240 | finish_omp_taskwait (void) |
12241 | { |
12242 | tree fn = builtin_decl_explicit (fncode: BUILT_IN_GOMP_TASKWAIT); |
12243 | releasing_vec vec; |
12244 | tree stmt = finish_call_expr (fn, args: &vec, disallow_virtual: false, koenig_p: false, complain: tf_warning_or_error); |
12245 | finish_expr_stmt (expr: stmt); |
12246 | } |
12247 | |
12248 | void |
12249 | finish_omp_taskyield (void) |
12250 | { |
12251 | tree fn = builtin_decl_explicit (fncode: BUILT_IN_GOMP_TASKYIELD); |
12252 | releasing_vec vec; |
12253 | tree stmt = finish_call_expr (fn, args: &vec, disallow_virtual: false, koenig_p: false, complain: tf_warning_or_error); |
12254 | finish_expr_stmt (expr: stmt); |
12255 | } |
12256 | |
12257 | void |
12258 | finish_omp_cancel (tree clauses) |
12259 | { |
12260 | tree fn = builtin_decl_explicit (fncode: BUILT_IN_GOMP_CANCEL); |
12261 | int mask = 0; |
12262 | if (omp_find_clause (clauses, kind: OMP_CLAUSE_PARALLEL)) |
12263 | mask = 1; |
12264 | else if (omp_find_clause (clauses, kind: OMP_CLAUSE_FOR)) |
12265 | mask = 2; |
12266 | else if (omp_find_clause (clauses, kind: OMP_CLAUSE_SECTIONS)) |
12267 | mask = 4; |
12268 | else if (omp_find_clause (clauses, kind: OMP_CLAUSE_TASKGROUP)) |
12269 | mask = 8; |
12270 | else |
12271 | { |
12272 | error ("%<#pragma omp cancel%> must specify one of " |
12273 | "%<parallel%>, %<for%>, %<sections%> or %<taskgroup%> clauses"); |
12274 | return; |
12275 | } |
12276 | releasing_vec vec; |
12277 | tree ifc = omp_find_clause (clauses, kind: OMP_CLAUSE_IF); |
12278 | if (ifc != NULL_TREE) |
12279 | { |
12280 | if (OMP_CLAUSE_IF_MODIFIER (ifc) != ERROR_MARK |
12281 | && OMP_CLAUSE_IF_MODIFIER (ifc) != VOID_CST) |
12282 | error_at (OMP_CLAUSE_LOCATION (ifc), |
12283 | "expected %<cancel%> %<if%> clause modifier"); |
12284 | else |
12285 | { |
12286 | tree ifc2 = omp_find_clause (OMP_CLAUSE_CHAIN (ifc), kind: OMP_CLAUSE_IF); |
12287 | if (ifc2 != NULL_TREE) |
12288 | { |
12289 | gcc_assert (OMP_CLAUSE_IF_MODIFIER (ifc) == VOID_CST |
12290 | && OMP_CLAUSE_IF_MODIFIER (ifc2) != ERROR_MARK |
12291 | && OMP_CLAUSE_IF_MODIFIER (ifc2) != VOID_CST); |
12292 | error_at (OMP_CLAUSE_LOCATION (ifc2), |
12293 | "expected %<cancel%> %<if%> clause modifier"); |
12294 | } |
12295 | } |
12296 | |
12297 | if (!processing_template_decl) |
12298 | ifc = maybe_convert_cond (OMP_CLAUSE_IF_EXPR (ifc)); |
12299 | else |
12300 | ifc = build_x_binary_op (OMP_CLAUSE_LOCATION (ifc), NE_EXPR, |
12301 | OMP_CLAUSE_IF_EXPR (ifc), ERROR_MARK, |
12302 | integer_zero_node, ERROR_MARK, |
12303 | NULL_TREE, NULL, tf_warning_or_error); |
12304 | } |
12305 | else |
12306 | ifc = boolean_true_node; |
12307 | vec->quick_push (obj: build_int_cst (integer_type_node, mask)); |
12308 | vec->quick_push (obj: ifc); |
12309 | tree stmt = finish_call_expr (fn, args: &vec, disallow_virtual: false, koenig_p: false, complain: tf_warning_or_error); |
12310 | finish_expr_stmt (expr: stmt); |
12311 | } |
12312 | |
12313 | void |
12314 | finish_omp_cancellation_point (tree clauses) |
12315 | { |
12316 | tree fn = builtin_decl_explicit (fncode: BUILT_IN_GOMP_CANCELLATION_POINT); |
12317 | int mask = 0; |
12318 | if (omp_find_clause (clauses, kind: OMP_CLAUSE_PARALLEL)) |
12319 | mask = 1; |
12320 | else if (omp_find_clause (clauses, kind: OMP_CLAUSE_FOR)) |
12321 | mask = 2; |
12322 | else if (omp_find_clause (clauses, kind: OMP_CLAUSE_SECTIONS)) |
12323 | mask = 4; |
12324 | else if (omp_find_clause (clauses, kind: OMP_CLAUSE_TASKGROUP)) |
12325 | mask = 8; |
12326 | else |
12327 | { |
12328 | error ("%<#pragma omp cancellation point%> must specify one of " |
12329 | "%<parallel%>, %<for%>, %<sections%> or %<taskgroup%> clauses"); |
12330 | return; |
12331 | } |
12332 | releasing_vec vec |
12333 | = make_tree_vector_single (build_int_cst (integer_type_node, mask)); |
12334 | tree stmt = finish_call_expr (fn, args: &vec, disallow_virtual: false, koenig_p: false, complain: tf_warning_or_error); |
12335 | finish_expr_stmt (expr: stmt); |
12336 | } |
12337 | |
12338 | /* Begin a __transaction_atomic or __transaction_relaxed statement. |
12339 | If PCOMPOUND is non-null, this is for a function-transaction-block, and we |
12340 | should create an extra compound stmt. */ |
12341 | |
12342 | tree |
12343 | begin_transaction_stmt (location_t loc, tree *pcompound, int flags) |
12344 | { |
12345 | tree r; |
12346 | |
12347 | if (pcompound) |
12348 | *pcompound = begin_compound_stmt (flags: 0); |
12349 | |
12350 | r = build_stmt (loc, TRANSACTION_EXPR, NULL_TREE); |
12351 | |
12352 | /* Only add the statement to the function if support enabled. */ |
12353 | if (flag_tm) |
12354 | add_stmt (t: r); |
12355 | else |
12356 | error_at (loc, ((flags & TM_STMT_ATTR_RELAXED) != 0 |
12357 | ? G_("%<__transaction_relaxed%> without " |
12358 | "transactional memory support enabled") |
12359 | : G_("%<__transaction_atomic%> without " |
12360 | "transactional memory support enabled"))); |
12361 | |
12362 | TRANSACTION_EXPR_BODY (r) = push_stmt_list (); |
12363 | TREE_SIDE_EFFECTS (r) = 1; |
12364 | return r; |
12365 | } |
12366 | |
12367 | /* End a __transaction_atomic or __transaction_relaxed statement. |
12368 | If COMPOUND_STMT is non-null, this is for a function-transaction-block, |
12369 | and we should end the compound. If NOEX is non-NULL, we wrap the body in |
12370 | a MUST_NOT_THROW_EXPR with NOEX as condition. */ |
12371 | |
12372 | void |
12373 | finish_transaction_stmt (tree stmt, tree compound_stmt, int flags, tree noex) |
12374 | { |
12375 | TRANSACTION_EXPR_BODY (stmt) = pop_stmt_list (TRANSACTION_EXPR_BODY (stmt)); |
12376 | TRANSACTION_EXPR_OUTER (stmt) = (flags & TM_STMT_ATTR_OUTER) != 0; |
12377 | TRANSACTION_EXPR_RELAXED (stmt) = (flags & TM_STMT_ATTR_RELAXED) != 0; |
12378 | TRANSACTION_EXPR_IS_STMT (stmt) = 1; |
12379 | |
12380 | /* noexcept specifications are not allowed for function transactions. */ |
12381 | gcc_assert (!(noex && compound_stmt)); |
12382 | if (noex) |
12383 | { |
12384 | tree body = build_must_not_throw_expr (TRANSACTION_EXPR_BODY (stmt), |
12385 | noex); |
12386 | protected_set_expr_location |
12387 | (body, EXPR_LOCATION (TRANSACTION_EXPR_BODY (stmt))); |
12388 | TREE_SIDE_EFFECTS (body) = 1; |
12389 | TRANSACTION_EXPR_BODY (stmt) = body; |
12390 | } |
12391 | |
12392 | if (compound_stmt) |
12393 | finish_compound_stmt (stmt: compound_stmt); |
12394 | } |
12395 | |
12396 | /* Build a __transaction_atomic or __transaction_relaxed expression. If |
12397 | NOEX is non-NULL, we wrap the body in a MUST_NOT_THROW_EXPR with NOEX as |
12398 | condition. */ |
12399 | |
12400 | tree |
12401 | build_transaction_expr (location_t loc, tree expr, int flags, tree noex) |
12402 | { |
12403 | tree ret; |
12404 | if (noex) |
12405 | { |
12406 | expr = build_must_not_throw_expr (expr, noex); |
12407 | protected_set_expr_location (expr, loc); |
12408 | TREE_SIDE_EFFECTS (expr) = 1; |
12409 | } |
12410 | ret = build1 (TRANSACTION_EXPR, TREE_TYPE (expr), expr); |
12411 | if (flags & TM_STMT_ATTR_RELAXED) |
12412 | TRANSACTION_EXPR_RELAXED (ret) = 1; |
12413 | TREE_SIDE_EFFECTS (ret) = 1; |
12414 | SET_EXPR_LOCATION (ret, loc); |
12415 | return ret; |
12416 | } |
12417 | |
12418 | void |
12419 | init_cp_semantics (void) |
12420 | { |
12421 | } |
12422 | |
12423 | |
12424 | /* Get constant string at LOCATION. Returns true if successful, |
12425 | otherwise false. */ |
12426 | |
12427 | bool |
12428 | cexpr_str::type_check (location_t location) |
12429 | { |
12430 | tsubst_flags_t complain = tf_warning_or_error; |
12431 | |
12432 | if (message == NULL_TREE |
12433 | || message == error_mark_node |
12434 | || check_for_bare_parameter_packs (message)) |
12435 | return false; |
12436 | |
12437 | if (TREE_CODE (message) != STRING_CST |
12438 | && !type_dependent_expression_p (message)) |
12439 | { |
12440 | message_sz |
12441 | = finish_class_member_access_expr (message, |
12442 | get_identifier ("size"), |
12443 | false, complain); |
12444 | if (message_sz != error_mark_node) |
12445 | message_data |
12446 | = finish_class_member_access_expr (message, |
12447 | get_identifier ("data"), |
12448 | false, complain); |
12449 | if (message_sz == error_mark_node || message_data == error_mark_node) |
12450 | { |
12451 | error_at (location, "constexpr string must be a string " |
12452 | "literal or object with %<size%> and " |
12453 | "%<data%> members"); |
12454 | return false; |
12455 | } |
12456 | releasing_vec size_args, data_args; |
12457 | message_sz = finish_call_expr (fn: message_sz, args: &size_args, disallow_virtual: false, koenig_p: false, |
12458 | complain); |
12459 | message_data = finish_call_expr (fn: message_data, args: &data_args, disallow_virtual: false, koenig_p: false, |
12460 | complain); |
12461 | if (message_sz == error_mark_node || message_data == error_mark_node) |
12462 | return false; |
12463 | message_sz = build_converted_constant_expr (size_type_node, message_sz, |
12464 | complain); |
12465 | if (message_sz == error_mark_node) |
12466 | { |
12467 | error_at (location, "constexpr string %<size()%> " |
12468 | "must be implicitly convertible to " |
12469 | "%<std::size_t%>"); |
12470 | return false; |
12471 | } |
12472 | message_data = build_converted_constant_expr (const_string_type_node, |
12473 | message_data, complain); |
12474 | if (message_data == error_mark_node) |
12475 | { |
12476 | error_at (location, "constexpr string %<data()%> " |
12477 | "must be implicitly convertible to " |
12478 | "%<const char*%>"); |
12479 | return false; |
12480 | } |
12481 | } |
12482 | return true; |
12483 | } |
12484 | |
12485 | /* Extract constant string at LOCATON into output string STR. |
12486 | Returns true if successful, otherwise false. */ |
12487 | |
12488 | bool |
12489 | cexpr_str::extract (location_t location, tree &str) |
12490 | { |
12491 | const char *msg; |
12492 | int len; |
12493 | if (!extract (location, msg, len)) |
12494 | return false; |
12495 | str = build_string (len, msg); |
12496 | return true; |
12497 | } |
12498 | |
12499 | /* Extract constant string at LOCATION into output string MSG with LEN. |
12500 | Returns true if successful, otherwise false. */ |
12501 | |
12502 | bool |
12503 | cexpr_str::extract (location_t location, const char * & msg, int &len) |
12504 | { |
12505 | tsubst_flags_t complain = tf_warning_or_error; |
12506 | |
12507 | msg = NULL; |
12508 | if (message_sz && message_data) |
12509 | { |
12510 | tree msz = cxx_constant_value (message_sz, NULL_TREE, complain); |
12511 | if (!tree_fits_uhwi_p (msz)) |
12512 | { |
12513 | error_at (location, |
12514 | "constexpr string %<size()%> " |
12515 | "must be a constant expression"); |
12516 | return false; |
12517 | } |
12518 | else if ((unsigned HOST_WIDE_INT) (int) tree_to_uhwi (msz) |
12519 | != tree_to_uhwi (msz)) |
12520 | { |
12521 | error_at (location, |
12522 | "constexpr string message %<size()%> " |
12523 | "%qE too large", msz); |
12524 | return false; |
12525 | } |
12526 | len = tree_to_uhwi (msz); |
12527 | tree data = maybe_constant_value (message_data, NULL_TREE, |
12528 | mce_true); |
12529 | if (!reduced_constant_expression_p (data)) |
12530 | data = NULL_TREE; |
12531 | if (len) |
12532 | { |
12533 | if (data) |
12534 | msg = c_getstr (data); |
12535 | if (msg == NULL) |
12536 | buf = XNEWVEC (char, len); |
12537 | for (int i = 0; i < len; ++i) |
12538 | { |
12539 | tree t = message_data; |
12540 | if (i) |
12541 | t = build2 (POINTER_PLUS_EXPR, |
12542 | TREE_TYPE (message_data), message_data, |
12543 | size_int (i)); |
12544 | t = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (t)), t); |
12545 | tree t2 = cxx_constant_value (t, NULL_TREE, complain); |
12546 | if (!tree_fits_shwi_p (t2)) |
12547 | { |
12548 | error_at (location, |
12549 | "constexpr string %<data()[%d]%> " |
12550 | "must be a constant expression", i); |
12551 | return false; |
12552 | } |
12553 | if (msg == NULL) |
12554 | buf[i] = tree_to_shwi (t2); |
12555 | /* If c_getstr worked, just verify the first and |
12556 | last characters using constant evaluation. */ |
12557 | else if (len > 2 && i == 0) |
12558 | i = len - 2; |
12559 | } |
12560 | if (msg == NULL) |
12561 | msg = buf; |
12562 | } |
12563 | else if (!data) |
12564 | { |
12565 | /* We don't have any function to test whether some |
12566 | expression is a core constant expression. So, instead |
12567 | test whether (message.data (), 0) is a constant |
12568 | expression. */ |
12569 | data = build2 (COMPOUND_EXPR, integer_type_node, |
12570 | message_data, integer_zero_node); |
12571 | tree t = cxx_constant_value (data, NULL_TREE, complain); |
12572 | if (!integer_zerop (t)) |
12573 | { |
12574 | error_at (location, |
12575 | "constexpr string %<data()%> " |
12576 | "must be a core constant expression"); |
12577 | return false; |
12578 | } |
12579 | } |
12580 | } |
12581 | else |
12582 | { |
12583 | tree eltype = TREE_TYPE (TREE_TYPE (message)); |
12584 | int sz = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (eltype)); |
12585 | msg = TREE_STRING_POINTER (message); |
12586 | len = TREE_STRING_LENGTH (message) / sz - 1; |
12587 | } |
12588 | |
12589 | return true; |
12590 | } |
12591 | |
12592 | /* Build a STATIC_ASSERT for a static assertion with the condition |
12593 | CONDITION and the message text MESSAGE. LOCATION is the location |
12594 | of the static assertion in the source code. When MEMBER_P, this |
12595 | static assertion is a member of a class. If SHOW_EXPR_P is true, |
12596 | print the condition (because it was instantiation-dependent). */ |
12597 | |
12598 | void |
12599 | finish_static_assert (tree condition, tree message, location_t location, |
12600 | bool member_p, bool show_expr_p) |
12601 | { |
12602 | tsubst_flags_t complain = tf_warning_or_error; |
12603 | |
12604 | if (condition == NULL_TREE |
12605 | || condition == error_mark_node) |
12606 | return; |
12607 | |
12608 | if (check_for_bare_parameter_packs (condition)) |
12609 | return; |
12610 | |
12611 | cexpr_str cstr(message); |
12612 | if (!cstr.type_check (location)) |
12613 | return; |
12614 | |
12615 | /* Save the condition in case it was a concept check. */ |
12616 | tree orig_condition = condition; |
12617 | |
12618 | if (instantiation_dependent_expression_p (condition) |
12619 | || instantiation_dependent_expression_p (message)) |
12620 | { |
12621 | /* We're in a template; build a STATIC_ASSERT and put it in |
12622 | the right place. */ |
12623 | defer: |
12624 | tree assertion = make_node (STATIC_ASSERT); |
12625 | STATIC_ASSERT_CONDITION (assertion) = orig_condition; |
12626 | STATIC_ASSERT_MESSAGE (assertion) = cstr.message; |
12627 | STATIC_ASSERT_SOURCE_LOCATION (assertion) = location; |
12628 | |
12629 | if (member_p) |
12630 | maybe_add_class_template_decl_list (current_class_type, |
12631 | assertion, |
12632 | /*friend_p=*/0); |
12633 | else |
12634 | add_stmt (t: assertion); |
12635 | |
12636 | return; |
12637 | } |
12638 | |
12639 | /* Fold the expression and convert it to a boolean value. */ |
12640 | condition = contextual_conv_bool (condition, complain); |
12641 | condition = fold_non_dependent_expr (condition, complain, |
12642 | /*manifestly_const_eval=*/true); |
12643 | |
12644 | if (TREE_CODE (condition) == INTEGER_CST && !integer_zerop (condition)) |
12645 | /* Do nothing; the condition is satisfied. */ |
12646 | ; |
12647 | else |
12648 | { |
12649 | iloc_sentinel ils (location); |
12650 | |
12651 | if (integer_zerop (condition)) |
12652 | { |
12653 | /* CWG2518: static_assert failure in a template is not IFNDR. */ |
12654 | if (processing_template_decl) |
12655 | goto defer; |
12656 | |
12657 | int len; |
12658 | const char *msg = NULL; |
12659 | if (!cstr.extract (location, msg, len)) |
12660 | return; |
12661 | |
12662 | /* See if we can find which clause was failing (for logical AND). */ |
12663 | tree bad = find_failing_clause (NULL, orig_condition); |
12664 | /* If not, or its location is unusable, fall back to the previous |
12665 | location. */ |
12666 | location_t cloc = cp_expr_loc_or_loc (t: bad, or_loc: location); |
12667 | |
12668 | auto_diagnostic_group d; |
12669 | |
12670 | /* Report the error. */ |
12671 | if (len == 0) |
12672 | error_at (cloc, "static assertion failed"); |
12673 | else |
12674 | error_at (cloc, "static assertion failed: %.*s", len, msg); |
12675 | |
12676 | diagnose_failing_condition (bad, cloc, show_expr_p); |
12677 | } |
12678 | else if (condition && condition != error_mark_node) |
12679 | { |
12680 | error ("non-constant condition for static assertion"); |
12681 | if (require_rvalue_constant_expression (condition)) |
12682 | cxx_constant_value (condition); |
12683 | } |
12684 | } |
12685 | } |
12686 | |
12687 | /* Implements the C++0x decltype keyword. Returns the type of EXPR, |
12688 | suitable for use as a type-specifier. |
12689 | |
12690 | ID_EXPRESSION_OR_MEMBER_ACCESS_P is true when EXPR was parsed as an |
12691 | id-expression or a class member access, FALSE when it was parsed as |
12692 | a full expression. */ |
12693 | |
12694 | tree |
12695 | finish_decltype_type (tree expr, bool id_expression_or_member_access_p, |
12696 | tsubst_flags_t complain) |
12697 | { |
12698 | tree type = NULL_TREE; |
12699 | |
12700 | if (!expr || error_operand_p (t: expr)) |
12701 | return error_mark_node; |
12702 | |
12703 | if (TYPE_P (expr) |
12704 | || TREE_CODE (expr) == TYPE_DECL |
12705 | || (TREE_CODE (expr) == BIT_NOT_EXPR |
12706 | && TYPE_P (TREE_OPERAND (expr, 0)))) |
12707 | { |
12708 | if (complain & tf_error) |
12709 | error ("argument to %<decltype%> must be an expression"); |
12710 | return error_mark_node; |
12711 | } |
12712 | |
12713 | /* decltype is an unevaluated context. */ |
12714 | cp_unevaluated u; |
12715 | |
12716 | processing_template_decl_sentinel ptds (/*reset=*/false); |
12717 | |
12718 | /* Depending on the resolution of DR 1172, we may later need to distinguish |
12719 | instantiation-dependent but not type-dependent expressions so that, say, |
12720 | A<decltype(sizeof(T))>::U doesn't require 'typename'. */ |
12721 | if (instantiation_dependent_uneval_expression_p (expr)) |
12722 | { |
12723 | dependent: |
12724 | type = cxx_make_type (DECLTYPE_TYPE); |
12725 | DECLTYPE_TYPE_EXPR (type) = expr; |
12726 | DECLTYPE_TYPE_ID_EXPR_OR_MEMBER_ACCESS_P (type) |
12727 | = id_expression_or_member_access_p; |
12728 | SET_TYPE_STRUCTURAL_EQUALITY (type); |
12729 | |
12730 | return type; |
12731 | } |
12732 | else if (processing_template_decl) |
12733 | { |
12734 | expr = instantiate_non_dependent_expr (expr, complain|tf_decltype); |
12735 | if (expr == error_mark_node) |
12736 | return error_mark_node; |
12737 | /* Keep processing_template_decl cleared for the rest of the function |
12738 | (for sake of the call to lvalue_kind below, which handles templated |
12739 | and non-templated COND_EXPR differently). */ |
12740 | processing_template_decl = 0; |
12741 | } |
12742 | |
12743 | /* The type denoted by decltype(e) is defined as follows: */ |
12744 | |
12745 | expr = resolve_nondeduced_context (expr, complain); |
12746 | if (!mark_single_function (expr, complain)) |
12747 | return error_mark_node; |
12748 | |
12749 | if (invalid_nonstatic_memfn_p (input_location, expr, complain)) |
12750 | return error_mark_node; |
12751 | |
12752 | if (type_unknown_p (expr)) |
12753 | { |
12754 | if (complain & tf_error) |
12755 | error ("%<decltype%> cannot resolve address of overloaded function"); |
12756 | return error_mark_node; |
12757 | } |
12758 | |
12759 | if (id_expression_or_member_access_p) |
12760 | { |
12761 | /* If e is an id-expression or a class member access (5.2.5 |
12762 | [expr.ref]), decltype(e) is defined as the type of the entity |
12763 | named by e. If there is no such entity, or e names a set of |
12764 | overloaded functions, the program is ill-formed. */ |
12765 | if (identifier_p (t: expr)) |
12766 | expr = lookup_name (name: expr); |
12767 | |
12768 | if (INDIRECT_REF_P (expr) |
12769 | || TREE_CODE (expr) == VIEW_CONVERT_EXPR) |
12770 | /* This can happen when the expression is, e.g., "a.b". Just |
12771 | look at the underlying operand. */ |
12772 | expr = TREE_OPERAND (expr, 0); |
12773 | |
12774 | if (TREE_CODE (expr) == OFFSET_REF |
12775 | || TREE_CODE (expr) == MEMBER_REF |
12776 | || TREE_CODE (expr) == SCOPE_REF) |
12777 | /* We're only interested in the field itself. If it is a |
12778 | BASELINK, we will need to see through it in the next |
12779 | step. */ |
12780 | expr = TREE_OPERAND (expr, 1); |
12781 | |
12782 | if (BASELINK_P (expr)) |
12783 | /* See through BASELINK nodes to the underlying function. */ |
12784 | expr = BASELINK_FUNCTIONS (expr); |
12785 | |
12786 | /* decltype of a decomposition name drops references in the tuple case |
12787 | (unlike decltype of a normal variable) and keeps cv-qualifiers from |
12788 | the containing object in the other cases (unlike decltype of a member |
12789 | access expression). */ |
12790 | if (DECL_DECOMPOSITION_P (expr)) |
12791 | { |
12792 | if (ptds.saved) |
12793 | { |
12794 | gcc_checking_assert (DECL_HAS_VALUE_EXPR_P (expr)); |
12795 | /* DECL_HAS_VALUE_EXPR_P is always set if |
12796 | processing_template_decl. If lookup_decomp_type |
12797 | returns non-NULL, it is the tuple case. */ |
12798 | if (tree ret = lookup_decomp_type (expr)) |
12799 | return ret; |
12800 | } |
12801 | if (DECL_HAS_VALUE_EXPR_P (expr)) |
12802 | /* Expr is an array or struct subobject proxy, handle |
12803 | bit-fields properly. */ |
12804 | return unlowered_expr_type (expr); |
12805 | else |
12806 | /* Expr is a reference variable for the tuple case. */ |
12807 | return lookup_decomp_type (expr); |
12808 | } |
12809 | |
12810 | switch (TREE_CODE (expr)) |
12811 | { |
12812 | case FIELD_DECL: |
12813 | if (DECL_BIT_FIELD_TYPE (expr)) |
12814 | { |
12815 | type = DECL_BIT_FIELD_TYPE (expr); |
12816 | break; |
12817 | } |
12818 | /* Fall through for fields that aren't bitfields. */ |
12819 | gcc_fallthrough (); |
12820 | |
12821 | case VAR_DECL: |
12822 | if (is_capture_proxy (expr)) |
12823 | { |
12824 | if (is_normal_capture_proxy (expr)) |
12825 | { |
12826 | expr = DECL_CAPTURED_VARIABLE (expr); |
12827 | type = TREE_TYPE (expr); |
12828 | } |
12829 | else |
12830 | { |
12831 | expr = DECL_VALUE_EXPR (expr); |
12832 | gcc_assert (TREE_CODE (expr) == COMPONENT_REF); |
12833 | expr = TREE_OPERAND (expr, 1); |
12834 | type = TREE_TYPE (expr); |
12835 | } |
12836 | break; |
12837 | } |
12838 | /* Fall through for variables that aren't capture proxies. */ |
12839 | gcc_fallthrough (); |
12840 | |
12841 | case FUNCTION_DECL: |
12842 | case CONST_DECL: |
12843 | case PARM_DECL: |
12844 | case RESULT_DECL: |
12845 | case TEMPLATE_PARM_INDEX: |
12846 | expr = mark_type_use (expr); |
12847 | type = TREE_TYPE (expr); |
12848 | if (VAR_P (expr) && DECL_NTTP_OBJECT_P (expr)) |
12849 | { |
12850 | /* decltype of an NTTP object is the type of the template |
12851 | parameter, which is the object type modulo cv-quals. */ |
12852 | int quals = cp_type_quals (type); |
12853 | gcc_checking_assert (quals & TYPE_QUAL_CONST); |
12854 | type = cv_unqualified (type); |
12855 | } |
12856 | break; |
12857 | |
12858 | case ERROR_MARK: |
12859 | type = error_mark_node; |
12860 | break; |
12861 | |
12862 | case COMPONENT_REF: |
12863 | case COMPOUND_EXPR: |
12864 | mark_type_use (expr); |
12865 | type = is_bitfield_expr_with_lowered_type (expr); |
12866 | if (!type) |
12867 | type = TREE_TYPE (TREE_OPERAND (expr, 1)); |
12868 | break; |
12869 | |
12870 | case BIT_FIELD_REF: |
12871 | gcc_unreachable (); |
12872 | |
12873 | case INTEGER_CST: |
12874 | case PTRMEM_CST: |
12875 | /* We can get here when the id-expression refers to an |
12876 | enumerator or non-type template parameter. */ |
12877 | type = TREE_TYPE (expr); |
12878 | break; |
12879 | |
12880 | default: |
12881 | /* Handle instantiated template non-type arguments. */ |
12882 | type = TREE_TYPE (expr); |
12883 | break; |
12884 | } |
12885 | } |
12886 | else |
12887 | { |
12888 | if (outer_automatic_var_p (STRIP_REFERENCE_REF (expr)) |
12889 | && current_function_decl |
12890 | && LAMBDA_FUNCTION_P (current_function_decl)) |
12891 | { |
12892 | /* [expr.prim.id.unqual]/3: If naming the entity from outside of an |
12893 | unevaluated operand within S would refer to an entity captured by |
12894 | copy in some intervening lambda-expression, then let E be the |
12895 | innermost such lambda-expression. |
12896 | |
12897 | If there is such a lambda-expression and if P is in E's function |
12898 | parameter scope but not its parameter-declaration-clause, then the |
12899 | type of the expression is the type of a class member access |
12900 | expression naming the non-static data member that would be declared |
12901 | for such a capture in the object parameter of the function call |
12902 | operator of E." */ |
12903 | /* FIXME: This transformation needs to happen for all uses of an outer |
12904 | local variable inside decltype, not just decltype((x)) (PR83167). |
12905 | And we don't handle nested lambdas properly, where we need to |
12906 | consider the outer lambdas as well (PR112926). */ |
12907 | tree decl = STRIP_REFERENCE_REF (expr); |
12908 | tree lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl)); |
12909 | tree cap = lookup_name (DECL_NAME (decl), LOOK_where::BLOCK, |
12910 | LOOK_want::HIDDEN_LAMBDA); |
12911 | |
12912 | if (cap && is_capture_proxy (cap)) |
12913 | type = TREE_TYPE (cap); |
12914 | else if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam) == CPLD_COPY) |
12915 | { |
12916 | type = TREE_TYPE (decl); |
12917 | if (TYPE_REF_P (type) |
12918 | && TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE) |
12919 | type = TREE_TYPE (type); |
12920 | } |
12921 | |
12922 | if (type && !TYPE_REF_P (type)) |
12923 | { |
12924 | tree obtype = TREE_TYPE (DECL_ARGUMENTS (current_function_decl)); |
12925 | if (WILDCARD_TYPE_P (non_reference (obtype))) |
12926 | /* We don't know what the eventual obtype quals will be. */ |
12927 | goto dependent; |
12928 | auto direct_type = [](tree t){ |
12929 | if (INDIRECT_TYPE_P (t)) |
12930 | return TREE_TYPE (t); |
12931 | return t; |
12932 | }; |
12933 | int const quals = cp_type_quals (type) |
12934 | | cp_type_quals (direct_type (obtype)); |
12935 | type = cp_build_qualified_type (type, quals); |
12936 | type = build_reference_type (type); |
12937 | } |
12938 | } |
12939 | else if (error_operand_p (t: expr)) |
12940 | type = error_mark_node; |
12941 | else if (expr == current_class_ptr) |
12942 | /* If the expression is just "this", we want the |
12943 | cv-unqualified pointer for the "this" type. */ |
12944 | type = TYPE_MAIN_VARIANT (TREE_TYPE (expr)); |
12945 | |
12946 | if (!type) |
12947 | { |
12948 | /* Otherwise, where T is the type of e, if e is an lvalue, |
12949 | decltype(e) is defined as T&; if an xvalue, T&&; otherwise, T. */ |
12950 | cp_lvalue_kind clk = lvalue_kind (expr); |
12951 | type = unlowered_expr_type (expr); |
12952 | gcc_assert (!TYPE_REF_P (type)); |
12953 | |
12954 | /* For vector types, pick a non-opaque variant. */ |
12955 | if (VECTOR_TYPE_P (type)) |
12956 | type = strip_typedefs (type); |
12957 | |
12958 | if (clk != clk_none && !(clk & clk_class)) |
12959 | type = cp_build_reference_type (type, (clk & clk_rvalueref)); |
12960 | } |
12961 | } |
12962 | |
12963 | return type; |
12964 | } |
12965 | |
12966 | /* Called from trait_expr_value to evaluate either __has_nothrow_assign or |
12967 | __has_nothrow_copy, depending on assign_p. Returns true iff all |
12968 | the copy {ctor,assign} fns are nothrow. */ |
12969 | |
12970 | static bool |
12971 | classtype_has_nothrow_assign_or_copy_p (tree type, bool assign_p) |
12972 | { |
12973 | tree fns = NULL_TREE; |
12974 | |
12975 | if (assign_p || TYPE_HAS_COPY_CTOR (type)) |
12976 | fns = get_class_binding (type, assign_p ? assign_op_identifier |
12977 | : ctor_identifier); |
12978 | |
12979 | bool saw_copy = false; |
12980 | for (ovl_iterator iter (fns); iter; ++iter) |
12981 | { |
12982 | tree fn = *iter; |
12983 | |
12984 | if (copy_fn_p (fn) > 0) |
12985 | { |
12986 | saw_copy = true; |
12987 | if (!maybe_instantiate_noexcept (fn) |
12988 | || !TYPE_NOTHROW_P (TREE_TYPE (fn))) |
12989 | return false; |
12990 | } |
12991 | } |
12992 | |
12993 | return saw_copy; |
12994 | } |
12995 | |
12996 | /* Return true if DERIVED is pointer interconvertible base of BASE. */ |
12997 | |
12998 | static bool |
12999 | pointer_interconvertible_base_of_p (tree base, tree derived) |
13000 | { |
13001 | if (base == error_mark_node || derived == error_mark_node) |
13002 | return false; |
13003 | base = TYPE_MAIN_VARIANT (base); |
13004 | derived = TYPE_MAIN_VARIANT (derived); |
13005 | if (!NON_UNION_CLASS_TYPE_P (base) |
13006 | || !NON_UNION_CLASS_TYPE_P (derived)) |
13007 | return false; |
13008 | |
13009 | if (same_type_p (base, derived)) |
13010 | return true; |
13011 | |
13012 | if (!std_layout_type_p (derived)) |
13013 | return false; |
13014 | |
13015 | return uniquely_derived_from_p (base, derived); |
13016 | } |
13017 | |
13018 | /* Helper function for fold_builtin_is_pointer_inverconvertible_with_class, |
13019 | return true if MEMBERTYPE is the type of the first non-static data member |
13020 | of TYPE or for unions of any members. */ |
13021 | static bool |
13022 | first_nonstatic_data_member_p (tree type, tree membertype) |
13023 | { |
13024 | for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) |
13025 | { |
13026 | if (TREE_CODE (field) != FIELD_DECL) |
13027 | continue; |
13028 | if (DECL_FIELD_IS_BASE (field) && is_empty_field (field)) |
13029 | continue; |
13030 | if (DECL_FIELD_IS_BASE (field)) |
13031 | return first_nonstatic_data_member_p (TREE_TYPE (field), membertype); |
13032 | if (ANON_AGGR_TYPE_P (TREE_TYPE (field))) |
13033 | { |
13034 | if ((TREE_CODE (TREE_TYPE (field)) == UNION_TYPE |
13035 | || std_layout_type_p (TREE_TYPE (field))) |
13036 | && first_nonstatic_data_member_p (TREE_TYPE (field), membertype)) |
13037 | return true; |
13038 | } |
13039 | else if (same_type_ignoring_top_level_qualifiers_p (TREE_TYPE (field), |
13040 | membertype)) |
13041 | return true; |
13042 | if (TREE_CODE (type) != UNION_TYPE) |
13043 | return false; |
13044 | } |
13045 | return false; |
13046 | } |
13047 | |
13048 | /* Fold __builtin_is_pointer_interconvertible_with_class call. */ |
13049 | |
13050 | tree |
13051 | fold_builtin_is_pointer_inverconvertible_with_class (location_t loc, int nargs, |
13052 | tree *args) |
13053 | { |
13054 | /* Unless users call the builtin directly, the following 3 checks should be |
13055 | ensured from std::is_pointer_interconvertible_with_class function |
13056 | template. */ |
13057 | if (nargs != 1) |
13058 | { |
13059 | error_at (loc, "%<__builtin_is_pointer_interconvertible_with_class%> " |
13060 | "needs a single argument"); |
13061 | return boolean_false_node; |
13062 | } |
13063 | tree arg = args[0]; |
13064 | if (error_operand_p (t: arg)) |
13065 | return boolean_false_node; |
13066 | if (!TYPE_PTRMEM_P (TREE_TYPE (arg))) |
13067 | { |
13068 | error_at (loc, "%<__builtin_is_pointer_interconvertible_with_class%> " |
13069 | "argument is not pointer to member"); |
13070 | return boolean_false_node; |
13071 | } |
13072 | |
13073 | if (!TYPE_PTRDATAMEM_P (TREE_TYPE (arg))) |
13074 | return boolean_false_node; |
13075 | |
13076 | tree membertype = TREE_TYPE (TREE_TYPE (arg)); |
13077 | tree basetype = TYPE_OFFSET_BASETYPE (TREE_TYPE (arg)); |
13078 | if (!complete_type_or_else (basetype, NULL_TREE)) |
13079 | return boolean_false_node; |
13080 | |
13081 | if (TREE_CODE (basetype) != UNION_TYPE |
13082 | && !std_layout_type_p (basetype)) |
13083 | return boolean_false_node; |
13084 | |
13085 | if (!first_nonstatic_data_member_p (type: basetype, membertype)) |
13086 | return boolean_false_node; |
13087 | |
13088 | if (TREE_CODE (arg) == PTRMEM_CST) |
13089 | arg = cplus_expand_constant (arg); |
13090 | |
13091 | if (integer_nonzerop (arg)) |
13092 | return boolean_false_node; |
13093 | if (integer_zerop (arg)) |
13094 | return boolean_true_node; |
13095 | |
13096 | return fold_build2 (EQ_EXPR, boolean_type_node, arg, |
13097 | build_zero_cst (TREE_TYPE (arg))); |
13098 | } |
13099 | |
13100 | /* Helper function for is_corresponding_member_aggr. Return true if |
13101 | MEMBERTYPE pointer-to-data-member ARG can be found in anonymous |
13102 | union or structure BASETYPE. */ |
13103 | |
13104 | static bool |
13105 | is_corresponding_member_union (tree basetype, tree membertype, tree arg) |
13106 | { |
13107 | for (tree field = TYPE_FIELDS (basetype); field; field = DECL_CHAIN (field)) |
13108 | if (TREE_CODE (field) != FIELD_DECL || DECL_BIT_FIELD_TYPE (field)) |
13109 | continue; |
13110 | else if (same_type_ignoring_top_level_qualifiers_p (TREE_TYPE (field), |
13111 | membertype)) |
13112 | { |
13113 | if (TREE_CODE (arg) != INTEGER_CST |
13114 | || tree_int_cst_equal (arg, byte_position (field))) |
13115 | return true; |
13116 | } |
13117 | else if (ANON_AGGR_TYPE_P (TREE_TYPE (field))) |
13118 | { |
13119 | tree narg = arg; |
13120 | if (TREE_CODE (basetype) != UNION_TYPE |
13121 | && TREE_CODE (narg) == INTEGER_CST) |
13122 | narg = size_binop (MINUS_EXPR, arg, byte_position (field)); |
13123 | if (is_corresponding_member_union (TREE_TYPE (field), |
13124 | membertype, arg: narg)) |
13125 | return true; |
13126 | } |
13127 | return false; |
13128 | } |
13129 | |
13130 | /* Helper function for fold_builtin_is_corresponding_member call. |
13131 | Return boolean_false_node if MEMBERTYPE1 BASETYPE1::*ARG1 and |
13132 | MEMBERTYPE2 BASETYPE2::*ARG2 aren't corresponding members, |
13133 | boolean_true_node if they are corresponding members, or for |
13134 | non-constant ARG2 the highest member offset for corresponding |
13135 | members. */ |
13136 | |
13137 | static tree |
13138 | is_corresponding_member_aggr (location_t loc, tree basetype1, tree membertype1, |
13139 | tree arg1, tree basetype2, tree membertype2, |
13140 | tree arg2) |
13141 | { |
13142 | tree field1 = TYPE_FIELDS (basetype1); |
13143 | tree field2 = TYPE_FIELDS (basetype2); |
13144 | tree ret = boolean_false_node; |
13145 | while (1) |
13146 | { |
13147 | bool r = next_common_initial_sequence (field1, field2); |
13148 | if (field1 == NULL_TREE || field2 == NULL_TREE) |
13149 | break; |
13150 | if (r |
13151 | && same_type_ignoring_top_level_qualifiers_p (TREE_TYPE (field1), |
13152 | membertype1) |
13153 | && same_type_ignoring_top_level_qualifiers_p (TREE_TYPE (field2), |
13154 | membertype2)) |
13155 | { |
13156 | tree pos = byte_position (field1); |
13157 | if (TREE_CODE (arg1) == INTEGER_CST |
13158 | && tree_int_cst_equal (arg1, pos)) |
13159 | { |
13160 | if (TREE_CODE (arg2) == INTEGER_CST) |
13161 | return boolean_true_node; |
13162 | return pos; |
13163 | } |
13164 | else if (TREE_CODE (arg1) != INTEGER_CST) |
13165 | ret = pos; |
13166 | } |
13167 | else if (ANON_AGGR_TYPE_P (TREE_TYPE (field1)) |
13168 | && ANON_AGGR_TYPE_P (TREE_TYPE (field2))) |
13169 | { |
13170 | if ((!lookup_attribute (attr_name: "no_unique_address", |
13171 | DECL_ATTRIBUTES (field1))) |
13172 | != !lookup_attribute (attr_name: "no_unique_address", |
13173 | DECL_ATTRIBUTES (field2))) |
13174 | break; |
13175 | if (!tree_int_cst_equal (bit_position (field1), |
13176 | bit_position (field2))) |
13177 | break; |
13178 | bool overlap = true; |
13179 | tree pos = byte_position (field1); |
13180 | if (TREE_CODE (arg1) == INTEGER_CST) |
13181 | { |
13182 | tree off1 = fold_convert (sizetype, arg1); |
13183 | tree sz1 = TYPE_SIZE_UNIT (TREE_TYPE (field1)); |
13184 | if (tree_int_cst_lt (t1: off1, t2: pos) |
13185 | || tree_int_cst_le (size_binop (PLUS_EXPR, pos, sz1), t2: off1)) |
13186 | overlap = false; |
13187 | } |
13188 | if (TREE_CODE (arg2) == INTEGER_CST) |
13189 | { |
13190 | tree off2 = fold_convert (sizetype, arg2); |
13191 | tree sz2 = TYPE_SIZE_UNIT (TREE_TYPE (field2)); |
13192 | if (tree_int_cst_lt (t1: off2, t2: pos) |
13193 | || tree_int_cst_le (size_binop (PLUS_EXPR, pos, sz2), t2: off2)) |
13194 | overlap = false; |
13195 | } |
13196 | if (overlap |
13197 | && NON_UNION_CLASS_TYPE_P (TREE_TYPE (field1)) |
13198 | && NON_UNION_CLASS_TYPE_P (TREE_TYPE (field2))) |
13199 | { |
13200 | tree narg1 = arg1; |
13201 | if (TREE_CODE (arg1) == INTEGER_CST) |
13202 | narg1 = size_binop (MINUS_EXPR, |
13203 | fold_convert (sizetype, arg1), pos); |
13204 | tree narg2 = arg2; |
13205 | if (TREE_CODE (arg2) == INTEGER_CST) |
13206 | narg2 = size_binop (MINUS_EXPR, |
13207 | fold_convert (sizetype, arg2), pos); |
13208 | tree t1 = TREE_TYPE (field1); |
13209 | tree t2 = TREE_TYPE (field2); |
13210 | tree nret = is_corresponding_member_aggr (loc, basetype1: t1, membertype1, |
13211 | arg1: narg1, basetype2: t2, membertype2, |
13212 | arg2: narg2); |
13213 | if (nret != boolean_false_node) |
13214 | { |
13215 | if (nret == boolean_true_node) |
13216 | return nret; |
13217 | if (TREE_CODE (arg1) == INTEGER_CST) |
13218 | return size_binop (PLUS_EXPR, nret, pos); |
13219 | ret = size_binop (PLUS_EXPR, nret, pos); |
13220 | } |
13221 | } |
13222 | else if (overlap |
13223 | && TREE_CODE (TREE_TYPE (field1)) == UNION_TYPE |
13224 | && TREE_CODE (TREE_TYPE (field2)) == UNION_TYPE) |
13225 | { |
13226 | tree narg1 = arg1; |
13227 | if (TREE_CODE (arg1) == INTEGER_CST) |
13228 | narg1 = size_binop (MINUS_EXPR, |
13229 | fold_convert (sizetype, arg1), pos); |
13230 | tree narg2 = arg2; |
13231 | if (TREE_CODE (arg2) == INTEGER_CST) |
13232 | narg2 = size_binop (MINUS_EXPR, |
13233 | fold_convert (sizetype, arg2), pos); |
13234 | if (is_corresponding_member_union (TREE_TYPE (field1), |
13235 | membertype: membertype1, arg: narg1) |
13236 | && is_corresponding_member_union (TREE_TYPE (field2), |
13237 | membertype: membertype2, arg: narg2)) |
13238 | { |
13239 | sorry_at (loc, "%<__builtin_is_corresponding_member%> " |
13240 | "not well defined for anonymous unions"); |
13241 | return boolean_false_node; |
13242 | } |
13243 | } |
13244 | } |
13245 | if (!r) |
13246 | break; |
13247 | field1 = DECL_CHAIN (field1); |
13248 | field2 = DECL_CHAIN (field2); |
13249 | } |
13250 | return ret; |
13251 | } |
13252 | |
13253 | /* Fold __builtin_is_corresponding_member call. */ |
13254 | |
13255 | tree |
13256 | fold_builtin_is_corresponding_member (location_t loc, int nargs, |
13257 | tree *args) |
13258 | { |
13259 | /* Unless users call the builtin directly, the following 3 checks should be |
13260 | ensured from std::is_corresponding_member function template. */ |
13261 | if (nargs != 2) |
13262 | { |
13263 | error_at (loc, "%<__builtin_is_corresponding_member%> " |
13264 | "needs two arguments"); |
13265 | return boolean_false_node; |
13266 | } |
13267 | tree arg1 = args[0]; |
13268 | tree arg2 = args[1]; |
13269 | if (error_operand_p (t: arg1) || error_operand_p (t: arg2)) |
13270 | return boolean_false_node; |
13271 | if (!TYPE_PTRMEM_P (TREE_TYPE (arg1)) |
13272 | || !TYPE_PTRMEM_P (TREE_TYPE (arg2))) |
13273 | { |
13274 | error_at (loc, "%<__builtin_is_corresponding_member%> " |
13275 | "argument is not pointer to member"); |
13276 | return boolean_false_node; |
13277 | } |
13278 | |
13279 | if (!TYPE_PTRDATAMEM_P (TREE_TYPE (arg1)) |
13280 | || !TYPE_PTRDATAMEM_P (TREE_TYPE (arg2))) |
13281 | return boolean_false_node; |
13282 | |
13283 | tree membertype1 = TREE_TYPE (TREE_TYPE (arg1)); |
13284 | tree basetype1 = TYPE_OFFSET_BASETYPE (TREE_TYPE (arg1)); |
13285 | if (!complete_type_or_else (basetype1, NULL_TREE)) |
13286 | return boolean_false_node; |
13287 | |
13288 | tree membertype2 = TREE_TYPE (TREE_TYPE (arg2)); |
13289 | tree basetype2 = TYPE_OFFSET_BASETYPE (TREE_TYPE (arg2)); |
13290 | if (!complete_type_or_else (basetype2, NULL_TREE)) |
13291 | return boolean_false_node; |
13292 | |
13293 | if (!NON_UNION_CLASS_TYPE_P (basetype1) |
13294 | || !NON_UNION_CLASS_TYPE_P (basetype2) |
13295 | || !std_layout_type_p (basetype1) |
13296 | || !std_layout_type_p (basetype2)) |
13297 | return boolean_false_node; |
13298 | |
13299 | /* If the member types aren't layout compatible, then they |
13300 | can't be corresponding members. */ |
13301 | if (!layout_compatible_type_p (membertype1, membertype2)) |
13302 | return boolean_false_node; |
13303 | |
13304 | if (TREE_CODE (arg1) == PTRMEM_CST) |
13305 | arg1 = cplus_expand_constant (arg1); |
13306 | if (TREE_CODE (arg2) == PTRMEM_CST) |
13307 | arg2 = cplus_expand_constant (arg2); |
13308 | |
13309 | if (null_member_pointer_value_p (arg1) |
13310 | || null_member_pointer_value_p (arg2)) |
13311 | return boolean_false_node; |
13312 | |
13313 | if (TREE_CODE (arg1) == INTEGER_CST |
13314 | && TREE_CODE (arg2) == INTEGER_CST |
13315 | && !tree_int_cst_equal (arg1, arg2)) |
13316 | return boolean_false_node; |
13317 | |
13318 | if (TREE_CODE (arg2) == INTEGER_CST |
13319 | && TREE_CODE (arg1) != INTEGER_CST) |
13320 | { |
13321 | std::swap (a&: arg1, b&: arg2); |
13322 | std::swap (a&: membertype1, b&: membertype2); |
13323 | std::swap (a&: basetype1, b&: basetype2); |
13324 | } |
13325 | |
13326 | tree ret = is_corresponding_member_aggr (loc, basetype1, membertype1, arg1, |
13327 | basetype2, membertype2, arg2); |
13328 | if (TREE_TYPE (ret) == boolean_type_node) |
13329 | return ret; |
13330 | /* If both arg1 and arg2 are INTEGER_CSTs, is_corresponding_member_aggr |
13331 | already returns boolean_{true,false}_node whether those particular |
13332 | members are corresponding members or not. Otherwise, if only |
13333 | one of them is INTEGER_CST (canonicalized to first being INTEGER_CST |
13334 | above), it returns boolean_false_node if it is certainly not a |
13335 | corresponding member and otherwise we need to do a runtime check that |
13336 | those two OFFSET_TYPE offsets are equal. |
13337 | If neither of the operands is INTEGER_CST, is_corresponding_member_aggr |
13338 | returns the largest offset at which the members would be corresponding |
13339 | members, so perform arg1 <= ret && arg1 == arg2 runtime check. */ |
13340 | gcc_assert (TREE_CODE (arg2) != INTEGER_CST); |
13341 | if (TREE_CODE (arg1) == INTEGER_CST) |
13342 | return fold_build2 (EQ_EXPR, boolean_type_node, arg1, |
13343 | fold_convert (TREE_TYPE (arg1), arg2)); |
13344 | ret = fold_build2 (LE_EXPR, boolean_type_node, |
13345 | fold_convert (pointer_sized_int_node, arg1), |
13346 | fold_convert (pointer_sized_int_node, ret)); |
13347 | return fold_build2 (TRUTH_AND_EXPR, boolean_type_node, ret, |
13348 | fold_build2 (EQ_EXPR, boolean_type_node, arg1, |
13349 | fold_convert (TREE_TYPE (arg1), arg2))); |
13350 | } |
13351 | |
13352 | /* [basic.types] 8. True iff TYPE is an object type. */ |
13353 | |
13354 | static bool |
13355 | object_type_p (const_tree type) |
13356 | { |
13357 | return (TREE_CODE (type) != FUNCTION_TYPE |
13358 | && !TYPE_REF_P (type) |
13359 | && !VOID_TYPE_P (type)); |
13360 | } |
13361 | |
13362 | /* Actually evaluates the trait. */ |
13363 | |
13364 | static bool |
13365 | trait_expr_value (cp_trait_kind kind, tree type1, tree type2) |
13366 | { |
13367 | enum tree_code type_code1; |
13368 | tree t; |
13369 | |
13370 | type_code1 = TREE_CODE (type1); |
13371 | |
13372 | switch (kind) |
13373 | { |
13374 | case CPTK_HAS_NOTHROW_ASSIGN: |
13375 | type1 = strip_array_types (type: type1); |
13376 | return (!CP_TYPE_CONST_P (type1) && type_code1 != REFERENCE_TYPE |
13377 | && (trait_expr_value (kind: CPTK_HAS_TRIVIAL_ASSIGN, type1, type2) |
13378 | || (CLASS_TYPE_P (type1) |
13379 | && classtype_has_nothrow_assign_or_copy_p (type: type1, |
13380 | assign_p: true)))); |
13381 | |
13382 | case CPTK_HAS_NOTHROW_CONSTRUCTOR: |
13383 | type1 = strip_array_types (type: type1); |
13384 | return (trait_expr_value (kind: CPTK_HAS_TRIVIAL_CONSTRUCTOR, type1, type2) |
13385 | || (CLASS_TYPE_P (type1) |
13386 | && (t = locate_ctor (type1)) |
13387 | && maybe_instantiate_noexcept (t) |
13388 | && TYPE_NOTHROW_P (TREE_TYPE (t)))); |
13389 | |
13390 | case CPTK_HAS_NOTHROW_COPY: |
13391 | type1 = strip_array_types (type: type1); |
13392 | return (trait_expr_value (kind: CPTK_HAS_TRIVIAL_COPY, type1, type2) |
13393 | || (CLASS_TYPE_P (type1) |
13394 | && classtype_has_nothrow_assign_or_copy_p (type: type1, assign_p: false))); |
13395 | |
13396 | case CPTK_HAS_TRIVIAL_ASSIGN: |
13397 | /* ??? The standard seems to be missing the "or array of such a class |
13398 | type" wording for this trait. */ |
13399 | type1 = strip_array_types (type: type1); |
13400 | return (!CP_TYPE_CONST_P (type1) && type_code1 != REFERENCE_TYPE |
13401 | && (trivial_type_p (type1) |
13402 | || (CLASS_TYPE_P (type1) |
13403 | && TYPE_HAS_TRIVIAL_COPY_ASSIGN (type1)))); |
13404 | |
13405 | case CPTK_HAS_TRIVIAL_CONSTRUCTOR: |
13406 | type1 = strip_array_types (type: type1); |
13407 | return (trivial_type_p (type1) |
13408 | || (CLASS_TYPE_P (type1) && TYPE_HAS_TRIVIAL_DFLT (type1))); |
13409 | |
13410 | case CPTK_HAS_TRIVIAL_COPY: |
13411 | /* ??? The standard seems to be missing the "or array of such a class |
13412 | type" wording for this trait. */ |
13413 | type1 = strip_array_types (type: type1); |
13414 | return (trivial_type_p (type1) || type_code1 == REFERENCE_TYPE |
13415 | || (CLASS_TYPE_P (type1) && TYPE_HAS_TRIVIAL_COPY_CTOR (type1))); |
13416 | |
13417 | case CPTK_HAS_TRIVIAL_DESTRUCTOR: |
13418 | type1 = strip_array_types (type: type1); |
13419 | if (CLASS_TYPE_P (type1) && type_build_dtor_call (type1)) |
13420 | { |
13421 | deferring_access_check_sentinel dacs (dk_no_check); |
13422 | cp_unevaluated un; |
13423 | tree fn = get_dtor (type1, tf_none); |
13424 | if (!fn && !seen_error ()) |
13425 | warning (0, "checking %qs for type %qT with a destructor that " |
13426 | "cannot be called", "__has_trivial_destructor", type1); |
13427 | } |
13428 | return (trivial_type_p (type1) || type_code1 == REFERENCE_TYPE |
13429 | || (CLASS_TYPE_P (type1) |
13430 | && TYPE_HAS_TRIVIAL_DESTRUCTOR (type1))); |
13431 | |
13432 | case CPTK_HAS_UNIQUE_OBJ_REPRESENTATIONS: |
13433 | return type_has_unique_obj_representations (type1); |
13434 | |
13435 | case CPTK_HAS_VIRTUAL_DESTRUCTOR: |
13436 | return type_has_virtual_destructor (type1); |
13437 | |
13438 | case CPTK_IS_ABSTRACT: |
13439 | return ABSTRACT_CLASS_TYPE_P (type1); |
13440 | |
13441 | case CPTK_IS_AGGREGATE: |
13442 | return CP_AGGREGATE_TYPE_P (type1); |
13443 | |
13444 | case CPTK_IS_ARRAY: |
13445 | return (type_code1 == ARRAY_TYPE |
13446 | /* We don't want to report T[0] as being an array type. |
13447 | This is for compatibility with an implementation of |
13448 | std::is_array by template argument deduction, because |
13449 | compute_array_index_type_loc rejects a zero-size array |
13450 | in SFINAE context. */ |
13451 | && !(TYPE_SIZE (type1) && integer_zerop (TYPE_SIZE (type1)))); |
13452 | |
13453 | case CPTK_IS_ASSIGNABLE: |
13454 | return is_xible (MODIFY_EXPR, type1, type2); |
13455 | |
13456 | case CPTK_IS_BASE_OF: |
13457 | return (NON_UNION_CLASS_TYPE_P (type1) && NON_UNION_CLASS_TYPE_P (type2) |
13458 | && (same_type_ignoring_top_level_qualifiers_p (type1, type2) |
13459 | || DERIVED_FROM_P (type1, type2))); |
13460 | |
13461 | case CPTK_IS_BOUNDED_ARRAY: |
13462 | return (type_code1 == ARRAY_TYPE |
13463 | && TYPE_DOMAIN (type1) |
13464 | /* We don't want to report T[0] as being a bounded array type. |
13465 | This is for compatibility with an implementation of |
13466 | std::is_bounded_array by template argument deduction, because |
13467 | compute_array_index_type_loc rejects a zero-size array |
13468 | in SFINAE context. */ |
13469 | && !(TYPE_SIZE (type1) && integer_zerop (TYPE_SIZE (type1)))); |
13470 | |
13471 | case CPTK_IS_CLASS: |
13472 | return NON_UNION_CLASS_TYPE_P (type1); |
13473 | |
13474 | case CPTK_IS_CONST: |
13475 | return CP_TYPE_CONST_P (type1); |
13476 | |
13477 | case CPTK_IS_CONSTRUCTIBLE: |
13478 | return is_xible (INIT_EXPR, type1, type2); |
13479 | |
13480 | case CPTK_IS_CONVERTIBLE: |
13481 | return is_convertible (type1, type2); |
13482 | |
13483 | case CPTK_IS_DESTRUCTIBLE: |
13484 | return is_xible (BIT_NOT_EXPR, type1, NULL_TREE); |
13485 | |
13486 | case CPTK_IS_EMPTY: |
13487 | return NON_UNION_CLASS_TYPE_P (type1) && CLASSTYPE_EMPTY_P (type1); |
13488 | |
13489 | case CPTK_IS_ENUM: |
13490 | return type_code1 == ENUMERAL_TYPE; |
13491 | |
13492 | case CPTK_IS_FINAL: |
13493 | return CLASS_TYPE_P (type1) && CLASSTYPE_FINAL (type1); |
13494 | |
13495 | case CPTK_IS_FUNCTION: |
13496 | return type_code1 == FUNCTION_TYPE; |
13497 | |
13498 | case CPTK_IS_INVOCABLE: |
13499 | return !error_operand_p (t: build_invoke (type1, type2, tf_none)); |
13500 | |
13501 | case CPTK_IS_LAYOUT_COMPATIBLE: |
13502 | return layout_compatible_type_p (type1, type2); |
13503 | |
13504 | case CPTK_IS_LITERAL_TYPE: |
13505 | return literal_type_p (type1); |
13506 | |
13507 | case CPTK_IS_MEMBER_FUNCTION_POINTER: |
13508 | return TYPE_PTRMEMFUNC_P (type1); |
13509 | |
13510 | case CPTK_IS_MEMBER_OBJECT_POINTER: |
13511 | return TYPE_PTRDATAMEM_P (type1); |
13512 | |
13513 | case CPTK_IS_MEMBER_POINTER: |
13514 | return TYPE_PTRMEM_P (type1); |
13515 | |
13516 | case CPTK_IS_NOTHROW_ASSIGNABLE: |
13517 | return is_nothrow_xible (MODIFY_EXPR, type1, type2); |
13518 | |
13519 | case CPTK_IS_NOTHROW_CONSTRUCTIBLE: |
13520 | return is_nothrow_xible (INIT_EXPR, type1, type2); |
13521 | |
13522 | case CPTK_IS_NOTHROW_CONVERTIBLE: |
13523 | return is_nothrow_convertible (type1, type2); |
13524 | |
13525 | case CPTK_IS_NOTHROW_DESTRUCTIBLE: |
13526 | return is_nothrow_xible (BIT_NOT_EXPR, type1, NULL_TREE); |
13527 | |
13528 | case CPTK_IS_NOTHROW_INVOCABLE: |
13529 | return expr_noexcept_p (build_invoke (type1, type2, tf_none), tf_none); |
13530 | |
13531 | case CPTK_IS_OBJECT: |
13532 | return object_type_p (type: type1); |
13533 | |
13534 | case CPTK_IS_POINTER_INTERCONVERTIBLE_BASE_OF: |
13535 | return pointer_interconvertible_base_of_p (base: type1, derived: type2); |
13536 | |
13537 | case CPTK_IS_POD: |
13538 | return pod_type_p (type1); |
13539 | |
13540 | case CPTK_IS_POINTER: |
13541 | return TYPE_PTR_P (type1); |
13542 | |
13543 | case CPTK_IS_POLYMORPHIC: |
13544 | return CLASS_TYPE_P (type1) && TYPE_POLYMORPHIC_P (type1); |
13545 | |
13546 | case CPTK_IS_REFERENCE: |
13547 | return type_code1 == REFERENCE_TYPE; |
13548 | |
13549 | case CPTK_IS_SAME: |
13550 | return same_type_p (type1, type2); |
13551 | |
13552 | case CPTK_IS_SCOPED_ENUM: |
13553 | return SCOPED_ENUM_P (type1); |
13554 | |
13555 | case CPTK_IS_STD_LAYOUT: |
13556 | return std_layout_type_p (type1); |
13557 | |
13558 | case CPTK_IS_TRIVIAL: |
13559 | return trivial_type_p (type1); |
13560 | |
13561 | case CPTK_IS_TRIVIALLY_ASSIGNABLE: |
13562 | return is_trivially_xible (MODIFY_EXPR, type1, type2); |
13563 | |
13564 | case CPTK_IS_TRIVIALLY_CONSTRUCTIBLE: |
13565 | return is_trivially_xible (INIT_EXPR, type1, type2); |
13566 | |
13567 | case CPTK_IS_TRIVIALLY_COPYABLE: |
13568 | return trivially_copyable_p (type1); |
13569 | |
13570 | case CPTK_IS_TRIVIALLY_DESTRUCTIBLE: |
13571 | return is_trivially_xible (BIT_NOT_EXPR, type1, NULL_TREE); |
13572 | |
13573 | case CPTK_IS_UNBOUNDED_ARRAY: |
13574 | return array_of_unknown_bound_p (type1); |
13575 | |
13576 | case CPTK_IS_UNION: |
13577 | return type_code1 == UNION_TYPE; |
13578 | |
13579 | case CPTK_IS_VIRTUAL_BASE_OF: |
13580 | return (NON_UNION_CLASS_TYPE_P (type1) && NON_UNION_CLASS_TYPE_P (type2) |
13581 | && lookup_base (type2, type1, ba_require_virtual, |
13582 | NULL, tf_none) != NULL_TREE); |
13583 | |
13584 | case CPTK_IS_VOLATILE: |
13585 | return CP_TYPE_VOLATILE_P (type1); |
13586 | |
13587 | case CPTK_REF_CONSTRUCTS_FROM_TEMPORARY: |
13588 | return ref_xes_from_temporary (type1, type2, /*direct_init=*/true); |
13589 | |
13590 | case CPTK_REF_CONVERTS_FROM_TEMPORARY: |
13591 | return ref_xes_from_temporary (type1, type2, /*direct_init=*/false); |
13592 | |
13593 | case CPTK_IS_DEDUCIBLE: |
13594 | return type_targs_deducible_from (type1, type2); |
13595 | |
13596 | /* __array_rank is handled in finish_trait_expr. */ |
13597 | case CPTK_RANK: |
13598 | gcc_unreachable (); |
13599 | |
13600 | #define DEFTRAIT_TYPE(CODE, NAME, ARITY) \ |
13601 | case CPTK_##CODE: |
13602 | #include "cp-trait.def" |
13603 | #undef DEFTRAIT_TYPE |
13604 | /* Type-yielding traits are handled in finish_trait_type. */ |
13605 | break; |
13606 | } |
13607 | |
13608 | gcc_unreachable (); |
13609 | } |
13610 | |
13611 | /* Returns true if TYPE meets the requirements for the specified KIND, |
13612 | false otherwise. |
13613 | |
13614 | When KIND == 1, TYPE must be an array of unknown bound, |
13615 | or (possibly cv-qualified) void, or a complete type. |
13616 | |
13617 | When KIND == 2, TYPE must be a complete type, or array of complete type, |
13618 | or (possibly cv-qualified) void. |
13619 | |
13620 | When KIND == 3: |
13621 | If TYPE is a non-union class type, it must be complete. |
13622 | |
13623 | When KIND == 4: |
13624 | If TYPE is a class type, it must be complete. */ |
13625 | |
13626 | static bool |
13627 | check_trait_type (tree type, int kind = 1) |
13628 | { |
13629 | if (type == NULL_TREE) |
13630 | return true; |
13631 | |
13632 | if (TREE_CODE (type) == TREE_VEC) |
13633 | { |
13634 | for (tree arg : tree_vec_range (type)) |
13635 | if (!check_trait_type (type: arg, kind)) |
13636 | return false; |
13637 | return true; |
13638 | } |
13639 | |
13640 | if (kind == 1 && TREE_CODE (type) == ARRAY_TYPE && !TYPE_DOMAIN (type)) |
13641 | return true; // Array of unknown bound. Don't care about completeness. |
13642 | |
13643 | if (kind == 3 && !NON_UNION_CLASS_TYPE_P (type)) |
13644 | return true; // Not a non-union class type. Don't care about completeness. |
13645 | |
13646 | if (kind == 4 && TREE_CODE (type) == ARRAY_TYPE) |
13647 | return true; // Not a class type. Don't care about completeness. |
13648 | |
13649 | if (VOID_TYPE_P (type)) |
13650 | return true; |
13651 | |
13652 | type = complete_type (strip_array_types (type)); |
13653 | if (!COMPLETE_TYPE_P (type) |
13654 | && cxx_incomplete_type_diagnostic (NULL_TREE, type, diag_kind: DK_PERMERROR) |
13655 | && !flag_permissive) |
13656 | return false; |
13657 | return true; |
13658 | } |
13659 | |
13660 | /* True iff the conversion (if any) would be a direct reference |
13661 | binding, not requiring complete types. This is LWG2939. */ |
13662 | |
13663 | static bool |
13664 | same_type_ref_bind_p (cp_trait_kind kind, tree type1, tree type2) |
13665 | { |
13666 | tree from, to; |
13667 | switch (kind) |
13668 | { |
13669 | /* These put the target type first. */ |
13670 | case CPTK_IS_CONSTRUCTIBLE: |
13671 | case CPTK_IS_NOTHROW_CONSTRUCTIBLE: |
13672 | case CPTK_IS_TRIVIALLY_CONSTRUCTIBLE: |
13673 | case CPTK_IS_INVOCABLE: |
13674 | case CPTK_IS_NOTHROW_INVOCABLE: |
13675 | case CPTK_REF_CONSTRUCTS_FROM_TEMPORARY: |
13676 | case CPTK_REF_CONVERTS_FROM_TEMPORARY: |
13677 | to = type1; |
13678 | from = type2; |
13679 | break; |
13680 | |
13681 | /* These put it second. */ |
13682 | case CPTK_IS_CONVERTIBLE: |
13683 | case CPTK_IS_NOTHROW_CONVERTIBLE: |
13684 | to = type2; |
13685 | from = type1; |
13686 | break; |
13687 | |
13688 | default: |
13689 | gcc_unreachable (); |
13690 | } |
13691 | |
13692 | if (TREE_CODE (to) != REFERENCE_TYPE || !from) |
13693 | return false; |
13694 | if (TREE_CODE (from) == TREE_VEC && TREE_VEC_LENGTH (from) == 1) |
13695 | from = TREE_VEC_ELT (from, 0); |
13696 | return (TYPE_P (from) |
13697 | && (same_type_ignoring_top_level_qualifiers_p |
13698 | (non_reference (to), non_reference (from)))); |
13699 | } |
13700 | |
13701 | /* [defns.referenceable] True iff TYPE is a referenceable type. */ |
13702 | |
13703 | static bool |
13704 | referenceable_type_p (const_tree type) |
13705 | { |
13706 | return (TYPE_REF_P (type) |
13707 | || object_type_p (type) |
13708 | || (FUNC_OR_METHOD_TYPE_P (type) |
13709 | && (type_memfn_quals (type) == TYPE_UNQUALIFIED |
13710 | && type_memfn_rqual (type) == REF_QUAL_NONE))); |
13711 | } |
13712 | |
13713 | /* Process a trait expression. */ |
13714 | |
13715 | tree |
13716 | finish_trait_expr (location_t loc, cp_trait_kind kind, tree type1, tree type2) |
13717 | { |
13718 | if (type1 == error_mark_node |
13719 | || type2 == error_mark_node) |
13720 | return error_mark_node; |
13721 | |
13722 | if (processing_template_decl) |
13723 | { |
13724 | tree trait_expr = make_node (TRAIT_EXPR); |
13725 | if (kind == CPTK_RANK) |
13726 | TREE_TYPE (trait_expr) = size_type_node; |
13727 | else |
13728 | TREE_TYPE (trait_expr) = boolean_type_node; |
13729 | TRAIT_EXPR_TYPE1 (trait_expr) = type1; |
13730 | TRAIT_EXPR_TYPE2 (trait_expr) = type2; |
13731 | TRAIT_EXPR_KIND (trait_expr) = kind; |
13732 | TRAIT_EXPR_LOCATION (trait_expr) = loc; |
13733 | return trait_expr; |
13734 | } |
13735 | |
13736 | switch (kind) |
13737 | { |
13738 | case CPTK_HAS_NOTHROW_ASSIGN: |
13739 | case CPTK_HAS_TRIVIAL_ASSIGN: |
13740 | case CPTK_HAS_NOTHROW_CONSTRUCTOR: |
13741 | case CPTK_HAS_TRIVIAL_CONSTRUCTOR: |
13742 | case CPTK_HAS_NOTHROW_COPY: |
13743 | case CPTK_HAS_TRIVIAL_COPY: |
13744 | case CPTK_HAS_TRIVIAL_DESTRUCTOR: |
13745 | case CPTK_IS_DESTRUCTIBLE: |
13746 | case CPTK_IS_NOTHROW_DESTRUCTIBLE: |
13747 | case CPTK_IS_TRIVIALLY_DESTRUCTIBLE: |
13748 | if (!check_trait_type (type: type1)) |
13749 | return error_mark_node; |
13750 | break; |
13751 | |
13752 | case CPTK_IS_LITERAL_TYPE: |
13753 | case CPTK_IS_POD: |
13754 | case CPTK_IS_STD_LAYOUT: |
13755 | case CPTK_IS_TRIVIAL: |
13756 | case CPTK_IS_TRIVIALLY_COPYABLE: |
13757 | case CPTK_HAS_UNIQUE_OBJ_REPRESENTATIONS: |
13758 | if (!check_trait_type (type: type1, /* kind = */ 2)) |
13759 | return error_mark_node; |
13760 | break; |
13761 | |
13762 | case CPTK_IS_ABSTRACT: |
13763 | case CPTK_IS_EMPTY: |
13764 | case CPTK_IS_POLYMORPHIC: |
13765 | case CPTK_HAS_VIRTUAL_DESTRUCTOR: |
13766 | if (!check_trait_type (type: type1, /* kind = */ 3)) |
13767 | return error_mark_node; |
13768 | break; |
13769 | |
13770 | /* N.B. std::is_aggregate is kind=2 but we don't need a complete element |
13771 | type to know whether an array is an aggregate, so use kind=4 here. */ |
13772 | case CPTK_IS_AGGREGATE: |
13773 | case CPTK_IS_FINAL: |
13774 | if (!check_trait_type (type: type1, /* kind = */ 4)) |
13775 | return error_mark_node; |
13776 | break; |
13777 | |
13778 | case CPTK_IS_CONSTRUCTIBLE: |
13779 | case CPTK_IS_CONVERTIBLE: |
13780 | case CPTK_IS_INVOCABLE: |
13781 | case CPTK_IS_NOTHROW_CONSTRUCTIBLE: |
13782 | case CPTK_IS_NOTHROW_CONVERTIBLE: |
13783 | case CPTK_IS_NOTHROW_INVOCABLE: |
13784 | case CPTK_IS_TRIVIALLY_CONSTRUCTIBLE: |
13785 | case CPTK_REF_CONSTRUCTS_FROM_TEMPORARY: |
13786 | case CPTK_REF_CONVERTS_FROM_TEMPORARY: |
13787 | /* Don't check completeness for direct reference binding. */; |
13788 | if (same_type_ref_bind_p (kind, type1, type2)) |
13789 | break; |
13790 | gcc_fallthrough (); |
13791 | |
13792 | case CPTK_IS_ASSIGNABLE: |
13793 | case CPTK_IS_NOTHROW_ASSIGNABLE: |
13794 | case CPTK_IS_TRIVIALLY_ASSIGNABLE: |
13795 | if (!check_trait_type (type: type1) |
13796 | || !check_trait_type (type: type2)) |
13797 | return error_mark_node; |
13798 | break; |
13799 | |
13800 | case CPTK_IS_BASE_OF: |
13801 | case CPTK_IS_POINTER_INTERCONVERTIBLE_BASE_OF: |
13802 | if (NON_UNION_CLASS_TYPE_P (type1) && NON_UNION_CLASS_TYPE_P (type2) |
13803 | && !same_type_ignoring_top_level_qualifiers_p (type1, type2) |
13804 | && !complete_type_or_else (type2, NULL_TREE)) |
13805 | /* We already issued an error. */ |
13806 | return error_mark_node; |
13807 | break; |
13808 | |
13809 | case CPTK_IS_VIRTUAL_BASE_OF: |
13810 | if (NON_UNION_CLASS_TYPE_P (type1) && NON_UNION_CLASS_TYPE_P (type2) |
13811 | && !complete_type_or_else (type2, NULL_TREE)) |
13812 | /* We already issued an error. */ |
13813 | return error_mark_node; |
13814 | break; |
13815 | |
13816 | case CPTK_IS_ARRAY: |
13817 | case CPTK_IS_BOUNDED_ARRAY: |
13818 | case CPTK_IS_CLASS: |
13819 | case CPTK_IS_CONST: |
13820 | case CPTK_IS_ENUM: |
13821 | case CPTK_IS_FUNCTION: |
13822 | case CPTK_IS_MEMBER_FUNCTION_POINTER: |
13823 | case CPTK_IS_MEMBER_OBJECT_POINTER: |
13824 | case CPTK_IS_MEMBER_POINTER: |
13825 | case CPTK_IS_OBJECT: |
13826 | case CPTK_IS_POINTER: |
13827 | case CPTK_IS_REFERENCE: |
13828 | case CPTK_IS_SAME: |
13829 | case CPTK_IS_SCOPED_ENUM: |
13830 | case CPTK_IS_UNBOUNDED_ARRAY: |
13831 | case CPTK_IS_UNION: |
13832 | case CPTK_IS_VOLATILE: |
13833 | case CPTK_RANK: |
13834 | break; |
13835 | |
13836 | case CPTK_IS_LAYOUT_COMPATIBLE: |
13837 | if (!array_of_unknown_bound_p (type1) |
13838 | && TREE_CODE (type1) != VOID_TYPE |
13839 | && !complete_type_or_else (type1, NULL_TREE)) |
13840 | /* We already issued an error. */ |
13841 | return error_mark_node; |
13842 | if (!array_of_unknown_bound_p (type2) |
13843 | && TREE_CODE (type2) != VOID_TYPE |
13844 | && !complete_type_or_else (type2, NULL_TREE)) |
13845 | /* We already issued an error. */ |
13846 | return error_mark_node; |
13847 | break; |
13848 | |
13849 | case CPTK_IS_DEDUCIBLE: |
13850 | if (!DECL_TYPE_TEMPLATE_P (type1)) |
13851 | { |
13852 | error ("%qD is not a class or alias template", type1); |
13853 | return error_mark_node; |
13854 | } |
13855 | break; |
13856 | |
13857 | #define DEFTRAIT_TYPE(CODE, NAME, ARITY) \ |
13858 | case CPTK_##CODE: |
13859 | #include "cp-trait.def" |
13860 | #undef DEFTRAIT_TYPE |
13861 | /* Type-yielding traits are handled in finish_trait_type. */ |
13862 | gcc_unreachable (); |
13863 | } |
13864 | |
13865 | tree val; |
13866 | if (kind == CPTK_RANK) |
13867 | { |
13868 | size_t rank = 0; |
13869 | for (; TREE_CODE (type1) == ARRAY_TYPE; type1 = TREE_TYPE (type1)) |
13870 | ++rank; |
13871 | val = build_int_cst (size_type_node, rank); |
13872 | } |
13873 | else |
13874 | val = (trait_expr_value (kind, type1, type2) |
13875 | ? boolean_true_node : boolean_false_node); |
13876 | |
13877 | return maybe_wrap_with_location (val, loc); |
13878 | } |
13879 | |
13880 | /* Process a trait type. */ |
13881 | |
13882 | tree |
13883 | finish_trait_type (cp_trait_kind kind, tree type1, tree type2, |
13884 | tsubst_flags_t complain) |
13885 | { |
13886 | if (type1 == error_mark_node |
13887 | || type2 == error_mark_node) |
13888 | return error_mark_node; |
13889 | |
13890 | if (processing_template_decl) |
13891 | { |
13892 | tree type = cxx_make_type (TRAIT_TYPE); |
13893 | TRAIT_TYPE_TYPE1 (type) = type1; |
13894 | TRAIT_TYPE_TYPE2 (type) = type2; |
13895 | TRAIT_TYPE_KIND_RAW (type) = build_int_cstu (integer_type_node, kind); |
13896 | /* These traits are intended to be used in the definition of the ::type |
13897 | member of the corresponding standard library type trait and aren't |
13898 | mangleable (and thus won't appear directly in template signatures), |
13899 | so structural equality should suffice. */ |
13900 | SET_TYPE_STRUCTURAL_EQUALITY (type); |
13901 | return type; |
13902 | } |
13903 | |
13904 | switch (kind) |
13905 | { |
13906 | case CPTK_ADD_LVALUE_REFERENCE: |
13907 | /* [meta.trans.ref]. */ |
13908 | if (referenceable_type_p (type: type1)) |
13909 | return cp_build_reference_type (type1, /*rval=*/false); |
13910 | return type1; |
13911 | |
13912 | case CPTK_ADD_POINTER: |
13913 | /* [meta.trans.ptr]. */ |
13914 | if (VOID_TYPE_P (type1) || referenceable_type_p (type: type1)) |
13915 | { |
13916 | if (TYPE_REF_P (type1)) |
13917 | type1 = TREE_TYPE (type1); |
13918 | return build_pointer_type (type1); |
13919 | } |
13920 | return type1; |
13921 | |
13922 | case CPTK_ADD_RVALUE_REFERENCE: |
13923 | /* [meta.trans.ref]. */ |
13924 | if (referenceable_type_p (type: type1)) |
13925 | return cp_build_reference_type (type1, /*rval=*/true); |
13926 | return type1; |
13927 | |
13928 | case CPTK_DECAY: |
13929 | if (TYPE_REF_P (type1)) |
13930 | type1 = TREE_TYPE (type1); |
13931 | |
13932 | if (TREE_CODE (type1) == ARRAY_TYPE) |
13933 | return finish_trait_type (kind: CPTK_ADD_POINTER, TREE_TYPE (type1), type2, |
13934 | complain); |
13935 | else if (TREE_CODE (type1) == FUNCTION_TYPE) |
13936 | return finish_trait_type (kind: CPTK_ADD_POINTER, type1, type2, complain); |
13937 | else |
13938 | return cv_unqualified (type1); |
13939 | |
13940 | case CPTK_REMOVE_ALL_EXTENTS: |
13941 | return strip_array_types (type: type1); |
13942 | |
13943 | case CPTK_REMOVE_CV: |
13944 | return cv_unqualified (type1); |
13945 | |
13946 | case CPTK_REMOVE_CVREF: |
13947 | if (TYPE_REF_P (type1)) |
13948 | type1 = TREE_TYPE (type1); |
13949 | return cv_unqualified (type1); |
13950 | |
13951 | case CPTK_REMOVE_EXTENT: |
13952 | if (TREE_CODE (type1) == ARRAY_TYPE) |
13953 | type1 = TREE_TYPE (type1); |
13954 | return type1; |
13955 | |
13956 | case CPTK_REMOVE_POINTER: |
13957 | if (TYPE_PTR_P (type1)) |
13958 | type1 = TREE_TYPE (type1); |
13959 | return type1; |
13960 | |
13961 | case CPTK_REMOVE_REFERENCE: |
13962 | if (TYPE_REF_P (type1)) |
13963 | type1 = TREE_TYPE (type1); |
13964 | return type1; |
13965 | |
13966 | case CPTK_TYPE_PACK_ELEMENT: |
13967 | return finish_type_pack_element (idx: type1, types: type2, complain); |
13968 | |
13969 | case CPTK_UNDERLYING_TYPE: |
13970 | return finish_underlying_type (type: type1); |
13971 | |
13972 | #define DEFTRAIT_EXPR(CODE, NAME, ARITY) \ |
13973 | case CPTK_##CODE: |
13974 | #include "cp-trait.def" |
13975 | #undef DEFTRAIT_EXPR |
13976 | /* Expression-yielding traits are handled in finish_trait_expr. */ |
13977 | case CPTK_BASES: |
13978 | case CPTK_DIRECT_BASES: |
13979 | /* BASES and DIRECT_BASES are handled in finish_bases. */ |
13980 | break; |
13981 | } |
13982 | |
13983 | gcc_unreachable (); |
13984 | } |
13985 | |
13986 | /* Do-nothing variants of functions to handle pragma FLOAT_CONST_DECIMAL64, |
13987 | which is ignored for C++. */ |
13988 | |
13989 | void |
13990 | set_float_const_decimal64 (void) |
13991 | { |
13992 | } |
13993 | |
13994 | void |
13995 | clear_float_const_decimal64 (void) |
13996 | { |
13997 | } |
13998 | |
13999 | bool |
14000 | float_const_decimal64_p (void) |
14001 | { |
14002 | return 0; |
14003 | } |
14004 | |
14005 | |
14006 | /* Return true if T designates the implied `this' parameter. */ |
14007 | |
14008 | bool |
14009 | is_this_parameter (tree t) |
14010 | { |
14011 | if (!DECL_P (t) || DECL_NAME (t) != this_identifier) |
14012 | return false; |
14013 | gcc_assert (TREE_CODE (t) == PARM_DECL |
14014 | || (VAR_P (t) && DECL_HAS_VALUE_EXPR_P (t)) |
14015 | || (cp_binding_oracle && VAR_P (t))); |
14016 | return true; |
14017 | } |
14018 | |
14019 | /* As above, or a C++23 explicit object parameter. */ |
14020 | |
14021 | bool |
14022 | is_object_parameter (tree t) |
14023 | { |
14024 | if (is_this_parameter (t)) |
14025 | return true; |
14026 | if (TREE_CODE (t) != PARM_DECL) |
14027 | return false; |
14028 | tree ctx = DECL_CONTEXT (t); |
14029 | return (ctx && DECL_XOBJ_MEMBER_FUNCTION_P (ctx) |
14030 | && t == DECL_ARGUMENTS (ctx)); |
14031 | } |
14032 | |
14033 | /* Insert the deduced return type for an auto function. */ |
14034 | |
14035 | void |
14036 | apply_deduced_return_type (tree fco, tree return_type) |
14037 | { |
14038 | tree result; |
14039 | |
14040 | if (return_type == error_mark_node) |
14041 | return; |
14042 | |
14043 | if (DECL_CONV_FN_P (fco)) |
14044 | DECL_NAME (fco) = make_conv_op_name (return_type); |
14045 | |
14046 | TREE_TYPE (fco) = change_return_type (return_type, TREE_TYPE (fco)); |
14047 | |
14048 | maybe_update_postconditions (fco); |
14049 | |
14050 | /* Apply the type to the result object. */ |
14051 | |
14052 | result = DECL_RESULT (fco); |
14053 | if (result == NULL_TREE) |
14054 | return; |
14055 | if (TREE_TYPE (result) == return_type) |
14056 | return; |
14057 | |
14058 | if (!processing_template_decl && !VOID_TYPE_P (return_type) |
14059 | && !complete_type_or_else (return_type, NULL_TREE)) |
14060 | return; |
14061 | |
14062 | /* We already have a DECL_RESULT from start_preparsed_function. |
14063 | Now we need to redo the work it and allocate_struct_function |
14064 | did to reflect the new type. */ |
14065 | result = build_decl (DECL_SOURCE_LOCATION (result), RESULT_DECL, NULL_TREE, |
14066 | TYPE_MAIN_VARIANT (return_type)); |
14067 | DECL_ARTIFICIAL (result) = 1; |
14068 | DECL_IGNORED_P (result) = 1; |
14069 | cp_apply_type_quals_to_decl (cp_type_quals (return_type), |
14070 | result); |
14071 | DECL_RESULT (fco) = result; |
14072 | |
14073 | if (!processing_template_decl) |
14074 | if (function *fun = DECL_STRUCT_FUNCTION (fco)) |
14075 | { |
14076 | bool aggr = aggregate_value_p (result, fco); |
14077 | #ifdef PCC_STATIC_STRUCT_RETURN |
14078 | fun->returns_pcc_struct = aggr; |
14079 | #endif |
14080 | fun->returns_struct = aggr; |
14081 | } |
14082 | } |
14083 | |
14084 | /* Build a unary fold expression of EXPR over OP. If IS_RIGHT is true, |
14085 | this is a right unary fold. Otherwise it is a left unary fold. */ |
14086 | |
14087 | static tree |
14088 | finish_unary_fold_expr (location_t loc, tree expr, int op, tree_code dir) |
14089 | { |
14090 | /* Build a pack expansion (assuming expr has pack type). */ |
14091 | if (!uses_parameter_packs (expr)) |
14092 | { |
14093 | error_at (location_of (expr), "operand of fold expression has no " |
14094 | "unexpanded parameter packs"); |
14095 | return error_mark_node; |
14096 | } |
14097 | tree pack = make_pack_expansion (expr); |
14098 | |
14099 | /* Build the fold expression. */ |
14100 | tree code = build_int_cstu (integer_type_node, abs (x: op)); |
14101 | tree fold = build_min_nt_loc (loc, dir, code, pack); |
14102 | FOLD_EXPR_MODIFY_P (fold) = (op < 0); |
14103 | TREE_TYPE (fold) = build_dependent_operator_type (NULL_TREE, |
14104 | FOLD_EXPR_OP (fold), |
14105 | FOLD_EXPR_MODIFY_P (fold)); |
14106 | return fold; |
14107 | } |
14108 | |
14109 | tree |
14110 | finish_left_unary_fold_expr (location_t loc, tree expr, int op) |
14111 | { |
14112 | return finish_unary_fold_expr (loc, expr, op, dir: UNARY_LEFT_FOLD_EXPR); |
14113 | } |
14114 | |
14115 | tree |
14116 | finish_right_unary_fold_expr (location_t loc, tree expr, int op) |
14117 | { |
14118 | return finish_unary_fold_expr (loc, expr, op, dir: UNARY_RIGHT_FOLD_EXPR); |
14119 | } |
14120 | |
14121 | /* Build a binary fold expression over EXPR1 and EXPR2. The |
14122 | associativity of the fold is determined by EXPR1 and EXPR2 (whichever |
14123 | has an unexpanded parameter pack). */ |
14124 | |
14125 | static tree |
14126 | finish_binary_fold_expr (location_t loc, tree pack, tree init, |
14127 | int op, tree_code dir) |
14128 | { |
14129 | pack = make_pack_expansion (pack); |
14130 | tree code = build_int_cstu (integer_type_node, abs (x: op)); |
14131 | tree fold = build_min_nt_loc (loc, dir, code, pack, init); |
14132 | FOLD_EXPR_MODIFY_P (fold) = (op < 0); |
14133 | TREE_TYPE (fold) = build_dependent_operator_type (NULL_TREE, |
14134 | FOLD_EXPR_OP (fold), |
14135 | FOLD_EXPR_MODIFY_P (fold)); |
14136 | return fold; |
14137 | } |
14138 | |
14139 | tree |
14140 | finish_binary_fold_expr (location_t loc, tree expr1, tree expr2, int op) |
14141 | { |
14142 | // Determine which expr has an unexpanded parameter pack and |
14143 | // set the pack and initial term. |
14144 | bool pack1 = uses_parameter_packs (expr1); |
14145 | bool pack2 = uses_parameter_packs (expr2); |
14146 | if (pack1 && !pack2) |
14147 | return finish_binary_fold_expr (loc, pack: expr1, init: expr2, op, dir: BINARY_RIGHT_FOLD_EXPR); |
14148 | else if (pack2 && !pack1) |
14149 | return finish_binary_fold_expr (loc, pack: expr2, init: expr1, op, dir: BINARY_LEFT_FOLD_EXPR); |
14150 | else |
14151 | { |
14152 | if (pack1) |
14153 | error ("both arguments in binary fold have unexpanded parameter packs"); |
14154 | else |
14155 | error ("no unexpanded parameter packs in binary fold"); |
14156 | } |
14157 | return error_mark_node; |
14158 | } |
14159 | |
14160 | /* Finish __builtin_launder (arg). */ |
14161 | |
14162 | tree |
14163 | finish_builtin_launder (location_t loc, tree arg, tsubst_flags_t complain) |
14164 | { |
14165 | tree orig_arg = arg; |
14166 | if (!type_dependent_expression_p (arg)) |
14167 | arg = decay_conversion (arg, complain); |
14168 | if (error_operand_p (t: arg)) |
14169 | return error_mark_node; |
14170 | if (!type_dependent_expression_p (arg) && !TYPE_PTROB_P (TREE_TYPE (arg))) |
14171 | { |
14172 | error_at (loc, "type %qT of argument to %<__builtin_launder%> " |
14173 | "is not a pointer to object type", TREE_TYPE (arg)); |
14174 | return error_mark_node; |
14175 | } |
14176 | if (processing_template_decl) |
14177 | arg = orig_arg; |
14178 | return build_call_expr_internal_loc (loc, IFN_LAUNDER, |
14179 | TREE_TYPE (arg), 1, arg); |
14180 | } |
14181 | |
14182 | /* Finish __builtin_convertvector (arg, type). */ |
14183 | |
14184 | tree |
14185 | cp_build_vec_convert (tree arg, location_t loc, tree type, |
14186 | tsubst_flags_t complain) |
14187 | { |
14188 | if (error_operand_p (t: type)) |
14189 | return error_mark_node; |
14190 | if (error_operand_p (t: arg)) |
14191 | return error_mark_node; |
14192 | |
14193 | tree ret = NULL_TREE; |
14194 | if (!type_dependent_expression_p (arg) && !dependent_type_p (type)) |
14195 | ret = c_build_vec_convert (cp_expr_loc_or_input_loc (t: arg), |
14196 | decay_conversion (arg, complain), |
14197 | loc, type, (complain & tf_error) != 0); |
14198 | |
14199 | if (!processing_template_decl) |
14200 | return ret; |
14201 | |
14202 | return build_call_expr_internal_loc (loc, IFN_VEC_CONVERT, type, 1, arg); |
14203 | } |
14204 | |
14205 | /* Finish __builtin_bit_cast (type, arg). */ |
14206 | |
14207 | tree |
14208 | cp_build_bit_cast (location_t loc, tree type, tree arg, |
14209 | tsubst_flags_t complain) |
14210 | { |
14211 | if (error_operand_p (t: type)) |
14212 | return error_mark_node; |
14213 | if (!dependent_type_p (type)) |
14214 | { |
14215 | if (!complete_type_or_maybe_complain (type, NULL_TREE, complain)) |
14216 | return error_mark_node; |
14217 | if (TREE_CODE (type) == ARRAY_TYPE) |
14218 | { |
14219 | /* std::bit_cast for destination ARRAY_TYPE is not possible, |
14220 | as functions may not return an array, so don't bother trying |
14221 | to support this (and then deal with VLAs etc.). */ |
14222 | error_at (loc, "%<__builtin_bit_cast%> destination type %qT " |
14223 | "is an array type", type); |
14224 | return error_mark_node; |
14225 | } |
14226 | if (!trivially_copyable_p (type)) |
14227 | { |
14228 | error_at (loc, "%<__builtin_bit_cast%> destination type %qT " |
14229 | "is not trivially copyable", type); |
14230 | return error_mark_node; |
14231 | } |
14232 | } |
14233 | |
14234 | if (error_operand_p (t: arg)) |
14235 | return error_mark_node; |
14236 | |
14237 | if (!type_dependent_expression_p (arg)) |
14238 | { |
14239 | if (TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE) |
14240 | { |
14241 | /* Don't perform array-to-pointer conversion. */ |
14242 | arg = mark_rvalue_use (arg, loc, reject_builtin: true); |
14243 | if (!complete_type_or_maybe_complain (TREE_TYPE (arg), arg, complain)) |
14244 | return error_mark_node; |
14245 | } |
14246 | else |
14247 | arg = decay_conversion (arg, complain); |
14248 | |
14249 | if (error_operand_p (t: arg)) |
14250 | return error_mark_node; |
14251 | |
14252 | if (!trivially_copyable_p (TREE_TYPE (arg))) |
14253 | { |
14254 | error_at (cp_expr_loc_or_loc (t: arg, or_loc: loc), |
14255 | "%<__builtin_bit_cast%> source type %qT " |
14256 | "is not trivially copyable", TREE_TYPE (arg)); |
14257 | return error_mark_node; |
14258 | } |
14259 | if (!dependent_type_p (type) |
14260 | && !cp_tree_equal (TYPE_SIZE_UNIT (type), |
14261 | TYPE_SIZE_UNIT (TREE_TYPE (arg)))) |
14262 | { |
14263 | error_at (loc, "%<__builtin_bit_cast%> source size %qE " |
14264 | "not equal to destination type size %qE", |
14265 | TYPE_SIZE_UNIT (TREE_TYPE (arg)), |
14266 | TYPE_SIZE_UNIT (type)); |
14267 | return error_mark_node; |
14268 | } |
14269 | } |
14270 | |
14271 | tree ret = build_min (BIT_CAST_EXPR, type, arg); |
14272 | SET_EXPR_LOCATION (ret, loc); |
14273 | |
14274 | if (!processing_template_decl && CLASS_TYPE_P (type)) |
14275 | ret = get_target_expr (ret, complain); |
14276 | |
14277 | return ret; |
14278 | } |
14279 | |
14280 | /* Diagnose invalid #pragma GCC unroll argument and adjust |
14281 | it if needed. */ |
14282 | |
14283 | tree |
14284 | cp_check_pragma_unroll (location_t loc, tree unroll) |
14285 | { |
14286 | HOST_WIDE_INT lunroll = 0; |
14287 | if (type_dependent_expression_p (unroll)) |
14288 | ; |
14289 | else if (!INTEGRAL_TYPE_P (TREE_TYPE (unroll)) |
14290 | || (!value_dependent_expression_p (unroll) |
14291 | && (!tree_fits_shwi_p (unroll) |
14292 | || (lunroll = tree_to_shwi (unroll)) < 0 |
14293 | || lunroll >= USHRT_MAX))) |
14294 | { |
14295 | error_at (loc, "%<#pragma GCC unroll%> requires an" |
14296 | " assignment-expression that evaluates to a non-negative" |
14297 | " integral constant less than %u", USHRT_MAX); |
14298 | unroll = integer_one_node; |
14299 | } |
14300 | else if (TREE_CODE (unroll) == INTEGER_CST) |
14301 | { |
14302 | unroll = fold_convert (integer_type_node, unroll); |
14303 | if (integer_zerop (unroll)) |
14304 | unroll = integer_one_node; |
14305 | } |
14306 | return unroll; |
14307 | } |
14308 | |
14309 | #include "gt-cp-semantics.h" |
14310 |
Definitions
- omp_private_member_map
- omp_private_member_vec
- omp_private_member_ignore_next
- deferred_access
- deferred_access_stack
- deferred_access_no_check
- push_deferring_access_checks
- reopen_deferring_access_checks
- resume_deferring_access_checks
- stop_deferring_access_checks
- pop_deferring_access_checks
- get_deferred_access_checks
- pop_to_parent_deferring_access_checks
- get_class_access_diagnostic_decl
- enforce_access
- perform_access_checks
- perform_deferred_access_checks
- perform_or_defer_access_check
- stmts_are_full_exprs_p
- add_stmt
- current_stmt_tree
- maybe_cleanup_point_expr
- maybe_cleanup_point_expr_void
- add_decl_expr
- set_one_cleanup_loc
- set_cleanup_locs
- at_try_scope
- do_poplevel
- do_pushlevel
- push_cleanup
- begin_maybe_infinite_loop
- break_maybe_infinite_loop
- end_maybe_infinite_loop
- begin_cond
- finish_cond
- adjust_loop_decl_cond
- finish_loop_cond_prep
- finish_goto_stmt
- is_assignment_op_expr_p
- boolish_class_type_p_cache
- boolish_class_type_p
- maybe_warn_unparenthesized_assignment
- annotate_saver
- annotate_saver
- restore
- maybe_convert_cond
- finish_expr_stmt
- begin_if_stmt
- is_std_constant_evaluated_p
- find_std_constant_evaluated_r
- maybe_warn_for_constant_evaluated
- finish_if_stmt_cond
- finish_then_clause
- begin_else_clause
- finish_else_clause
- maybe_mark_exp_read_r
- finish_if_stmt
- finish_loop_cond
- begin_while_stmt
- finish_while_stmt_cond
- finish_while_stmt
- begin_do_stmt
- finish_do_body
- finish_do_stmt
- finish_return_stmt
- begin_for_scope
- begin_for_stmt
- finish_init_stmt
- finish_for_cond
- finish_for_expr
- find_range_for_decls
- finish_for_stmt
- begin_range_for_stmt
- finish_range_for_decl
- finish_break_stmt
- finish_continue_stmt
- begin_switch_stmt
- finish_switch_cond
- finish_switch_stmt
- begin_try_block
- begin_function_try_block
- finish_try_block
- finish_cleanup_try_block
- finish_cleanup
- finish_function_try_block
- finish_handler_sequence
- finish_function_handler_sequence
- begin_handler
- finish_handler_parms
- finish_handler
- begin_compound_stmt
- finish_compound_stmt
- finish_asm_string_expression
- finish_asm_stmt
- finish_label_stmt
- finish_label_decl
- finish_decl_cleanup
- finish_eh_cleanup
- finish_mem_initializers
- force_paren_expr
- maybe_undo_parenthesized_ref
- finish_parenthesized_expr
- finish_non_static_data_member
- check_accessibility_of_qualified_id
- finish_qualified_id_expr
- begin_stmt_expr
- finish_stmt_expr_expr
- finish_stmt_expr
- stmt_expr_value_expr
- empty_expr_stmt_p
- perform_koenig_lookup
- finish_call_expr
- finish_increment_expr
- finish_this_expr
- finish_pseudo_destructor_expr
- finish_unary_op_expr
- maybe_zero_constructor_nelts
- finish_compound_literal
- finish_fname
- finish_translation_unit
- finish_template_type_parm
- finish_template_template_parm
- check_template_template_default_arg
- begin_class_definition
- finish_member_declaration
- finish_template_decl
- fixup_template_type
- finish_template_type
- finish_base_specifier
- baselink_for_fns
- outer_var_p
- outer_automatic_var_p
- process_outer_var_ref
- finish_id_expression_1
- finish_id_expression
- finish_typeof
- finish_underlying_type
- finish_type_pack_element
- pack_index_element
- calculate_direct_bases
- dfs_calculate_bases_pre
- dfs_calculate_bases_post
- calculate_bases_helper
- calculate_bases
- finish_bases
- finish_offsetof
- simplify_aggr_init_expr
- emit_associated_thunks
- expand_or_defer_fn_1
- expand_or_defer_fn
- nrv_data
- nrv_data
- finalize_nrv_r
- finalize_nrv
- cxx_omp_create_clause_info
- omp_clause_decl_field
- omp_clause_printable_decl
- omp_note_field_privatization
- omp_privatize_field
- cp_omp_address_inspector
- cp_omp_address_inspector
- ~cp_omp_address_inspector
- processing_template_decl_p
- emit_unmappable_type_notes
- convert_from_reference
- build_array_ref
- check_clause
- handle_omp_array_sections_1
- handle_omp_array_sections
- omp_reduction_id
- omp_reduction_lookup
- omp_mapper_id
- cxx_omp_mapper_lookup
- cxx_omp_extract_mapper_directive
- cxx_omp_map_array_section
- cp_remove_omp_priv_cleanup_stmt
- cp_check_omp_declare_reduction_data
- cp_check_omp_declare_reduction_r
- cp_check_omp_declare_reduction
- clone_omp_udr
- find_omp_placeholder_r
- finish_omp_reduction_clause
- cp_check_omp_declare_mapper
- finish_omp_declare_simd_methods
- cp_finish_omp_clause_doacross_sink
- cp_omp_finish_iterators
- cp_oacc_check_attachments
- cp_finish_omp_init_prefer_type
- finish_omp_clauses
- push_omp_privatization_clauses
- pop_omp_privatization_clauses
- save_omp_privatization_clauses
- restore_omp_privatization_clauses
- finish_omp_threadprivate
- begin_omp_structured_block
- finish_omp_structured_block
- begin_omp_parallel
- finish_oacc_data
- finish_oacc_host_data
- finish_omp_construct
- omp_target_walk_data
- finish_omp_target_clauses_r
- finish_omp_target_clauses
- finish_omp_target
- finish_omp_parallel
- begin_omp_task
- finish_omp_task
- handle_omp_for_class_iterator
- finish_omp_for
- fofb_data
- finish_omp_for_block_walker
- finish_omp_for_block
- finish_omp_atomic
- finish_omp_barrier
- finish_omp_depobj
- finish_omp_flush
- finish_omp_taskwait
- finish_omp_taskyield
- finish_omp_cancel
- finish_omp_cancellation_point
- begin_transaction_stmt
- finish_transaction_stmt
- build_transaction_expr
- init_cp_semantics
- type_check
- extract
- extract
- finish_static_assert
- finish_decltype_type
- classtype_has_nothrow_assign_or_copy_p
- pointer_interconvertible_base_of_p
- first_nonstatic_data_member_p
- fold_builtin_is_pointer_inverconvertible_with_class
- is_corresponding_member_union
- is_corresponding_member_aggr
- fold_builtin_is_corresponding_member
- object_type_p
- trait_expr_value
- check_trait_type
- same_type_ref_bind_p
- referenceable_type_p
- finish_trait_expr
- finish_trait_type
- set_float_const_decimal64
- clear_float_const_decimal64
- float_const_decimal64_p
- is_this_parameter
- is_object_parameter
- apply_deduced_return_type
- finish_unary_fold_expr
- finish_left_unary_fold_expr
- finish_right_unary_fold_expr
- finish_binary_fold_expr
- finish_binary_fold_expr
- finish_builtin_launder
- cp_build_vec_convert
- cp_build_bit_cast
Update your C++ knowledge – Modern C++11/14/17 Training
Find out more