1 | /* Nested function decomposition for GIMPLE. |
2 | Copyright (C) 2004-2023 Free Software Foundation, Inc. |
3 | |
4 | This file is part of GCC. |
5 | |
6 | GCC is free software; you can redistribute it and/or modify |
7 | it under the terms of the GNU General Public License as published by |
8 | the Free Software Foundation; either version 3, or (at your option) |
9 | any later version. |
10 | |
11 | GCC is distributed in the hope that it will be useful, |
12 | but WITHOUT ANY WARRANTY; without even the implied warranty of |
13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
14 | GNU General Public License for more details. |
15 | |
16 | You should have received a copy of the GNU General Public License |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ |
19 | |
20 | #include "config.h" |
21 | #include "system.h" |
22 | #include "coretypes.h" |
23 | #include "backend.h" |
24 | #include "target.h" |
25 | #include "rtl.h" |
26 | #include "tree.h" |
27 | #include "gimple.h" |
28 | #include "memmodel.h" |
29 | #include "tm_p.h" |
30 | #include "stringpool.h" |
31 | #include "cgraph.h" |
32 | #include "fold-const.h" |
33 | #include "stor-layout.h" |
34 | #include "dumpfile.h" |
35 | #include "tree-inline.h" |
36 | #include "gimplify.h" |
37 | #include "gimple-iterator.h" |
38 | #include "gimple-walk.h" |
39 | #include "tree-cfg.h" |
40 | #include "explow.h" |
41 | #include "langhooks.h" |
42 | #include "gimple-low.h" |
43 | #include "gomp-constants.h" |
44 | #include "diagnostic.h" |
45 | #include "alloc-pool.h" |
46 | #include "tree-nested.h" |
47 | #include "symbol-summary.h" |
48 | #include "symtab-thunks.h" |
49 | |
50 | /* Summary of nested functions. */ |
51 | static function_summary <nested_function_info *> |
52 | *nested_function_sum = NULL; |
53 | |
54 | /* Return nested_function_info, if available. */ |
55 | nested_function_info * |
56 | nested_function_info::get (cgraph_node *node) |
57 | { |
58 | if (!nested_function_sum) |
59 | return NULL; |
60 | return nested_function_sum->get (node); |
61 | } |
62 | |
63 | /* Return nested_function_info possibly creating new one. */ |
64 | nested_function_info * |
65 | nested_function_info::get_create (cgraph_node *node) |
66 | { |
67 | if (!nested_function_sum) |
68 | { |
69 | nested_function_sum = new function_summary <nested_function_info *> |
70 | (symtab); |
71 | nested_function_sum->disable_insertion_hook (); |
72 | } |
73 | return nested_function_sum->get_create (node); |
74 | } |
75 | |
76 | /* cgraph_node is no longer nested function; update cgraph accordingly. */ |
77 | void |
78 | unnest_function (cgraph_node *node) |
79 | { |
80 | nested_function_info *info = nested_function_info::get (node); |
81 | cgraph_node **node2 = &nested_function_info::get |
82 | (node: nested_function_origin (node))->nested; |
83 | |
84 | gcc_checking_assert (info->origin); |
85 | while (*node2 != node) |
86 | node2 = &nested_function_info::get (node: *node2)->next_nested; |
87 | *node2 = info->next_nested; |
88 | info->next_nested = NULL; |
89 | info->origin = NULL; |
90 | nested_function_sum->remove (node); |
91 | } |
92 | |
93 | /* Destructor: unlink function from nested function lists. */ |
94 | nested_function_info::~nested_function_info () |
95 | { |
96 | cgraph_node *next; |
97 | for (cgraph_node *n = nested; n; n = next) |
98 | { |
99 | nested_function_info *info = nested_function_info::get (node: n); |
100 | next = info->next_nested; |
101 | info->origin = NULL; |
102 | info->next_nested = NULL; |
103 | } |
104 | nested = NULL; |
105 | if (origin) |
106 | { |
107 | cgraph_node **node2 |
108 | = &nested_function_info::get (node: origin)->nested; |
109 | |
110 | nested_function_info *info; |
111 | while ((info = nested_function_info::get (node: *node2)) != this && info) |
112 | node2 = &info->next_nested; |
113 | *node2 = next_nested; |
114 | } |
115 | } |
116 | |
117 | /* Free nested function info summaries. */ |
118 | void |
119 | nested_function_info::release () |
120 | { |
121 | if (nested_function_sum) |
122 | delete (nested_function_sum); |
123 | nested_function_sum = NULL; |
124 | } |
125 | |
126 | /* If NODE is nested function, record it. */ |
127 | void |
128 | maybe_record_nested_function (cgraph_node *node) |
129 | { |
130 | /* All nested functions gets lowered during the construction of symtab. */ |
131 | if (symtab->state > CONSTRUCTION) |
132 | return; |
133 | if (DECL_CONTEXT (node->decl) |
134 | && TREE_CODE (DECL_CONTEXT (node->decl)) == FUNCTION_DECL) |
135 | { |
136 | cgraph_node *origin = cgraph_node::get_create (DECL_CONTEXT (node->decl)); |
137 | nested_function_info *info = nested_function_info::get_create (node); |
138 | nested_function_info *origin_info |
139 | = nested_function_info::get_create (node: origin); |
140 | |
141 | info->origin = origin; |
142 | info->next_nested = origin_info->nested; |
143 | origin_info->nested = node; |
144 | } |
145 | } |
146 | |
147 | /* The object of this pass is to lower the representation of a set of nested |
148 | functions in order to expose all of the gory details of the various |
149 | nonlocal references. We want to do this sooner rather than later, in |
150 | order to give us more freedom in emitting all of the functions in question. |
151 | |
152 | Back in olden times, when gcc was young, we developed an insanely |
153 | complicated scheme whereby variables which were referenced nonlocally |
154 | were forced to live in the stack of the declaring function, and then |
155 | the nested functions magically discovered where these variables were |
156 | placed. In order for this scheme to function properly, it required |
157 | that the outer function be partially expanded, then we switch to |
158 | compiling the inner function, and once done with those we switch back |
159 | to compiling the outer function. Such delicate ordering requirements |
160 | makes it difficult to do whole translation unit optimizations |
161 | involving such functions. |
162 | |
163 | The implementation here is much more direct. Everything that can be |
164 | referenced by an inner function is a member of an explicitly created |
165 | structure herein called the "nonlocal frame struct". The incoming |
166 | static chain for a nested function is a pointer to this struct in |
167 | the parent. In this way, we settle on known offsets from a known |
168 | base, and so are decoupled from the logic that places objects in the |
169 | function's stack frame. More importantly, we don't have to wait for |
170 | that to happen -- since the compilation of the inner function is no |
171 | longer tied to a real stack frame, the nonlocal frame struct can be |
172 | allocated anywhere. Which means that the outer function is now |
173 | inlinable. |
174 | |
175 | Theory of operation here is very simple. Iterate over all the |
176 | statements in all the functions (depth first) several times, |
177 | allocating structures and fields on demand. In general we want to |
178 | examine inner functions first, so that we can avoid making changes |
179 | to outer functions which are unnecessary. |
180 | |
181 | The order of the passes matters a bit, in that later passes will be |
182 | skipped if it is discovered that the functions don't actually interact |
183 | at all. That is, they're nested in the lexical sense but could have |
184 | been written as independent functions without change. */ |
185 | |
186 | |
187 | struct nesting_info |
188 | { |
189 | struct nesting_info *outer; |
190 | struct nesting_info *inner; |
191 | struct nesting_info *next; |
192 | |
193 | hash_map<tree, tree> *field_map; |
194 | hash_map<tree, tree> *var_map; |
195 | hash_set<tree *> *mem_refs; |
196 | bitmap suppress_expansion; |
197 | |
198 | tree context; |
199 | tree new_local_var_chain; |
200 | tree debug_var_chain; |
201 | tree frame_type; |
202 | tree frame_decl; |
203 | tree chain_field; |
204 | tree chain_decl; |
205 | tree nl_goto_field; |
206 | |
207 | bool thunk_p; |
208 | bool any_parm_remapped; |
209 | bool any_tramp_created; |
210 | bool any_descr_created; |
211 | char static_chain_added; |
212 | }; |
213 | |
214 | |
215 | /* Iterate over the nesting tree, starting with ROOT, depth first. */ |
216 | |
217 | static inline struct nesting_info * |
218 | iter_nestinfo_start (struct nesting_info *root) |
219 | { |
220 | while (root->inner) |
221 | root = root->inner; |
222 | return root; |
223 | } |
224 | |
225 | static inline struct nesting_info * |
226 | iter_nestinfo_next (struct nesting_info *node) |
227 | { |
228 | if (node->next) |
229 | return iter_nestinfo_start (root: node->next); |
230 | return node->outer; |
231 | } |
232 | |
233 | #define FOR_EACH_NEST_INFO(I, ROOT) \ |
234 | for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I)) |
235 | |
236 | /* Obstack used for the bitmaps in the struct above. */ |
237 | static struct bitmap_obstack nesting_info_bitmap_obstack; |
238 | |
239 | |
240 | /* We're working in so many different function contexts simultaneously, |
241 | that create_tmp_var is dangerous. Prevent mishap. */ |
242 | #define create_tmp_var cant_use_create_tmp_var_here_dummy |
243 | |
244 | /* Like create_tmp_var, except record the variable for registration at |
245 | the given nesting level. */ |
246 | |
247 | static tree |
248 | create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix) |
249 | { |
250 | tree tmp_var; |
251 | |
252 | /* If the type is of variable size or a type which must be created by the |
253 | frontend, something is wrong. Note that we explicitly allow |
254 | incomplete types here, since we create them ourselves here. */ |
255 | gcc_assert (!TREE_ADDRESSABLE (type)); |
256 | gcc_assert (!TYPE_SIZE_UNIT (type) |
257 | || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST); |
258 | |
259 | tmp_var = create_tmp_var_raw (type, prefix); |
260 | DECL_CONTEXT (tmp_var) = info->context; |
261 | DECL_CHAIN (tmp_var) = info->new_local_var_chain; |
262 | DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1; |
263 | |
264 | info->new_local_var_chain = tmp_var; |
265 | |
266 | return tmp_var; |
267 | } |
268 | |
269 | /* Like build_simple_mem_ref, but set TREE_THIS_NOTRAP on the result. */ |
270 | |
271 | static tree |
272 | build_simple_mem_ref_notrap (tree ptr) |
273 | { |
274 | tree t = build_simple_mem_ref (ptr); |
275 | TREE_THIS_NOTRAP (t) = 1; |
276 | return t; |
277 | } |
278 | |
279 | /* Take the address of EXP to be used within function CONTEXT. |
280 | Mark it for addressability as necessary. */ |
281 | |
282 | tree |
283 | build_addr (tree exp) |
284 | { |
285 | mark_addressable (exp); |
286 | return build_fold_addr_expr (exp); |
287 | } |
288 | |
289 | /* Insert FIELD into TYPE, sorted by alignment requirements. */ |
290 | |
291 | void |
292 | insert_field_into_struct (tree type, tree field) |
293 | { |
294 | tree *p; |
295 | |
296 | DECL_CONTEXT (field) = type; |
297 | |
298 | for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p)) |
299 | if (DECL_ALIGN (field) >= DECL_ALIGN (*p)) |
300 | break; |
301 | |
302 | DECL_CHAIN (field) = *p; |
303 | *p = field; |
304 | |
305 | /* Set correct alignment for frame struct type. */ |
306 | if (TYPE_ALIGN (type) < DECL_ALIGN (field)) |
307 | SET_TYPE_ALIGN (type, DECL_ALIGN (field)); |
308 | } |
309 | |
310 | /* Build or return the RECORD_TYPE that describes the frame state that is |
311 | shared between INFO->CONTEXT and its nested functions. This record will |
312 | not be complete until finalize_nesting_tree; up until that point we'll |
313 | be adding fields as necessary. |
314 | |
315 | We also build the DECL that represents this frame in the function. */ |
316 | |
317 | static tree |
318 | get_frame_type (struct nesting_info *info) |
319 | { |
320 | tree type = info->frame_type; |
321 | if (!type) |
322 | { |
323 | char *name; |
324 | |
325 | type = make_node (RECORD_TYPE); |
326 | |
327 | name = concat ("FRAME." , |
328 | IDENTIFIER_POINTER (DECL_NAME (info->context)), |
329 | NULL); |
330 | TYPE_NAME (type) = get_identifier (name); |
331 | free (ptr: name); |
332 | |
333 | info->frame_type = type; |
334 | |
335 | /* Do not put info->frame_decl on info->new_local_var_chain, |
336 | so that we can declare it in the lexical blocks, which |
337 | makes sure virtual regs that end up appearing in its RTL |
338 | expression get substituted in instantiate_virtual_regs. */ |
339 | info->frame_decl = create_tmp_var_raw (type, "FRAME" ); |
340 | DECL_CONTEXT (info->frame_decl) = info->context; |
341 | DECL_NONLOCAL_FRAME (info->frame_decl) = 1; |
342 | DECL_SEEN_IN_BIND_EXPR_P (info->frame_decl) = 1; |
343 | |
344 | /* ??? Always make it addressable for now, since it is meant to |
345 | be pointed to by the static chain pointer. This pessimizes |
346 | when it turns out that no static chains are needed because |
347 | the nested functions referencing non-local variables are not |
348 | reachable, but the true pessimization is to create the non- |
349 | local frame structure in the first place. */ |
350 | TREE_ADDRESSABLE (info->frame_decl) = 1; |
351 | } |
352 | |
353 | return type; |
354 | } |
355 | |
356 | /* Return true if DECL should be referenced by pointer in the non-local frame |
357 | structure. */ |
358 | |
359 | static bool |
360 | use_pointer_in_frame (tree decl) |
361 | { |
362 | if (TREE_CODE (decl) == PARM_DECL) |
363 | { |
364 | /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable- |
365 | sized DECLs, and inefficient to copy large aggregates. Don't bother |
366 | moving anything but scalar parameters. */ |
367 | return AGGREGATE_TYPE_P (TREE_TYPE (decl)); |
368 | } |
369 | else |
370 | { |
371 | /* Variable-sized DECLs can only come from OMP clauses at this point |
372 | since the gimplifier has already turned the regular variables into |
373 | pointers. Do the same as the gimplifier. */ |
374 | return !DECL_SIZE (decl) || TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST; |
375 | } |
376 | } |
377 | |
378 | /* Given DECL, a non-locally accessed variable, find or create a field |
379 | in the non-local frame structure for the given nesting context. */ |
380 | |
381 | static tree |
382 | lookup_field_for_decl (struct nesting_info *info, tree decl, |
383 | enum insert_option insert) |
384 | { |
385 | gcc_checking_assert (decl_function_context (decl) == info->context); |
386 | |
387 | if (insert == NO_INSERT) |
388 | { |
389 | tree *slot = info->field_map->get (k: decl); |
390 | return slot ? *slot : NULL_TREE; |
391 | } |
392 | |
393 | tree *slot = &info->field_map->get_or_insert (k: decl); |
394 | if (!*slot) |
395 | { |
396 | tree type = get_frame_type (info); |
397 | tree field = make_node (FIELD_DECL); |
398 | DECL_NAME (field) = DECL_NAME (decl); |
399 | |
400 | if (use_pointer_in_frame (decl)) |
401 | { |
402 | TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl)); |
403 | SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field))); |
404 | DECL_NONADDRESSABLE_P (field) = 1; |
405 | } |
406 | else |
407 | { |
408 | TREE_TYPE (field) = TREE_TYPE (decl); |
409 | DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl); |
410 | SET_DECL_ALIGN (field, DECL_ALIGN (decl)); |
411 | DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl); |
412 | DECL_IGNORED_P (field) = DECL_IGNORED_P (decl); |
413 | DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl); |
414 | TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl); |
415 | copy_warning (field, decl); |
416 | |
417 | /* Declare the transformation and adjust the original DECL. For a |
418 | variable or for a parameter when not optimizing, we make it point |
419 | to the field in the frame directly. For a parameter, we don't do |
420 | it when optimizing because the variable tracking pass will already |
421 | do the job, */ |
422 | if (VAR_P (decl) || !optimize) |
423 | { |
424 | tree x |
425 | = build3 (COMPONENT_REF, TREE_TYPE (field), info->frame_decl, |
426 | field, NULL_TREE); |
427 | |
428 | /* If the next declaration is a PARM_DECL pointing to the DECL, |
429 | we need to adjust its VALUE_EXPR directly, since chains of |
430 | VALUE_EXPRs run afoul of garbage collection. This occurs |
431 | in Ada for Out parameters that aren't copied in. */ |
432 | tree next = DECL_CHAIN (decl); |
433 | if (next |
434 | && TREE_CODE (next) == PARM_DECL |
435 | && DECL_HAS_VALUE_EXPR_P (next) |
436 | && DECL_VALUE_EXPR (next) == decl) |
437 | SET_DECL_VALUE_EXPR (next, x); |
438 | |
439 | SET_DECL_VALUE_EXPR (decl, x); |
440 | DECL_HAS_VALUE_EXPR_P (decl) = 1; |
441 | } |
442 | } |
443 | |
444 | insert_field_into_struct (type, field); |
445 | *slot = field; |
446 | |
447 | if (TREE_CODE (decl) == PARM_DECL) |
448 | info->any_parm_remapped = true; |
449 | } |
450 | |
451 | return *slot; |
452 | } |
453 | |
454 | /* Build or return the variable that holds the static chain within |
455 | INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */ |
456 | |
457 | static tree |
458 | get_chain_decl (struct nesting_info *info) |
459 | { |
460 | tree decl = info->chain_decl; |
461 | |
462 | if (!decl) |
463 | { |
464 | tree type; |
465 | |
466 | type = get_frame_type (info: info->outer); |
467 | type = build_pointer_type (type); |
468 | |
469 | /* Note that this variable is *not* entered into any BIND_EXPR; |
470 | the construction of this variable is handled specially in |
471 | expand_function_start and initialize_inlined_parameters. |
472 | Note also that it's represented as a parameter. This is more |
473 | close to the truth, since the initial value does come from |
474 | the caller. */ |
475 | decl = build_decl (DECL_SOURCE_LOCATION (info->context), |
476 | PARM_DECL, create_tmp_var_name ("CHAIN" ), type); |
477 | DECL_ARTIFICIAL (decl) = 1; |
478 | DECL_IGNORED_P (decl) = 1; |
479 | TREE_USED (decl) = 1; |
480 | DECL_CONTEXT (decl) = info->context; |
481 | DECL_ARG_TYPE (decl) = type; |
482 | |
483 | /* Tell tree-inline.cc that we never write to this variable, so |
484 | it can copy-prop the replacement value immediately. */ |
485 | TREE_READONLY (decl) = 1; |
486 | |
487 | info->chain_decl = decl; |
488 | |
489 | if (dump_file |
490 | && (dump_flags & TDF_DETAILS) |
491 | && !DECL_STATIC_CHAIN (info->context)) |
492 | fprintf (stream: dump_file, format: "Setting static-chain for %s\n" , |
493 | lang_hooks.decl_printable_name (info->context, 2)); |
494 | |
495 | DECL_STATIC_CHAIN (info->context) = 1; |
496 | } |
497 | return decl; |
498 | } |
499 | |
500 | /* Build or return the field within the non-local frame state that holds |
501 | the static chain for INFO->CONTEXT. This is the way to walk back up |
502 | multiple nesting levels. */ |
503 | |
504 | static tree |
505 | get_chain_field (struct nesting_info *info) |
506 | { |
507 | tree field = info->chain_field; |
508 | |
509 | if (!field) |
510 | { |
511 | tree type = build_pointer_type (get_frame_type (info: info->outer)); |
512 | |
513 | field = make_node (FIELD_DECL); |
514 | DECL_NAME (field) = get_identifier ("__chain" ); |
515 | TREE_TYPE (field) = type; |
516 | SET_DECL_ALIGN (field, TYPE_ALIGN (type)); |
517 | DECL_NONADDRESSABLE_P (field) = 1; |
518 | |
519 | insert_field_into_struct (type: get_frame_type (info), field); |
520 | |
521 | info->chain_field = field; |
522 | |
523 | if (dump_file |
524 | && (dump_flags & TDF_DETAILS) |
525 | && !DECL_STATIC_CHAIN (info->context)) |
526 | fprintf (stream: dump_file, format: "Setting static-chain for %s\n" , |
527 | lang_hooks.decl_printable_name (info->context, 2)); |
528 | |
529 | DECL_STATIC_CHAIN (info->context) = 1; |
530 | } |
531 | return field; |
532 | } |
533 | |
534 | /* Initialize a new temporary with the GIMPLE_CALL STMT. */ |
535 | |
536 | static tree |
537 | init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi, |
538 | gcall *call) |
539 | { |
540 | tree t; |
541 | |
542 | t = create_tmp_var_for (info, type: gimple_call_return_type (gs: call), NULL); |
543 | gimple_call_set_lhs (gs: call, lhs: t); |
544 | if (! gsi_end_p (i: *gsi)) |
545 | gimple_set_location (g: call, location: gimple_location (g: gsi_stmt (i: *gsi))); |
546 | gsi_insert_before (gsi, call, GSI_SAME_STMT); |
547 | |
548 | return t; |
549 | } |
550 | |
551 | |
552 | /* Copy EXP into a temporary. Allocate the temporary in the context of |
553 | INFO and insert the initialization statement before GSI. */ |
554 | |
555 | static tree |
556 | init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi) |
557 | { |
558 | tree t; |
559 | gimple *stmt; |
560 | |
561 | t = create_tmp_var_for (info, TREE_TYPE (exp), NULL); |
562 | stmt = gimple_build_assign (t, exp); |
563 | if (! gsi_end_p (i: *gsi)) |
564 | gimple_set_location (g: stmt, location: gimple_location (g: gsi_stmt (i: *gsi))); |
565 | gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT); |
566 | |
567 | return t; |
568 | } |
569 | |
570 | |
571 | /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */ |
572 | |
573 | static tree |
574 | gsi_gimplify_val (struct nesting_info *info, tree exp, |
575 | gimple_stmt_iterator *gsi) |
576 | { |
577 | if (is_gimple_val (exp)) |
578 | return exp; |
579 | else |
580 | return init_tmp_var (info, exp, gsi); |
581 | } |
582 | |
583 | /* Similarly, but copy from the temporary and insert the statement |
584 | after the iterator. */ |
585 | |
586 | static tree |
587 | save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi) |
588 | { |
589 | tree t; |
590 | gimple *stmt; |
591 | |
592 | t = create_tmp_var_for (info, TREE_TYPE (exp), NULL); |
593 | stmt = gimple_build_assign (exp, t); |
594 | if (! gsi_end_p (i: *gsi)) |
595 | gimple_set_location (g: stmt, location: gimple_location (g: gsi_stmt (i: *gsi))); |
596 | gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT); |
597 | |
598 | return t; |
599 | } |
600 | |
601 | /* Build or return the type used to represent a nested function trampoline. */ |
602 | |
603 | static GTY(()) tree trampoline_type; |
604 | |
605 | static tree |
606 | get_trampoline_type (struct nesting_info *info) |
607 | { |
608 | unsigned align, size; |
609 | tree t; |
610 | |
611 | if (trampoline_type) |
612 | return trampoline_type; |
613 | |
614 | /* When trampolines are created off-stack then the only thing we need in the |
615 | local frame is a single pointer. */ |
616 | if (flag_trampoline_impl == TRAMPOLINE_IMPL_HEAP) |
617 | { |
618 | trampoline_type = build_pointer_type (void_type_node); |
619 | return trampoline_type; |
620 | } |
621 | |
622 | align = TRAMPOLINE_ALIGNMENT; |
623 | size = TRAMPOLINE_SIZE; |
624 | |
625 | /* If we won't be able to guarantee alignment simply via TYPE_ALIGN, |
626 | then allocate extra space so that we can do dynamic alignment. */ |
627 | if (align > STACK_BOUNDARY) |
628 | { |
629 | size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT); |
630 | align = STACK_BOUNDARY; |
631 | } |
632 | |
633 | t = build_index_type (size_int (size - 1)); |
634 | t = build_array_type (char_type_node, t); |
635 | t = build_decl (DECL_SOURCE_LOCATION (info->context), |
636 | FIELD_DECL, get_identifier ("__data" ), t); |
637 | SET_DECL_ALIGN (t, align); |
638 | DECL_USER_ALIGN (t) = 1; |
639 | |
640 | trampoline_type = make_node (RECORD_TYPE); |
641 | TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline" ); |
642 | TYPE_FIELDS (trampoline_type) = t; |
643 | layout_type (trampoline_type); |
644 | DECL_CONTEXT (t) = trampoline_type; |
645 | |
646 | return trampoline_type; |
647 | } |
648 | |
649 | /* Build or return the type used to represent a nested function descriptor. */ |
650 | |
651 | static GTY(()) tree descriptor_type; |
652 | |
653 | static tree |
654 | get_descriptor_type (struct nesting_info *info) |
655 | { |
656 | /* The base alignment is that of a function. */ |
657 | const unsigned align = FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY); |
658 | tree t; |
659 | |
660 | if (descriptor_type) |
661 | return descriptor_type; |
662 | |
663 | t = build_index_type (integer_one_node); |
664 | t = build_array_type (ptr_type_node, t); |
665 | t = build_decl (DECL_SOURCE_LOCATION (info->context), |
666 | FIELD_DECL, get_identifier ("__data" ), t); |
667 | SET_DECL_ALIGN (t, MAX (TYPE_ALIGN (ptr_type_node), align)); |
668 | DECL_USER_ALIGN (t) = 1; |
669 | |
670 | descriptor_type = make_node (RECORD_TYPE); |
671 | TYPE_NAME (descriptor_type) = get_identifier ("__builtin_descriptor" ); |
672 | TYPE_FIELDS (descriptor_type) = t; |
673 | layout_type (descriptor_type); |
674 | DECL_CONTEXT (t) = descriptor_type; |
675 | |
676 | return descriptor_type; |
677 | } |
678 | |
679 | /* Given DECL, a nested function, find or create an element in the |
680 | var map for this function. */ |
681 | |
682 | static tree |
683 | lookup_element_for_decl (struct nesting_info *info, tree decl, |
684 | enum insert_option insert) |
685 | { |
686 | if (insert == NO_INSERT) |
687 | { |
688 | tree *slot = info->var_map->get (k: decl); |
689 | return slot ? *slot : NULL_TREE; |
690 | } |
691 | |
692 | tree *slot = &info->var_map->get_or_insert (k: decl); |
693 | if (!*slot) |
694 | *slot = build_tree_list (NULL_TREE, NULL_TREE); |
695 | |
696 | return (tree) *slot; |
697 | } |
698 | |
699 | /* Given DECL, a nested function, create a field in the non-local |
700 | frame structure for this function. */ |
701 | |
702 | static tree |
703 | create_field_for_decl (struct nesting_info *info, tree decl, tree type) |
704 | { |
705 | tree field = make_node (FIELD_DECL); |
706 | DECL_NAME (field) = DECL_NAME (decl); |
707 | TREE_TYPE (field) = type; |
708 | TREE_ADDRESSABLE (field) = 1; |
709 | insert_field_into_struct (type: get_frame_type (info), field); |
710 | return field; |
711 | } |
712 | |
713 | /* Given DECL, a nested function, find or create a field in the non-local |
714 | frame structure for a trampoline for this function. */ |
715 | |
716 | static tree |
717 | lookup_tramp_for_decl (struct nesting_info *info, tree decl, |
718 | enum insert_option insert) |
719 | { |
720 | tree elt, field; |
721 | |
722 | elt = lookup_element_for_decl (info, decl, insert); |
723 | if (!elt) |
724 | return NULL_TREE; |
725 | |
726 | field = TREE_PURPOSE (elt); |
727 | |
728 | if (!field && insert == INSERT) |
729 | { |
730 | field = create_field_for_decl (info, decl, type: get_trampoline_type (info)); |
731 | TREE_PURPOSE (elt) = field; |
732 | info->any_tramp_created = true; |
733 | } |
734 | |
735 | return field; |
736 | } |
737 | |
738 | /* Given DECL, a nested function, find or create a field in the non-local |
739 | frame structure for a descriptor for this function. */ |
740 | |
741 | static tree |
742 | lookup_descr_for_decl (struct nesting_info *info, tree decl, |
743 | enum insert_option insert) |
744 | { |
745 | tree elt, field; |
746 | |
747 | elt = lookup_element_for_decl (info, decl, insert); |
748 | if (!elt) |
749 | return NULL_TREE; |
750 | |
751 | field = TREE_VALUE (elt); |
752 | |
753 | if (!field && insert == INSERT) |
754 | { |
755 | field = create_field_for_decl (info, decl, type: get_descriptor_type (info)); |
756 | TREE_VALUE (elt) = field; |
757 | info->any_descr_created = true; |
758 | } |
759 | |
760 | return field; |
761 | } |
762 | |
763 | /* Build or return the field within the non-local frame state that holds |
764 | the non-local goto "jmp_buf". The buffer itself is maintained by the |
765 | rtl middle-end as dynamic stack space is allocated. */ |
766 | |
767 | static tree |
768 | get_nl_goto_field (struct nesting_info *info) |
769 | { |
770 | tree field = info->nl_goto_field; |
771 | if (!field) |
772 | { |
773 | unsigned size; |
774 | tree type; |
775 | |
776 | /* For __builtin_nonlocal_goto, we need N words. The first is the |
777 | frame pointer, the rest is for the target's stack pointer save |
778 | area. The number of words is controlled by STACK_SAVEAREA_MODE; |
779 | not the best interface, but it'll do for now. */ |
780 | if (Pmode == ptr_mode) |
781 | type = ptr_type_node; |
782 | else |
783 | type = lang_hooks.types.type_for_mode (Pmode, 1); |
784 | |
785 | scalar_int_mode mode |
786 | = as_a <scalar_int_mode> (STACK_SAVEAREA_MODE (SAVE_NONLOCAL)); |
787 | size = GET_MODE_SIZE (mode); |
788 | size = size / GET_MODE_SIZE (Pmode); |
789 | size = size + 1; |
790 | |
791 | type = build_array_type |
792 | (type, build_index_type (size_int (size))); |
793 | |
794 | field = make_node (FIELD_DECL); |
795 | DECL_NAME (field) = get_identifier ("__nl_goto_buf" ); |
796 | TREE_TYPE (field) = type; |
797 | SET_DECL_ALIGN (field, TYPE_ALIGN (type)); |
798 | TREE_ADDRESSABLE (field) = 1; |
799 | |
800 | insert_field_into_struct (type: get_frame_type (info), field); |
801 | |
802 | info->nl_goto_field = field; |
803 | } |
804 | |
805 | return field; |
806 | } |
807 | |
808 | /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */ |
809 | |
810 | static void |
811 | walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op, |
812 | struct nesting_info *info, gimple_seq *pseq) |
813 | { |
814 | struct walk_stmt_info wi; |
815 | |
816 | memset (s: &wi, c: 0, n: sizeof (wi)); |
817 | wi.info = info; |
818 | wi.val_only = true; |
819 | walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi); |
820 | } |
821 | |
822 | |
823 | /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */ |
824 | |
825 | static inline void |
826 | walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op, |
827 | struct nesting_info *info) |
828 | { |
829 | gimple_seq body = gimple_body (info->context); |
830 | walk_body (callback_stmt, callback_op, info, pseq: &body); |
831 | gimple_set_body (info->context, body); |
832 | } |
833 | |
834 | /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */ |
835 | |
836 | static void |
837 | walk_gimple_omp_for (gomp_for *for_stmt, |
838 | walk_stmt_fn callback_stmt, walk_tree_fn callback_op, |
839 | struct nesting_info *info) |
840 | { |
841 | struct walk_stmt_info wi; |
842 | gimple_seq seq; |
843 | tree t; |
844 | size_t i; |
845 | |
846 | walk_body (callback_stmt, callback_op, info, pseq: gimple_omp_for_pre_body_ptr (gs: for_stmt)); |
847 | |
848 | seq = NULL; |
849 | memset (s: &wi, c: 0, n: sizeof (wi)); |
850 | wi.info = info; |
851 | wi.gsi = gsi_last (seq); |
852 | |
853 | for (i = 0; i < gimple_omp_for_collapse (gs: for_stmt); i++) |
854 | { |
855 | wi.val_only = false; |
856 | walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op, |
857 | &wi, NULL); |
858 | wi.val_only = true; |
859 | wi.is_lhs = false; |
860 | walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op, |
861 | &wi, NULL); |
862 | |
863 | wi.val_only = true; |
864 | wi.is_lhs = false; |
865 | walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op, |
866 | &wi, NULL); |
867 | |
868 | t = gimple_omp_for_incr (gs: for_stmt, i); |
869 | gcc_assert (BINARY_CLASS_P (t)); |
870 | wi.val_only = false; |
871 | walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL); |
872 | wi.val_only = true; |
873 | wi.is_lhs = false; |
874 | walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL); |
875 | } |
876 | |
877 | seq = gsi_seq (i: wi.gsi); |
878 | if (!gimple_seq_empty_p (s: seq)) |
879 | { |
880 | gimple_seq pre_body = gimple_omp_for_pre_body (gs: for_stmt); |
881 | annotate_all_with_location (seq, gimple_location (g: for_stmt)); |
882 | gimple_seq_add_seq (&pre_body, seq); |
883 | gimple_omp_for_set_pre_body (gs: for_stmt, pre_body); |
884 | } |
885 | } |
886 | |
887 | /* Similarly for ROOT and all functions nested underneath, depth first. */ |
888 | |
889 | static void |
890 | walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op, |
891 | struct nesting_info *root) |
892 | { |
893 | struct nesting_info *n; |
894 | FOR_EACH_NEST_INFO (n, root) |
895 | walk_function (callback_stmt, callback_op, info: n); |
896 | } |
897 | |
898 | |
899 | /* We have to check for a fairly pathological case. The operands of function |
900 | nested function are to be interpreted in the context of the enclosing |
901 | function. So if any are variably-sized, they will get remapped when the |
902 | enclosing function is inlined. But that remapping would also have to be |
903 | done in the types of the PARM_DECLs of the nested function, meaning the |
904 | argument types of that function will disagree with the arguments in the |
905 | calls to that function. So we'd either have to make a copy of the nested |
906 | function corresponding to each time the enclosing function was inlined or |
907 | add a VIEW_CONVERT_EXPR to each such operand for each call to the nested |
908 | function. The former is not practical. The latter would still require |
909 | detecting this case to know when to add the conversions. So, for now at |
910 | least, we don't inline such an enclosing function. |
911 | |
912 | We have to do that check recursively, so here return indicating whether |
913 | FNDECL has such a nested function. ORIG_FN is the function we were |
914 | trying to inline to use for checking whether any argument is variably |
915 | modified by anything in it. |
916 | |
917 | It would be better to do this in tree-inline.cc so that we could give |
918 | the appropriate warning for why a function can't be inlined, but that's |
919 | too late since the nesting structure has already been flattened and |
920 | adding a flag just to record this fact seems a waste of a flag. */ |
921 | |
922 | static bool |
923 | check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl) |
924 | { |
925 | struct cgraph_node *cgn = cgraph_node::get (decl: fndecl); |
926 | tree arg; |
927 | |
928 | for (cgn = first_nested_function (node: cgn); cgn; |
929 | cgn = next_nested_function (node: cgn)) |
930 | { |
931 | for (arg = DECL_ARGUMENTS (cgn->decl); arg; arg = DECL_CHAIN (arg)) |
932 | if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl)) |
933 | return true; |
934 | |
935 | if (check_for_nested_with_variably_modified (fndecl: cgn->decl, |
936 | orig_fndecl)) |
937 | return true; |
938 | } |
939 | |
940 | return false; |
941 | } |
942 | |
943 | /* Construct our local datastructure describing the function nesting |
944 | tree rooted by CGN. */ |
945 | |
946 | static struct nesting_info * |
947 | create_nesting_tree (struct cgraph_node *cgn) |
948 | { |
949 | struct nesting_info *info = XCNEW (struct nesting_info); |
950 | info->field_map = new hash_map<tree, tree>; |
951 | info->var_map = new hash_map<tree, tree>; |
952 | info->mem_refs = new hash_set<tree *>; |
953 | info->suppress_expansion = BITMAP_ALLOC (obstack: &nesting_info_bitmap_obstack); |
954 | info->context = cgn->decl; |
955 | info->thunk_p = cgn->thunk; |
956 | |
957 | for (cgn = first_nested_function (node: cgn); cgn; |
958 | cgn = next_nested_function (node: cgn)) |
959 | { |
960 | struct nesting_info *sub = create_nesting_tree (cgn); |
961 | sub->outer = info; |
962 | sub->next = info->inner; |
963 | info->inner = sub; |
964 | } |
965 | |
966 | /* See discussion at check_for_nested_with_variably_modified for a |
967 | discussion of why this has to be here. */ |
968 | if (check_for_nested_with_variably_modified (fndecl: info->context, orig_fndecl: info->context)) |
969 | DECL_UNINLINABLE (info->context) = true; |
970 | |
971 | return info; |
972 | } |
973 | |
974 | /* Return an expression computing the static chain for TARGET_CONTEXT |
975 | from INFO->CONTEXT. Insert any necessary computations before TSI. */ |
976 | |
977 | static tree |
978 | get_static_chain (struct nesting_info *info, tree target_context, |
979 | gimple_stmt_iterator *gsi) |
980 | { |
981 | struct nesting_info *i; |
982 | tree x; |
983 | |
984 | if (info->context == target_context) |
985 | { |
986 | x = build_addr (exp: info->frame_decl); |
987 | info->static_chain_added |= 1; |
988 | } |
989 | else |
990 | { |
991 | x = get_chain_decl (info); |
992 | info->static_chain_added |= 2; |
993 | |
994 | for (i = info->outer; i->context != target_context; i = i->outer) |
995 | { |
996 | tree field = get_chain_field (info: i); |
997 | |
998 | x = build_simple_mem_ref_notrap (ptr: x); |
999 | x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE); |
1000 | x = init_tmp_var (info, exp: x, gsi); |
1001 | } |
1002 | } |
1003 | |
1004 | return x; |
1005 | } |
1006 | |
1007 | |
1008 | /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local |
1009 | frame as seen from INFO->CONTEXT. Insert any necessary computations |
1010 | before GSI. */ |
1011 | |
1012 | static tree |
1013 | get_frame_field (struct nesting_info *info, tree target_context, |
1014 | tree field, gimple_stmt_iterator *gsi) |
1015 | { |
1016 | struct nesting_info *i; |
1017 | tree x; |
1018 | |
1019 | if (info->context == target_context) |
1020 | { |
1021 | /* Make sure frame_decl gets created. */ |
1022 | (void) get_frame_type (info); |
1023 | x = info->frame_decl; |
1024 | info->static_chain_added |= 1; |
1025 | } |
1026 | else |
1027 | { |
1028 | x = get_chain_decl (info); |
1029 | info->static_chain_added |= 2; |
1030 | |
1031 | for (i = info->outer; i->context != target_context; i = i->outer) |
1032 | { |
1033 | tree field = get_chain_field (info: i); |
1034 | |
1035 | x = build_simple_mem_ref_notrap (ptr: x); |
1036 | x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE); |
1037 | x = init_tmp_var (info, exp: x, gsi); |
1038 | } |
1039 | |
1040 | x = build_simple_mem_ref_notrap (ptr: x); |
1041 | } |
1042 | |
1043 | x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE); |
1044 | TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (field); |
1045 | return x; |
1046 | } |
1047 | |
1048 | static void note_nonlocal_vla_type (struct nesting_info *info, tree type); |
1049 | |
1050 | /* A subroutine of convert_nonlocal_reference_op. Create a local variable |
1051 | in the nested function with DECL_VALUE_EXPR set to reference the true |
1052 | variable in the parent function. This is used both for debug info |
1053 | and in OMP lowering. */ |
1054 | |
1055 | static tree |
1056 | get_nonlocal_debug_decl (struct nesting_info *info, tree decl) |
1057 | { |
1058 | tree target_context; |
1059 | struct nesting_info *i; |
1060 | tree x, field, new_decl; |
1061 | |
1062 | tree *slot = &info->var_map->get_or_insert (k: decl); |
1063 | |
1064 | if (*slot) |
1065 | return *slot; |
1066 | |
1067 | target_context = decl_function_context (decl); |
1068 | |
1069 | /* A copy of the code in get_frame_field, but without the temporaries. */ |
1070 | if (info->context == target_context) |
1071 | { |
1072 | /* Make sure frame_decl gets created. */ |
1073 | (void) get_frame_type (info); |
1074 | x = info->frame_decl; |
1075 | i = info; |
1076 | info->static_chain_added |= 1; |
1077 | } |
1078 | else |
1079 | { |
1080 | x = get_chain_decl (info); |
1081 | info->static_chain_added |= 2; |
1082 | for (i = info->outer; i->context != target_context; i = i->outer) |
1083 | { |
1084 | field = get_chain_field (info: i); |
1085 | x = build_simple_mem_ref_notrap (ptr: x); |
1086 | x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE); |
1087 | } |
1088 | x = build_simple_mem_ref_notrap (ptr: x); |
1089 | } |
1090 | |
1091 | field = lookup_field_for_decl (info: i, decl, insert: INSERT); |
1092 | x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE); |
1093 | if (use_pointer_in_frame (decl)) |
1094 | x = build_simple_mem_ref_notrap (ptr: x); |
1095 | |
1096 | /* ??? We should be remapping types as well, surely. */ |
1097 | new_decl = build_decl (DECL_SOURCE_LOCATION (decl), |
1098 | VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl)); |
1099 | DECL_CONTEXT (new_decl) = info->context; |
1100 | DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl); |
1101 | DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl); |
1102 | TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl); |
1103 | TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl); |
1104 | TREE_READONLY (new_decl) = TREE_READONLY (decl); |
1105 | TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl); |
1106 | DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1; |
1107 | if ((TREE_CODE (decl) == PARM_DECL |
1108 | || TREE_CODE (decl) == RESULT_DECL |
1109 | || VAR_P (decl)) |
1110 | && DECL_BY_REFERENCE (decl)) |
1111 | DECL_BY_REFERENCE (new_decl) = 1; |
1112 | |
1113 | SET_DECL_VALUE_EXPR (new_decl, x); |
1114 | DECL_HAS_VALUE_EXPR_P (new_decl) = 1; |
1115 | |
1116 | *slot = new_decl; |
1117 | DECL_CHAIN (new_decl) = info->debug_var_chain; |
1118 | info->debug_var_chain = new_decl; |
1119 | |
1120 | if (!optimize |
1121 | && info->context != target_context |
1122 | && variably_modified_type_p (TREE_TYPE (decl), NULL)) |
1123 | note_nonlocal_vla_type (info, TREE_TYPE (decl)); |
1124 | |
1125 | return new_decl; |
1126 | } |
1127 | |
1128 | |
1129 | /* Callback for walk_gimple_stmt, rewrite all references to VAR |
1130 | and PARM_DECLs that belong to outer functions. |
1131 | |
1132 | The rewrite will involve some number of structure accesses back up |
1133 | the static chain. E.g. for a variable FOO up one nesting level it'll |
1134 | be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further |
1135 | indirections apply to decls for which use_pointer_in_frame is true. */ |
1136 | |
1137 | static tree |
1138 | convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data) |
1139 | { |
1140 | struct walk_stmt_info *wi = (struct walk_stmt_info *) data; |
1141 | struct nesting_info *const info = (struct nesting_info *) wi->info; |
1142 | tree t = *tp; |
1143 | |
1144 | *walk_subtrees = 0; |
1145 | switch (TREE_CODE (t)) |
1146 | { |
1147 | case VAR_DECL: |
1148 | /* Non-automatic variables are never processed. */ |
1149 | if (TREE_STATIC (t) || DECL_EXTERNAL (t)) |
1150 | break; |
1151 | /* FALLTHRU */ |
1152 | |
1153 | case PARM_DECL: |
1154 | { |
1155 | tree x, target_context = decl_function_context (t); |
1156 | |
1157 | if (info->context == target_context) |
1158 | break; |
1159 | |
1160 | wi->changed = true; |
1161 | |
1162 | if (bitmap_bit_p (info->suppress_expansion, DECL_UID (t))) |
1163 | x = get_nonlocal_debug_decl (info, decl: t); |
1164 | else |
1165 | { |
1166 | struct nesting_info *i = info; |
1167 | while (i && i->context != target_context) |
1168 | i = i->outer; |
1169 | /* If none of the outer contexts is the target context, this means |
1170 | that the VAR or PARM_DECL is referenced in a wrong context. */ |
1171 | if (!i) |
1172 | internal_error ("%s from %s referenced in %s" , |
1173 | IDENTIFIER_POINTER (DECL_NAME (t)), |
1174 | IDENTIFIER_POINTER (DECL_NAME (target_context)), |
1175 | IDENTIFIER_POINTER (DECL_NAME (info->context))); |
1176 | |
1177 | x = lookup_field_for_decl (info: i, decl: t, insert: INSERT); |
1178 | x = get_frame_field (info, target_context, field: x, gsi: &wi->gsi); |
1179 | if (use_pointer_in_frame (decl: t)) |
1180 | { |
1181 | x = init_tmp_var (info, exp: x, gsi: &wi->gsi); |
1182 | x = build_simple_mem_ref_notrap (ptr: x); |
1183 | } |
1184 | } |
1185 | |
1186 | if (wi->val_only) |
1187 | { |
1188 | if (wi->is_lhs) |
1189 | x = save_tmp_var (info, exp: x, gsi: &wi->gsi); |
1190 | else |
1191 | x = init_tmp_var (info, exp: x, gsi: &wi->gsi); |
1192 | } |
1193 | |
1194 | *tp = x; |
1195 | } |
1196 | break; |
1197 | |
1198 | case LABEL_DECL: |
1199 | /* We're taking the address of a label from a parent function, but |
1200 | this is not itself a non-local goto. Mark the label such that it |
1201 | will not be deleted, much as we would with a label address in |
1202 | static storage. */ |
1203 | if (decl_function_context (t) != info->context) |
1204 | FORCED_LABEL (t) = 1; |
1205 | break; |
1206 | |
1207 | case ADDR_EXPR: |
1208 | { |
1209 | bool save_val_only = wi->val_only; |
1210 | |
1211 | wi->val_only = false; |
1212 | wi->is_lhs = false; |
1213 | wi->changed = false; |
1214 | walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0); |
1215 | wi->val_only = true; |
1216 | |
1217 | if (wi->changed) |
1218 | { |
1219 | tree save_context; |
1220 | |
1221 | /* If we changed anything, we might no longer be directly |
1222 | referencing a decl. */ |
1223 | save_context = current_function_decl; |
1224 | current_function_decl = info->context; |
1225 | recompute_tree_invariant_for_addr_expr (t); |
1226 | |
1227 | /* If the callback converted the address argument in a context |
1228 | where we only accept variables (and min_invariant, presumably), |
1229 | then compute the address into a temporary. */ |
1230 | if (save_val_only) |
1231 | *tp = gsi_gimplify_val (info: (struct nesting_info *) wi->info, |
1232 | exp: t, gsi: &wi->gsi); |
1233 | current_function_decl = save_context; |
1234 | } |
1235 | } |
1236 | break; |
1237 | |
1238 | case REALPART_EXPR: |
1239 | case IMAGPART_EXPR: |
1240 | case COMPONENT_REF: |
1241 | case ARRAY_REF: |
1242 | case ARRAY_RANGE_REF: |
1243 | case BIT_FIELD_REF: |
1244 | /* Go down this entire nest and just look at the final prefix and |
1245 | anything that describes the references. Otherwise, we lose track |
1246 | of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */ |
1247 | wi->val_only = true; |
1248 | wi->is_lhs = false; |
1249 | for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp) |
1250 | { |
1251 | if (TREE_CODE (t) == COMPONENT_REF) |
1252 | walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi, |
1253 | NULL); |
1254 | else if (TREE_CODE (t) == ARRAY_REF |
1255 | || TREE_CODE (t) == ARRAY_RANGE_REF) |
1256 | { |
1257 | walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op, |
1258 | wi, NULL); |
1259 | walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, |
1260 | wi, NULL); |
1261 | walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op, |
1262 | wi, NULL); |
1263 | } |
1264 | } |
1265 | wi->val_only = false; |
1266 | walk_tree (tp, convert_nonlocal_reference_op, wi, NULL); |
1267 | break; |
1268 | |
1269 | case VIEW_CONVERT_EXPR: |
1270 | /* Just request to look at the subtrees, leaving val_only and lhs |
1271 | untouched. This might actually be for !val_only + lhs, in which |
1272 | case we don't want to force a replacement by a temporary. */ |
1273 | *walk_subtrees = 1; |
1274 | break; |
1275 | |
1276 | default: |
1277 | if (!IS_TYPE_OR_DECL_P (t)) |
1278 | { |
1279 | *walk_subtrees = 1; |
1280 | wi->val_only = true; |
1281 | wi->is_lhs = false; |
1282 | } |
1283 | break; |
1284 | } |
1285 | |
1286 | return NULL_TREE; |
1287 | } |
1288 | |
1289 | static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *, |
1290 | struct walk_stmt_info *); |
1291 | |
1292 | /* Helper for convert_nonlocal_references, rewrite all references to VAR |
1293 | and PARM_DECLs that belong to outer functions. */ |
1294 | |
1295 | static bool |
1296 | convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi) |
1297 | { |
1298 | struct nesting_info *const info = (struct nesting_info *) wi->info; |
1299 | bool need_chain = false, need_stmts = false; |
1300 | tree clause, decl, *pdecl; |
1301 | int dummy; |
1302 | bitmap new_suppress; |
1303 | |
1304 | new_suppress = BITMAP_GGC_ALLOC (); |
1305 | bitmap_copy (new_suppress, info->suppress_expansion); |
1306 | |
1307 | for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause)) |
1308 | { |
1309 | pdecl = NULL; |
1310 | switch (OMP_CLAUSE_CODE (clause)) |
1311 | { |
1312 | case OMP_CLAUSE_REDUCTION: |
1313 | case OMP_CLAUSE_IN_REDUCTION: |
1314 | case OMP_CLAUSE_TASK_REDUCTION: |
1315 | if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause)) |
1316 | need_stmts = true; |
1317 | if (TREE_CODE (OMP_CLAUSE_DECL (clause)) == MEM_REF) |
1318 | { |
1319 | pdecl = &TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0); |
1320 | if (TREE_CODE (*pdecl) == POINTER_PLUS_EXPR) |
1321 | pdecl = &TREE_OPERAND (*pdecl, 0); |
1322 | if (INDIRECT_REF_P (*pdecl) |
1323 | || TREE_CODE (*pdecl) == ADDR_EXPR) |
1324 | pdecl = &TREE_OPERAND (*pdecl, 0); |
1325 | } |
1326 | goto do_decl_clause; |
1327 | |
1328 | case OMP_CLAUSE_LASTPRIVATE: |
1329 | if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause)) |
1330 | need_stmts = true; |
1331 | goto do_decl_clause; |
1332 | |
1333 | case OMP_CLAUSE_LINEAR: |
1334 | if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause)) |
1335 | need_stmts = true; |
1336 | wi->val_only = true; |
1337 | wi->is_lhs = false; |
1338 | convert_nonlocal_reference_op (tp: &OMP_CLAUSE_LINEAR_STEP (clause), |
1339 | walk_subtrees: &dummy, data: wi); |
1340 | goto do_decl_clause; |
1341 | |
1342 | case OMP_CLAUSE_PRIVATE: |
1343 | case OMP_CLAUSE_FIRSTPRIVATE: |
1344 | case OMP_CLAUSE_COPYPRIVATE: |
1345 | case OMP_CLAUSE_SHARED: |
1346 | case OMP_CLAUSE_ENTER: |
1347 | case OMP_CLAUSE_LINK: |
1348 | case OMP_CLAUSE_USE_DEVICE_PTR: |
1349 | case OMP_CLAUSE_USE_DEVICE_ADDR: |
1350 | case OMP_CLAUSE_HAS_DEVICE_ADDR: |
1351 | case OMP_CLAUSE_IS_DEVICE_PTR: |
1352 | case OMP_CLAUSE_DETACH: |
1353 | do_decl_clause: |
1354 | if (pdecl == NULL) |
1355 | pdecl = &OMP_CLAUSE_DECL (clause); |
1356 | decl = *pdecl; |
1357 | if (VAR_P (decl) |
1358 | && (TREE_STATIC (decl) || DECL_EXTERNAL (decl))) |
1359 | break; |
1360 | if (decl_function_context (decl) != info->context) |
1361 | { |
1362 | if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED) |
1363 | OMP_CLAUSE_SHARED_READONLY (clause) = 0; |
1364 | bitmap_set_bit (new_suppress, DECL_UID (decl)); |
1365 | *pdecl = get_nonlocal_debug_decl (info, decl); |
1366 | if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE) |
1367 | need_chain = true; |
1368 | } |
1369 | break; |
1370 | |
1371 | case OMP_CLAUSE_SCHEDULE: |
1372 | if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL) |
1373 | break; |
1374 | /* FALLTHRU */ |
1375 | case OMP_CLAUSE_FINAL: |
1376 | case OMP_CLAUSE_IF: |
1377 | case OMP_CLAUSE_SELF: |
1378 | case OMP_CLAUSE_NUM_THREADS: |
1379 | case OMP_CLAUSE_DEPEND: |
1380 | case OMP_CLAUSE_DOACROSS: |
1381 | case OMP_CLAUSE_DEVICE: |
1382 | case OMP_CLAUSE_NUM_TEAMS: |
1383 | case OMP_CLAUSE_THREAD_LIMIT: |
1384 | case OMP_CLAUSE_SAFELEN: |
1385 | case OMP_CLAUSE_SIMDLEN: |
1386 | case OMP_CLAUSE_PRIORITY: |
1387 | case OMP_CLAUSE_GRAINSIZE: |
1388 | case OMP_CLAUSE_NUM_TASKS: |
1389 | case OMP_CLAUSE_HINT: |
1390 | case OMP_CLAUSE_FILTER: |
1391 | case OMP_CLAUSE_NUM_GANGS: |
1392 | case OMP_CLAUSE_NUM_WORKERS: |
1393 | case OMP_CLAUSE_VECTOR_LENGTH: |
1394 | case OMP_CLAUSE_GANG: |
1395 | case OMP_CLAUSE_WORKER: |
1396 | case OMP_CLAUSE_VECTOR: |
1397 | case OMP_CLAUSE_ASYNC: |
1398 | case OMP_CLAUSE_WAIT: |
1399 | /* Several OpenACC clauses have optional arguments. Check if they |
1400 | are present. */ |
1401 | if (OMP_CLAUSE_OPERAND (clause, 0)) |
1402 | { |
1403 | wi->val_only = true; |
1404 | wi->is_lhs = false; |
1405 | convert_nonlocal_reference_op (tp: &OMP_CLAUSE_OPERAND (clause, 0), |
1406 | walk_subtrees: &dummy, data: wi); |
1407 | } |
1408 | |
1409 | /* The gang clause accepts two arguments. */ |
1410 | if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG |
1411 | && OMP_CLAUSE_GANG_STATIC_EXPR (clause)) |
1412 | { |
1413 | wi->val_only = true; |
1414 | wi->is_lhs = false; |
1415 | convert_nonlocal_reference_op |
1416 | (tp: &OMP_CLAUSE_GANG_STATIC_EXPR (clause), walk_subtrees: &dummy, data: wi); |
1417 | } |
1418 | break; |
1419 | |
1420 | case OMP_CLAUSE_DIST_SCHEDULE: |
1421 | if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL) |
1422 | { |
1423 | wi->val_only = true; |
1424 | wi->is_lhs = false; |
1425 | convert_nonlocal_reference_op (tp: &OMP_CLAUSE_OPERAND (clause, 0), |
1426 | walk_subtrees: &dummy, data: wi); |
1427 | } |
1428 | break; |
1429 | |
1430 | case OMP_CLAUSE_MAP: |
1431 | case OMP_CLAUSE_TO: |
1432 | case OMP_CLAUSE_FROM: |
1433 | if (OMP_CLAUSE_SIZE (clause)) |
1434 | { |
1435 | wi->val_only = true; |
1436 | wi->is_lhs = false; |
1437 | convert_nonlocal_reference_op (tp: &OMP_CLAUSE_SIZE (clause), |
1438 | walk_subtrees: &dummy, data: wi); |
1439 | } |
1440 | if (DECL_P (OMP_CLAUSE_DECL (clause))) |
1441 | goto do_decl_clause; |
1442 | wi->val_only = true; |
1443 | wi->is_lhs = false; |
1444 | walk_tree (&OMP_CLAUSE_DECL (clause), convert_nonlocal_reference_op, |
1445 | wi, NULL); |
1446 | break; |
1447 | |
1448 | case OMP_CLAUSE_ALIGNED: |
1449 | if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause)) |
1450 | { |
1451 | wi->val_only = true; |
1452 | wi->is_lhs = false; |
1453 | convert_nonlocal_reference_op |
1454 | (tp: &OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), walk_subtrees: &dummy, data: wi); |
1455 | } |
1456 | /* FALLTHRU */ |
1457 | case OMP_CLAUSE_NONTEMPORAL: |
1458 | do_decl_clause_no_supp: |
1459 | /* Like do_decl_clause, but don't add any suppression. */ |
1460 | decl = OMP_CLAUSE_DECL (clause); |
1461 | if (VAR_P (decl) |
1462 | && (TREE_STATIC (decl) || DECL_EXTERNAL (decl))) |
1463 | break; |
1464 | if (decl_function_context (decl) != info->context) |
1465 | { |
1466 | OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl); |
1467 | need_chain = true; |
1468 | } |
1469 | break; |
1470 | |
1471 | case OMP_CLAUSE_ALLOCATE: |
1472 | if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (clause)) |
1473 | { |
1474 | wi->val_only = true; |
1475 | wi->is_lhs = false; |
1476 | convert_nonlocal_reference_op |
1477 | (tp: &OMP_CLAUSE_ALLOCATE_ALLOCATOR (clause), walk_subtrees: &dummy, data: wi); |
1478 | } |
1479 | goto do_decl_clause_no_supp; |
1480 | |
1481 | case OMP_CLAUSE_NOWAIT: |
1482 | case OMP_CLAUSE_ORDERED: |
1483 | case OMP_CLAUSE_DEFAULT: |
1484 | case OMP_CLAUSE_COPYIN: |
1485 | case OMP_CLAUSE_COLLAPSE: |
1486 | case OMP_CLAUSE_TILE: |
1487 | case OMP_CLAUSE_UNTIED: |
1488 | case OMP_CLAUSE_MERGEABLE: |
1489 | case OMP_CLAUSE_PROC_BIND: |
1490 | case OMP_CLAUSE_NOGROUP: |
1491 | case OMP_CLAUSE_THREADS: |
1492 | case OMP_CLAUSE_SIMD: |
1493 | case OMP_CLAUSE_DEFAULTMAP: |
1494 | case OMP_CLAUSE_ORDER: |
1495 | case OMP_CLAUSE_SEQ: |
1496 | case OMP_CLAUSE_INDEPENDENT: |
1497 | case OMP_CLAUSE_AUTO: |
1498 | case OMP_CLAUSE_IF_PRESENT: |
1499 | case OMP_CLAUSE_FINALIZE: |
1500 | case OMP_CLAUSE_BIND: |
1501 | case OMP_CLAUSE__CONDTEMP_: |
1502 | case OMP_CLAUSE__SCANTEMP_: |
1503 | break; |
1504 | |
1505 | /* The following clause belongs to the OpenACC cache directive, which |
1506 | is discarded during gimplification. */ |
1507 | case OMP_CLAUSE__CACHE_: |
1508 | /* The following clauses are only allowed in the OpenMP declare simd |
1509 | directive, so not seen here. */ |
1510 | case OMP_CLAUSE_UNIFORM: |
1511 | case OMP_CLAUSE_INBRANCH: |
1512 | case OMP_CLAUSE_NOTINBRANCH: |
1513 | /* The following clauses are only allowed on OpenMP cancel and |
1514 | cancellation point directives, which at this point have already |
1515 | been lowered into a function call. */ |
1516 | case OMP_CLAUSE_FOR: |
1517 | case OMP_CLAUSE_PARALLEL: |
1518 | case OMP_CLAUSE_SECTIONS: |
1519 | case OMP_CLAUSE_TASKGROUP: |
1520 | /* The following clauses are only added during OMP lowering; nested |
1521 | function decomposition happens before that. */ |
1522 | case OMP_CLAUSE__LOOPTEMP_: |
1523 | case OMP_CLAUSE__REDUCTEMP_: |
1524 | case OMP_CLAUSE__SIMDUID_: |
1525 | case OMP_CLAUSE__SIMT_: |
1526 | /* The following clauses are only allowed on OpenACC 'routine' |
1527 | directives, not seen here. */ |
1528 | case OMP_CLAUSE_NOHOST: |
1529 | /* Anything else. */ |
1530 | default: |
1531 | gcc_unreachable (); |
1532 | } |
1533 | } |
1534 | |
1535 | info->suppress_expansion = new_suppress; |
1536 | |
1537 | if (need_stmts) |
1538 | for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause)) |
1539 | switch (OMP_CLAUSE_CODE (clause)) |
1540 | { |
1541 | case OMP_CLAUSE_REDUCTION: |
1542 | case OMP_CLAUSE_IN_REDUCTION: |
1543 | case OMP_CLAUSE_TASK_REDUCTION: |
1544 | if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause)) |
1545 | { |
1546 | tree old_context |
1547 | = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause)); |
1548 | DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause)) |
1549 | = info->context; |
1550 | if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause)) |
1551 | DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause)) |
1552 | = info->context; |
1553 | tree save_local_var_chain = info->new_local_var_chain; |
1554 | info->new_local_var_chain = NULL; |
1555 | gimple_seq *seq = &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause); |
1556 | walk_body (callback_stmt: convert_nonlocal_reference_stmt, |
1557 | callback_op: convert_nonlocal_reference_op, info, pseq: seq); |
1558 | if (info->new_local_var_chain) |
1559 | declare_vars (info->new_local_var_chain, |
1560 | gimple_seq_first_stmt (s: *seq), false); |
1561 | info->new_local_var_chain = NULL; |
1562 | seq = &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause); |
1563 | walk_body (callback_stmt: convert_nonlocal_reference_stmt, |
1564 | callback_op: convert_nonlocal_reference_op, info, pseq: seq); |
1565 | if (info->new_local_var_chain) |
1566 | declare_vars (info->new_local_var_chain, |
1567 | gimple_seq_first_stmt (s: *seq), false); |
1568 | info->new_local_var_chain = save_local_var_chain; |
1569 | DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause)) |
1570 | = old_context; |
1571 | if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause)) |
1572 | DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause)) |
1573 | = old_context; |
1574 | } |
1575 | break; |
1576 | |
1577 | case OMP_CLAUSE_LASTPRIVATE: |
1578 | case OMP_CLAUSE_LINEAR: |
1579 | { |
1580 | tree save_local_var_chain = info->new_local_var_chain; |
1581 | info->new_local_var_chain = NULL; |
1582 | gimple_seq *seq; |
1583 | if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LASTPRIVATE) |
1584 | seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause); |
1585 | else |
1586 | seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause); |
1587 | walk_body (callback_stmt: convert_nonlocal_reference_stmt, |
1588 | callback_op: convert_nonlocal_reference_op, info, pseq: seq); |
1589 | if (info->new_local_var_chain) |
1590 | { |
1591 | gimple *g = gimple_seq_first_stmt (s: *seq); |
1592 | if (gimple_code (g) != GIMPLE_BIND) |
1593 | { |
1594 | g = gimple_build_bind (NULL_TREE, *seq, NULL_TREE); |
1595 | *seq = NULL; |
1596 | gimple_seq_add_stmt_without_update (seq, g); |
1597 | } |
1598 | declare_vars (info->new_local_var_chain, |
1599 | gimple_seq_first_stmt (s: *seq), false); |
1600 | } |
1601 | info->new_local_var_chain = save_local_var_chain; |
1602 | } |
1603 | break; |
1604 | |
1605 | default: |
1606 | break; |
1607 | } |
1608 | |
1609 | return need_chain; |
1610 | } |
1611 | |
1612 | /* Create nonlocal debug decls for nonlocal VLA array bounds. */ |
1613 | |
1614 | static void |
1615 | note_nonlocal_vla_type (struct nesting_info *info, tree type) |
1616 | { |
1617 | while (POINTER_TYPE_P (type) && !TYPE_NAME (type)) |
1618 | type = TREE_TYPE (type); |
1619 | |
1620 | if (TYPE_NAME (type) |
1621 | && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL |
1622 | && DECL_ORIGINAL_TYPE (TYPE_NAME (type))) |
1623 | type = DECL_ORIGINAL_TYPE (TYPE_NAME (type)); |
1624 | |
1625 | while (POINTER_TYPE_P (type) |
1626 | || VECTOR_TYPE_P (type) |
1627 | || TREE_CODE (type) == FUNCTION_TYPE |
1628 | || TREE_CODE (type) == METHOD_TYPE) |
1629 | type = TREE_TYPE (type); |
1630 | |
1631 | if (TREE_CODE (type) == ARRAY_TYPE) |
1632 | { |
1633 | tree domain, t; |
1634 | |
1635 | note_nonlocal_vla_type (info, TREE_TYPE (type)); |
1636 | domain = TYPE_DOMAIN (type); |
1637 | if (domain) |
1638 | { |
1639 | t = TYPE_MIN_VALUE (domain); |
1640 | if (t && (VAR_P (t) || TREE_CODE (t) == PARM_DECL) |
1641 | && decl_function_context (t) != info->context) |
1642 | get_nonlocal_debug_decl (info, decl: t); |
1643 | t = TYPE_MAX_VALUE (domain); |
1644 | if (t && (VAR_P (t) || TREE_CODE (t) == PARM_DECL) |
1645 | && decl_function_context (t) != info->context) |
1646 | get_nonlocal_debug_decl (info, decl: t); |
1647 | } |
1648 | } |
1649 | } |
1650 | |
1651 | /* Callback for walk_gimple_stmt. Rewrite all references to VAR and |
1652 | PARM_DECLs that belong to outer functions. This handles statements |
1653 | that are not handled via the standard recursion done in |
1654 | walk_gimple_stmt. STMT is the statement to examine, DATA is as in |
1655 | convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the |
1656 | operands of STMT have been handled by this function. */ |
1657 | |
1658 | static tree |
1659 | convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p, |
1660 | struct walk_stmt_info *wi) |
1661 | { |
1662 | struct nesting_info *info = (struct nesting_info *) wi->info; |
1663 | tree save_local_var_chain; |
1664 | bitmap save_suppress; |
1665 | gimple *stmt = gsi_stmt (i: *gsi); |
1666 | |
1667 | switch (gimple_code (g: stmt)) |
1668 | { |
1669 | case GIMPLE_GOTO: |
1670 | /* Don't walk non-local gotos for now. */ |
1671 | if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL) |
1672 | { |
1673 | wi->val_only = true; |
1674 | wi->is_lhs = false; |
1675 | *handled_ops_p = false; |
1676 | return NULL_TREE; |
1677 | } |
1678 | break; |
1679 | |
1680 | case GIMPLE_OMP_TEAMS: |
1681 | if (!gimple_omp_teams_host (omp_teams_stmt: as_a <gomp_teams *> (p: stmt))) |
1682 | { |
1683 | save_suppress = info->suppress_expansion; |
1684 | convert_nonlocal_omp_clauses (pclauses: gimple_omp_teams_clauses_ptr (gs: stmt), |
1685 | wi); |
1686 | walk_body (callback_stmt: convert_nonlocal_reference_stmt, |
1687 | callback_op: convert_nonlocal_reference_op, info, |
1688 | pseq: gimple_omp_body_ptr (gs: stmt)); |
1689 | info->suppress_expansion = save_suppress; |
1690 | break; |
1691 | } |
1692 | /* FALLTHRU */ |
1693 | |
1694 | case GIMPLE_OMP_PARALLEL: |
1695 | case GIMPLE_OMP_TASK: |
1696 | save_suppress = info->suppress_expansion; |
1697 | if (convert_nonlocal_omp_clauses (pclauses: gimple_omp_taskreg_clauses_ptr (gs: stmt), |
1698 | wi)) |
1699 | { |
1700 | tree c, decl; |
1701 | decl = get_chain_decl (info); |
1702 | c = build_omp_clause (gimple_location (g: stmt), |
1703 | OMP_CLAUSE_FIRSTPRIVATE); |
1704 | OMP_CLAUSE_DECL (c) = decl; |
1705 | OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (gs: stmt); |
1706 | gimple_omp_taskreg_set_clauses (gs: stmt, clauses: c); |
1707 | } |
1708 | |
1709 | save_local_var_chain = info->new_local_var_chain; |
1710 | info->new_local_var_chain = NULL; |
1711 | |
1712 | walk_body (callback_stmt: convert_nonlocal_reference_stmt, callback_op: convert_nonlocal_reference_op, |
1713 | info, pseq: gimple_omp_body_ptr (gs: stmt)); |
1714 | |
1715 | if (info->new_local_var_chain) |
1716 | declare_vars (info->new_local_var_chain, |
1717 | gimple_seq_first_stmt (s: gimple_omp_body (gs: stmt)), |
1718 | false); |
1719 | info->new_local_var_chain = save_local_var_chain; |
1720 | info->suppress_expansion = save_suppress; |
1721 | break; |
1722 | |
1723 | case GIMPLE_OMP_FOR: |
1724 | save_suppress = info->suppress_expansion; |
1725 | convert_nonlocal_omp_clauses (pclauses: gimple_omp_for_clauses_ptr (gs: stmt), wi); |
1726 | walk_gimple_omp_for (for_stmt: as_a <gomp_for *> (p: stmt), |
1727 | callback_stmt: convert_nonlocal_reference_stmt, |
1728 | callback_op: convert_nonlocal_reference_op, info); |
1729 | walk_body (callback_stmt: convert_nonlocal_reference_stmt, |
1730 | callback_op: convert_nonlocal_reference_op, info, pseq: gimple_omp_body_ptr (gs: stmt)); |
1731 | info->suppress_expansion = save_suppress; |
1732 | break; |
1733 | |
1734 | case GIMPLE_OMP_SECTIONS: |
1735 | save_suppress = info->suppress_expansion; |
1736 | convert_nonlocal_omp_clauses (pclauses: gimple_omp_sections_clauses_ptr (gs: stmt), wi); |
1737 | walk_body (callback_stmt: convert_nonlocal_reference_stmt, callback_op: convert_nonlocal_reference_op, |
1738 | info, pseq: gimple_omp_body_ptr (gs: stmt)); |
1739 | info->suppress_expansion = save_suppress; |
1740 | break; |
1741 | |
1742 | case GIMPLE_OMP_SINGLE: |
1743 | save_suppress = info->suppress_expansion; |
1744 | convert_nonlocal_omp_clauses (pclauses: gimple_omp_single_clauses_ptr (gs: stmt), wi); |
1745 | walk_body (callback_stmt: convert_nonlocal_reference_stmt, callback_op: convert_nonlocal_reference_op, |
1746 | info, pseq: gimple_omp_body_ptr (gs: stmt)); |
1747 | info->suppress_expansion = save_suppress; |
1748 | break; |
1749 | |
1750 | case GIMPLE_OMP_SCOPE: |
1751 | save_suppress = info->suppress_expansion; |
1752 | convert_nonlocal_omp_clauses (pclauses: gimple_omp_scope_clauses_ptr (gs: stmt), wi); |
1753 | walk_body (callback_stmt: convert_nonlocal_reference_stmt, callback_op: convert_nonlocal_reference_op, |
1754 | info, pseq: gimple_omp_body_ptr (gs: stmt)); |
1755 | info->suppress_expansion = save_suppress; |
1756 | break; |
1757 | |
1758 | case GIMPLE_OMP_TASKGROUP: |
1759 | save_suppress = info->suppress_expansion; |
1760 | convert_nonlocal_omp_clauses (pclauses: gimple_omp_taskgroup_clauses_ptr (gs: stmt), wi); |
1761 | walk_body (callback_stmt: convert_nonlocal_reference_stmt, callback_op: convert_nonlocal_reference_op, |
1762 | info, pseq: gimple_omp_body_ptr (gs: stmt)); |
1763 | info->suppress_expansion = save_suppress; |
1764 | break; |
1765 | |
1766 | case GIMPLE_OMP_TARGET: |
1767 | if (!is_gimple_omp_offloaded (stmt)) |
1768 | { |
1769 | save_suppress = info->suppress_expansion; |
1770 | convert_nonlocal_omp_clauses (pclauses: gimple_omp_target_clauses_ptr (gs: stmt), |
1771 | wi); |
1772 | info->suppress_expansion = save_suppress; |
1773 | walk_body (callback_stmt: convert_nonlocal_reference_stmt, |
1774 | callback_op: convert_nonlocal_reference_op, info, |
1775 | pseq: gimple_omp_body_ptr (gs: stmt)); |
1776 | break; |
1777 | } |
1778 | save_suppress = info->suppress_expansion; |
1779 | if (convert_nonlocal_omp_clauses (pclauses: gimple_omp_target_clauses_ptr (gs: stmt), |
1780 | wi)) |
1781 | { |
1782 | tree c, decl; |
1783 | decl = get_chain_decl (info); |
1784 | c = build_omp_clause (gimple_location (g: stmt), OMP_CLAUSE_MAP); |
1785 | OMP_CLAUSE_DECL (c) = decl; |
1786 | OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO); |
1787 | OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl); |
1788 | OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (gs: stmt); |
1789 | gimple_omp_target_set_clauses (omp_target_stmt: as_a <gomp_target *> (p: stmt), clauses: c); |
1790 | } |
1791 | |
1792 | save_local_var_chain = info->new_local_var_chain; |
1793 | info->new_local_var_chain = NULL; |
1794 | |
1795 | walk_body (callback_stmt: convert_nonlocal_reference_stmt, callback_op: convert_nonlocal_reference_op, |
1796 | info, pseq: gimple_omp_body_ptr (gs: stmt)); |
1797 | |
1798 | if (info->new_local_var_chain) |
1799 | declare_vars (info->new_local_var_chain, |
1800 | gimple_seq_first_stmt (s: gimple_omp_body (gs: stmt)), |
1801 | false); |
1802 | info->new_local_var_chain = save_local_var_chain; |
1803 | info->suppress_expansion = save_suppress; |
1804 | break; |
1805 | |
1806 | case GIMPLE_OMP_SECTION: |
1807 | case GIMPLE_OMP_STRUCTURED_BLOCK: |
1808 | case GIMPLE_OMP_MASTER: |
1809 | case GIMPLE_OMP_MASKED: |
1810 | case GIMPLE_OMP_ORDERED: |
1811 | case GIMPLE_OMP_SCAN: |
1812 | walk_body (callback_stmt: convert_nonlocal_reference_stmt, callback_op: convert_nonlocal_reference_op, |
1813 | info, pseq: gimple_omp_body_ptr (gs: stmt)); |
1814 | break; |
1815 | |
1816 | case GIMPLE_BIND: |
1817 | { |
1818 | gbind *bind_stmt = as_a <gbind *> (p: stmt); |
1819 | |
1820 | for (tree var = gimple_bind_vars (bind_stmt); var; var = DECL_CHAIN (var)) |
1821 | if (TREE_CODE (var) == NAMELIST_DECL) |
1822 | { |
1823 | /* Adjust decls mentioned in NAMELIST_DECL. */ |
1824 | tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var); |
1825 | tree decl; |
1826 | unsigned int i; |
1827 | |
1828 | FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl) |
1829 | { |
1830 | if (VAR_P (decl) |
1831 | && (TREE_STATIC (decl) || DECL_EXTERNAL (decl))) |
1832 | continue; |
1833 | if (decl_function_context (decl) != info->context) |
1834 | CONSTRUCTOR_ELT (decls, i)->value |
1835 | = get_nonlocal_debug_decl (info, decl); |
1836 | } |
1837 | } |
1838 | |
1839 | *handled_ops_p = false; |
1840 | return NULL_TREE; |
1841 | } |
1842 | case GIMPLE_COND: |
1843 | wi->val_only = true; |
1844 | wi->is_lhs = false; |
1845 | *handled_ops_p = false; |
1846 | return NULL_TREE; |
1847 | |
1848 | case GIMPLE_ASSIGN: |
1849 | if (gimple_clobber_p (s: stmt)) |
1850 | { |
1851 | tree lhs = gimple_assign_lhs (gs: stmt); |
1852 | if (DECL_P (lhs) |
1853 | && !(TREE_STATIC (lhs) || DECL_EXTERNAL (lhs)) |
1854 | && decl_function_context (lhs) != info->context) |
1855 | { |
1856 | gsi_replace (gsi, gimple_build_nop (), true); |
1857 | break; |
1858 | } |
1859 | } |
1860 | *handled_ops_p = false; |
1861 | return NULL_TREE; |
1862 | |
1863 | default: |
1864 | /* For every other statement that we are not interested in |
1865 | handling here, let the walker traverse the operands. */ |
1866 | *handled_ops_p = false; |
1867 | return NULL_TREE; |
1868 | } |
1869 | |
1870 | /* We have handled all of STMT operands, no need to traverse the operands. */ |
1871 | *handled_ops_p = true; |
1872 | return NULL_TREE; |
1873 | } |
1874 | |
1875 | |
1876 | /* A subroutine of convert_local_reference. Create a local variable |
1877 | in the parent function with DECL_VALUE_EXPR set to reference the |
1878 | field in FRAME. This is used both for debug info and in OMP |
1879 | lowering. */ |
1880 | |
1881 | static tree |
1882 | get_local_debug_decl (struct nesting_info *info, tree decl, tree field) |
1883 | { |
1884 | tree x, new_decl; |
1885 | |
1886 | tree *slot = &info->var_map->get_or_insert (k: decl); |
1887 | if (*slot) |
1888 | return *slot; |
1889 | |
1890 | /* Make sure frame_decl gets created. */ |
1891 | (void) get_frame_type (info); |
1892 | x = info->frame_decl; |
1893 | x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE); |
1894 | |
1895 | new_decl = build_decl (DECL_SOURCE_LOCATION (decl), |
1896 | VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl)); |
1897 | DECL_CONTEXT (new_decl) = info->context; |
1898 | DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl); |
1899 | DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl); |
1900 | TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl); |
1901 | TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl); |
1902 | TREE_READONLY (new_decl) = TREE_READONLY (decl); |
1903 | TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl); |
1904 | DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1; |
1905 | if ((TREE_CODE (decl) == PARM_DECL |
1906 | || TREE_CODE (decl) == RESULT_DECL |
1907 | || VAR_P (decl)) |
1908 | && DECL_BY_REFERENCE (decl)) |
1909 | DECL_BY_REFERENCE (new_decl) = 1; |
1910 | |
1911 | SET_DECL_VALUE_EXPR (new_decl, x); |
1912 | DECL_HAS_VALUE_EXPR_P (new_decl) = 1; |
1913 | *slot = new_decl; |
1914 | |
1915 | DECL_CHAIN (new_decl) = info->debug_var_chain; |
1916 | info->debug_var_chain = new_decl; |
1917 | |
1918 | /* Do not emit debug info twice. */ |
1919 | DECL_IGNORED_P (decl) = 1; |
1920 | |
1921 | return new_decl; |
1922 | } |
1923 | |
1924 | |
1925 | /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR |
1926 | and PARM_DECLs that were referenced by inner nested functions. |
1927 | The rewrite will be a structure reference to the local frame variable. */ |
1928 | |
1929 | static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *); |
1930 | |
1931 | static tree |
1932 | convert_local_reference_op (tree *tp, int *walk_subtrees, void *data) |
1933 | { |
1934 | struct walk_stmt_info *wi = (struct walk_stmt_info *) data; |
1935 | struct nesting_info *const info = (struct nesting_info *) wi->info; |
1936 | tree t = *tp, field, x; |
1937 | bool save_val_only; |
1938 | |
1939 | *walk_subtrees = 0; |
1940 | switch (TREE_CODE (t)) |
1941 | { |
1942 | case VAR_DECL: |
1943 | /* Non-automatic variables are never processed. */ |
1944 | if (TREE_STATIC (t) || DECL_EXTERNAL (t)) |
1945 | break; |
1946 | /* FALLTHRU */ |
1947 | |
1948 | case PARM_DECL: |
1949 | if (t != info->frame_decl && decl_function_context (t) == info->context) |
1950 | { |
1951 | /* If we copied a pointer to the frame, then the original decl |
1952 | is used unchanged in the parent function. */ |
1953 | if (use_pointer_in_frame (decl: t)) |
1954 | break; |
1955 | |
1956 | /* No need to transform anything if no child references the |
1957 | variable. */ |
1958 | field = lookup_field_for_decl (info, decl: t, insert: NO_INSERT); |
1959 | if (!field) |
1960 | break; |
1961 | wi->changed = true; |
1962 | |
1963 | if (bitmap_bit_p (info->suppress_expansion, DECL_UID (t))) |
1964 | x = get_local_debug_decl (info, decl: t, field); |
1965 | else |
1966 | x = get_frame_field (info, target_context: info->context, field, gsi: &wi->gsi); |
1967 | |
1968 | if (wi->val_only) |
1969 | { |
1970 | if (wi->is_lhs) |
1971 | x = save_tmp_var (info, exp: x, gsi: &wi->gsi); |
1972 | else |
1973 | x = init_tmp_var (info, exp: x, gsi: &wi->gsi); |
1974 | } |
1975 | |
1976 | *tp = x; |
1977 | } |
1978 | break; |
1979 | |
1980 | case ADDR_EXPR: |
1981 | save_val_only = wi->val_only; |
1982 | wi->val_only = false; |
1983 | wi->is_lhs = false; |
1984 | wi->changed = false; |
1985 | walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL); |
1986 | wi->val_only = save_val_only; |
1987 | |
1988 | /* If we converted anything ... */ |
1989 | if (wi->changed) |
1990 | { |
1991 | tree save_context; |
1992 | |
1993 | /* Then the frame decl is now addressable. */ |
1994 | TREE_ADDRESSABLE (info->frame_decl) = 1; |
1995 | |
1996 | save_context = current_function_decl; |
1997 | current_function_decl = info->context; |
1998 | recompute_tree_invariant_for_addr_expr (t); |
1999 | |
2000 | /* If we are in a context where we only accept values, then |
2001 | compute the address into a temporary. */ |
2002 | if (save_val_only) |
2003 | *tp = gsi_gimplify_val (info: (struct nesting_info *) wi->info, |
2004 | exp: t, gsi: &wi->gsi); |
2005 | current_function_decl = save_context; |
2006 | } |
2007 | break; |
2008 | |
2009 | case REALPART_EXPR: |
2010 | case IMAGPART_EXPR: |
2011 | case COMPONENT_REF: |
2012 | case ARRAY_REF: |
2013 | case ARRAY_RANGE_REF: |
2014 | case BIT_FIELD_REF: |
2015 | /* Go down this entire nest and just look at the final prefix and |
2016 | anything that describes the references. Otherwise, we lose track |
2017 | of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */ |
2018 | save_val_only = wi->val_only; |
2019 | wi->val_only = true; |
2020 | wi->is_lhs = false; |
2021 | for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp) |
2022 | { |
2023 | if (TREE_CODE (t) == COMPONENT_REF) |
2024 | walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi, |
2025 | NULL); |
2026 | else if (TREE_CODE (t) == ARRAY_REF |
2027 | || TREE_CODE (t) == ARRAY_RANGE_REF) |
2028 | { |
2029 | walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi, |
2030 | NULL); |
2031 | walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi, |
2032 | NULL); |
2033 | walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi, |
2034 | NULL); |
2035 | } |
2036 | } |
2037 | wi->val_only = false; |
2038 | walk_tree (tp, convert_local_reference_op, wi, NULL); |
2039 | wi->val_only = save_val_only; |
2040 | break; |
2041 | |
2042 | case MEM_REF: |
2043 | save_val_only = wi->val_only; |
2044 | wi->val_only = true; |
2045 | wi->is_lhs = false; |
2046 | walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, |
2047 | wi, NULL); |
2048 | /* We need to re-fold the MEM_REF as component references as |
2049 | part of a ADDR_EXPR address are not allowed. But we cannot |
2050 | fold here, as the chain record type is not yet finalized. */ |
2051 | if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR |
2052 | && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))) |
2053 | info->mem_refs->add (k: tp); |
2054 | wi->val_only = save_val_only; |
2055 | break; |
2056 | |
2057 | case VIEW_CONVERT_EXPR: |
2058 | /* Just request to look at the subtrees, leaving val_only and lhs |
2059 | untouched. This might actually be for !val_only + lhs, in which |
2060 | case we don't want to force a replacement by a temporary. */ |
2061 | *walk_subtrees = 1; |
2062 | break; |
2063 | |
2064 | default: |
2065 | if (!IS_TYPE_OR_DECL_P (t)) |
2066 | { |
2067 | *walk_subtrees = 1; |
2068 | wi->val_only = true; |
2069 | wi->is_lhs = false; |
2070 | } |
2071 | break; |
2072 | } |
2073 | |
2074 | return NULL_TREE; |
2075 | } |
2076 | |
2077 | static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *, |
2078 | struct walk_stmt_info *); |
2079 | |
2080 | /* Helper for convert_local_reference. Convert all the references in |
2081 | the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */ |
2082 | |
2083 | static bool |
2084 | convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi) |
2085 | { |
2086 | struct nesting_info *const info = (struct nesting_info *) wi->info; |
2087 | bool need_frame = false, need_stmts = false; |
2088 | tree clause, decl, *pdecl; |
2089 | int dummy; |
2090 | bitmap new_suppress; |
2091 | |
2092 | new_suppress = BITMAP_GGC_ALLOC (); |
2093 | bitmap_copy (new_suppress, info->suppress_expansion); |
2094 | |
2095 | for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause)) |
2096 | { |
2097 | pdecl = NULL; |
2098 | switch (OMP_CLAUSE_CODE (clause)) |
2099 | { |
2100 | case OMP_CLAUSE_REDUCTION: |
2101 | case OMP_CLAUSE_IN_REDUCTION: |
2102 | case OMP_CLAUSE_TASK_REDUCTION: |
2103 | if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause)) |
2104 | need_stmts = true; |
2105 | if (TREE_CODE (OMP_CLAUSE_DECL (clause)) == MEM_REF) |
2106 | { |
2107 | pdecl = &TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0); |
2108 | if (TREE_CODE (*pdecl) == POINTER_PLUS_EXPR) |
2109 | pdecl = &TREE_OPERAND (*pdecl, 0); |
2110 | if (INDIRECT_REF_P (*pdecl) |
2111 | || TREE_CODE (*pdecl) == ADDR_EXPR) |
2112 | pdecl = &TREE_OPERAND (*pdecl, 0); |
2113 | } |
2114 | goto do_decl_clause; |
2115 | |
2116 | case OMP_CLAUSE_LASTPRIVATE: |
2117 | if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause)) |
2118 | need_stmts = true; |
2119 | goto do_decl_clause; |
2120 | |
2121 | case OMP_CLAUSE_LINEAR: |
2122 | if (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause)) |
2123 | need_stmts = true; |
2124 | wi->val_only = true; |
2125 | wi->is_lhs = false; |
2126 | convert_local_reference_op (tp: &OMP_CLAUSE_LINEAR_STEP (clause), walk_subtrees: &dummy, |
2127 | data: wi); |
2128 | goto do_decl_clause; |
2129 | |
2130 | case OMP_CLAUSE_PRIVATE: |
2131 | case OMP_CLAUSE_FIRSTPRIVATE: |
2132 | case OMP_CLAUSE_COPYPRIVATE: |
2133 | case OMP_CLAUSE_SHARED: |
2134 | case OMP_CLAUSE_ENTER: |
2135 | case OMP_CLAUSE_LINK: |
2136 | case OMP_CLAUSE_USE_DEVICE_PTR: |
2137 | case OMP_CLAUSE_USE_DEVICE_ADDR: |
2138 | case OMP_CLAUSE_HAS_DEVICE_ADDR: |
2139 | case OMP_CLAUSE_IS_DEVICE_PTR: |
2140 | case OMP_CLAUSE_DETACH: |
2141 | do_decl_clause: |
2142 | if (pdecl == NULL) |
2143 | pdecl = &OMP_CLAUSE_DECL (clause); |
2144 | decl = *pdecl; |
2145 | if (VAR_P (decl) |
2146 | && (TREE_STATIC (decl) || DECL_EXTERNAL (decl))) |
2147 | break; |
2148 | if (decl_function_context (decl) == info->context |
2149 | && !use_pointer_in_frame (decl)) |
2150 | { |
2151 | tree field = lookup_field_for_decl (info, decl, insert: NO_INSERT); |
2152 | if (field) |
2153 | { |
2154 | if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_SHARED) |
2155 | OMP_CLAUSE_SHARED_READONLY (clause) = 0; |
2156 | bitmap_set_bit (new_suppress, DECL_UID (decl)); |
2157 | *pdecl = get_local_debug_decl (info, decl, field); |
2158 | need_frame = true; |
2159 | } |
2160 | } |
2161 | break; |
2162 | |
2163 | case OMP_CLAUSE_SCHEDULE: |
2164 | if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL) |
2165 | break; |
2166 | /* FALLTHRU */ |
2167 | case OMP_CLAUSE_FINAL: |
2168 | case OMP_CLAUSE_IF: |
2169 | case OMP_CLAUSE_SELF: |
2170 | case OMP_CLAUSE_NUM_THREADS: |
2171 | case OMP_CLAUSE_DEPEND: |
2172 | case OMP_CLAUSE_DOACROSS: |
2173 | case OMP_CLAUSE_DEVICE: |
2174 | case OMP_CLAUSE_NUM_TEAMS: |
2175 | case OMP_CLAUSE_THREAD_LIMIT: |
2176 | case OMP_CLAUSE_SAFELEN: |
2177 | case OMP_CLAUSE_SIMDLEN: |
2178 | case OMP_CLAUSE_PRIORITY: |
2179 | case OMP_CLAUSE_GRAINSIZE: |
2180 | case OMP_CLAUSE_NUM_TASKS: |
2181 | case OMP_CLAUSE_HINT: |
2182 | case OMP_CLAUSE_FILTER: |
2183 | case OMP_CLAUSE_NUM_GANGS: |
2184 | case OMP_CLAUSE_NUM_WORKERS: |
2185 | case OMP_CLAUSE_VECTOR_LENGTH: |
2186 | case OMP_CLAUSE_GANG: |
2187 | case OMP_CLAUSE_WORKER: |
2188 | case OMP_CLAUSE_VECTOR: |
2189 | case OMP_CLAUSE_ASYNC: |
2190 | case OMP_CLAUSE_WAIT: |
2191 | /* Several OpenACC clauses have optional arguments. Check if they |
2192 | are present. */ |
2193 | if (OMP_CLAUSE_OPERAND (clause, 0)) |
2194 | { |
2195 | wi->val_only = true; |
2196 | wi->is_lhs = false; |
2197 | convert_local_reference_op (tp: &OMP_CLAUSE_OPERAND (clause, 0), |
2198 | walk_subtrees: &dummy, data: wi); |
2199 | } |
2200 | |
2201 | /* The gang clause accepts two arguments. */ |
2202 | if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_GANG |
2203 | && OMP_CLAUSE_GANG_STATIC_EXPR (clause)) |
2204 | { |
2205 | wi->val_only = true; |
2206 | wi->is_lhs = false; |
2207 | convert_nonlocal_reference_op |
2208 | (tp: &OMP_CLAUSE_GANG_STATIC_EXPR (clause), walk_subtrees: &dummy, data: wi); |
2209 | } |
2210 | break; |
2211 | |
2212 | case OMP_CLAUSE_DIST_SCHEDULE: |
2213 | if (OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (clause) != NULL) |
2214 | { |
2215 | wi->val_only = true; |
2216 | wi->is_lhs = false; |
2217 | convert_local_reference_op (tp: &OMP_CLAUSE_OPERAND (clause, 0), |
2218 | walk_subtrees: &dummy, data: wi); |
2219 | } |
2220 | break; |
2221 | |
2222 | case OMP_CLAUSE_MAP: |
2223 | case OMP_CLAUSE_TO: |
2224 | case OMP_CLAUSE_FROM: |
2225 | if (OMP_CLAUSE_SIZE (clause)) |
2226 | { |
2227 | wi->val_only = true; |
2228 | wi->is_lhs = false; |
2229 | convert_local_reference_op (tp: &OMP_CLAUSE_SIZE (clause), |
2230 | walk_subtrees: &dummy, data: wi); |
2231 | } |
2232 | if (DECL_P (OMP_CLAUSE_DECL (clause))) |
2233 | goto do_decl_clause; |
2234 | wi->val_only = true; |
2235 | wi->is_lhs = false; |
2236 | walk_tree (&OMP_CLAUSE_DECL (clause), convert_local_reference_op, |
2237 | wi, NULL); |
2238 | break; |
2239 | |
2240 | case OMP_CLAUSE_ALIGNED: |
2241 | if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause)) |
2242 | { |
2243 | wi->val_only = true; |
2244 | wi->is_lhs = false; |
2245 | convert_local_reference_op |
2246 | (tp: &OMP_CLAUSE_ALIGNED_ALIGNMENT (clause), walk_subtrees: &dummy, data: wi); |
2247 | } |
2248 | /* FALLTHRU */ |
2249 | case OMP_CLAUSE_NONTEMPORAL: |
2250 | do_decl_clause_no_supp: |
2251 | /* Like do_decl_clause, but don't add any suppression. */ |
2252 | decl = OMP_CLAUSE_DECL (clause); |
2253 | if (VAR_P (decl) |
2254 | && (TREE_STATIC (decl) || DECL_EXTERNAL (decl))) |
2255 | break; |
2256 | if (decl_function_context (decl) == info->context |
2257 | && !use_pointer_in_frame (decl)) |
2258 | { |
2259 | tree field = lookup_field_for_decl (info, decl, insert: NO_INSERT); |
2260 | if (field) |
2261 | { |
2262 | OMP_CLAUSE_DECL (clause) |
2263 | = get_local_debug_decl (info, decl, field); |
2264 | need_frame = true; |
2265 | } |
2266 | } |
2267 | break; |
2268 | |
2269 | case OMP_CLAUSE_ALLOCATE: |
2270 | if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (clause)) |
2271 | { |
2272 | wi->val_only = true; |
2273 | wi->is_lhs = false; |
2274 | convert_local_reference_op |
2275 | (tp: &OMP_CLAUSE_ALLOCATE_ALLOCATOR (clause), walk_subtrees: &dummy, data: wi); |
2276 | } |
2277 | goto do_decl_clause_no_supp; |
2278 | |
2279 | case OMP_CLAUSE_NOWAIT: |
2280 | case OMP_CLAUSE_ORDERED: |
2281 | case OMP_CLAUSE_DEFAULT: |
2282 | case OMP_CLAUSE_COPYIN: |
2283 | case OMP_CLAUSE_COLLAPSE: |
2284 | case OMP_CLAUSE_TILE: |
2285 | case OMP_CLAUSE_UNTIED: |
2286 | case OMP_CLAUSE_MERGEABLE: |
2287 | case OMP_CLAUSE_PROC_BIND: |
2288 | case OMP_CLAUSE_NOGROUP: |
2289 | case OMP_CLAUSE_THREADS: |
2290 | case OMP_CLAUSE_SIMD: |
2291 | case OMP_CLAUSE_DEFAULTMAP: |
2292 | case OMP_CLAUSE_ORDER: |
2293 | case OMP_CLAUSE_SEQ: |
2294 | case OMP_CLAUSE_INDEPENDENT: |
2295 | case OMP_CLAUSE_AUTO: |
2296 | case OMP_CLAUSE_IF_PRESENT: |
2297 | case OMP_CLAUSE_FINALIZE: |
2298 | case OMP_CLAUSE_BIND: |
2299 | case OMP_CLAUSE__CONDTEMP_: |
2300 | case OMP_CLAUSE__SCANTEMP_: |
2301 | break; |
2302 | |
2303 | /* The following clause belongs to the OpenACC cache directive, which |
2304 | is discarded during gimplification. */ |
2305 | case OMP_CLAUSE__CACHE_: |
2306 | /* The following clauses are only allowed in the OpenMP declare simd |
2307 | directive, so not seen here. */ |
2308 | case OMP_CLAUSE_UNIFORM: |
2309 | case OMP_CLAUSE_INBRANCH: |
2310 | case OMP_CLAUSE_NOTINBRANCH: |
2311 | /* The following clauses are only allowed on OpenMP cancel and |
2312 | cancellation point directives, which at this point have already |
2313 | been lowered into a function call. */ |
2314 | case OMP_CLAUSE_FOR: |
2315 | case OMP_CLAUSE_PARALLEL: |
2316 | case OMP_CLAUSE_SECTIONS: |
2317 | case OMP_CLAUSE_TASKGROUP: |
2318 | /* The following clauses are only added during OMP lowering; nested |
2319 | function decomposition happens before that. */ |
2320 | case OMP_CLAUSE__LOOPTEMP_: |
2321 | case OMP_CLAUSE__REDUCTEMP_: |
2322 | case OMP_CLAUSE__SIMDUID_: |
2323 | case OMP_CLAUSE__SIMT_: |
2324 | /* The following clauses are only allowed on OpenACC 'routine' |
2325 | directives, not seen here. */ |
2326 | case OMP_CLAUSE_NOHOST: |
2327 | /* Anything else. */ |
2328 | default: |
2329 | gcc_unreachable (); |
2330 | } |
2331 | } |
2332 | |
2333 | info->suppress_expansion = new_suppress; |
2334 | |
2335 | if (need_stmts) |
2336 | for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause)) |
2337 | switch (OMP_CLAUSE_CODE (clause)) |
2338 | { |
2339 | case OMP_CLAUSE_REDUCTION: |
2340 | case OMP_CLAUSE_IN_REDUCTION: |
2341 | case OMP_CLAUSE_TASK_REDUCTION: |
2342 | if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause)) |
2343 | { |
2344 | tree old_context |
2345 | = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause)); |
2346 | DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause)) |
2347 | = info->context; |
2348 | if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause)) |
2349 | DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause)) |
2350 | = info->context; |
2351 | walk_body (callback_stmt: convert_local_reference_stmt, |
2352 | callback_op: convert_local_reference_op, info, |
2353 | pseq: &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause)); |
2354 | walk_body (callback_stmt: convert_local_reference_stmt, |
2355 | callback_op: convert_local_reference_op, info, |
2356 | pseq: &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause)); |
2357 | DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause)) |
2358 | = old_context; |
2359 | if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause)) |
2360 | DECL_CONTEXT (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clause)) |
2361 | = old_context; |
2362 | } |
2363 | break; |
2364 | |
2365 | case OMP_CLAUSE_LASTPRIVATE: |
2366 | walk_body (callback_stmt: convert_local_reference_stmt, |
2367 | callback_op: convert_local_reference_op, info, |
2368 | pseq: &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause)); |
2369 | break; |
2370 | |
2371 | case OMP_CLAUSE_LINEAR: |
2372 | walk_body (callback_stmt: convert_local_reference_stmt, |
2373 | callback_op: convert_local_reference_op, info, |
2374 | pseq: &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (clause)); |
2375 | break; |
2376 | |
2377 | default: |
2378 | break; |
2379 | } |
2380 | |
2381 | return need_frame; |
2382 | } |
2383 | |
2384 | |
2385 | /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR |
2386 | and PARM_DECLs that were referenced by inner nested functions. |
2387 | The rewrite will be a structure reference to the local frame variable. */ |
2388 | |
2389 | static tree |
2390 | convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p, |
2391 | struct walk_stmt_info *wi) |
2392 | { |
2393 | struct nesting_info *info = (struct nesting_info *) wi->info; |
2394 | tree save_local_var_chain; |
2395 | bitmap save_suppress; |
2396 | char save_static_chain_added; |
2397 | bool frame_decl_added; |
2398 | gimple *stmt = gsi_stmt (i: *gsi); |
2399 | |
2400 | switch (gimple_code (g: stmt)) |
2401 | { |
2402 | case GIMPLE_OMP_TEAMS: |
2403 | if (!gimple_omp_teams_host (omp_teams_stmt: as_a <gomp_teams *> (p: stmt))) |
2404 | { |
2405 | save_suppress = info->suppress_expansion; |
2406 | convert_local_omp_clauses (pclauses: gimple_omp_teams_clauses_ptr (gs: stmt), wi); |
2407 | walk_body (callback_stmt: convert_local_reference_stmt, callback_op: convert_local_reference_op, |
2408 | info, pseq: gimple_omp_body_ptr (gs: stmt)); |
2409 | info->suppress_expansion = save_suppress; |
2410 | break; |
2411 | } |
2412 | /* FALLTHRU */ |
2413 | |
2414 | case GIMPLE_OMP_PARALLEL: |
2415 | case GIMPLE_OMP_TASK: |
2416 | save_suppress = info->suppress_expansion; |
2417 | frame_decl_added = false; |
2418 | if (convert_local_omp_clauses (pclauses: gimple_omp_taskreg_clauses_ptr (gs: stmt), |
2419 | wi)) |
2420 | { |
2421 | tree c = build_omp_clause (gimple_location (g: stmt), |
2422 | OMP_CLAUSE_SHARED); |
2423 | (void) get_frame_type (info); |
2424 | OMP_CLAUSE_DECL (c) = info->frame_decl; |
2425 | OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (gs: stmt); |
2426 | gimple_omp_taskreg_set_clauses (gs: stmt, clauses: c); |
2427 | info->static_chain_added |= 4; |
2428 | frame_decl_added = true; |
2429 | } |
2430 | |
2431 | save_local_var_chain = info->new_local_var_chain; |
2432 | save_static_chain_added = info->static_chain_added; |
2433 | info->new_local_var_chain = NULL; |
2434 | info->static_chain_added = 0; |
2435 | |
2436 | walk_body (callback_stmt: convert_local_reference_stmt, callback_op: convert_local_reference_op, info, |
2437 | pseq: gimple_omp_body_ptr (gs: stmt)); |
2438 | |
2439 | if ((info->static_chain_added & 4) != 0 && !frame_decl_added) |
2440 | { |
2441 | tree c = build_omp_clause (gimple_location (g: stmt), |
2442 | OMP_CLAUSE_SHARED); |
2443 | (void) get_frame_type (info); |
2444 | OMP_CLAUSE_DECL (c) = info->frame_decl; |
2445 | OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (gs: stmt); |
2446 | info->static_chain_added |= 4; |
2447 | gimple_omp_taskreg_set_clauses (gs: stmt, clauses: c); |
2448 | } |
2449 | if (info->new_local_var_chain) |
2450 | declare_vars (info->new_local_var_chain, |
2451 | gimple_seq_first_stmt (s: gimple_omp_body (gs: stmt)), false); |
2452 | info->new_local_var_chain = save_local_var_chain; |
2453 | info->suppress_expansion = save_suppress; |
2454 | info->static_chain_added |= save_static_chain_added; |
2455 | break; |
2456 | |
2457 | case GIMPLE_OMP_FOR: |
2458 | save_suppress = info->suppress_expansion; |
2459 | convert_local_omp_clauses (pclauses: gimple_omp_for_clauses_ptr (gs: stmt), wi); |
2460 | walk_gimple_omp_for (for_stmt: as_a <gomp_for *> (p: stmt), |
2461 | callback_stmt: convert_local_reference_stmt, |
2462 | callback_op: convert_local_reference_op, info); |
2463 | walk_body (callback_stmt: convert_local_reference_stmt, callback_op: convert_local_reference_op, |
2464 | info, pseq: gimple_omp_body_ptr (gs: stmt)); |
2465 | info->suppress_expansion = save_suppress; |
2466 | break; |
2467 | |
2468 | case GIMPLE_OMP_SECTIONS: |
2469 | save_suppress = info->suppress_expansion; |
2470 | convert_local_omp_clauses (pclauses: gimple_omp_sections_clauses_ptr (gs: stmt), wi); |
2471 | walk_body (callback_stmt: convert_local_reference_stmt, callback_op: convert_local_reference_op, |
2472 | info, pseq: gimple_omp_body_ptr (gs: stmt)); |
2473 | info->suppress_expansion = save_suppress; |
2474 | break; |
2475 | |
2476 | case GIMPLE_OMP_SINGLE: |
2477 | save_suppress = info->suppress_expansion; |
2478 | convert_local_omp_clauses (pclauses: gimple_omp_single_clauses_ptr (gs: stmt), wi); |
2479 | walk_body (callback_stmt: convert_local_reference_stmt, callback_op: convert_local_reference_op, |
2480 | info, pseq: gimple_omp_body_ptr (gs: stmt)); |
2481 | info->suppress_expansion = save_suppress; |
2482 | break; |
2483 | |
2484 | case GIMPLE_OMP_SCOPE: |
2485 | save_suppress = info->suppress_expansion; |
2486 | convert_local_omp_clauses (pclauses: gimple_omp_scope_clauses_ptr (gs: stmt), wi); |
2487 | walk_body (callback_stmt: convert_local_reference_stmt, callback_op: convert_local_reference_op, |
2488 | info, pseq: gimple_omp_body_ptr (gs: stmt)); |
2489 | info->suppress_expansion = save_suppress; |
2490 | break; |
2491 | |
2492 | case GIMPLE_OMP_TASKGROUP: |
2493 | save_suppress = info->suppress_expansion; |
2494 | convert_local_omp_clauses (pclauses: gimple_omp_taskgroup_clauses_ptr (gs: stmt), wi); |
2495 | walk_body (callback_stmt: convert_local_reference_stmt, callback_op: convert_local_reference_op, |
2496 | info, pseq: gimple_omp_body_ptr (gs: stmt)); |
2497 | info->suppress_expansion = save_suppress; |
2498 | break; |
2499 | |
2500 | case GIMPLE_OMP_TARGET: |
2501 | if (!is_gimple_omp_offloaded (stmt)) |
2502 | { |
2503 | save_suppress = info->suppress_expansion; |
2504 | convert_local_omp_clauses (pclauses: gimple_omp_target_clauses_ptr (gs: stmt), wi); |
2505 | info->suppress_expansion = save_suppress; |
2506 | walk_body (callback_stmt: convert_local_reference_stmt, callback_op: convert_local_reference_op, |
2507 | info, pseq: gimple_omp_body_ptr (gs: stmt)); |
2508 | break; |
2509 | } |
2510 | save_suppress = info->suppress_expansion; |
2511 | frame_decl_added = false; |
2512 | if (convert_local_omp_clauses (pclauses: gimple_omp_target_clauses_ptr (gs: stmt), wi)) |
2513 | { |
2514 | tree c = build_omp_clause (gimple_location (g: stmt), OMP_CLAUSE_MAP); |
2515 | (void) get_frame_type (info); |
2516 | OMP_CLAUSE_DECL (c) = info->frame_decl; |
2517 | OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM); |
2518 | OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl); |
2519 | OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (gs: stmt); |
2520 | gimple_omp_target_set_clauses (omp_target_stmt: as_a <gomp_target *> (p: stmt), clauses: c); |
2521 | info->static_chain_added |= 4; |
2522 | frame_decl_added = true; |
2523 | } |
2524 | |
2525 | save_local_var_chain = info->new_local_var_chain; |
2526 | save_static_chain_added = info->static_chain_added; |
2527 | info->new_local_var_chain = NULL; |
2528 | info->static_chain_added = 0; |
2529 | |
2530 | walk_body (callback_stmt: convert_local_reference_stmt, callback_op: convert_local_reference_op, info, |
2531 | pseq: gimple_omp_body_ptr (gs: stmt)); |
2532 | |
2533 | if ((info->static_chain_added & 4) != 0 && !frame_decl_added) |
2534 | { |
2535 | tree c = build_omp_clause (gimple_location (g: stmt), OMP_CLAUSE_MAP); |
2536 | (void) get_frame_type (info); |
2537 | OMP_CLAUSE_DECL (c) = info->frame_decl; |
2538 | OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM); |
2539 | OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl); |
2540 | OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (gs: stmt); |
2541 | gimple_omp_target_set_clauses (omp_target_stmt: as_a <gomp_target *> (p: stmt), clauses: c); |
2542 | info->static_chain_added |= 4; |
2543 | } |
2544 | |
2545 | if (info->new_local_var_chain) |
2546 | declare_vars (info->new_local_var_chain, |
2547 | gimple_seq_first_stmt (s: gimple_omp_body (gs: stmt)), false); |
2548 | info->new_local_var_chain = save_local_var_chain; |
2549 | info->suppress_expansion = save_suppress; |
2550 | info->static_chain_added |= save_static_chain_added; |
2551 | break; |
2552 | |
2553 | case GIMPLE_OMP_SECTION: |
2554 | case GIMPLE_OMP_STRUCTURED_BLOCK: |
2555 | case GIMPLE_OMP_MASTER: |
2556 | case GIMPLE_OMP_MASKED: |
2557 | case GIMPLE_OMP_ORDERED: |
2558 | case GIMPLE_OMP_SCAN: |
2559 | walk_body (callback_stmt: convert_local_reference_stmt, callback_op: convert_local_reference_op, |
2560 | info, pseq: gimple_omp_body_ptr (gs: stmt)); |
2561 | break; |
2562 | |
2563 | case GIMPLE_COND: |
2564 | wi->val_only = true; |
2565 | wi->is_lhs = false; |
2566 | *handled_ops_p = false; |
2567 | return NULL_TREE; |
2568 | |
2569 | case GIMPLE_ASSIGN: |
2570 | if (gimple_clobber_p (s: stmt)) |
2571 | { |
2572 | tree lhs = gimple_assign_lhs (gs: stmt); |
2573 | if (DECL_P (lhs) |
2574 | && decl_function_context (lhs) == info->context |
2575 | && !use_pointer_in_frame (decl: lhs) |
2576 | && lookup_field_for_decl (info, decl: lhs, insert: NO_INSERT)) |
2577 | { |
2578 | gsi_replace (gsi, gimple_build_nop (), true); |
2579 | break; |
2580 | } |
2581 | } |
2582 | *handled_ops_p = false; |
2583 | return NULL_TREE; |
2584 | |
2585 | case GIMPLE_BIND: |
2586 | for (tree var = gimple_bind_vars (bind_stmt: as_a <gbind *> (p: stmt)); |
2587 | var; |
2588 | var = DECL_CHAIN (var)) |
2589 | if (TREE_CODE (var) == NAMELIST_DECL) |
2590 | { |
2591 | /* Adjust decls mentioned in NAMELIST_DECL. */ |
2592 | tree decls = NAMELIST_DECL_ASSOCIATED_DECL (var); |
2593 | tree decl; |
2594 | unsigned int i; |
2595 | |
2596 | FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (decls), i, decl) |
2597 | { |
2598 | if (VAR_P (decl) |
2599 | && (TREE_STATIC (decl) || DECL_EXTERNAL (decl))) |
2600 | continue; |
2601 | if (decl_function_context (decl) == info->context |
2602 | && !use_pointer_in_frame (decl)) |
2603 | { |
2604 | tree field = lookup_field_for_decl (info, decl, insert: NO_INSERT); |
2605 | if (field) |
2606 | { |
2607 | CONSTRUCTOR_ELT (decls, i)->value |
2608 | = get_local_debug_decl (info, decl, field); |
2609 | } |
2610 | } |
2611 | } |
2612 | } |
2613 | |
2614 | *handled_ops_p = false; |
2615 | return NULL_TREE; |
2616 | |
2617 | default: |
2618 | /* For every other statement that we are not interested in |
2619 | handling here, let the walker traverse the operands. */ |
2620 | *handled_ops_p = false; |
2621 | return NULL_TREE; |
2622 | } |
2623 | |
2624 | /* Indicate that we have handled all the operands ourselves. */ |
2625 | *handled_ops_p = true; |
2626 | return NULL_TREE; |
2627 | } |
2628 | |
2629 | |
2630 | /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs |
2631 | that reference labels from outer functions. The rewrite will be a |
2632 | call to __builtin_nonlocal_goto. */ |
2633 | |
2634 | static tree |
2635 | convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p, |
2636 | struct walk_stmt_info *wi) |
2637 | { |
2638 | struct nesting_info *const info = (struct nesting_info *) wi->info, *i; |
2639 | tree label, new_label, target_context, x, field; |
2640 | gcall *call; |
2641 | gimple *stmt = gsi_stmt (i: *gsi); |
2642 | |
2643 | if (gimple_code (g: stmt) != GIMPLE_GOTO) |
2644 | { |
2645 | *handled_ops_p = false; |
2646 | return NULL_TREE; |
2647 | } |
2648 | |
2649 | label = gimple_goto_dest (gs: stmt); |
2650 | if (TREE_CODE (label) != LABEL_DECL) |
2651 | { |
2652 | *handled_ops_p = false; |
2653 | return NULL_TREE; |
2654 | } |
2655 | |
2656 | target_context = decl_function_context (label); |
2657 | if (target_context == info->context) |
2658 | { |
2659 | *handled_ops_p = false; |
2660 | return NULL_TREE; |
2661 | } |
2662 | |
2663 | for (i = info->outer; target_context != i->context; i = i->outer) |
2664 | continue; |
2665 | |
2666 | /* The original user label may also be use for a normal goto, therefore |
2667 | we must create a new label that will actually receive the abnormal |
2668 | control transfer. This new label will be marked LABEL_NONLOCAL; this |
2669 | mark will trigger proper behavior in the cfg, as well as cause the |
2670 | (hairy target-specific) non-local goto receiver code to be generated |
2671 | when we expand rtl. Enter this association into var_map so that we |
2672 | can insert the new label into the IL during a second pass. */ |
2673 | tree *slot = &i->var_map->get_or_insert (k: label); |
2674 | if (*slot == NULL) |
2675 | { |
2676 | new_label = create_artificial_label (UNKNOWN_LOCATION); |
2677 | DECL_NONLOCAL (new_label) = 1; |
2678 | *slot = new_label; |
2679 | } |
2680 | else |
2681 | new_label = *slot; |
2682 | |
2683 | /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */ |
2684 | field = get_nl_goto_field (info: i); |
2685 | x = get_frame_field (info, target_context, field, gsi); |
2686 | x = build_addr (exp: x); |
2687 | x = gsi_gimplify_val (info, exp: x, gsi); |
2688 | call = gimple_build_call (builtin_decl_implicit (fncode: BUILT_IN_NONLOCAL_GOTO), |
2689 | 2, build_addr (exp: new_label), x); |
2690 | gsi_replace (gsi, call, false); |
2691 | |
2692 | /* We have handled all of STMT's operands, no need to keep going. */ |
2693 | *handled_ops_p = true; |
2694 | return NULL_TREE; |
2695 | } |
2696 | |
2697 | |
2698 | /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels |
2699 | are referenced via nonlocal goto from a nested function. The rewrite |
2700 | will involve installing a newly generated DECL_NONLOCAL label, and |
2701 | (potentially) a branch around the rtl gunk that is assumed to be |
2702 | attached to such a label. */ |
2703 | |
2704 | static tree |
2705 | convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p, |
2706 | struct walk_stmt_info *wi) |
2707 | { |
2708 | struct nesting_info *const info = (struct nesting_info *) wi->info; |
2709 | tree label, new_label; |
2710 | gimple_stmt_iterator tmp_gsi; |
2711 | glabel *stmt = dyn_cast <glabel *> (p: gsi_stmt (i: *gsi)); |
2712 | |
2713 | if (!stmt) |
2714 | { |
2715 | *handled_ops_p = false; |
2716 | return NULL_TREE; |
2717 | } |
2718 | |
2719 | label = gimple_label_label (gs: stmt); |
2720 | |
2721 | tree *slot = info->var_map->get (k: label); |
2722 | if (!slot) |
2723 | { |
2724 | *handled_ops_p = false; |
2725 | return NULL_TREE; |
2726 | } |
2727 | |
2728 | /* If there's any possibility that the previous statement falls through, |
2729 | then we must branch around the new non-local label. */ |
2730 | tmp_gsi = wi->gsi; |
2731 | gsi_prev (i: &tmp_gsi); |
2732 | if (gsi_end_p (i: tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (i: tmp_gsi))) |
2733 | { |
2734 | gimple *stmt = gimple_build_goto (dest: label); |
2735 | gsi_insert_before (gsi, stmt, GSI_SAME_STMT); |
2736 | } |
2737 | |
2738 | new_label = (tree) *slot; |
2739 | stmt = gimple_build_label (label: new_label); |
2740 | gsi_insert_before (gsi, stmt, GSI_SAME_STMT); |
2741 | |
2742 | *handled_ops_p = true; |
2743 | return NULL_TREE; |
2744 | } |
2745 | |
2746 | |
2747 | /* Called via walk_function+walk_stmt, rewrite all references to addresses |
2748 | of nested functions that require the use of trampolines. The rewrite |
2749 | will involve a reference a trampoline generated for the occasion. */ |
2750 | |
2751 | static tree |
2752 | convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data) |
2753 | { |
2754 | struct walk_stmt_info *wi = (struct walk_stmt_info *) data; |
2755 | struct nesting_info *const info = (struct nesting_info *) wi->info, *i; |
2756 | tree t = *tp, decl, target_context, x, builtin; |
2757 | bool descr; |
2758 | gcall *call; |
2759 | |
2760 | *walk_subtrees = 0; |
2761 | switch (TREE_CODE (t)) |
2762 | { |
2763 | case ADDR_EXPR: |
2764 | /* Build |
2765 | T.1 = &CHAIN->tramp; |
2766 | T.2 = __builtin_adjust_trampoline (T.1); |
2767 | T.3 = (func_type)T.2; |
2768 | */ |
2769 | |
2770 | decl = TREE_OPERAND (t, 0); |
2771 | if (TREE_CODE (decl) != FUNCTION_DECL) |
2772 | break; |
2773 | |
2774 | /* Only need to process nested functions. */ |
2775 | target_context = decl_function_context (decl); |
2776 | if (!target_context) |
2777 | break; |
2778 | |
2779 | /* If the nested function doesn't use a static chain, then |
2780 | it doesn't need a trampoline. */ |
2781 | if (!DECL_STATIC_CHAIN (decl)) |
2782 | break; |
2783 | |
2784 | /* If we don't want a trampoline, then don't build one. */ |
2785 | if (TREE_NO_TRAMPOLINE (t)) |
2786 | break; |
2787 | |
2788 | /* Lookup the immediate parent of the callee, as that's where |
2789 | we need to insert the trampoline. */ |
2790 | for (i = info; i->context != target_context; i = i->outer) |
2791 | continue; |
2792 | |
2793 | /* Decide whether to generate a descriptor or a trampoline. */ |
2794 | descr = FUNC_ADDR_BY_DESCRIPTOR (t) && !flag_trampolines; |
2795 | |
2796 | if (descr) |
2797 | x = lookup_descr_for_decl (info: i, decl, insert: INSERT); |
2798 | else |
2799 | x = lookup_tramp_for_decl (info: i, decl, insert: INSERT); |
2800 | |
2801 | /* Compute the address of the field holding the trampoline. */ |
2802 | x = get_frame_field (info, target_context, field: x, gsi: &wi->gsi); |
2803 | |
2804 | /* APB: We don't need to do the adjustment calls when using off-stack |
2805 | trampolines, any such adjustment will be done when the off-stack |
2806 | trampoline is created. */ |
2807 | if (!descr && flag_trampoline_impl == TRAMPOLINE_IMPL_HEAP) |
2808 | x = gsi_gimplify_val (info, exp: x, gsi: &wi->gsi); |
2809 | else |
2810 | { |
2811 | x = build_addr (exp: x); |
2812 | |
2813 | x = gsi_gimplify_val (info, exp: x, gsi: &wi->gsi); |
2814 | |
2815 | /* Do machine-specific ugliness. Normally this will involve |
2816 | computing extra alignment, but it can really be anything. */ |
2817 | if (descr) |
2818 | builtin = builtin_decl_implicit (fncode: BUILT_IN_ADJUST_DESCRIPTOR); |
2819 | else |
2820 | builtin = builtin_decl_implicit (fncode: BUILT_IN_ADJUST_TRAMPOLINE); |
2821 | call = gimple_build_call (builtin, 1, x); |
2822 | x = init_tmp_var_with_call (info, gsi: &wi->gsi, call); |
2823 | } |
2824 | |
2825 | /* Cast back to the proper function type. */ |
2826 | x = build1 (NOP_EXPR, TREE_TYPE (t), x); |
2827 | x = init_tmp_var (info, exp: x, gsi: &wi->gsi); |
2828 | |
2829 | *tp = x; |
2830 | break; |
2831 | |
2832 | default: |
2833 | if (!IS_TYPE_OR_DECL_P (t)) |
2834 | *walk_subtrees = 1; |
2835 | break; |
2836 | } |
2837 | |
2838 | return NULL_TREE; |
2839 | } |
2840 | |
2841 | |
2842 | /* Called via walk_function+walk_gimple_stmt, rewrite all references |
2843 | to addresses of nested functions that require the use of |
2844 | trampolines. The rewrite will involve a reference a trampoline |
2845 | generated for the occasion. */ |
2846 | |
2847 | static tree |
2848 | convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p, |
2849 | struct walk_stmt_info *wi) |
2850 | { |
2851 | struct nesting_info *info = (struct nesting_info *) wi->info; |
2852 | gimple *stmt = gsi_stmt (i: *gsi); |
2853 | |
2854 | switch (gimple_code (g: stmt)) |
2855 | { |
2856 | case GIMPLE_CALL: |
2857 | { |
2858 | /* Only walk call arguments, lest we generate trampolines for |
2859 | direct calls. */ |
2860 | unsigned long i, nargs = gimple_call_num_args (gs: stmt); |
2861 | for (i = 0; i < nargs; i++) |
2862 | walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op, |
2863 | wi, NULL); |
2864 | break; |
2865 | } |
2866 | |
2867 | case GIMPLE_OMP_TEAMS: |
2868 | if (!gimple_omp_teams_host (omp_teams_stmt: as_a <gomp_teams *> (p: stmt))) |
2869 | { |
2870 | *handled_ops_p = false; |
2871 | return NULL_TREE; |
2872 | } |
2873 | goto do_parallel; |
2874 | |
2875 | case GIMPLE_OMP_TARGET: |
2876 | if (!is_gimple_omp_offloaded (stmt)) |
2877 | { |
2878 | *handled_ops_p = false; |
2879 | return NULL_TREE; |
2880 | } |
2881 | /* FALLTHRU */ |
2882 | case GIMPLE_OMP_PARALLEL: |
2883 | case GIMPLE_OMP_TASK: |
2884 | do_parallel: |
2885 | { |
2886 | tree save_local_var_chain = info->new_local_var_chain; |
2887 | walk_gimple_op (stmt, convert_tramp_reference_op, wi); |
2888 | info->new_local_var_chain = NULL; |
2889 | char save_static_chain_added = info->static_chain_added; |
2890 | info->static_chain_added = 0; |
2891 | walk_body (callback_stmt: convert_tramp_reference_stmt, callback_op: convert_tramp_reference_op, |
2892 | info, pseq: gimple_omp_body_ptr (gs: stmt)); |
2893 | if (info->new_local_var_chain) |
2894 | declare_vars (info->new_local_var_chain, |
2895 | gimple_seq_first_stmt (s: gimple_omp_body (gs: stmt)), |
2896 | false); |
2897 | for (int i = 0; i < 2; i++) |
2898 | { |
2899 | tree c, decl; |
2900 | if ((info->static_chain_added & (1 << i)) == 0) |
2901 | continue; |
2902 | decl = i ? get_chain_decl (info) : info->frame_decl; |
2903 | /* Don't add CHAIN.* or FRAME.* twice. */ |
2904 | for (c = gimple_omp_taskreg_clauses (gs: stmt); |
2905 | c; |
2906 | c = OMP_CLAUSE_CHAIN (c)) |
2907 | if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE |
2908 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED) |
2909 | && OMP_CLAUSE_DECL (c) == decl) |
2910 | break; |
2911 | if (c == NULL && gimple_code (g: stmt) != GIMPLE_OMP_TARGET) |
2912 | { |
2913 | c = build_omp_clause (gimple_location (g: stmt), |
2914 | i ? OMP_CLAUSE_FIRSTPRIVATE |
2915 | : OMP_CLAUSE_SHARED); |
2916 | OMP_CLAUSE_DECL (c) = decl; |
2917 | OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (gs: stmt); |
2918 | gimple_omp_taskreg_set_clauses (gs: stmt, clauses: c); |
2919 | } |
2920 | else if (c == NULL) |
2921 | { |
2922 | c = build_omp_clause (gimple_location (g: stmt), |
2923 | OMP_CLAUSE_MAP); |
2924 | OMP_CLAUSE_DECL (c) = decl; |
2925 | OMP_CLAUSE_SET_MAP_KIND (c, |
2926 | i ? GOMP_MAP_TO : GOMP_MAP_TOFROM); |
2927 | OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl); |
2928 | OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (gs: stmt); |
2929 | gimple_omp_target_set_clauses (omp_target_stmt: as_a <gomp_target *> (p: stmt), |
2930 | clauses: c); |
2931 | } |
2932 | } |
2933 | info->new_local_var_chain = save_local_var_chain; |
2934 | info->static_chain_added |= save_static_chain_added; |
2935 | } |
2936 | break; |
2937 | |
2938 | default: |
2939 | *handled_ops_p = false; |
2940 | return NULL_TREE; |
2941 | } |
2942 | |
2943 | *handled_ops_p = true; |
2944 | return NULL_TREE; |
2945 | } |
2946 | |
2947 | |
2948 | |
2949 | /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs |
2950 | that reference nested functions to make sure that the static chain |
2951 | is set up properly for the call. */ |
2952 | |
2953 | static tree |
2954 | convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p, |
2955 | struct walk_stmt_info *wi) |
2956 | { |
2957 | struct nesting_info *const info = (struct nesting_info *) wi->info; |
2958 | tree decl, target_context; |
2959 | char save_static_chain_added; |
2960 | int i; |
2961 | gimple *stmt = gsi_stmt (i: *gsi); |
2962 | |
2963 | switch (gimple_code (g: stmt)) |
2964 | { |
2965 | case GIMPLE_CALL: |
2966 | if (gimple_call_chain (gs: stmt)) |
2967 | break; |
2968 | decl = gimple_call_fndecl (gs: stmt); |
2969 | if (!decl) |
2970 | break; |
2971 | target_context = decl_function_context (decl); |
2972 | if (target_context && DECL_STATIC_CHAIN (decl)) |
2973 | { |
2974 | struct nesting_info *i = info; |
2975 | while (i && i->context != target_context) |
2976 | i = i->outer; |
2977 | /* If none of the outer contexts is the target context, this means |
2978 | that the function is called in a wrong context. */ |
2979 | if (!i) |
2980 | internal_error ("%s from %s called in %s" , |
2981 | IDENTIFIER_POINTER (DECL_NAME (decl)), |
2982 | IDENTIFIER_POINTER (DECL_NAME (target_context)), |
2983 | IDENTIFIER_POINTER (DECL_NAME (info->context))); |
2984 | |
2985 | gimple_call_set_chain (call_stmt: as_a <gcall *> (p: stmt), |
2986 | chain: get_static_chain (info, target_context, |
2987 | gsi: &wi->gsi)); |
2988 | info->static_chain_added |= (1 << (info->context != target_context)); |
2989 | } |
2990 | break; |
2991 | |
2992 | case GIMPLE_OMP_TEAMS: |
2993 | if (!gimple_omp_teams_host (omp_teams_stmt: as_a <gomp_teams *> (p: stmt))) |
2994 | { |
2995 | walk_body (callback_stmt: convert_gimple_call, NULL, info, |
2996 | pseq: gimple_omp_body_ptr (gs: stmt)); |
2997 | break; |
2998 | } |
2999 | /* FALLTHRU */ |
3000 | |
3001 | case GIMPLE_OMP_PARALLEL: |
3002 | case GIMPLE_OMP_TASK: |
3003 | save_static_chain_added = info->static_chain_added; |
3004 | info->static_chain_added = 0; |
3005 | walk_body (callback_stmt: convert_gimple_call, NULL, info, pseq: gimple_omp_body_ptr (gs: stmt)); |
3006 | for (i = 0; i < 2; i++) |
3007 | { |
3008 | tree c, decl; |
3009 | if ((info->static_chain_added & (1 << i)) == 0) |
3010 | continue; |
3011 | decl = i ? get_chain_decl (info) : info->frame_decl; |
3012 | /* Don't add CHAIN.* or FRAME.* twice. */ |
3013 | for (c = gimple_omp_taskreg_clauses (gs: stmt); |
3014 | c; |
3015 | c = OMP_CLAUSE_CHAIN (c)) |
3016 | if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE |
3017 | || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED) |
3018 | && OMP_CLAUSE_DECL (c) == decl) |
3019 | break; |
3020 | if (c == NULL) |
3021 | { |
3022 | c = build_omp_clause (gimple_location (g: stmt), |
3023 | i ? OMP_CLAUSE_FIRSTPRIVATE |
3024 | : OMP_CLAUSE_SHARED); |
3025 | OMP_CLAUSE_DECL (c) = decl; |
3026 | OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (gs: stmt); |
3027 | gimple_omp_taskreg_set_clauses (gs: stmt, clauses: c); |
3028 | } |
3029 | } |
3030 | info->static_chain_added |= save_static_chain_added; |
3031 | break; |
3032 | |
3033 | case GIMPLE_OMP_TARGET: |
3034 | if (!is_gimple_omp_offloaded (stmt)) |
3035 | { |
3036 | walk_body (callback_stmt: convert_gimple_call, NULL, info, pseq: gimple_omp_body_ptr (gs: stmt)); |
3037 | break; |
3038 | } |
3039 | save_static_chain_added = info->static_chain_added; |
3040 | info->static_chain_added = 0; |
3041 | walk_body (callback_stmt: convert_gimple_call, NULL, info, pseq: gimple_omp_body_ptr (gs: stmt)); |
3042 | for (i = 0; i < 2; i++) |
3043 | { |
3044 | tree c, decl; |
3045 | if ((info->static_chain_added & (1 << i)) == 0) |
3046 | continue; |
3047 | decl = i ? get_chain_decl (info) : info->frame_decl; |
3048 | /* Don't add CHAIN.* or FRAME.* twice. */ |
3049 | for (c = gimple_omp_target_clauses (gs: stmt); |
3050 | c; |
3051 | c = OMP_CLAUSE_CHAIN (c)) |
3052 | if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP |
3053 | && OMP_CLAUSE_DECL (c) == decl) |
3054 | break; |
3055 | if (c == NULL) |
3056 | { |
3057 | c = build_omp_clause (gimple_location (g: stmt), OMP_CLAUSE_MAP); |
3058 | OMP_CLAUSE_DECL (c) = decl; |
3059 | OMP_CLAUSE_SET_MAP_KIND (c, i ? GOMP_MAP_TO : GOMP_MAP_TOFROM); |
3060 | OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl); |
3061 | OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (gs: stmt); |
3062 | gimple_omp_target_set_clauses (omp_target_stmt: as_a <gomp_target *> (p: stmt), |
3063 | clauses: c); |
3064 | } |
3065 | } |
3066 | info->static_chain_added |= save_static_chain_added; |
3067 | break; |
3068 | |
3069 | case GIMPLE_OMP_FOR: |
3070 | walk_body (callback_stmt: convert_gimple_call, NULL, info, |
3071 | pseq: gimple_omp_for_pre_body_ptr (gs: stmt)); |
3072 | /* FALLTHRU */ |
3073 | case GIMPLE_OMP_SECTIONS: |
3074 | case GIMPLE_OMP_SECTION: |
3075 | case GIMPLE_OMP_STRUCTURED_BLOCK: |
3076 | case GIMPLE_OMP_SINGLE: |
3077 | case GIMPLE_OMP_SCOPE: |
3078 | case GIMPLE_OMP_MASTER: |
3079 | case GIMPLE_OMP_MASKED: |
3080 | case GIMPLE_OMP_TASKGROUP: |
3081 | case GIMPLE_OMP_ORDERED: |
3082 | case GIMPLE_OMP_SCAN: |
3083 | case GIMPLE_OMP_CRITICAL: |
3084 | walk_body (callback_stmt: convert_gimple_call, NULL, info, pseq: gimple_omp_body_ptr (gs: stmt)); |
3085 | break; |
3086 | |
3087 | default: |
3088 | /* Keep looking for other operands. */ |
3089 | *handled_ops_p = false; |
3090 | return NULL_TREE; |
3091 | } |
3092 | |
3093 | *handled_ops_p = true; |
3094 | return NULL_TREE; |
3095 | } |
3096 | |
3097 | /* Walk the nesting tree starting with ROOT. Convert all trampolines and |
3098 | call expressions. At the same time, determine if a nested function |
3099 | actually uses its static chain; if not, remember that. */ |
3100 | |
3101 | static void |
3102 | convert_all_function_calls (struct nesting_info *root) |
3103 | { |
3104 | unsigned int chain_count = 0, old_chain_count, iter_count; |
3105 | struct nesting_info *n; |
3106 | |
3107 | /* First, optimistically clear static_chain for all decls that haven't |
3108 | used the static chain already for variable access. But always create |
3109 | it if not optimizing. This makes it possible to reconstruct the static |
3110 | nesting tree at run time and thus to resolve up-level references from |
3111 | within the debugger. */ |
3112 | FOR_EACH_NEST_INFO (n, root) |
3113 | { |
3114 | if (n->thunk_p) |
3115 | continue; |
3116 | tree decl = n->context; |
3117 | if (!optimize) |
3118 | { |
3119 | if (n->inner) |
3120 | (void) get_frame_type (info: n); |
3121 | if (n->outer) |
3122 | (void) get_chain_decl (info: n); |
3123 | } |
3124 | else if (!n->outer || (!n->chain_decl && !n->chain_field)) |
3125 | { |
3126 | DECL_STATIC_CHAIN (decl) = 0; |
3127 | if (dump_file && (dump_flags & TDF_DETAILS)) |
3128 | fprintf (stream: dump_file, format: "Guessing no static-chain for %s\n" , |
3129 | lang_hooks.decl_printable_name (decl, 2)); |
3130 | } |
3131 | else |
3132 | DECL_STATIC_CHAIN (decl) = 1; |
3133 | chain_count += DECL_STATIC_CHAIN (decl); |
3134 | } |
3135 | |
3136 | FOR_EACH_NEST_INFO (n, root) |
3137 | if (n->thunk_p) |
3138 | { |
3139 | tree decl = n->context; |
3140 | tree alias = thunk_info::get (node: cgraph_node::get (decl))->alias; |
3141 | DECL_STATIC_CHAIN (decl) = DECL_STATIC_CHAIN (alias); |
3142 | } |
3143 | |
3144 | /* Walk the functions and perform transformations. Note that these |
3145 | transformations can induce new uses of the static chain, which in turn |
3146 | require re-examining all users of the decl. */ |
3147 | /* ??? It would make sense to try to use the call graph to speed this up, |
3148 | but the call graph hasn't really been built yet. Even if it did, we |
3149 | would still need to iterate in this loop since address-of references |
3150 | wouldn't show up in the callgraph anyway. */ |
3151 | iter_count = 0; |
3152 | do |
3153 | { |
3154 | old_chain_count = chain_count; |
3155 | chain_count = 0; |
3156 | iter_count++; |
3157 | |
3158 | if (dump_file && (dump_flags & TDF_DETAILS)) |
3159 | fputc (c: '\n', stream: dump_file); |
3160 | |
3161 | FOR_EACH_NEST_INFO (n, root) |
3162 | { |
3163 | if (n->thunk_p) |
3164 | continue; |
3165 | tree decl = n->context; |
3166 | walk_function (callback_stmt: convert_tramp_reference_stmt, |
3167 | callback_op: convert_tramp_reference_op, info: n); |
3168 | walk_function (callback_stmt: convert_gimple_call, NULL, info: n); |
3169 | chain_count += DECL_STATIC_CHAIN (decl); |
3170 | } |
3171 | |
3172 | FOR_EACH_NEST_INFO (n, root) |
3173 | if (n->thunk_p) |
3174 | { |
3175 | tree decl = n->context; |
3176 | tree alias = thunk_info::get (node: cgraph_node::get (decl))->alias; |
3177 | DECL_STATIC_CHAIN (decl) = DECL_STATIC_CHAIN (alias); |
3178 | } |
3179 | } |
3180 | while (chain_count != old_chain_count); |
3181 | |
3182 | if (dump_file && (dump_flags & TDF_DETAILS)) |
3183 | fprintf (stream: dump_file, format: "convert_all_function_calls iterations: %u\n\n" , |
3184 | iter_count); |
3185 | } |
3186 | |
3187 | struct nesting_copy_body_data |
3188 | { |
3189 | copy_body_data cb; |
3190 | struct nesting_info *root; |
3191 | }; |
3192 | |
3193 | /* A helper subroutine for debug_var_chain type remapping. */ |
3194 | |
3195 | static tree |
3196 | nesting_copy_decl (tree decl, copy_body_data *id) |
3197 | { |
3198 | struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id; |
3199 | tree *slot = nid->root->var_map->get (k: decl); |
3200 | |
3201 | if (slot) |
3202 | return (tree) *slot; |
3203 | |
3204 | if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl)) |
3205 | { |
3206 | tree new_decl = copy_decl_no_change (decl, id); |
3207 | DECL_ORIGINAL_TYPE (new_decl) |
3208 | = remap_type (DECL_ORIGINAL_TYPE (decl), id); |
3209 | return new_decl; |
3210 | } |
3211 | |
3212 | if (VAR_P (decl) |
3213 | || TREE_CODE (decl) == PARM_DECL |
3214 | || TREE_CODE (decl) == RESULT_DECL) |
3215 | return decl; |
3216 | |
3217 | return copy_decl_no_change (decl, id); |
3218 | } |
3219 | |
3220 | /* A helper function for remap_vla_decls. See if *TP contains |
3221 | some remapped variables. */ |
3222 | |
3223 | static tree |
3224 | contains_remapped_vars (tree *tp, int *walk_subtrees, void *data) |
3225 | { |
3226 | struct nesting_info *root = (struct nesting_info *) data; |
3227 | tree t = *tp; |
3228 | |
3229 | if (DECL_P (t)) |
3230 | { |
3231 | *walk_subtrees = 0; |
3232 | tree *slot = root->var_map->get (k: t); |
3233 | |
3234 | if (slot) |
3235 | return *slot; |
3236 | } |
3237 | return NULL; |
3238 | } |
3239 | |
3240 | /* Remap VLA decls in BLOCK and subblocks if remapped variables are |
3241 | involved. */ |
3242 | |
3243 | static void |
3244 | remap_vla_decls (tree block, struct nesting_info *root) |
3245 | { |
3246 | tree var, subblock, val, type; |
3247 | struct nesting_copy_body_data id; |
3248 | |
3249 | for (subblock = BLOCK_SUBBLOCKS (block); |
3250 | subblock; |
3251 | subblock = BLOCK_CHAIN (subblock)) |
3252 | remap_vla_decls (block: subblock, root); |
3253 | |
3254 | for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var)) |
3255 | if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var)) |
3256 | { |
3257 | val = DECL_VALUE_EXPR (var); |
3258 | type = TREE_TYPE (var); |
3259 | |
3260 | if (! (INDIRECT_REF_P (val) |
3261 | && VAR_P (TREE_OPERAND (val, 0)) |
3262 | && variably_modified_type_p (type, NULL))) |
3263 | continue; |
3264 | |
3265 | if (root->var_map->get (TREE_OPERAND (val, 0)) |
3266 | || walk_tree (&type, contains_remapped_vars, root, NULL)) |
3267 | break; |
3268 | } |
3269 | |
3270 | if (var == NULL_TREE) |
3271 | return; |
3272 | |
3273 | memset (s: &id, c: 0, n: sizeof (id)); |
3274 | id.cb.copy_decl = nesting_copy_decl; |
3275 | id.cb.decl_map = new hash_map<tree, tree>; |
3276 | id.root = root; |
3277 | |
3278 | for (; var; var = DECL_CHAIN (var)) |
3279 | if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var)) |
3280 | { |
3281 | struct nesting_info *i; |
3282 | tree newt, context; |
3283 | |
3284 | val = DECL_VALUE_EXPR (var); |
3285 | type = TREE_TYPE (var); |
3286 | |
3287 | if (! (INDIRECT_REF_P (val) |
3288 | && VAR_P (TREE_OPERAND (val, 0)) |
3289 | && variably_modified_type_p (type, NULL))) |
3290 | continue; |
3291 | |
3292 | tree *slot = root->var_map->get (TREE_OPERAND (val, 0)); |
3293 | if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL)) |
3294 | continue; |
3295 | |
3296 | context = decl_function_context (var); |
3297 | for (i = root; i; i = i->outer) |
3298 | if (i->context == context) |
3299 | break; |
3300 | |
3301 | if (i == NULL) |
3302 | continue; |
3303 | |
3304 | /* Fully expand value expressions. This avoids having debug variables |
3305 | only referenced from them and that can be swept during GC. */ |
3306 | if (slot) |
3307 | { |
3308 | tree t = (tree) *slot; |
3309 | gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t)); |
3310 | val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t)); |
3311 | } |
3312 | |
3313 | id.cb.src_fn = i->context; |
3314 | id.cb.dst_fn = i->context; |
3315 | id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context); |
3316 | |
3317 | TREE_TYPE (var) = newt = remap_type (type, id: &id.cb); |
3318 | while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt)) |
3319 | { |
3320 | newt = TREE_TYPE (newt); |
3321 | type = TREE_TYPE (type); |
3322 | } |
3323 | if (TYPE_NAME (newt) |
3324 | && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL |
3325 | && DECL_ORIGINAL_TYPE (TYPE_NAME (newt)) |
3326 | && newt != type |
3327 | && TYPE_NAME (newt) == TYPE_NAME (type)) |
3328 | TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), id: &id.cb); |
3329 | |
3330 | walk_tree (&val, copy_tree_body_r, &id.cb, NULL); |
3331 | if (val != DECL_VALUE_EXPR (var)) |
3332 | SET_DECL_VALUE_EXPR (var, val); |
3333 | } |
3334 | |
3335 | delete id.cb.decl_map; |
3336 | } |
3337 | |
3338 | /* Fixup VLA decls in BLOCK and subblocks if remapped variables are |
3339 | involved. */ |
3340 | |
3341 | static void |
3342 | fixup_vla_decls (tree block) |
3343 | { |
3344 | for (tree var = BLOCK_VARS (block); var; var = DECL_CHAIN (var)) |
3345 | if (VAR_P (var) && DECL_HAS_VALUE_EXPR_P (var)) |
3346 | { |
3347 | tree val = DECL_VALUE_EXPR (var); |
3348 | |
3349 | if (! (INDIRECT_REF_P (val) |
3350 | && VAR_P (TREE_OPERAND (val, 0)) |
3351 | && DECL_HAS_VALUE_EXPR_P (TREE_OPERAND (val, 0)))) |
3352 | continue; |
3353 | |
3354 | /* Fully expand value expressions. This avoids having debug variables |
3355 | only referenced from them and that can be swept during GC. */ |
3356 | val = build1 (INDIRECT_REF, TREE_TYPE (val), |
3357 | DECL_VALUE_EXPR (TREE_OPERAND (val, 0))); |
3358 | SET_DECL_VALUE_EXPR (var, val); |
3359 | } |
3360 | |
3361 | for (tree sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub)) |
3362 | fixup_vla_decls (block: sub); |
3363 | } |
3364 | |
3365 | /* Fold the MEM_REF *E. */ |
3366 | bool |
3367 | fold_mem_refs (tree *const &e, void *data ATTRIBUTE_UNUSED) |
3368 | { |
3369 | tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e); |
3370 | *ref_p = fold (*ref_p); |
3371 | return true; |
3372 | } |
3373 | |
3374 | /* Given DECL, a nested function, build an initialization call for FIELD, |
3375 | the trampoline or descriptor for DECL, using FUNC as the function. */ |
3376 | |
3377 | static gcall * |
3378 | build_init_call_stmt (struct nesting_info *info, tree decl, tree field, |
3379 | tree func) |
3380 | { |
3381 | tree arg1, arg2, arg3, x; |
3382 | |
3383 | gcc_assert (DECL_STATIC_CHAIN (decl)); |
3384 | arg3 = build_addr (exp: info->frame_decl); |
3385 | |
3386 | arg2 = build_addr (exp: decl); |
3387 | |
3388 | x = build3 (COMPONENT_REF, TREE_TYPE (field), |
3389 | info->frame_decl, field, NULL_TREE); |
3390 | arg1 = build_addr (exp: x); |
3391 | |
3392 | return gimple_build_call (func, 3, arg1, arg2, arg3); |
3393 | } |
3394 | |
3395 | /* Do "everything else" to clean up or complete state collected by the various |
3396 | walking passes -- create a field to hold the frame base address, lay out the |
3397 | types and decls, generate code to initialize the frame decl, store critical |
3398 | expressions in the struct function for rtl to find. */ |
3399 | |
3400 | static void |
3401 | finalize_nesting_tree_1 (struct nesting_info *root) |
3402 | { |
3403 | gimple_seq cleanup_list = NULL; |
3404 | gimple_seq stmt_list = NULL; |
3405 | gimple *stmt; |
3406 | tree context = root->context; |
3407 | struct function *sf; |
3408 | |
3409 | if (root->thunk_p) |
3410 | return; |
3411 | |
3412 | /* If we created a non-local frame type or decl, we need to lay them |
3413 | out at this time. */ |
3414 | if (root->frame_type) |
3415 | { |
3416 | /* Debugging information needs to compute the frame base address of the |
3417 | parent frame out of the static chain from the nested frame. |
3418 | |
3419 | The static chain is the address of the FRAME record, so one could |
3420 | imagine it would be possible to compute the frame base address just |
3421 | adding a constant offset to this address. Unfortunately, this is not |
3422 | possible: if the FRAME object has alignment constraints that are |
3423 | stronger than the stack, then the offset between the frame base and |
3424 | the FRAME object will be dynamic. |
3425 | |
3426 | What we do instead is to append a field to the FRAME object that holds |
3427 | the frame base address: then debug info just has to fetch this |
3428 | field. */ |
3429 | |
3430 | /* Debugging information will refer to the CFA as the frame base |
3431 | address: we will do the same here. */ |
3432 | const tree frame_addr_fndecl |
3433 | = builtin_decl_explicit (fncode: BUILT_IN_DWARF_CFA); |
3434 | |
3435 | /* Create a field in the FRAME record to hold the frame base address for |
3436 | this stack frame. Since it will be used only by the debugger, put it |
3437 | at the end of the record in order not to shift all other offsets. */ |
3438 | tree fb_decl = make_node (FIELD_DECL); |
3439 | |
3440 | DECL_NAME (fb_decl) = get_identifier ("FRAME_BASE.PARENT" ); |
3441 | TREE_TYPE (fb_decl) = ptr_type_node; |
3442 | TREE_ADDRESSABLE (fb_decl) = 1; |
3443 | DECL_CONTEXT (fb_decl) = root->frame_type; |
3444 | TYPE_FIELDS (root->frame_type) = chainon (TYPE_FIELDS (root->frame_type), |
3445 | fb_decl); |
3446 | |
3447 | /* In some cases the frame type will trigger the -Wpadded warning. |
3448 | This is not helpful; suppress it. */ |
3449 | int save_warn_padded = warn_padded; |
3450 | warn_padded = 0; |
3451 | layout_type (root->frame_type); |
3452 | warn_padded = save_warn_padded; |
3453 | layout_decl (root->frame_decl, 0); |
3454 | |
3455 | /* Initialize the frame base address field. If the builtin we need is |
3456 | not available, set it to NULL so that debugging information does not |
3457 | reference junk. */ |
3458 | tree fb_ref = build3 (COMPONENT_REF, TREE_TYPE (fb_decl), |
3459 | root->frame_decl, fb_decl, NULL_TREE); |
3460 | tree fb_tmp; |
3461 | |
3462 | if (frame_addr_fndecl != NULL_TREE) |
3463 | { |
3464 | gcall *fb_gimple = gimple_build_call (frame_addr_fndecl, 1, |
3465 | integer_zero_node); |
3466 | gimple_stmt_iterator gsi = gsi_last (seq&: stmt_list); |
3467 | |
3468 | fb_tmp = init_tmp_var_with_call (info: root, gsi: &gsi, call: fb_gimple); |
3469 | } |
3470 | else |
3471 | fb_tmp = build_int_cst (TREE_TYPE (fb_ref), 0); |
3472 | gimple_seq_add_stmt (&stmt_list, |
3473 | gimple_build_assign (fb_ref, fb_tmp)); |
3474 | |
3475 | declare_vars (root->frame_decl, |
3476 | gimple_seq_first_stmt (s: gimple_body (context)), true); |
3477 | } |
3478 | |
3479 | /* If any parameters were referenced non-locally, then we need to insert |
3480 | a copy or a pointer. */ |
3481 | if (root->any_parm_remapped) |
3482 | { |
3483 | tree p; |
3484 | for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p)) |
3485 | { |
3486 | tree field, x, y; |
3487 | |
3488 | field = lookup_field_for_decl (info: root, decl: p, insert: NO_INSERT); |
3489 | if (!field) |
3490 | continue; |
3491 | |
3492 | if (use_pointer_in_frame (decl: p)) |
3493 | x = build_addr (exp: p); |
3494 | else |
3495 | x = p; |
3496 | |
3497 | /* If the assignment is from a non-register the stmt is |
3498 | not valid gimple. Make it so by using a temporary instead. */ |
3499 | if (!is_gimple_reg (x) |
3500 | && is_gimple_reg_type (TREE_TYPE (x))) |
3501 | { |
3502 | gimple_stmt_iterator gsi = gsi_last (seq&: stmt_list); |
3503 | x = init_tmp_var (info: root, exp: x, gsi: &gsi); |
3504 | } |
3505 | |
3506 | y = build3 (COMPONENT_REF, TREE_TYPE (field), |
3507 | root->frame_decl, field, NULL_TREE); |
3508 | stmt = gimple_build_assign (y, x); |
3509 | gimple_seq_add_stmt (&stmt_list, stmt); |
3510 | } |
3511 | } |
3512 | |
3513 | /* If a chain_field was created, then it needs to be initialized |
3514 | from chain_decl. */ |
3515 | if (root->chain_field) |
3516 | { |
3517 | tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field), |
3518 | root->frame_decl, root->chain_field, NULL_TREE); |
3519 | stmt = gimple_build_assign (x, get_chain_decl (info: root)); |
3520 | gimple_seq_add_stmt (&stmt_list, stmt); |
3521 | } |
3522 | |
3523 | /* If trampolines were created, then we need to initialize them. */ |
3524 | if (root->any_tramp_created) |
3525 | { |
3526 | struct nesting_info *i; |
3527 | for (i = root->inner; i ; i = i->next) |
3528 | { |
3529 | tree field, x; |
3530 | |
3531 | field = lookup_tramp_for_decl (info: root, decl: i->context, insert: NO_INSERT); |
3532 | if (!field) |
3533 | continue; |
3534 | |
3535 | if (flag_trampoline_impl == TRAMPOLINE_IMPL_HEAP) |
3536 | { |
3537 | /* We pass a whole bunch of arguments to the builtin function that |
3538 | creates the off-stack trampoline, these are |
3539 | 1. The nested function chain value (that must be passed to the |
3540 | nested function so it can find the function arguments). |
3541 | 2. A pointer to the nested function implementation, |
3542 | 3. The address in the local stack frame where we should write |
3543 | the address of the trampoline. |
3544 | |
3545 | When this code was originally written I just kind of threw |
3546 | everything at the builtin, figuring I'd work out what was |
3547 | actually needed later, I think, the stack pointer could |
3548 | certainly be dropped, arguments #2 and #4 are based off the |
3549 | stack pointer anyway, so #1 doesn't seem to add much value. */ |
3550 | tree arg1, arg2, arg3; |
3551 | |
3552 | gcc_assert (DECL_STATIC_CHAIN (i->context)); |
3553 | arg1 = build_addr (exp: root->frame_decl); |
3554 | arg2 = build_addr (exp: i->context); |
3555 | |
3556 | x = build3 (COMPONENT_REF, TREE_TYPE (field), |
3557 | root->frame_decl, field, NULL_TREE); |
3558 | arg3 = build_addr (exp: x); |
3559 | |
3560 | x = builtin_decl_implicit (fncode: BUILT_IN_NESTED_PTR_CREATED); |
3561 | stmt = gimple_build_call (x, 3, arg1, arg2, arg3); |
3562 | gimple_seq_add_stmt (&stmt_list, stmt); |
3563 | |
3564 | /* This call to delete the nested function trampoline is added to |
3565 | the cleanup list, and called when we exit the current scope. */ |
3566 | x = builtin_decl_implicit (fncode: BUILT_IN_NESTED_PTR_DELETED); |
3567 | stmt = gimple_build_call (x, 0); |
3568 | gimple_seq_add_stmt (&cleanup_list, stmt); |
3569 | } |
3570 | else |
3571 | { |
3572 | /* Original code to initialise the on stack trampoline. */ |
3573 | x = builtin_decl_implicit (fncode: BUILT_IN_INIT_TRAMPOLINE); |
3574 | stmt = build_init_call_stmt (info: root, decl: i->context, field, func: x); |
3575 | gimple_seq_add_stmt (&stmt_list, stmt); |
3576 | } |
3577 | } |
3578 | } |
3579 | |
3580 | /* If descriptors were created, then we need to initialize them. */ |
3581 | if (root->any_descr_created) |
3582 | { |
3583 | struct nesting_info *i; |
3584 | for (i = root->inner; i ; i = i->next) |
3585 | { |
3586 | tree field, x; |
3587 | |
3588 | field = lookup_descr_for_decl (info: root, decl: i->context, insert: NO_INSERT); |
3589 | if (!field) |
3590 | continue; |
3591 | |
3592 | x = builtin_decl_implicit (fncode: BUILT_IN_INIT_DESCRIPTOR); |
3593 | stmt = build_init_call_stmt (info: root, decl: i->context, field, func: x); |
3594 | gimple_seq_add_stmt (&stmt_list, stmt); |
3595 | } |
3596 | } |
3597 | |
3598 | /* If we created initialization statements, insert them. */ |
3599 | if (stmt_list) |
3600 | { |
3601 | if (flag_trampoline_impl == TRAMPOLINE_IMPL_HEAP) |
3602 | { |
3603 | /* Handle off-stack trampolines. */ |
3604 | gbind *bind; |
3605 | annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context)); |
3606 | annotate_all_with_location (cleanup_list, DECL_SOURCE_LOCATION (context)); |
3607 | bind = gimple_seq_first_stmt_as_a_bind (s: gimple_body (context)); |
3608 | gimple_seq_add_seq (&stmt_list, gimple_bind_body (gs: bind)); |
3609 | |
3610 | gimple_seq xxx_list = NULL; |
3611 | |
3612 | if (cleanup_list != NULL) |
3613 | { |
3614 | /* Maybe we shouldn't be creating this try/finally if -fno-exceptions is |
3615 | in use. If this is the case, then maybe we should, instead, be |
3616 | inserting the cleanup code onto every path out of this function? Not |
3617 | yet figured out how we would do this. */ |
3618 | gtry *t = gimple_build_try (stmt_list, cleanup_list, GIMPLE_TRY_FINALLY); |
3619 | gimple_seq_add_stmt (&xxx_list, t); |
3620 | } |
3621 | else |
3622 | xxx_list = stmt_list; |
3623 | |
3624 | gimple_bind_set_body (bind_stmt: bind, seq: xxx_list); |
3625 | } |
3626 | else |
3627 | { |
3628 | /* The traditional, on stack trampolines. */ |
3629 | gbind *bind; |
3630 | annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context)); |
3631 | bind = gimple_seq_first_stmt_as_a_bind (s: gimple_body (context)); |
3632 | gimple_seq_add_seq (&stmt_list, gimple_bind_body (gs: bind)); |
3633 | gimple_bind_set_body (bind_stmt: bind, seq: stmt_list); |
3634 | } |
3635 | } |
3636 | |
3637 | /* If a chain_decl was created, then it needs to be registered with |
3638 | struct function so that it gets initialized from the static chain |
3639 | register at the beginning of the function. */ |
3640 | sf = DECL_STRUCT_FUNCTION (root->context); |
3641 | sf->static_chain_decl = root->chain_decl; |
3642 | |
3643 | /* Similarly for the non-local goto save area. */ |
3644 | if (root->nl_goto_field) |
3645 | { |
3646 | sf->nonlocal_goto_save_area |
3647 | = get_frame_field (info: root, target_context: context, field: root->nl_goto_field, NULL); |
3648 | sf->has_nonlocal_label = 1; |
3649 | } |
3650 | |
3651 | /* Make sure all new local variables get inserted into the |
3652 | proper BIND_EXPR. */ |
3653 | if (root->new_local_var_chain) |
3654 | declare_vars (root->new_local_var_chain, |
3655 | gimple_seq_first_stmt (s: gimple_body (root->context)), |
3656 | false); |
3657 | |
3658 | if (root->debug_var_chain) |
3659 | { |
3660 | tree debug_var; |
3661 | gbind *scope; |
3662 | |
3663 | remap_vla_decls (DECL_INITIAL (root->context), root); |
3664 | |
3665 | for (debug_var = root->debug_var_chain; debug_var; |
3666 | debug_var = DECL_CHAIN (debug_var)) |
3667 | if (variably_modified_type_p (TREE_TYPE (debug_var), NULL)) |
3668 | break; |
3669 | |
3670 | /* If there are any debug decls with variable length types, |
3671 | remap those types using other debug_var_chain variables. */ |
3672 | if (debug_var) |
3673 | { |
3674 | struct nesting_copy_body_data id; |
3675 | |
3676 | memset (s: &id, c: 0, n: sizeof (id)); |
3677 | id.cb.copy_decl = nesting_copy_decl; |
3678 | id.cb.decl_map = new hash_map<tree, tree>; |
3679 | id.root = root; |
3680 | |
3681 | for (; debug_var; debug_var = DECL_CHAIN (debug_var)) |
3682 | if (variably_modified_type_p (TREE_TYPE (debug_var), NULL)) |
3683 | { |
3684 | tree type = TREE_TYPE (debug_var); |
3685 | tree newt, t = type; |
3686 | struct nesting_info *i; |
3687 | |
3688 | for (i = root; i; i = i->outer) |
3689 | if (variably_modified_type_p (type, i->context)) |
3690 | break; |
3691 | |
3692 | if (i == NULL) |
3693 | continue; |
3694 | |
3695 | id.cb.src_fn = i->context; |
3696 | id.cb.dst_fn = i->context; |
3697 | id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context); |
3698 | |
3699 | TREE_TYPE (debug_var) = newt = remap_type (type, id: &id.cb); |
3700 | while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt)) |
3701 | { |
3702 | newt = TREE_TYPE (newt); |
3703 | t = TREE_TYPE (t); |
3704 | } |
3705 | if (TYPE_NAME (newt) |
3706 | && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL |
3707 | && DECL_ORIGINAL_TYPE (TYPE_NAME (newt)) |
3708 | && newt != t |
3709 | && TYPE_NAME (newt) == TYPE_NAME (t)) |
3710 | TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), id: &id.cb); |
3711 | } |
3712 | |
3713 | delete id.cb.decl_map; |
3714 | } |
3715 | |
3716 | scope = gimple_seq_first_stmt_as_a_bind (s: gimple_body (root->context)); |
3717 | if (gimple_bind_block (bind_stmt: scope)) |
3718 | declare_vars (root->debug_var_chain, scope, true); |
3719 | else |
3720 | BLOCK_VARS (DECL_INITIAL (root->context)) |
3721 | = chainon (BLOCK_VARS (DECL_INITIAL (root->context)), |
3722 | root->debug_var_chain); |
3723 | } |
3724 | else |
3725 | fixup_vla_decls (DECL_INITIAL (root->context)); |
3726 | |
3727 | /* Fold the rewritten MEM_REF trees. */ |
3728 | root->mem_refs->traverse<void *, fold_mem_refs> (NULL); |
3729 | |
3730 | /* Dump the translated tree function. */ |
3731 | if (dump_file) |
3732 | { |
3733 | fputs (s: "\n\n" , stream: dump_file); |
3734 | dump_function_to_file (root->context, dump_file, dump_flags); |
3735 | } |
3736 | } |
3737 | |
3738 | static void |
3739 | finalize_nesting_tree (struct nesting_info *root) |
3740 | { |
3741 | struct nesting_info *n; |
3742 | FOR_EACH_NEST_INFO (n, root) |
3743 | finalize_nesting_tree_1 (root: n); |
3744 | } |
3745 | |
3746 | /* Unnest the nodes and pass them to cgraph. */ |
3747 | |
3748 | static void |
3749 | unnest_nesting_tree_1 (struct nesting_info *root) |
3750 | { |
3751 | struct cgraph_node *node = cgraph_node::get (decl: root->context); |
3752 | |
3753 | /* For nested functions update the cgraph to reflect unnesting. |
3754 | We also delay finalizing of these functions up to this point. */ |
3755 | if (nested_function_info::get (node)->origin) |
3756 | { |
3757 | unnest_function (node); |
3758 | if (!root->thunk_p) |
3759 | cgraph_node::finalize_function (root->context, true); |
3760 | } |
3761 | } |
3762 | |
3763 | static void |
3764 | unnest_nesting_tree (struct nesting_info *root) |
3765 | { |
3766 | struct nesting_info *n; |
3767 | FOR_EACH_NEST_INFO (n, root) |
3768 | unnest_nesting_tree_1 (root: n); |
3769 | } |
3770 | |
3771 | /* Free the data structures allocated during this pass. */ |
3772 | |
3773 | static void |
3774 | free_nesting_tree (struct nesting_info *root) |
3775 | { |
3776 | struct nesting_info *node, *next; |
3777 | |
3778 | node = iter_nestinfo_start (root); |
3779 | do |
3780 | { |
3781 | next = iter_nestinfo_next (node); |
3782 | delete node->var_map; |
3783 | delete node->field_map; |
3784 | delete node->mem_refs; |
3785 | free (ptr: node); |
3786 | node = next; |
3787 | } |
3788 | while (node); |
3789 | } |
3790 | |
3791 | /* Gimplify a function and all its nested functions. */ |
3792 | static void |
3793 | gimplify_all_functions (struct cgraph_node *root) |
3794 | { |
3795 | struct cgraph_node *iter; |
3796 | if (!gimple_body (root->decl)) |
3797 | gimplify_function_tree (root->decl); |
3798 | for (iter = first_nested_function (node: root); iter; |
3799 | iter = next_nested_function (node: iter)) |
3800 | if (!iter->thunk) |
3801 | gimplify_all_functions (root: iter); |
3802 | } |
3803 | |
3804 | /* Main entry point for this pass. Process FNDECL and all of its nested |
3805 | subroutines and turn them into something less tightly bound. */ |
3806 | |
3807 | void |
3808 | lower_nested_functions (tree fndecl) |
3809 | { |
3810 | struct cgraph_node *cgn; |
3811 | struct nesting_info *root; |
3812 | |
3813 | /* If there are no nested functions, there's nothing to do. */ |
3814 | cgn = cgraph_node::get (decl: fndecl); |
3815 | if (!first_nested_function (node: cgn)) |
3816 | return; |
3817 | |
3818 | gimplify_all_functions (root: cgn); |
3819 | |
3820 | set_dump_file (dump_begin (TDI_nested, &dump_flags)); |
3821 | if (dump_file) |
3822 | fprintf (stream: dump_file, format: "\n;; Function %s\n\n" , |
3823 | lang_hooks.decl_printable_name (fndecl, 2)); |
3824 | |
3825 | bitmap_obstack_initialize (&nesting_info_bitmap_obstack); |
3826 | root = create_nesting_tree (cgn); |
3827 | |
3828 | walk_all_functions (callback_stmt: convert_nonlocal_reference_stmt, |
3829 | callback_op: convert_nonlocal_reference_op, |
3830 | root); |
3831 | walk_all_functions (callback_stmt: convert_local_reference_stmt, |
3832 | callback_op: convert_local_reference_op, |
3833 | root); |
3834 | walk_all_functions (callback_stmt: convert_nl_goto_reference, NULL, root); |
3835 | walk_all_functions (callback_stmt: convert_nl_goto_receiver, NULL, root); |
3836 | |
3837 | convert_all_function_calls (root); |
3838 | finalize_nesting_tree (root); |
3839 | unnest_nesting_tree (root); |
3840 | |
3841 | free_nesting_tree (root); |
3842 | bitmap_obstack_release (&nesting_info_bitmap_obstack); |
3843 | |
3844 | if (dump_file) |
3845 | { |
3846 | dump_end (TDI_nested, dump_file); |
3847 | set_dump_file (NULL); |
3848 | } |
3849 | } |
3850 | |
3851 | #include "gt-tree-nested.h" |
3852 | |