1 | /* Statement translation -- generate GCC trees from gfc_code. |
2 | Copyright (C) 2002-2023 Free Software Foundation, Inc. |
3 | Contributed by Paul Brook <paul@nowt.org> |
4 | and Steven Bosscher <s.bosscher@student.tudelft.nl> |
5 | |
6 | This file is part of GCC. |
7 | |
8 | GCC is free software; you can redistribute it and/or modify it under |
9 | the terms of the GNU General Public License as published by the Free |
10 | Software Foundation; either version 3, or (at your option) any later |
11 | version. |
12 | |
13 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
14 | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
15 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
16 | for more details. |
17 | |
18 | You should have received a copy of the GNU General Public License |
19 | along with GCC; see the file COPYING3. If not see |
20 | <http://www.gnu.org/licenses/>. */ |
21 | |
22 | |
23 | #include "config.h" |
24 | #include "system.h" |
25 | #include "coretypes.h" |
26 | #include "options.h" |
27 | #include "tree.h" |
28 | #include "gfortran.h" |
29 | #include "trans.h" |
30 | #include "stringpool.h" |
31 | #include "fold-const.h" |
32 | #include "trans-stmt.h" |
33 | #include "trans-types.h" |
34 | #include "trans-array.h" |
35 | #include "trans-const.h" |
36 | #include "dependency.h" |
37 | |
38 | typedef struct iter_info |
39 | { |
40 | tree var; |
41 | tree start; |
42 | tree end; |
43 | tree step; |
44 | struct iter_info *next; |
45 | } |
46 | iter_info; |
47 | |
48 | typedef struct forall_info |
49 | { |
50 | iter_info *this_loop; |
51 | tree mask; |
52 | tree maskindex; |
53 | int nvar; |
54 | tree size; |
55 | struct forall_info *prev_nest; |
56 | bool do_concurrent; |
57 | } |
58 | forall_info; |
59 | |
60 | static void gfc_trans_where_2 (gfc_code *, tree, bool, |
61 | forall_info *, stmtblock_t *); |
62 | |
63 | /* Translate a F95 label number to a LABEL_EXPR. */ |
64 | |
65 | tree |
66 | gfc_trans_label_here (gfc_code * code) |
67 | { |
68 | return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here)); |
69 | } |
70 | |
71 | |
72 | /* Given a variable expression which has been ASSIGNed to, find the decl |
73 | containing the auxiliary variables. For variables in common blocks this |
74 | is a field_decl. */ |
75 | |
76 | void |
77 | gfc_conv_label_variable (gfc_se * se, gfc_expr * expr) |
78 | { |
79 | gcc_assert (expr->symtree->n.sym->attr.assign == 1); |
80 | gfc_conv_expr (se, expr); |
81 | /* Deals with variable in common block. Get the field declaration. */ |
82 | if (TREE_CODE (se->expr) == COMPONENT_REF) |
83 | se->expr = TREE_OPERAND (se->expr, 1); |
84 | /* Deals with dummy argument. Get the parameter declaration. */ |
85 | else if (INDIRECT_REF_P (se->expr)) |
86 | se->expr = TREE_OPERAND (se->expr, 0); |
87 | } |
88 | |
89 | /* Translate a label assignment statement. */ |
90 | |
91 | tree |
92 | gfc_trans_label_assign (gfc_code * code) |
93 | { |
94 | tree label_tree; |
95 | gfc_se se; |
96 | tree len; |
97 | tree addr; |
98 | tree len_tree; |
99 | int label_len; |
100 | |
101 | /* Start a new block. */ |
102 | gfc_init_se (&se, NULL); |
103 | gfc_start_block (&se.pre); |
104 | gfc_conv_label_variable (se: &se, expr: code->expr1); |
105 | |
106 | len = GFC_DECL_STRING_LEN (se.expr); |
107 | addr = GFC_DECL_ASSIGN_ADDR (se.expr); |
108 | |
109 | label_tree = gfc_get_label_decl (code->label1); |
110 | |
111 | if (code->label1->defined == ST_LABEL_TARGET |
112 | || code->label1->defined == ST_LABEL_DO_TARGET) |
113 | { |
114 | label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree); |
115 | len_tree = build_int_cst (gfc_charlen_type_node, -1); |
116 | } |
117 | else |
118 | { |
119 | gfc_expr *format = code->label1->format; |
120 | |
121 | label_len = format->value.character.length; |
122 | len_tree = build_int_cst (gfc_charlen_type_node, label_len); |
123 | label_tree = gfc_build_wide_string_const (format->ts.kind, label_len + 1, |
124 | format->value.character.string); |
125 | label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree); |
126 | } |
127 | |
128 | gfc_add_modify (&se.pre, len, fold_convert (TREE_TYPE (len), len_tree)); |
129 | gfc_add_modify (&se.pre, addr, label_tree); |
130 | |
131 | return gfc_finish_block (&se.pre); |
132 | } |
133 | |
134 | /* Translate a GOTO statement. */ |
135 | |
136 | tree |
137 | gfc_trans_goto (gfc_code * code) |
138 | { |
139 | locus loc = code->loc; |
140 | tree assigned_goto; |
141 | tree target; |
142 | tree tmp; |
143 | gfc_se se; |
144 | |
145 | if (code->label1 != NULL) |
146 | return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1)); |
147 | |
148 | /* ASSIGNED GOTO. */ |
149 | gfc_init_se (&se, NULL); |
150 | gfc_start_block (&se.pre); |
151 | gfc_conv_label_variable (se: &se, expr: code->expr1); |
152 | tmp = GFC_DECL_STRING_LEN (se.expr); |
153 | tmp = fold_build2_loc (input_location, NE_EXPR, logical_type_node, tmp, |
154 | build_int_cst (TREE_TYPE (tmp), -1)); |
155 | gfc_trans_runtime_check (true, false, tmp, &se.pre, &loc, |
156 | "Assigned label is not a target label" ); |
157 | |
158 | assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr); |
159 | |
160 | /* We're going to ignore a label list. It does not really change the |
161 | statement's semantics (because it is just a further restriction on |
162 | what's legal code); before, we were comparing label addresses here, but |
163 | that's a very fragile business and may break with optimization. So |
164 | just ignore it. */ |
165 | |
166 | target = fold_build1_loc (input_location, GOTO_EXPR, void_type_node, |
167 | assigned_goto); |
168 | gfc_add_expr_to_block (&se.pre, target); |
169 | return gfc_finish_block (&se.pre); |
170 | } |
171 | |
172 | |
173 | /* Translate an ENTRY statement. Just adds a label for this entry point. */ |
174 | tree |
175 | gfc_trans_entry (gfc_code * code) |
176 | { |
177 | return build1_v (LABEL_EXPR, code->ext.entry->label); |
178 | } |
179 | |
180 | |
181 | /* Replace a gfc_ss structure by another both in the gfc_se struct |
182 | and the gfc_loopinfo struct. This is used in gfc_conv_elemental_dependencies |
183 | to replace a variable ss by the corresponding temporary. */ |
184 | |
185 | static void |
186 | replace_ss (gfc_se *se, gfc_ss *old_ss, gfc_ss *new_ss) |
187 | { |
188 | gfc_ss **sess, **loopss; |
189 | |
190 | /* The old_ss is a ss for a single variable. */ |
191 | gcc_assert (old_ss->info->type == GFC_SS_SECTION); |
192 | |
193 | for (sess = &(se->ss); *sess != gfc_ss_terminator; sess = &((*sess)->next)) |
194 | if (*sess == old_ss) |
195 | break; |
196 | gcc_assert (*sess != gfc_ss_terminator); |
197 | |
198 | *sess = new_ss; |
199 | new_ss->next = old_ss->next; |
200 | |
201 | /* Make sure that trailing references are not lost. */ |
202 | if (old_ss->info |
203 | && old_ss->info->data.array.ref |
204 | && old_ss->info->data.array.ref->next |
205 | && !(new_ss->info->data.array.ref |
206 | && new_ss->info->data.array.ref->next)) |
207 | new_ss->info->data.array.ref = old_ss->info->data.array.ref; |
208 | |
209 | for (loopss = &(se->loop->ss); *loopss != gfc_ss_terminator; |
210 | loopss = &((*loopss)->loop_chain)) |
211 | if (*loopss == old_ss) |
212 | break; |
213 | gcc_assert (*loopss != gfc_ss_terminator); |
214 | |
215 | *loopss = new_ss; |
216 | new_ss->loop_chain = old_ss->loop_chain; |
217 | new_ss->loop = old_ss->loop; |
218 | |
219 | gfc_free_ss (old_ss); |
220 | } |
221 | |
222 | |
223 | /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of |
224 | elemental subroutines. Make temporaries for output arguments if any such |
225 | dependencies are found. Output arguments are chosen because internal_unpack |
226 | can be used, as is, to copy the result back to the variable. */ |
227 | static void |
228 | gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse, |
229 | gfc_symbol * sym, gfc_actual_arglist * arg, |
230 | gfc_dep_check check_variable) |
231 | { |
232 | gfc_actual_arglist *arg0; |
233 | gfc_expr *e; |
234 | gfc_formal_arglist *formal; |
235 | gfc_se parmse; |
236 | gfc_ss *ss; |
237 | gfc_symbol *fsym; |
238 | tree data; |
239 | tree size; |
240 | tree tmp; |
241 | |
242 | if (loopse->ss == NULL) |
243 | return; |
244 | |
245 | ss = loopse->ss; |
246 | arg0 = arg; |
247 | formal = gfc_sym_get_dummy_args (sym); |
248 | |
249 | /* Loop over all the arguments testing for dependencies. */ |
250 | for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL) |
251 | { |
252 | e = arg->expr; |
253 | if (e == NULL) |
254 | continue; |
255 | |
256 | /* Obtain the info structure for the current argument. */ |
257 | for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next) |
258 | if (ss->info->expr == e) |
259 | break; |
260 | |
261 | /* If there is a dependency, create a temporary and use it |
262 | instead of the variable. */ |
263 | fsym = formal ? formal->sym : NULL; |
264 | if (e->expr_type == EXPR_VARIABLE |
265 | && e->rank && fsym |
266 | && fsym->attr.intent != INTENT_IN |
267 | && !fsym->attr.value |
268 | && gfc_check_fncall_dependency (e, fsym->attr.intent, |
269 | sym, arg0, check_variable)) |
270 | { |
271 | tree initial, temptype; |
272 | stmtblock_t temp_post; |
273 | gfc_ss *tmp_ss; |
274 | |
275 | tmp_ss = gfc_get_array_ss (gfc_ss_terminator, NULL, ss->dimen, |
276 | GFC_SS_SECTION); |
277 | gfc_mark_ss_chain_used (tmp_ss, 1); |
278 | tmp_ss->info->expr = ss->info->expr; |
279 | replace_ss (se: loopse, old_ss: ss, new_ss: tmp_ss); |
280 | |
281 | /* Obtain the argument descriptor for unpacking. */ |
282 | gfc_init_se (&parmse, NULL); |
283 | parmse.want_pointer = 1; |
284 | gfc_conv_expr_descriptor (&parmse, e); |
285 | gfc_add_block_to_block (&se->pre, &parmse.pre); |
286 | |
287 | /* If we've got INTENT(INOUT) or a derived type with INTENT(OUT), |
288 | initialize the array temporary with a copy of the values. */ |
289 | if (fsym->attr.intent == INTENT_INOUT |
290 | || (fsym->ts.type ==BT_DERIVED |
291 | && fsym->attr.intent == INTENT_OUT)) |
292 | initial = parmse.expr; |
293 | /* For class expressions, we always initialize with the copy of |
294 | the values. */ |
295 | else if (e->ts.type == BT_CLASS) |
296 | initial = parmse.expr; |
297 | else |
298 | initial = NULL_TREE; |
299 | |
300 | if (e->ts.type != BT_CLASS) |
301 | { |
302 | /* Find the type of the temporary to create; we don't use the type |
303 | of e itself as this breaks for subcomponent-references in e |
304 | (where the type of e is that of the final reference, but |
305 | parmse.expr's type corresponds to the full derived-type). */ |
306 | /* TODO: Fix this somehow so we don't need a temporary of the whole |
307 | array but instead only the components referenced. */ |
308 | temptype = TREE_TYPE (parmse.expr); /* Pointer to descriptor. */ |
309 | gcc_assert (TREE_CODE (temptype) == POINTER_TYPE); |
310 | temptype = TREE_TYPE (temptype); |
311 | temptype = gfc_get_element_type (temptype); |
312 | } |
313 | |
314 | else |
315 | /* For class arrays signal that the size of the dynamic type has to |
316 | be obtained from the vtable, using the 'initial' expression. */ |
317 | temptype = NULL_TREE; |
318 | |
319 | /* Generate the temporary. Cleaning up the temporary should be the |
320 | very last thing done, so we add the code to a new block and add it |
321 | to se->post as last instructions. */ |
322 | size = gfc_create_var (gfc_array_index_type, NULL); |
323 | data = gfc_create_var (pvoid_type_node, NULL); |
324 | gfc_init_block (&temp_post); |
325 | tmp = gfc_trans_create_temp_array (&se->pre, &temp_post, tmp_ss, |
326 | temptype, initial, false, true, |
327 | false, &arg->expr->where); |
328 | gfc_add_modify (&se->pre, size, tmp); |
329 | tmp = fold_convert (pvoid_type_node, tmp_ss->info->data.array.data); |
330 | gfc_add_modify (&se->pre, data, tmp); |
331 | |
332 | /* Update other ss' delta. */ |
333 | gfc_set_delta (loopse->loop); |
334 | |
335 | /* Copy the result back using unpack..... */ |
336 | if (e->ts.type != BT_CLASS) |
337 | tmp = build_call_expr_loc (input_location, |
338 | gfor_fndecl_in_unpack, 2, parmse.expr, data); |
339 | else |
340 | { |
341 | /* ... except for class results where the copy is |
342 | unconditional. */ |
343 | tmp = build_fold_indirect_ref_loc (input_location, parmse.expr); |
344 | tmp = gfc_conv_descriptor_data_get (tmp); |
345 | tmp = build_call_expr_loc (input_location, |
346 | builtin_decl_explicit (fncode: BUILT_IN_MEMCPY), |
347 | 3, tmp, data, |
348 | fold_convert (size_type_node, size)); |
349 | } |
350 | gfc_add_expr_to_block (&se->post, tmp); |
351 | |
352 | /* parmse.pre is already added above. */ |
353 | gfc_add_block_to_block (&se->post, &parmse.post); |
354 | gfc_add_block_to_block (&se->post, &temp_post); |
355 | } |
356 | } |
357 | } |
358 | |
359 | |
360 | /* Given an executable statement referring to an intrinsic function call, |
361 | returns the intrinsic symbol. */ |
362 | |
363 | static gfc_intrinsic_sym * |
364 | get_intrinsic_for_code (gfc_code *code) |
365 | { |
366 | if (code->op == EXEC_CALL) |
367 | { |
368 | gfc_intrinsic_sym * const isym = code->resolved_isym; |
369 | if (isym) |
370 | return isym; |
371 | else |
372 | return gfc_get_intrinsic_for_expr (code->expr1); |
373 | } |
374 | |
375 | return NULL; |
376 | } |
377 | |
378 | |
379 | /* Translate the CALL statement. Builds a call to an F95 subroutine. */ |
380 | |
381 | tree |
382 | gfc_trans_call (gfc_code * code, bool dependency_check, |
383 | tree mask, tree count1, bool invert) |
384 | { |
385 | gfc_se se; |
386 | gfc_ss * ss; |
387 | int has_alternate_specifier; |
388 | gfc_dep_check check_variable; |
389 | tree index = NULL_TREE; |
390 | tree maskexpr = NULL_TREE; |
391 | tree tmp; |
392 | bool is_intrinsic_mvbits; |
393 | |
394 | /* A CALL starts a new block because the actual arguments may have to |
395 | be evaluated first. */ |
396 | gfc_init_se (&se, NULL); |
397 | gfc_start_block (&se.pre); |
398 | |
399 | gcc_assert (code->resolved_sym); |
400 | |
401 | ss = gfc_ss_terminator; |
402 | if (code->resolved_sym->attr.elemental) |
403 | ss = gfc_walk_elemental_function_args (ss, code->ext.actual, |
404 | get_intrinsic_for_code (code), |
405 | GFC_SS_REFERENCE); |
406 | |
407 | /* MVBITS is inlined but needs the dependency checking found here. */ |
408 | is_intrinsic_mvbits = code->resolved_isym |
409 | && code->resolved_isym->id == GFC_ISYM_MVBITS; |
410 | |
411 | /* Is not an elemental subroutine call with array valued arguments. */ |
412 | if (ss == gfc_ss_terminator) |
413 | { |
414 | |
415 | if (is_intrinsic_mvbits) |
416 | { |
417 | has_alternate_specifier = 0; |
418 | gfc_conv_intrinsic_mvbits (&se, code->ext.actual, NULL); |
419 | } |
420 | else |
421 | { |
422 | /* Translate the call. */ |
423 | has_alternate_specifier = |
424 | gfc_conv_procedure_call (&se, code->resolved_sym, |
425 | code->ext.actual, code->expr1, NULL); |
426 | |
427 | /* A subroutine without side-effect, by definition, does nothing! */ |
428 | TREE_SIDE_EFFECTS (se.expr) = 1; |
429 | } |
430 | |
431 | /* Chain the pieces together and return the block. */ |
432 | if (has_alternate_specifier) |
433 | { |
434 | gfc_code *select_code; |
435 | gfc_symbol *sym; |
436 | select_code = code->next; |
437 | gcc_assert(select_code->op == EXEC_SELECT); |
438 | sym = select_code->expr1->symtree->n.sym; |
439 | se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr); |
440 | if (sym->backend_decl == NULL) |
441 | sym->backend_decl = gfc_get_symbol_decl (sym); |
442 | gfc_add_modify (&se.pre, sym->backend_decl, se.expr); |
443 | } |
444 | else |
445 | gfc_add_expr_to_block (&se.pre, se.expr); |
446 | |
447 | gfc_add_block_to_block (&se.finalblock, &se.post); |
448 | gfc_add_block_to_block (&se.pre, &se.finalblock); |
449 | } |
450 | |
451 | else |
452 | { |
453 | /* An elemental subroutine call with array valued arguments has |
454 | to be scalarized. */ |
455 | gfc_loopinfo loop; |
456 | stmtblock_t body; |
457 | stmtblock_t block; |
458 | gfc_se loopse; |
459 | gfc_se depse; |
460 | |
461 | /* gfc_walk_elemental_function_args renders the ss chain in the |
462 | reverse order to the actual argument order. */ |
463 | ss = gfc_reverse_ss (ss); |
464 | |
465 | /* Initialize the loop. */ |
466 | gfc_init_se (&loopse, NULL); |
467 | gfc_init_loopinfo (&loop); |
468 | gfc_add_ss_to_loop (&loop, ss); |
469 | |
470 | gfc_conv_ss_startstride (&loop); |
471 | /* TODO: gfc_conv_loop_setup generates a temporary for vector |
472 | subscripts. This could be prevented in the elemental case |
473 | as temporaries are handled separately |
474 | (below in gfc_conv_elemental_dependencies). */ |
475 | if (code->expr1) |
476 | gfc_conv_loop_setup (&loop, &code->expr1->where); |
477 | else |
478 | gfc_conv_loop_setup (&loop, &code->loc); |
479 | |
480 | gfc_mark_ss_chain_used (ss, 1); |
481 | |
482 | /* Convert the arguments, checking for dependencies. */ |
483 | gfc_copy_loopinfo_to_se (&loopse, &loop); |
484 | loopse.ss = ss; |
485 | |
486 | /* For operator assignment, do dependency checking. */ |
487 | if (dependency_check) |
488 | check_variable = ELEM_CHECK_VARIABLE; |
489 | else |
490 | check_variable = ELEM_DONT_CHECK_VARIABLE; |
491 | |
492 | gfc_init_se (&depse, NULL); |
493 | gfc_conv_elemental_dependencies (se: &depse, loopse: &loopse, sym: code->resolved_sym, |
494 | arg: code->ext.actual, check_variable); |
495 | |
496 | gfc_add_block_to_block (&loop.pre, &depse.pre); |
497 | gfc_add_block_to_block (&loop.post, &depse.post); |
498 | |
499 | /* Generate the loop body. */ |
500 | gfc_start_scalarized_body (&loop, &body); |
501 | gfc_init_block (&block); |
502 | |
503 | if (mask && count1) |
504 | { |
505 | /* Form the mask expression according to the mask. */ |
506 | index = count1; |
507 | maskexpr = gfc_build_array_ref (mask, index, NULL); |
508 | if (invert) |
509 | maskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR, |
510 | TREE_TYPE (maskexpr), maskexpr); |
511 | } |
512 | |
513 | if (is_intrinsic_mvbits) |
514 | { |
515 | has_alternate_specifier = 0; |
516 | gfc_conv_intrinsic_mvbits (&loopse, code->ext.actual, &loop); |
517 | } |
518 | else |
519 | { |
520 | /* Add the subroutine call to the block. */ |
521 | gfc_conv_procedure_call (&loopse, code->resolved_sym, |
522 | code->ext.actual, code->expr1, |
523 | NULL); |
524 | } |
525 | |
526 | if (mask && count1) |
527 | { |
528 | tmp = build3_v (COND_EXPR, maskexpr, loopse.expr, |
529 | build_empty_stmt (input_location)); |
530 | gfc_add_expr_to_block (&loopse.pre, tmp); |
531 | tmp = fold_build2_loc (input_location, PLUS_EXPR, |
532 | gfc_array_index_type, |
533 | count1, gfc_index_one_node); |
534 | gfc_add_modify (&loopse.pre, count1, tmp); |
535 | } |
536 | else |
537 | gfc_add_expr_to_block (&loopse.pre, loopse.expr); |
538 | |
539 | gfc_add_block_to_block (&block, &loopse.pre); |
540 | gfc_add_block_to_block (&block, &loopse.post); |
541 | |
542 | /* Finish up the loop block and the loop. */ |
543 | gfc_add_expr_to_block (&body, gfc_finish_block (&block)); |
544 | gfc_trans_scalarizing_loops (&loop, &body); |
545 | gfc_add_block_to_block (&se.pre, &loop.pre); |
546 | gfc_add_block_to_block (&se.pre, &loop.post); |
547 | gfc_add_block_to_block (&se.pre, &loopse.finalblock); |
548 | gfc_add_block_to_block (&se.pre, &se.post); |
549 | gfc_cleanup_loop (&loop); |
550 | } |
551 | |
552 | return gfc_finish_block (&se.pre); |
553 | } |
554 | |
555 | |
556 | /* Translate the RETURN statement. */ |
557 | |
558 | tree |
559 | gfc_trans_return (gfc_code * code) |
560 | { |
561 | if (code->expr1) |
562 | { |
563 | gfc_se se; |
564 | tree tmp; |
565 | tree result; |
566 | |
567 | /* If code->expr is not NULL, this return statement must appear |
568 | in a subroutine and current_fake_result_decl has already |
569 | been generated. */ |
570 | |
571 | result = gfc_get_fake_result_decl (NULL, 0); |
572 | if (!result) |
573 | { |
574 | gfc_warning (opt: 0, |
575 | "An alternate return at %L without a * dummy argument" , |
576 | &code->expr1->where); |
577 | return gfc_generate_return (); |
578 | } |
579 | |
580 | /* Start a new block for this statement. */ |
581 | gfc_init_se (&se, NULL); |
582 | gfc_start_block (&se.pre); |
583 | |
584 | gfc_conv_expr (se: &se, expr: code->expr1); |
585 | |
586 | /* Note that the actually returned expression is a simple value and |
587 | does not depend on any pointers or such; thus we can clean-up with |
588 | se.post before returning. */ |
589 | tmp = fold_build2_loc (input_location, MODIFY_EXPR, TREE_TYPE (result), |
590 | result, fold_convert (TREE_TYPE (result), |
591 | se.expr)); |
592 | gfc_add_expr_to_block (&se.pre, tmp); |
593 | gfc_add_block_to_block (&se.pre, &se.post); |
594 | |
595 | tmp = gfc_generate_return (); |
596 | gfc_add_expr_to_block (&se.pre, tmp); |
597 | return gfc_finish_block (&se.pre); |
598 | } |
599 | |
600 | return gfc_generate_return (); |
601 | } |
602 | |
603 | |
604 | /* Translate the PAUSE statement. We have to translate this statement |
605 | to a runtime library call. */ |
606 | |
607 | tree |
608 | gfc_trans_pause (gfc_code * code) |
609 | { |
610 | tree gfc_int8_type_node = gfc_get_int_type (8); |
611 | gfc_se se; |
612 | tree tmp; |
613 | |
614 | /* Start a new block for this statement. */ |
615 | gfc_init_se (&se, NULL); |
616 | gfc_start_block (&se.pre); |
617 | |
618 | |
619 | if (code->expr1 == NULL) |
620 | { |
621 | tmp = build_int_cst (size_type_node, 0); |
622 | tmp = build_call_expr_loc (input_location, |
623 | gfor_fndecl_pause_string, 2, |
624 | build_int_cst (pchar_type_node, 0), tmp); |
625 | } |
626 | else if (code->expr1->ts.type == BT_INTEGER) |
627 | { |
628 | gfc_conv_expr (se: &se, expr: code->expr1); |
629 | tmp = build_call_expr_loc (input_location, |
630 | gfor_fndecl_pause_numeric, 1, |
631 | fold_convert (gfc_int8_type_node, se.expr)); |
632 | } |
633 | else |
634 | { |
635 | gfc_conv_expr_reference (se: &se, expr: code->expr1); |
636 | tmp = build_call_expr_loc (input_location, |
637 | gfor_fndecl_pause_string, 2, |
638 | se.expr, fold_convert (size_type_node, |
639 | se.string_length)); |
640 | } |
641 | |
642 | gfc_add_expr_to_block (&se.pre, tmp); |
643 | |
644 | gfc_add_block_to_block (&se.pre, &se.post); |
645 | |
646 | return gfc_finish_block (&se.pre); |
647 | } |
648 | |
649 | |
650 | /* Translate the STOP statement. We have to translate this statement |
651 | to a runtime library call. */ |
652 | |
653 | tree |
654 | gfc_trans_stop (gfc_code *code, bool error_stop) |
655 | { |
656 | gfc_se se; |
657 | tree tmp; |
658 | tree quiet; |
659 | |
660 | /* Start a new block for this statement. */ |
661 | gfc_init_se (&se, NULL); |
662 | gfc_start_block (&se.pre); |
663 | |
664 | if (code->expr2) |
665 | { |
666 | gfc_conv_expr_val (se: &se, expr: code->expr2); |
667 | quiet = fold_convert (boolean_type_node, se.expr); |
668 | } |
669 | else |
670 | quiet = boolean_false_node; |
671 | |
672 | if (code->expr1 == NULL) |
673 | { |
674 | tmp = build_int_cst (size_type_node, 0); |
675 | tmp = build_call_expr_loc (input_location, |
676 | error_stop |
677 | ? (flag_coarray == GFC_FCOARRAY_LIB |
678 | ? gfor_fndecl_caf_error_stop_str |
679 | : gfor_fndecl_error_stop_string) |
680 | : (flag_coarray == GFC_FCOARRAY_LIB |
681 | ? gfor_fndecl_caf_stop_str |
682 | : gfor_fndecl_stop_string), |
683 | 3, build_int_cst (pchar_type_node, 0), tmp, |
684 | quiet); |
685 | } |
686 | else if (code->expr1->ts.type == BT_INTEGER) |
687 | { |
688 | gfc_conv_expr (se: &se, expr: code->expr1); |
689 | tmp = build_call_expr_loc (input_location, |
690 | error_stop |
691 | ? (flag_coarray == GFC_FCOARRAY_LIB |
692 | ? gfor_fndecl_caf_error_stop |
693 | : gfor_fndecl_error_stop_numeric) |
694 | : (flag_coarray == GFC_FCOARRAY_LIB |
695 | ? gfor_fndecl_caf_stop_numeric |
696 | : gfor_fndecl_stop_numeric), 2, |
697 | fold_convert (integer_type_node, se.expr), |
698 | quiet); |
699 | } |
700 | else |
701 | { |
702 | gfc_conv_expr_reference (se: &se, expr: code->expr1); |
703 | tmp = build_call_expr_loc (input_location, |
704 | error_stop |
705 | ? (flag_coarray == GFC_FCOARRAY_LIB |
706 | ? gfor_fndecl_caf_error_stop_str |
707 | : gfor_fndecl_error_stop_string) |
708 | : (flag_coarray == GFC_FCOARRAY_LIB |
709 | ? gfor_fndecl_caf_stop_str |
710 | : gfor_fndecl_stop_string), |
711 | 3, se.expr, fold_convert (size_type_node, |
712 | se.string_length), |
713 | quiet); |
714 | } |
715 | |
716 | gfc_add_expr_to_block (&se.pre, tmp); |
717 | |
718 | gfc_add_block_to_block (&se.pre, &se.post); |
719 | |
720 | return gfc_finish_block (&se.pre); |
721 | } |
722 | |
723 | /* Translate the FAIL IMAGE statement. */ |
724 | |
725 | tree |
726 | gfc_trans_fail_image (gfc_code *code ATTRIBUTE_UNUSED) |
727 | { |
728 | if (flag_coarray == GFC_FCOARRAY_LIB) |
729 | return build_call_expr_loc (input_location, |
730 | gfor_fndecl_caf_fail_image, 0); |
731 | else |
732 | { |
733 | const char *name = gfc_get_string (PREFIX ("exit_i%d" ), 4); |
734 | gfc_symbol *exsym = gfc_get_intrinsic_sub_symbol (name); |
735 | tree tmp = gfc_get_symbol_decl (exsym); |
736 | return build_call_expr_loc (input_location, tmp, 1, integer_zero_node); |
737 | } |
738 | } |
739 | |
740 | /* Translate the FORM TEAM statement. */ |
741 | |
742 | tree |
743 | gfc_trans_form_team (gfc_code *code) |
744 | { |
745 | if (flag_coarray == GFC_FCOARRAY_LIB) |
746 | { |
747 | gfc_se se; |
748 | gfc_se argse1, argse2; |
749 | tree team_id, team_type, tmp; |
750 | |
751 | gfc_init_se (&se, NULL); |
752 | gfc_init_se (&argse1, NULL); |
753 | gfc_init_se (&argse2, NULL); |
754 | gfc_start_block (&se.pre); |
755 | |
756 | gfc_conv_expr_val (se: &argse1, expr: code->expr1); |
757 | gfc_conv_expr_val (se: &argse2, expr: code->expr2); |
758 | team_id = fold_convert (integer_type_node, argse1.expr); |
759 | team_type = gfc_build_addr_expr (ppvoid_type_node, argse2.expr); |
760 | |
761 | gfc_add_block_to_block (&se.pre, &argse1.pre); |
762 | gfc_add_block_to_block (&se.pre, &argse2.pre); |
763 | tmp = build_call_expr_loc (input_location, |
764 | gfor_fndecl_caf_form_team, 3, |
765 | team_id, team_type, |
766 | build_int_cst (integer_type_node, 0)); |
767 | gfc_add_expr_to_block (&se.pre, tmp); |
768 | gfc_add_block_to_block (&se.pre, &argse1.post); |
769 | gfc_add_block_to_block (&se.pre, &argse2.post); |
770 | return gfc_finish_block (&se.pre); |
771 | } |
772 | else |
773 | { |
774 | const char *name = gfc_get_string (PREFIX ("exit_i%d" ), 4); |
775 | gfc_symbol *exsym = gfc_get_intrinsic_sub_symbol (name); |
776 | tree tmp = gfc_get_symbol_decl (exsym); |
777 | return build_call_expr_loc (input_location, tmp, 1, integer_zero_node); |
778 | } |
779 | } |
780 | |
781 | /* Translate the CHANGE TEAM statement. */ |
782 | |
783 | tree |
784 | gfc_trans_change_team (gfc_code *code) |
785 | { |
786 | if (flag_coarray == GFC_FCOARRAY_LIB) |
787 | { |
788 | gfc_se argse; |
789 | tree team_type, tmp; |
790 | |
791 | gfc_init_se (&argse, NULL); |
792 | gfc_conv_expr_val (se: &argse, expr: code->expr1); |
793 | team_type = gfc_build_addr_expr (ppvoid_type_node, argse.expr); |
794 | |
795 | tmp = build_call_expr_loc (input_location, |
796 | gfor_fndecl_caf_change_team, 2, team_type, |
797 | build_int_cst (integer_type_node, 0)); |
798 | gfc_add_expr_to_block (&argse.pre, tmp); |
799 | gfc_add_block_to_block (&argse.pre, &argse.post); |
800 | return gfc_finish_block (&argse.pre); |
801 | } |
802 | else |
803 | { |
804 | const char *name = gfc_get_string (PREFIX ("exit_i%d" ), 4); |
805 | gfc_symbol *exsym = gfc_get_intrinsic_sub_symbol (name); |
806 | tree tmp = gfc_get_symbol_decl (exsym); |
807 | return build_call_expr_loc (input_location, tmp, 1, integer_zero_node); |
808 | } |
809 | } |
810 | |
811 | /* Translate the END TEAM statement. */ |
812 | |
813 | tree |
814 | gfc_trans_end_team (gfc_code *code ATTRIBUTE_UNUSED) |
815 | { |
816 | if (flag_coarray == GFC_FCOARRAY_LIB) |
817 | { |
818 | return build_call_expr_loc (input_location, |
819 | gfor_fndecl_caf_end_team, 1, |
820 | build_int_cst (pchar_type_node, 0)); |
821 | } |
822 | else |
823 | { |
824 | const char *name = gfc_get_string (PREFIX ("exit_i%d" ), 4); |
825 | gfc_symbol *exsym = gfc_get_intrinsic_sub_symbol (name); |
826 | tree tmp = gfc_get_symbol_decl (exsym); |
827 | return build_call_expr_loc (input_location, tmp, 1, integer_zero_node); |
828 | } |
829 | } |
830 | |
831 | /* Translate the SYNC TEAM statement. */ |
832 | |
833 | tree |
834 | gfc_trans_sync_team (gfc_code *code) |
835 | { |
836 | if (flag_coarray == GFC_FCOARRAY_LIB) |
837 | { |
838 | gfc_se argse; |
839 | tree team_type, tmp; |
840 | |
841 | gfc_init_se (&argse, NULL); |
842 | gfc_conv_expr_val (se: &argse, expr: code->expr1); |
843 | team_type = gfc_build_addr_expr (ppvoid_type_node, argse.expr); |
844 | |
845 | tmp = build_call_expr_loc (input_location, |
846 | gfor_fndecl_caf_sync_team, 2, |
847 | team_type, |
848 | build_int_cst (integer_type_node, 0)); |
849 | gfc_add_expr_to_block (&argse.pre, tmp); |
850 | gfc_add_block_to_block (&argse.pre, &argse.post); |
851 | return gfc_finish_block (&argse.pre); |
852 | } |
853 | else |
854 | { |
855 | const char *name = gfc_get_string (PREFIX ("exit_i%d" ), 4); |
856 | gfc_symbol *exsym = gfc_get_intrinsic_sub_symbol (name); |
857 | tree tmp = gfc_get_symbol_decl (exsym); |
858 | return build_call_expr_loc (input_location, tmp, 1, integer_zero_node); |
859 | } |
860 | } |
861 | |
862 | tree |
863 | gfc_trans_lock_unlock (gfc_code *code, gfc_exec_op op) |
864 | { |
865 | gfc_se se, argse; |
866 | tree stat = NULL_TREE, stat2 = NULL_TREE; |
867 | tree lock_acquired = NULL_TREE, lock_acquired2 = NULL_TREE; |
868 | |
869 | /* Short cut: For single images without STAT= or LOCK_ACQUIRED |
870 | return early. (ERRMSG= is always untouched for -fcoarray=single.) */ |
871 | if (!code->expr2 && !code->expr4 && flag_coarray != GFC_FCOARRAY_LIB) |
872 | return NULL_TREE; |
873 | |
874 | if (code->expr2) |
875 | { |
876 | gcc_assert (code->expr2->expr_type == EXPR_VARIABLE); |
877 | gfc_init_se (&argse, NULL); |
878 | gfc_conv_expr_val (se: &argse, expr: code->expr2); |
879 | stat = argse.expr; |
880 | } |
881 | else if (flag_coarray == GFC_FCOARRAY_LIB) |
882 | stat = null_pointer_node; |
883 | |
884 | if (code->expr4) |
885 | { |
886 | gcc_assert (code->expr4->expr_type == EXPR_VARIABLE); |
887 | gfc_init_se (&argse, NULL); |
888 | gfc_conv_expr_val (se: &argse, expr: code->expr4); |
889 | lock_acquired = argse.expr; |
890 | } |
891 | else if (flag_coarray == GFC_FCOARRAY_LIB) |
892 | lock_acquired = null_pointer_node; |
893 | |
894 | gfc_start_block (&se.pre); |
895 | if (flag_coarray == GFC_FCOARRAY_LIB) |
896 | { |
897 | tree tmp, token, image_index, errmsg, errmsg_len; |
898 | tree index = build_zero_cst (gfc_array_index_type); |
899 | tree caf_decl = gfc_get_tree_for_caf_expr (code->expr1); |
900 | |
901 | if (code->expr1->symtree->n.sym->ts.type != BT_DERIVED |
902 | || code->expr1->symtree->n.sym->ts.u.derived->from_intmod |
903 | != INTMOD_ISO_FORTRAN_ENV |
904 | || code->expr1->symtree->n.sym->ts.u.derived->intmod_sym_id |
905 | != ISOFORTRAN_LOCK_TYPE) |
906 | { |
907 | gfc_error ("Sorry, the lock component of derived type at %L is not " |
908 | "yet supported" , &code->expr1->where); |
909 | return NULL_TREE; |
910 | } |
911 | |
912 | gfc_get_caf_token_offset (&se, &token, NULL, caf_decl, NULL_TREE, |
913 | code->expr1); |
914 | |
915 | if (gfc_is_coindexed (code->expr1)) |
916 | image_index = gfc_caf_get_image_index (&se.pre, code->expr1, caf_decl); |
917 | else |
918 | image_index = integer_zero_node; |
919 | |
920 | /* For arrays, obtain the array index. */ |
921 | if (gfc_expr_attr (code->expr1).dimension) |
922 | { |
923 | tree desc, tmp, extent, lbound, ubound; |
924 | gfc_array_ref *ar, ar2; |
925 | int i; |
926 | |
927 | /* TODO: Extend this, once DT components are supported. */ |
928 | ar = &code->expr1->ref->u.ar; |
929 | ar2 = *ar; |
930 | memset (s: ar, c: '\0', n: sizeof (*ar)); |
931 | ar->as = ar2.as; |
932 | ar->type = AR_FULL; |
933 | |
934 | gfc_init_se (&argse, NULL); |
935 | argse.descriptor_only = 1; |
936 | gfc_conv_expr_descriptor (&argse, code->expr1); |
937 | gfc_add_block_to_block (&se.pre, &argse.pre); |
938 | desc = argse.expr; |
939 | *ar = ar2; |
940 | |
941 | extent = build_one_cst (gfc_array_index_type); |
942 | for (i = 0; i < ar->dimen; i++) |
943 | { |
944 | gfc_init_se (&argse, NULL); |
945 | gfc_conv_expr_type (se: &argse, ar->start[i], gfc_array_index_type); |
946 | gfc_add_block_to_block (&argse.pre, &argse.pre); |
947 | lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[i]); |
948 | tmp = fold_build2_loc (input_location, MINUS_EXPR, |
949 | TREE_TYPE (lbound), argse.expr, lbound); |
950 | tmp = fold_build2_loc (input_location, MULT_EXPR, |
951 | TREE_TYPE (tmp), extent, tmp); |
952 | index = fold_build2_loc (input_location, PLUS_EXPR, |
953 | TREE_TYPE (tmp), index, tmp); |
954 | if (i < ar->dimen - 1) |
955 | { |
956 | ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[i]); |
957 | tmp = gfc_conv_array_extent_dim (lbound, ubound, NULL); |
958 | extent = fold_build2_loc (input_location, MULT_EXPR, |
959 | TREE_TYPE (tmp), extent, tmp); |
960 | } |
961 | } |
962 | } |
963 | |
964 | /* errmsg. */ |
965 | if (code->expr3) |
966 | { |
967 | gfc_init_se (&argse, NULL); |
968 | argse.want_pointer = 1; |
969 | gfc_conv_expr (se: &argse, expr: code->expr3); |
970 | gfc_add_block_to_block (&se.pre, &argse.pre); |
971 | errmsg = argse.expr; |
972 | errmsg_len = fold_convert (size_type_node, argse.string_length); |
973 | } |
974 | else |
975 | { |
976 | errmsg = null_pointer_node; |
977 | errmsg_len = build_zero_cst (size_type_node); |
978 | } |
979 | |
980 | if (stat != null_pointer_node && TREE_TYPE (stat) != integer_type_node) |
981 | { |
982 | stat2 = stat; |
983 | stat = gfc_create_var (integer_type_node, "stat" ); |
984 | } |
985 | |
986 | if (lock_acquired != null_pointer_node |
987 | && TREE_TYPE (lock_acquired) != integer_type_node) |
988 | { |
989 | lock_acquired2 = lock_acquired; |
990 | lock_acquired = gfc_create_var (integer_type_node, "acquired" ); |
991 | } |
992 | |
993 | index = fold_convert (size_type_node, index); |
994 | if (op == EXEC_LOCK) |
995 | tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_lock, 7, |
996 | token, index, image_index, |
997 | lock_acquired != null_pointer_node |
998 | ? gfc_build_addr_expr (NULL, lock_acquired) |
999 | : lock_acquired, |
1000 | stat != null_pointer_node |
1001 | ? gfc_build_addr_expr (NULL, stat) : stat, |
1002 | errmsg, errmsg_len); |
1003 | else |
1004 | tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_unlock, 6, |
1005 | token, index, image_index, |
1006 | stat != null_pointer_node |
1007 | ? gfc_build_addr_expr (NULL, stat) : stat, |
1008 | errmsg, errmsg_len); |
1009 | gfc_add_expr_to_block (&se.pre, tmp); |
1010 | |
1011 | /* It guarantees memory consistency within the same segment */ |
1012 | tmp = gfc_build_string_const (strlen (s: "memory" )+1, "memory" ), |
1013 | tmp = build5_loc (loc: input_location, code: ASM_EXPR, void_type_node, |
1014 | arg0: gfc_build_string_const (1, "" ), NULL_TREE, NULL_TREE, |
1015 | arg3: tree_cons (NULL_TREE, tmp, NULL_TREE), NULL_TREE); |
1016 | ASM_VOLATILE_P (tmp) = 1; |
1017 | |
1018 | gfc_add_expr_to_block (&se.pre, tmp); |
1019 | |
1020 | if (stat2 != NULL_TREE) |
1021 | gfc_add_modify (&se.pre, stat2, |
1022 | fold_convert (TREE_TYPE (stat2), stat)); |
1023 | |
1024 | if (lock_acquired2 != NULL_TREE) |
1025 | gfc_add_modify (&se.pre, lock_acquired2, |
1026 | fold_convert (TREE_TYPE (lock_acquired2), |
1027 | lock_acquired)); |
1028 | |
1029 | return gfc_finish_block (&se.pre); |
1030 | } |
1031 | |
1032 | if (stat != NULL_TREE) |
1033 | gfc_add_modify (&se.pre, stat, build_int_cst (TREE_TYPE (stat), 0)); |
1034 | |
1035 | if (lock_acquired != NULL_TREE) |
1036 | gfc_add_modify (&se.pre, lock_acquired, |
1037 | fold_convert (TREE_TYPE (lock_acquired), |
1038 | boolean_true_node)); |
1039 | |
1040 | return gfc_finish_block (&se.pre); |
1041 | } |
1042 | |
1043 | tree |
1044 | gfc_trans_event_post_wait (gfc_code *code, gfc_exec_op op) |
1045 | { |
1046 | gfc_se se, argse; |
1047 | tree stat = NULL_TREE, stat2 = NULL_TREE; |
1048 | tree until_count = NULL_TREE; |
1049 | |
1050 | if (code->expr2) |
1051 | { |
1052 | gcc_assert (code->expr2->expr_type == EXPR_VARIABLE); |
1053 | gfc_init_se (&argse, NULL); |
1054 | gfc_conv_expr_val (se: &argse, expr: code->expr2); |
1055 | stat = argse.expr; |
1056 | } |
1057 | else if (flag_coarray == GFC_FCOARRAY_LIB) |
1058 | stat = null_pointer_node; |
1059 | |
1060 | if (code->expr4) |
1061 | { |
1062 | gfc_init_se (&argse, NULL); |
1063 | gfc_conv_expr_val (se: &argse, expr: code->expr4); |
1064 | until_count = fold_convert (integer_type_node, argse.expr); |
1065 | } |
1066 | else |
1067 | until_count = integer_one_node; |
1068 | |
1069 | if (flag_coarray != GFC_FCOARRAY_LIB) |
1070 | { |
1071 | gfc_start_block (&se.pre); |
1072 | gfc_init_se (&argse, NULL); |
1073 | gfc_conv_expr_val (se: &argse, expr: code->expr1); |
1074 | |
1075 | if (op == EXEC_EVENT_POST) |
1076 | gfc_add_modify (&se.pre, argse.expr, |
1077 | fold_build2_loc (input_location, PLUS_EXPR, |
1078 | TREE_TYPE (argse.expr), argse.expr, |
1079 | build_int_cst (TREE_TYPE (argse.expr), 1))); |
1080 | else |
1081 | gfc_add_modify (&se.pre, argse.expr, |
1082 | fold_build2_loc (input_location, MINUS_EXPR, |
1083 | TREE_TYPE (argse.expr), argse.expr, |
1084 | fold_convert (TREE_TYPE (argse.expr), |
1085 | until_count))); |
1086 | if (stat != NULL_TREE) |
1087 | gfc_add_modify (&se.pre, stat, build_int_cst (TREE_TYPE (stat), 0)); |
1088 | |
1089 | return gfc_finish_block (&se.pre); |
1090 | } |
1091 | |
1092 | gfc_start_block (&se.pre); |
1093 | tree tmp, token, image_index, errmsg, errmsg_len; |
1094 | tree index = build_zero_cst (gfc_array_index_type); |
1095 | tree caf_decl = gfc_get_tree_for_caf_expr (code->expr1); |
1096 | |
1097 | if (code->expr1->symtree->n.sym->ts.type != BT_DERIVED |
1098 | || code->expr1->symtree->n.sym->ts.u.derived->from_intmod |
1099 | != INTMOD_ISO_FORTRAN_ENV |
1100 | || code->expr1->symtree->n.sym->ts.u.derived->intmod_sym_id |
1101 | != ISOFORTRAN_EVENT_TYPE) |
1102 | { |
1103 | gfc_error ("Sorry, the event component of derived type at %L is not " |
1104 | "yet supported" , &code->expr1->where); |
1105 | return NULL_TREE; |
1106 | } |
1107 | |
1108 | gfc_init_se (&argse, NULL); |
1109 | gfc_get_caf_token_offset (&argse, &token, NULL, caf_decl, NULL_TREE, |
1110 | code->expr1); |
1111 | gfc_add_block_to_block (&se.pre, &argse.pre); |
1112 | |
1113 | if (gfc_is_coindexed (code->expr1)) |
1114 | image_index = gfc_caf_get_image_index (&se.pre, code->expr1, caf_decl); |
1115 | else |
1116 | image_index = integer_zero_node; |
1117 | |
1118 | /* For arrays, obtain the array index. */ |
1119 | if (gfc_expr_attr (code->expr1).dimension) |
1120 | { |
1121 | tree desc, tmp, extent, lbound, ubound; |
1122 | gfc_array_ref *ar, ar2; |
1123 | int i; |
1124 | |
1125 | /* TODO: Extend this, once DT components are supported. */ |
1126 | ar = &code->expr1->ref->u.ar; |
1127 | ar2 = *ar; |
1128 | memset (s: ar, c: '\0', n: sizeof (*ar)); |
1129 | ar->as = ar2.as; |
1130 | ar->type = AR_FULL; |
1131 | |
1132 | gfc_init_se (&argse, NULL); |
1133 | argse.descriptor_only = 1; |
1134 | gfc_conv_expr_descriptor (&argse, code->expr1); |
1135 | gfc_add_block_to_block (&se.pre, &argse.pre); |
1136 | desc = argse.expr; |
1137 | *ar = ar2; |
1138 | |
1139 | extent = build_one_cst (gfc_array_index_type); |
1140 | for (i = 0; i < ar->dimen; i++) |
1141 | { |
1142 | gfc_init_se (&argse, NULL); |
1143 | gfc_conv_expr_type (se: &argse, ar->start[i], gfc_array_index_type); |
1144 | gfc_add_block_to_block (&argse.pre, &argse.pre); |
1145 | lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[i]); |
1146 | tmp = fold_build2_loc (input_location, MINUS_EXPR, |
1147 | TREE_TYPE (lbound), argse.expr, lbound); |
1148 | tmp = fold_build2_loc (input_location, MULT_EXPR, |
1149 | TREE_TYPE (tmp), extent, tmp); |
1150 | index = fold_build2_loc (input_location, PLUS_EXPR, |
1151 | TREE_TYPE (tmp), index, tmp); |
1152 | if (i < ar->dimen - 1) |
1153 | { |
1154 | ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[i]); |
1155 | tmp = gfc_conv_array_extent_dim (lbound, ubound, NULL); |
1156 | extent = fold_build2_loc (input_location, MULT_EXPR, |
1157 | TREE_TYPE (tmp), extent, tmp); |
1158 | } |
1159 | } |
1160 | } |
1161 | |
1162 | /* errmsg. */ |
1163 | if (code->expr3) |
1164 | { |
1165 | gfc_init_se (&argse, NULL); |
1166 | argse.want_pointer = 1; |
1167 | gfc_conv_expr (se: &argse, expr: code->expr3); |
1168 | gfc_add_block_to_block (&se.pre, &argse.pre); |
1169 | errmsg = argse.expr; |
1170 | errmsg_len = fold_convert (size_type_node, argse.string_length); |
1171 | } |
1172 | else |
1173 | { |
1174 | errmsg = null_pointer_node; |
1175 | errmsg_len = build_zero_cst (size_type_node); |
1176 | } |
1177 | |
1178 | if (stat != null_pointer_node && TREE_TYPE (stat) != integer_type_node) |
1179 | { |
1180 | stat2 = stat; |
1181 | stat = gfc_create_var (integer_type_node, "stat" ); |
1182 | } |
1183 | |
1184 | index = fold_convert (size_type_node, index); |
1185 | if (op == EXEC_EVENT_POST) |
1186 | tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_event_post, 6, |
1187 | token, index, image_index, |
1188 | stat != null_pointer_node |
1189 | ? gfc_build_addr_expr (NULL, stat) : stat, |
1190 | errmsg, errmsg_len); |
1191 | else |
1192 | tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_event_wait, 6, |
1193 | token, index, until_count, |
1194 | stat != null_pointer_node |
1195 | ? gfc_build_addr_expr (NULL, stat) : stat, |
1196 | errmsg, errmsg_len); |
1197 | gfc_add_expr_to_block (&se.pre, tmp); |
1198 | |
1199 | /* It guarantees memory consistency within the same segment */ |
1200 | tmp = gfc_build_string_const (strlen (s: "memory" )+1, "memory" ), |
1201 | tmp = build5_loc (loc: input_location, code: ASM_EXPR, void_type_node, |
1202 | arg0: gfc_build_string_const (1, "" ), NULL_TREE, NULL_TREE, |
1203 | arg3: tree_cons (NULL_TREE, tmp, NULL_TREE), NULL_TREE); |
1204 | ASM_VOLATILE_P (tmp) = 1; |
1205 | gfc_add_expr_to_block (&se.pre, tmp); |
1206 | |
1207 | if (stat2 != NULL_TREE) |
1208 | gfc_add_modify (&se.pre, stat2, fold_convert (TREE_TYPE (stat2), stat)); |
1209 | |
1210 | return gfc_finish_block (&se.pre); |
1211 | } |
1212 | |
1213 | tree |
1214 | gfc_trans_sync (gfc_code *code, gfc_exec_op type) |
1215 | { |
1216 | gfc_se se, argse; |
1217 | tree tmp; |
1218 | tree images = NULL_TREE, stat = NULL_TREE, |
1219 | errmsg = NULL_TREE, errmsglen = NULL_TREE; |
1220 | |
1221 | /* Short cut: For single images without bound checking or without STAT=, |
1222 | return early. (ERRMSG= is always untouched for -fcoarray=single.) */ |
1223 | if (!code->expr2 && !(gfc_option.rtcheck & GFC_RTCHECK_BOUNDS) |
1224 | && flag_coarray != GFC_FCOARRAY_LIB) |
1225 | return NULL_TREE; |
1226 | |
1227 | gfc_init_se (&se, NULL); |
1228 | gfc_start_block (&se.pre); |
1229 | |
1230 | if (code->expr1 && code->expr1->rank == 0) |
1231 | { |
1232 | gfc_init_se (&argse, NULL); |
1233 | gfc_conv_expr_val (se: &argse, expr: code->expr1); |
1234 | images = argse.expr; |
1235 | } |
1236 | |
1237 | if (code->expr2) |
1238 | { |
1239 | gcc_assert (code->expr2->expr_type == EXPR_VARIABLE |
1240 | || code->expr2->expr_type == EXPR_FUNCTION); |
1241 | gfc_init_se (&argse, NULL); |
1242 | gfc_conv_expr_val (se: &argse, expr: code->expr2); |
1243 | stat = argse.expr; |
1244 | } |
1245 | else |
1246 | stat = null_pointer_node; |
1247 | |
1248 | if (code->expr3 && flag_coarray == GFC_FCOARRAY_LIB) |
1249 | { |
1250 | gcc_assert (code->expr3->expr_type == EXPR_VARIABLE |
1251 | || code->expr3->expr_type == EXPR_FUNCTION); |
1252 | gfc_init_se (&argse, NULL); |
1253 | argse.want_pointer = 1; |
1254 | gfc_conv_expr (se: &argse, expr: code->expr3); |
1255 | gfc_conv_string_parameter (se: &argse); |
1256 | errmsg = gfc_build_addr_expr (NULL, argse.expr); |
1257 | errmsglen = fold_convert (size_type_node, argse.string_length); |
1258 | } |
1259 | else if (flag_coarray == GFC_FCOARRAY_LIB) |
1260 | { |
1261 | errmsg = null_pointer_node; |
1262 | errmsglen = build_int_cst (size_type_node, 0); |
1263 | } |
1264 | |
1265 | /* Check SYNC IMAGES(imageset) for valid image index. |
1266 | FIXME: Add a check for image-set arrays. */ |
1267 | if (code->expr1 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS) |
1268 | && code->expr1->rank == 0) |
1269 | { |
1270 | tree images2 = fold_convert (integer_type_node, images); |
1271 | tree cond; |
1272 | if (flag_coarray != GFC_FCOARRAY_LIB) |
1273 | cond = fold_build2_loc (input_location, NE_EXPR, logical_type_node, |
1274 | images, build_int_cst (TREE_TYPE (images), 1)); |
1275 | else |
1276 | { |
1277 | tree cond2; |
1278 | tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_num_images, |
1279 | 2, integer_zero_node, |
1280 | build_int_cst (integer_type_node, -1)); |
1281 | cond = fold_build2_loc (input_location, GT_EXPR, logical_type_node, |
1282 | images2, tmp); |
1283 | cond2 = fold_build2_loc (input_location, LT_EXPR, logical_type_node, |
1284 | images, |
1285 | build_int_cst (TREE_TYPE (images), 1)); |
1286 | cond = fold_build2_loc (input_location, TRUTH_OR_EXPR, |
1287 | logical_type_node, cond, cond2); |
1288 | } |
1289 | gfc_trans_runtime_check (true, false, cond, &se.pre, |
1290 | &code->expr1->where, "Invalid image number " |
1291 | "%d in SYNC IMAGES" , images2); |
1292 | } |
1293 | |
1294 | /* Per F2008, 8.5.1, a SYNC MEMORY is implied by calling the |
1295 | image control statements SYNC IMAGES and SYNC ALL. */ |
1296 | if (flag_coarray == GFC_FCOARRAY_LIB) |
1297 | { |
1298 | tmp = gfc_build_string_const (strlen (s: "memory" )+1, "memory" ), |
1299 | tmp = build5_loc (loc: input_location, code: ASM_EXPR, void_type_node, |
1300 | arg0: gfc_build_string_const (1, "" ), NULL_TREE, NULL_TREE, |
1301 | arg3: tree_cons (NULL_TREE, tmp, NULL_TREE), NULL_TREE); |
1302 | ASM_VOLATILE_P (tmp) = 1; |
1303 | gfc_add_expr_to_block (&se.pre, tmp); |
1304 | } |
1305 | |
1306 | if (flag_coarray != GFC_FCOARRAY_LIB) |
1307 | { |
1308 | /* Set STAT to zero. */ |
1309 | if (code->expr2) |
1310 | gfc_add_modify (&se.pre, stat, build_int_cst (TREE_TYPE (stat), 0)); |
1311 | } |
1312 | else if (type == EXEC_SYNC_ALL || type == EXEC_SYNC_MEMORY) |
1313 | { |
1314 | /* SYNC ALL => stat == null_pointer_node |
1315 | SYNC ALL(stat=s) => stat has an integer type |
1316 | |
1317 | If "stat" has the wrong integer type, use a temp variable of |
1318 | the right type and later cast the result back into "stat". */ |
1319 | if (stat == null_pointer_node || TREE_TYPE (stat) == integer_type_node) |
1320 | { |
1321 | if (TREE_TYPE (stat) == integer_type_node) |
1322 | stat = gfc_build_addr_expr (NULL, stat); |
1323 | |
1324 | if(type == EXEC_SYNC_MEMORY) |
1325 | tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_sync_memory, |
1326 | 3, stat, errmsg, errmsglen); |
1327 | else |
1328 | tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_sync_all, |
1329 | 3, stat, errmsg, errmsglen); |
1330 | |
1331 | gfc_add_expr_to_block (&se.pre, tmp); |
1332 | } |
1333 | else |
1334 | { |
1335 | tree tmp_stat = gfc_create_var (integer_type_node, "stat" ); |
1336 | |
1337 | tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_sync_all, |
1338 | 3, gfc_build_addr_expr (NULL, tmp_stat), |
1339 | errmsg, errmsglen); |
1340 | gfc_add_expr_to_block (&se.pre, tmp); |
1341 | |
1342 | gfc_add_modify (&se.pre, stat, |
1343 | fold_convert (TREE_TYPE (stat), tmp_stat)); |
1344 | } |
1345 | } |
1346 | else |
1347 | { |
1348 | tree len; |
1349 | |
1350 | gcc_assert (type == EXEC_SYNC_IMAGES); |
1351 | |
1352 | if (!code->expr1) |
1353 | { |
1354 | len = build_int_cst (integer_type_node, -1); |
1355 | images = null_pointer_node; |
1356 | } |
1357 | else if (code->expr1->rank == 0) |
1358 | { |
1359 | len = build_int_cst (integer_type_node, 1); |
1360 | images = gfc_build_addr_expr (NULL_TREE, images); |
1361 | } |
1362 | else |
1363 | { |
1364 | /* FIXME. */ |
1365 | if (code->expr1->ts.kind != gfc_c_int_kind) |
1366 | gfc_fatal_error ("Sorry, only support for integer kind %d " |
1367 | "implemented for image-set at %L" , |
1368 | gfc_c_int_kind, &code->expr1->where); |
1369 | |
1370 | gfc_conv_array_parameter (&se, code->expr1, true, NULL, NULL, &len); |
1371 | images = se.expr; |
1372 | |
1373 | tmp = gfc_typenode_for_spec (&code->expr1->ts); |
1374 | if (GFC_ARRAY_TYPE_P (tmp) || GFC_DESCRIPTOR_TYPE_P (tmp)) |
1375 | tmp = gfc_get_element_type (tmp); |
1376 | |
1377 | len = fold_build2_loc (input_location, TRUNC_DIV_EXPR, |
1378 | TREE_TYPE (len), len, |
1379 | fold_convert (TREE_TYPE (len), |
1380 | TYPE_SIZE_UNIT (tmp))); |
1381 | len = fold_convert (integer_type_node, len); |
1382 | } |
1383 | |
1384 | /* SYNC IMAGES(imgs) => stat == null_pointer_node |
1385 | SYNC IMAGES(imgs,stat=s) => stat has an integer type |
1386 | |
1387 | If "stat" has the wrong integer type, use a temp variable of |
1388 | the right type and later cast the result back into "stat". */ |
1389 | if (stat == null_pointer_node || TREE_TYPE (stat) == integer_type_node) |
1390 | { |
1391 | if (TREE_TYPE (stat) == integer_type_node) |
1392 | stat = gfc_build_addr_expr (NULL, stat); |
1393 | |
1394 | tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_sync_images, |
1395 | 5, fold_convert (integer_type_node, len), |
1396 | images, stat, errmsg, errmsglen); |
1397 | gfc_add_expr_to_block (&se.pre, tmp); |
1398 | } |
1399 | else |
1400 | { |
1401 | tree tmp_stat = gfc_create_var (integer_type_node, "stat" ); |
1402 | |
1403 | tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_sync_images, |
1404 | 5, fold_convert (integer_type_node, len), |
1405 | images, gfc_build_addr_expr (NULL, tmp_stat), |
1406 | errmsg, errmsglen); |
1407 | gfc_add_expr_to_block (&se.pre, tmp); |
1408 | |
1409 | gfc_add_modify (&se.pre, stat, |
1410 | fold_convert (TREE_TYPE (stat), tmp_stat)); |
1411 | } |
1412 | } |
1413 | |
1414 | return gfc_finish_block (&se.pre); |
1415 | } |
1416 | |
1417 | |
1418 | /* Generate GENERIC for the IF construct. This function also deals with |
1419 | the simple IF statement, because the front end translates the IF |
1420 | statement into an IF construct. |
1421 | |
1422 | We translate: |
1423 | |
1424 | IF (cond) THEN |
1425 | then_clause |
1426 | ELSEIF (cond2) |
1427 | elseif_clause |
1428 | ELSE |
1429 | else_clause |
1430 | ENDIF |
1431 | |
1432 | into: |
1433 | |
1434 | pre_cond_s; |
1435 | if (cond_s) |
1436 | { |
1437 | then_clause; |
1438 | } |
1439 | else |
1440 | { |
1441 | pre_cond_s |
1442 | if (cond_s) |
1443 | { |
1444 | elseif_clause |
1445 | } |
1446 | else |
1447 | { |
1448 | else_clause; |
1449 | } |
1450 | } |
1451 | |
1452 | where COND_S is the simplified version of the predicate. PRE_COND_S |
1453 | are the pre side-effects produced by the translation of the |
1454 | conditional. |
1455 | We need to build the chain recursively otherwise we run into |
1456 | problems with folding incomplete statements. */ |
1457 | |
1458 | static tree |
1459 | gfc_trans_if_1 (gfc_code * code) |
1460 | { |
1461 | gfc_se if_se; |
1462 | tree stmt, elsestmt; |
1463 | locus saved_loc; |
1464 | location_t loc; |
1465 | |
1466 | /* Check for an unconditional ELSE clause. */ |
1467 | if (!code->expr1) |
1468 | return gfc_trans_code (code->next); |
1469 | |
1470 | /* Initialize a statement builder for each block. Puts in NULL_TREEs. */ |
1471 | gfc_init_se (&if_se, NULL); |
1472 | gfc_start_block (&if_se.pre); |
1473 | |
1474 | /* Calculate the IF condition expression. */ |
1475 | if (code->expr1->where.lb) |
1476 | { |
1477 | gfc_save_backend_locus (&saved_loc); |
1478 | gfc_set_backend_locus (&code->expr1->where); |
1479 | } |
1480 | |
1481 | gfc_conv_expr_val (se: &if_se, expr: code->expr1); |
1482 | |
1483 | if (code->expr1->where.lb) |
1484 | gfc_restore_backend_locus (&saved_loc); |
1485 | |
1486 | /* Translate the THEN clause. */ |
1487 | stmt = gfc_trans_code (code->next); |
1488 | |
1489 | /* Translate the ELSE clause. */ |
1490 | if (code->block) |
1491 | elsestmt = gfc_trans_if_1 (code: code->block); |
1492 | else |
1493 | elsestmt = build_empty_stmt (input_location); |
1494 | |
1495 | /* Build the condition expression and add it to the condition block. */ |
1496 | loc = code->expr1->where.lb ? gfc_get_location (&code->expr1->where) |
1497 | : input_location; |
1498 | stmt = fold_build3_loc (loc, COND_EXPR, void_type_node, if_se.expr, stmt, |
1499 | elsestmt); |
1500 | |
1501 | gfc_add_expr_to_block (&if_se.pre, stmt); |
1502 | |
1503 | /* Finish off this statement. */ |
1504 | return gfc_finish_block (&if_se.pre); |
1505 | } |
1506 | |
1507 | tree |
1508 | gfc_trans_if (gfc_code * code) |
1509 | { |
1510 | stmtblock_t body; |
1511 | tree exit_label; |
1512 | |
1513 | /* Create exit label so it is available for trans'ing the body code. */ |
1514 | exit_label = gfc_build_label_decl (NULL_TREE); |
1515 | code->exit_label = exit_label; |
1516 | |
1517 | /* Translate the actual code in code->block. */ |
1518 | gfc_init_block (&body); |
1519 | gfc_add_expr_to_block (&body, gfc_trans_if_1 (code: code->block)); |
1520 | |
1521 | /* Add exit label. */ |
1522 | gfc_add_expr_to_block (&body, build1_v (LABEL_EXPR, exit_label)); |
1523 | |
1524 | return gfc_finish_block (&body); |
1525 | } |
1526 | |
1527 | |
1528 | /* Translate an arithmetic IF expression. |
1529 | |
1530 | IF (cond) label1, label2, label3 translates to |
1531 | |
1532 | if (cond <= 0) |
1533 | { |
1534 | if (cond < 0) |
1535 | goto label1; |
1536 | else // cond == 0 |
1537 | goto label2; |
1538 | } |
1539 | else // cond > 0 |
1540 | goto label3; |
1541 | |
1542 | An optimized version can be generated in case of equal labels. |
1543 | E.g., if label1 is equal to label2, we can translate it to |
1544 | |
1545 | if (cond <= 0) |
1546 | goto label1; |
1547 | else |
1548 | goto label3; |
1549 | */ |
1550 | |
1551 | tree |
1552 | gfc_trans_arithmetic_if (gfc_code * code) |
1553 | { |
1554 | gfc_se se; |
1555 | tree tmp; |
1556 | tree branch1; |
1557 | tree branch2; |
1558 | tree zero; |
1559 | |
1560 | /* Start a new block. */ |
1561 | gfc_init_se (&se, NULL); |
1562 | gfc_start_block (&se.pre); |
1563 | |
1564 | /* Pre-evaluate COND. */ |
1565 | gfc_conv_expr_val (se: &se, expr: code->expr1); |
1566 | se.expr = gfc_evaluate_now (se.expr, &se.pre); |
1567 | |
1568 | /* Build something to compare with. */ |
1569 | zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node); |
1570 | |
1571 | if (code->label1->value != code->label2->value) |
1572 | { |
1573 | /* If (cond < 0) take branch1 else take branch2. |
1574 | First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */ |
1575 | branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1)); |
1576 | branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2)); |
1577 | |
1578 | if (code->label1->value != code->label3->value) |
1579 | tmp = fold_build2_loc (input_location, LT_EXPR, logical_type_node, |
1580 | se.expr, zero); |
1581 | else |
1582 | tmp = fold_build2_loc (input_location, NE_EXPR, logical_type_node, |
1583 | se.expr, zero); |
1584 | |
1585 | branch1 = fold_build3_loc (input_location, COND_EXPR, void_type_node, |
1586 | tmp, branch1, branch2); |
1587 | } |
1588 | else |
1589 | branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1)); |
1590 | |
1591 | if (code->label1->value != code->label3->value |
1592 | && code->label2->value != code->label3->value) |
1593 | { |
1594 | /* if (cond <= 0) take branch1 else take branch2. */ |
1595 | branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3)); |
1596 | tmp = fold_build2_loc (input_location, LE_EXPR, logical_type_node, |
1597 | se.expr, zero); |
1598 | branch1 = fold_build3_loc (input_location, COND_EXPR, void_type_node, |
1599 | tmp, branch1, branch2); |
1600 | } |
1601 | |
1602 | /* Append the COND_EXPR to the evaluation of COND, and return. */ |
1603 | gfc_add_expr_to_block (&se.pre, branch1); |
1604 | return gfc_finish_block (&se.pre); |
1605 | } |
1606 | |
1607 | |
1608 | /* Translate a CRITICAL block. */ |
1609 | tree |
1610 | gfc_trans_critical (gfc_code *code) |
1611 | { |
1612 | stmtblock_t block; |
1613 | tree tmp, token = NULL_TREE; |
1614 | |
1615 | gfc_start_block (&block); |
1616 | |
1617 | if (flag_coarray == GFC_FCOARRAY_LIB) |
1618 | { |
1619 | tree zero_size = build_zero_cst (size_type_node); |
1620 | token = gfc_get_symbol_decl (code->resolved_sym); |
1621 | token = GFC_TYPE_ARRAY_CAF_TOKEN (TREE_TYPE (token)); |
1622 | tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_lock, 7, |
1623 | token, zero_size, integer_one_node, |
1624 | null_pointer_node, null_pointer_node, |
1625 | null_pointer_node, zero_size); |
1626 | gfc_add_expr_to_block (&block, tmp); |
1627 | |
1628 | /* It guarantees memory consistency within the same segment */ |
1629 | tmp = gfc_build_string_const (strlen (s: "memory" )+1, "memory" ), |
1630 | tmp = build5_loc (loc: input_location, code: ASM_EXPR, void_type_node, |
1631 | arg0: gfc_build_string_const (1, "" ), |
1632 | NULL_TREE, NULL_TREE, |
1633 | arg3: tree_cons (NULL_TREE, tmp, NULL_TREE), |
1634 | NULL_TREE); |
1635 | ASM_VOLATILE_P (tmp) = 1; |
1636 | |
1637 | gfc_add_expr_to_block (&block, tmp); |
1638 | } |
1639 | |
1640 | tmp = gfc_trans_code (code->block->next); |
1641 | gfc_add_expr_to_block (&block, tmp); |
1642 | |
1643 | if (flag_coarray == GFC_FCOARRAY_LIB) |
1644 | { |
1645 | tree zero_size = build_zero_cst (size_type_node); |
1646 | tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_unlock, 6, |
1647 | token, zero_size, integer_one_node, |
1648 | null_pointer_node, null_pointer_node, |
1649 | zero_size); |
1650 | gfc_add_expr_to_block (&block, tmp); |
1651 | |
1652 | /* It guarantees memory consistency within the same segment */ |
1653 | tmp = gfc_build_string_const (strlen (s: "memory" )+1, "memory" ), |
1654 | tmp = build5_loc (loc: input_location, code: ASM_EXPR, void_type_node, |
1655 | arg0: gfc_build_string_const (1, "" ), |
1656 | NULL_TREE, NULL_TREE, |
1657 | arg3: tree_cons (NULL_TREE, tmp, NULL_TREE), |
1658 | NULL_TREE); |
1659 | ASM_VOLATILE_P (tmp) = 1; |
1660 | |
1661 | gfc_add_expr_to_block (&block, tmp); |
1662 | } |
1663 | |
1664 | return gfc_finish_block (&block); |
1665 | } |
1666 | |
1667 | |
1668 | /* Return true, when the class has a _len component. */ |
1669 | |
1670 | static bool |
1671 | class_has_len_component (gfc_symbol *sym) |
1672 | { |
1673 | gfc_component *comp = sym->ts.u.derived->components; |
1674 | while (comp) |
1675 | { |
1676 | if (strcmp (s1: comp->name, s2: "_len" ) == 0) |
1677 | return true; |
1678 | comp = comp->next; |
1679 | } |
1680 | return false; |
1681 | } |
1682 | |
1683 | |
1684 | static void |
1685 | copy_descriptor (stmtblock_t *block, tree dst, tree src, int rank) |
1686 | { |
1687 | int n; |
1688 | tree dim; |
1689 | tree tmp; |
1690 | tree tmp2; |
1691 | tree size; |
1692 | tree offset; |
1693 | |
1694 | offset = gfc_index_zero_node; |
1695 | |
1696 | /* Use memcpy to copy the descriptor. The size is the minimum of |
1697 | the sizes of 'src' and 'dst'. This avoids a non-trivial conversion. */ |
1698 | tmp = TYPE_SIZE_UNIT (TREE_TYPE (src)); |
1699 | tmp2 = TYPE_SIZE_UNIT (TREE_TYPE (dst)); |
1700 | size = fold_build2_loc (input_location, MIN_EXPR, |
1701 | TREE_TYPE (tmp), tmp, tmp2); |
1702 | tmp = builtin_decl_explicit (fncode: BUILT_IN_MEMCPY); |
1703 | tmp = build_call_expr_loc (input_location, tmp, 3, |
1704 | gfc_build_addr_expr (NULL_TREE, dst), |
1705 | gfc_build_addr_expr (NULL_TREE, src), |
1706 | fold_convert (size_type_node, size)); |
1707 | gfc_add_expr_to_block (block, tmp); |
1708 | |
1709 | /* Set the offset correctly. */ |
1710 | for (n = 0; n < rank; n++) |
1711 | { |
1712 | dim = gfc_rank_cst[n]; |
1713 | tmp = gfc_conv_descriptor_lbound_get (src, dim); |
1714 | tmp2 = gfc_conv_descriptor_stride_get (src, dim); |
1715 | tmp = fold_build2_loc (input_location, MULT_EXPR, TREE_TYPE (tmp), |
1716 | tmp, tmp2); |
1717 | offset = fold_build2_loc (input_location, MINUS_EXPR, |
1718 | TREE_TYPE (offset), offset, tmp); |
1719 | offset = gfc_evaluate_now (offset, block); |
1720 | } |
1721 | |
1722 | gfc_conv_descriptor_offset_set (block, dst, offset); |
1723 | } |
1724 | |
1725 | |
1726 | /* Do proper initialization for ASSOCIATE names. */ |
1727 | |
1728 | static void |
1729 | trans_associate_var (gfc_symbol *sym, gfc_wrapped_block *block) |
1730 | { |
1731 | gfc_expr *e; |
1732 | tree tmp; |
1733 | bool class_target; |
1734 | bool unlimited; |
1735 | tree desc; |
1736 | tree offset; |
1737 | tree dim; |
1738 | int n; |
1739 | tree charlen; |
1740 | bool need_len_assign; |
1741 | bool whole_array = true; |
1742 | gfc_ref *ref; |
1743 | gfc_symbol *sym2; |
1744 | |
1745 | gcc_assert (sym->assoc); |
1746 | e = sym->assoc->target; |
1747 | |
1748 | class_target = (e->expr_type == EXPR_VARIABLE) |
1749 | && (gfc_is_class_scalar_expr (e) |
1750 | || gfc_is_class_array_ref (e, NULL)); |
1751 | |
1752 | unlimited = UNLIMITED_POLY (e); |
1753 | |
1754 | for (ref = e->ref; ref; ref = ref->next) |
1755 | if (ref->type == REF_ARRAY |
1756 | && ref->u.ar.type == AR_FULL |
1757 | && ref->next) |
1758 | { |
1759 | whole_array = false; |
1760 | break; |
1761 | } |
1762 | |
1763 | /* Assignments to the string length need to be generated, when |
1764 | ( sym is a char array or |
1765 | sym has a _len component) |
1766 | and the associated expression is unlimited polymorphic, which is |
1767 | not (yet) correctly in 'unlimited', because for an already associated |
1768 | BT_DERIVED the u-poly flag is not set, i.e., |
1769 | __tmp_CHARACTER_0_1 => w => arg |
1770 | ^ generated temp ^ from code, the w does not have the u-poly |
1771 | flag set, where UNLIMITED_POLY(e) expects it. */ |
1772 | need_len_assign = ((unlimited || (e->ts.type == BT_DERIVED |
1773 | && e->ts.u.derived->attr.unlimited_polymorphic)) |
1774 | && (sym->ts.type == BT_CHARACTER |
1775 | || ((sym->ts.type == BT_CLASS || sym->ts.type == BT_DERIVED) |
1776 | && class_has_len_component (sym))) |
1777 | && !sym->attr.select_rank_temporary); |
1778 | |
1779 | /* Do a `pointer assignment' with updated descriptor (or assign descriptor |
1780 | to array temporary) for arrays with either unknown shape or if associating |
1781 | to a variable. Select rank temporaries need somewhat different treatment |
1782 | to other associate names and case temporaries. This because the selector |
1783 | is assumed rank and so the offset in particular has to be changed. Also, |
1784 | the case temporaries carry both allocatable and target attributes if |
1785 | present in the selector. This means that an allocatation or change of |
1786 | association can occur and so has to be dealt with. */ |
1787 | if (sym->attr.select_rank_temporary) |
1788 | { |
1789 | gfc_se se; |
1790 | tree class_decl = NULL_TREE; |
1791 | int rank = 0; |
1792 | bool class_ptr; |
1793 | |
1794 | sym2 = e->symtree->n.sym; |
1795 | gfc_init_se (&se, NULL); |
1796 | if (e->ts.type == BT_CLASS) |
1797 | { |
1798 | /* Go straight to the class data. */ |
1799 | if (sym2->attr.dummy && !sym2->attr.optional) |
1800 | { |
1801 | class_decl = sym2->backend_decl; |
1802 | if (DECL_LANG_SPECIFIC (class_decl) |
1803 | && GFC_DECL_SAVED_DESCRIPTOR (class_decl)) |
1804 | class_decl = GFC_DECL_SAVED_DESCRIPTOR (class_decl); |
1805 | if (POINTER_TYPE_P (TREE_TYPE (class_decl))) |
1806 | class_decl = build_fold_indirect_ref_loc (input_location, |
1807 | class_decl); |
1808 | gcc_assert (GFC_CLASS_TYPE_P (TREE_TYPE (class_decl))); |
1809 | se.expr = gfc_class_data_get (class_decl); |
1810 | } |
1811 | else |
1812 | { |
1813 | class_decl = sym2->backend_decl; |
1814 | gfc_conv_expr_descriptor (&se, e); |
1815 | if (POINTER_TYPE_P (TREE_TYPE (se.expr))) |
1816 | se.expr = build_fold_indirect_ref_loc (input_location, |
1817 | se.expr); |
1818 | } |
1819 | |
1820 | if (CLASS_DATA (sym)->as && CLASS_DATA (sym)->as->rank > 0) |
1821 | rank = CLASS_DATA (sym)->as->rank; |
1822 | } |
1823 | else |
1824 | { |
1825 | gfc_conv_expr_descriptor (&se, e); |
1826 | if (sym->as && sym->as->rank > 0) |
1827 | rank = sym->as->rank; |
1828 | } |
1829 | |
1830 | desc = sym->backend_decl; |
1831 | |
1832 | /* The SELECT TYPE mechanisms turn class temporaries into pointers, which |
1833 | point to the selector. */ |
1834 | class_ptr = class_decl != NULL_TREE && POINTER_TYPE_P (TREE_TYPE (desc)); |
1835 | if (class_ptr) |
1836 | { |
1837 | tmp = gfc_create_var (TREE_TYPE (TREE_TYPE (desc)), "class" ); |
1838 | tmp = gfc_build_addr_expr (NULL, tmp); |
1839 | gfc_add_modify (&se.pre, desc, tmp); |
1840 | |
1841 | tmp = gfc_class_vptr_get (class_decl); |
1842 | gfc_add_modify (&se.pre, gfc_class_vptr_get (desc), tmp); |
1843 | if (UNLIMITED_POLY (sym)) |
1844 | gfc_add_modify (&se.pre, gfc_class_len_get (desc), |
1845 | gfc_class_len_get (class_decl)); |
1846 | |
1847 | desc = gfc_class_data_get (desc); |
1848 | } |
1849 | |
1850 | /* SELECT RANK temporaries can carry the allocatable and pointer |
1851 | attributes so the selector descriptor must be copied in and |
1852 | copied out. */ |
1853 | if (rank > 0) |
1854 | copy_descriptor (block: &se.pre, dst: desc, src: se.expr, rank); |
1855 | else |
1856 | { |
1857 | tmp = gfc_conv_descriptor_data_get (se.expr); |
1858 | gfc_add_modify (&se.pre, desc, |
1859 | fold_convert (TREE_TYPE (desc), tmp)); |
1860 | } |
1861 | |
1862 | /* Deal with associate_name => selector. Class associate names are |
1863 | treated in the same way as in SELECT TYPE. */ |
1864 | sym2 = sym->assoc->target->symtree->n.sym; |
1865 | if (sym2->assoc && sym->assoc->target && sym2->ts.type != BT_CLASS) |
1866 | { |
1867 | sym2 = sym2->assoc->target->symtree->n.sym; |
1868 | se.expr = sym2->backend_decl; |
1869 | |
1870 | if (POINTER_TYPE_P (TREE_TYPE (se.expr))) |
1871 | se.expr = build_fold_indirect_ref_loc (input_location, |
1872 | se.expr); |
1873 | } |
1874 | |
1875 | /* There could have been reallocation. Copy descriptor back to the |
1876 | selector and update the offset. */ |
1877 | if (sym->attr.allocatable || sym->attr.pointer |
1878 | || (sym->ts.type == BT_CLASS |
1879 | && (CLASS_DATA (sym)->attr.allocatable |
1880 | || CLASS_DATA (sym)->attr.pointer))) |
1881 | { |
1882 | if (rank > 0) |
1883 | copy_descriptor (block: &se.post, dst: se.expr, src: desc, rank); |
1884 | else |
1885 | gfc_conv_descriptor_data_set (&se.post, se.expr, desc); |
1886 | |
1887 | /* The dynamic type could have changed too. */ |
1888 | if (sym->ts.type == BT_CLASS) |
1889 | { |
1890 | tmp = sym->backend_decl; |
1891 | if (class_ptr) |
1892 | tmp = build_fold_indirect_ref_loc (input_location, tmp); |
1893 | gfc_add_modify (&se.post, gfc_class_vptr_get (class_decl), |
1894 | gfc_class_vptr_get (tmp)); |
1895 | if (UNLIMITED_POLY (sym)) |
1896 | gfc_add_modify (&se.post, gfc_class_len_get (class_decl), |
1897 | gfc_class_len_get (tmp)); |
1898 | } |
1899 | } |
1900 | |
1901 | tmp = gfc_finish_block (&se.post); |
1902 | |
1903 | gfc_add_init_cleanup (block, init: gfc_finish_block (&se.pre), cleanup: tmp); |
1904 | } |
1905 | /* Now all the other kinds of associate variable. */ |
1906 | else if (sym->attr.dimension && !class_target |
1907 | && (sym->as->type == AS_DEFERRED || sym->assoc->variable)) |
1908 | { |
1909 | gfc_se se; |
1910 | tree desc; |
1911 | bool cst_array_ctor; |
1912 | |
1913 | desc = sym->backend_decl; |
1914 | cst_array_ctor = e->expr_type == EXPR_ARRAY |
1915 | && gfc_constant_array_constructor_p (e->value.constructor) |
1916 | && e->ts.type != BT_CHARACTER; |
1917 | |
1918 | /* If association is to an expression, evaluate it and create temporary. |
1919 | Otherwise, get descriptor of target for pointer assignment. */ |
1920 | gfc_init_se (&se, NULL); |
1921 | |
1922 | if (sym->assoc->variable || cst_array_ctor) |
1923 | { |
1924 | se.direct_byref = 1; |
1925 | se.use_offset = 1; |
1926 | se.expr = desc; |
1927 | GFC_DECL_PTR_ARRAY_P (sym->backend_decl) = 1; |
1928 | } |
1929 | |
1930 | gfc_conv_expr_descriptor (&se, e); |
1931 | |
1932 | if (sym->ts.type == BT_CHARACTER |
1933 | && !sym->attr.select_type_temporary |
1934 | && sym->ts.u.cl->backend_decl |
1935 | && VAR_P (sym->ts.u.cl->backend_decl) |
1936 | && se.string_length != sym->ts.u.cl->backend_decl) |
1937 | gfc_add_modify (&se.pre, sym->ts.u.cl->backend_decl, |
1938 | fold_convert (TREE_TYPE (sym->ts.u.cl->backend_decl), |
1939 | se.string_length)); |
1940 | |
1941 | /* If we didn't already do the pointer assignment, set associate-name |
1942 | descriptor to the one generated for the temporary. */ |
1943 | if ((!sym->assoc->variable && !cst_array_ctor) |
1944 | || !whole_array) |
1945 | { |
1946 | int dim; |
1947 | |
1948 | if (whole_array) |
1949 | gfc_add_modify (&se.pre, desc, se.expr); |
1950 | |
1951 | /* The generated descriptor has lower bound zero (as array |
1952 | temporary), shift bounds so we get lower bounds of 1. */ |
1953 | for (dim = 0; dim < e->rank; ++dim) |
1954 | gfc_conv_shift_descriptor_lbound (&se.pre, desc, |
1955 | dim, gfc_index_one_node); |
1956 | } |
1957 | |
1958 | /* If this is a subreference array pointer associate name use the |
1959 | associate variable element size for the value of 'span'. */ |
1960 | if (sym->attr.subref_array_pointer && !se.direct_byref) |
1961 | { |
1962 | gcc_assert (e->expr_type == EXPR_VARIABLE); |
1963 | tmp = gfc_get_array_span (se.expr, e); |
1964 | |
1965 | gfc_conv_descriptor_span_set (&se.pre, desc, tmp); |
1966 | } |
1967 | |
1968 | if (e->expr_type == EXPR_FUNCTION |
1969 | && sym->ts.type == BT_DERIVED |
1970 | && sym->ts.u.derived |
1971 | && sym->ts.u.derived->attr.pdt_type) |
1972 | { |
1973 | tmp = gfc_deallocate_pdt_comp (sym->ts.u.derived, se.expr, |
1974 | sym->as->rank); |
1975 | gfc_add_expr_to_block (&se.post, tmp); |
1976 | } |
1977 | |
1978 | /* Done, register stuff as init / cleanup code. */ |
1979 | gfc_add_init_cleanup (block, init: gfc_finish_block (&se.pre), |
1980 | cleanup: gfc_finish_block (&se.post)); |
1981 | } |
1982 | |
1983 | /* Temporaries, arising from TYPE IS, just need the descriptor of class |
1984 | arrays to be assigned directly. */ |
1985 | else if (class_target && sym->attr.dimension |
1986 | && (sym->ts.type == BT_DERIVED || unlimited)) |
1987 | { |
1988 | gfc_se se; |
1989 | |
1990 | gfc_init_se (&se, NULL); |
1991 | se.descriptor_only = 1; |
1992 | /* In a select type the (temporary) associate variable shall point to |
1993 | a standard fortran array (lower bound == 1), but conv_expr () |
1994 | just maps to the input array in the class object, whose lbound may |
1995 | be arbitrary. conv_expr_descriptor solves this by inserting a |
1996 | temporary array descriptor. */ |
1997 | gfc_conv_expr_descriptor (&se, e); |
1998 | |
1999 | gcc_assert (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (se.expr)) |
2000 | || GFC_ARRAY_TYPE_P (TREE_TYPE (se.expr))); |
2001 | gcc_assert (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (sym->backend_decl))); |
2002 | |
2003 | if (GFC_ARRAY_TYPE_P (TREE_TYPE (se.expr))) |
2004 | { |
2005 | if (INDIRECT_REF_P (se.expr)) |
2006 | tmp = TREE_OPERAND (se.expr, 0); |
2007 | else |
2008 | tmp = se.expr; |
2009 | |
2010 | gfc_add_modify (&se.pre, sym->backend_decl, |
2011 | gfc_class_data_get (GFC_DECL_SAVED_DESCRIPTOR (tmp))); |
2012 | } |
2013 | else |
2014 | gfc_add_modify (&se.pre, sym->backend_decl, se.expr); |
2015 | |
2016 | if (unlimited) |
2017 | { |
2018 | /* Recover the dtype, which has been overwritten by the |
2019 | assignment from an unlimited polymorphic object. */ |
2020 | tmp = gfc_conv_descriptor_dtype (sym->backend_decl); |
2021 | gfc_add_modify (&se.pre, tmp, |
2022 | gfc_get_dtype (TREE_TYPE (sym->backend_decl))); |
2023 | } |
2024 | |
2025 | gfc_add_init_cleanup (block, init: gfc_finish_block (&se.pre), |
2026 | cleanup: gfc_finish_block (&se.post)); |
2027 | } |
2028 | |
2029 | /* Do a scalar pointer assignment; this is for scalar variable targets. */ |
2030 | else if (gfc_is_associate_pointer (sym)) |
2031 | { |
2032 | gfc_se se; |
2033 | |
2034 | gcc_assert (!sym->attr.dimension); |
2035 | |
2036 | gfc_init_se (&se, NULL); |
2037 | |
2038 | /* Class associate-names come this way because they are |
2039 | unconditionally associate pointers and the symbol is scalar. */ |
2040 | if (sym->ts.type == BT_CLASS && CLASS_DATA (sym)->attr.dimension) |
2041 | { |
2042 | tree target_expr; |
2043 | /* For a class array we need a descriptor for the selector. */ |
2044 | gfc_conv_expr_descriptor (&se, e); |
2045 | /* Needed to get/set the _len component below. */ |
2046 | target_expr = se.expr; |
2047 | |
2048 | /* Obtain a temporary class container for the result. */ |
2049 | gfc_conv_class_to_class (&se, e, sym->ts, false, true, false, false); |
2050 | se.expr = build_fold_indirect_ref_loc (input_location, se.expr); |
2051 | |
2052 | /* Set the offset. */ |
2053 | desc = gfc_class_data_get (se.expr); |
2054 | offset = gfc_index_zero_node; |
2055 | for (n = 0; n < e->rank; n++) |
2056 | { |
2057 | dim = gfc_rank_cst[n]; |
2058 | tmp = fold_build2_loc (input_location, MULT_EXPR, |
2059 | gfc_array_index_type, |
2060 | gfc_conv_descriptor_stride_get (desc, dim), |
2061 | gfc_conv_descriptor_lbound_get (desc, dim)); |
2062 | offset = fold_build2_loc (input_location, MINUS_EXPR, |
2063 | gfc_array_index_type, |
2064 | offset, tmp); |
2065 | } |
2066 | if (need_len_assign) |
2067 | { |
2068 | if (e->symtree |
2069 | && DECL_LANG_SPECIFIC (e->symtree->n.sym->backend_decl) |
2070 | && GFC_DECL_SAVED_DESCRIPTOR (e->symtree->n.sym->backend_decl) |
2071 | && TREE_CODE (target_expr) != COMPONENT_REF) |
2072 | /* Use the original class descriptor stored in the saved |
2073 | descriptor to get the target_expr. */ |
2074 | target_expr = |
2075 | GFC_DECL_SAVED_DESCRIPTOR (e->symtree->n.sym->backend_decl); |
2076 | else |
2077 | /* Strip the _data component from the target_expr. */ |
2078 | target_expr = TREE_OPERAND (target_expr, 0); |
2079 | /* Add a reference to the _len comp to the target expr. */ |
2080 | tmp = gfc_class_len_get (target_expr); |
2081 | /* Get the component-ref for the temp structure's _len comp. */ |
2082 | charlen = gfc_class_len_get (se.expr); |
2083 | /* Add the assign to the beginning of the block... */ |
2084 | gfc_add_modify (&se.pre, charlen, |
2085 | fold_convert (TREE_TYPE (charlen), tmp)); |
2086 | /* and the oposite way at the end of the block, to hand changes |
2087 | on the string length back. */ |
2088 | gfc_add_modify (&se.post, tmp, |
2089 | fold_convert (TREE_TYPE (tmp), charlen)); |
2090 | /* Length assignment done, prevent adding it again below. */ |
2091 | need_len_assign = false; |
2092 | } |
2093 | gfc_conv_descriptor_offset_set (&se.pre, desc, offset); |
2094 | } |
2095 | else if (sym->ts.type == BT_CLASS && e->ts.type == BT_CLASS |
2096 | && CLASS_DATA (e)->attr.dimension) |
2097 | { |
2098 | /* This is bound to be a class array element. */ |
2099 | gfc_conv_expr_reference (se: &se, expr: e); |
2100 | /* Get the _vptr component of the class object. */ |
2101 | tmp = gfc_get_vptr_from_expr (se.expr); |
2102 | /* Obtain a temporary class container for the result. */ |
2103 | gfc_conv_derived_to_class (&se, e, sym->ts, tmp, false, false); |
2104 | se.expr = build_fold_indirect_ref_loc (input_location, se.expr); |
2105 | need_len_assign = false; |
2106 | } |
2107 | else |
2108 | { |
2109 | /* For BT_CLASS and BT_DERIVED, this boils down to a pointer assign, |
2110 | which has the string length included. For CHARACTERS it is still |
2111 | needed and will be done at the end of this routine. */ |
2112 | gfc_conv_expr (se: &se, expr: e); |
2113 | need_len_assign = need_len_assign && sym->ts.type == BT_CHARACTER; |
2114 | } |
2115 | |
2116 | if (sym->ts.type == BT_CHARACTER |
2117 | && !sym->attr.select_type_temporary |
2118 | && VAR_P (sym->ts.u.cl->backend_decl) |
2119 | && se.string_length != sym->ts.u.cl->backend_decl) |
2120 | { |
2121 | gfc_add_modify (&se.pre, sym->ts.u.cl->backend_decl, |
2122 | fold_convert (TREE_TYPE (sym->ts.u.cl->backend_decl), |
2123 | se.string_length)); |
2124 | if (e->expr_type == EXPR_FUNCTION) |
2125 | { |
2126 | tmp = gfc_call_free (sym->backend_decl); |
2127 | gfc_add_expr_to_block (&se.post, tmp); |
2128 | } |
2129 | } |
2130 | |
2131 | if (sym->ts.type == BT_CHARACTER && e->ts.type == BT_CHARACTER |
2132 | && POINTER_TYPE_P (TREE_TYPE (se.expr))) |
2133 | { |
2134 | /* These are pointer types already. */ |
2135 | tmp = fold_convert (TREE_TYPE (sym->backend_decl), se.expr); |
2136 | } |
2137 | else |
2138 | { |
2139 | tree ctree = gfc_get_class_from_expr (se.expr); |
2140 | tmp = TREE_TYPE (sym->backend_decl); |
2141 | |
2142 | /* F2018:19.5.1.6 "If a selector has the POINTER attribute, |
2143 | it shall be associated; the associate name is associated |
2144 | with the target of the pointer and does not have the |
2145 | POINTER attribute." */ |
2146 | if (sym->ts.type == BT_CLASS |
2147 | && e->ts.type == BT_CLASS && e->rank == 0 && ctree |
2148 | && (!GFC_CLASS_TYPE_P (TREE_TYPE (se.expr)) |
2149 | || CLASS_DATA (e)->attr.class_pointer)) |
2150 | { |
2151 | tree stmp; |
2152 | tree dtmp; |
2153 | |
2154 | se.expr = ctree; |
2155 | dtmp = TREE_TYPE (TREE_TYPE (sym->backend_decl)); |
2156 | ctree = gfc_create_var (dtmp, "class" ); |
2157 | |
2158 | stmp = gfc_class_data_get (se.expr); |
2159 | /* Coarray scalar component expressions can emerge from |
2160 | the front end as array elements of the _data field. */ |
2161 | if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (stmp))) |
2162 | stmp = gfc_conv_descriptor_data_get (stmp); |
2163 | dtmp = gfc_class_data_get (ctree); |
2164 | stmp = fold_convert (TREE_TYPE (dtmp), stmp); |
2165 | gfc_add_modify (&se.pre, dtmp, stmp); |
2166 | stmp = gfc_class_vptr_get (se.expr); |
2167 | dtmp = gfc_class_vptr_get (ctree); |
2168 | stmp = fold_convert (TREE_TYPE (dtmp), stmp); |
2169 | gfc_add_modify (&se.pre, dtmp, stmp); |
2170 | if (UNLIMITED_POLY (sym)) |
2171 | { |
2172 | stmp = gfc_class_len_get (se.expr); |
2173 | dtmp = gfc_class_len_get (ctree); |
2174 | stmp = fold_convert (TREE_TYPE (dtmp), stmp); |
2175 | gfc_add_modify (&se.pre, dtmp, stmp); |
2176 | need_len_assign = false; |
2177 | } |
2178 | se.expr = ctree; |
2179 | } |
2180 | tmp = gfc_build_addr_expr (tmp, se.expr); |
2181 | } |
2182 | |
2183 | gfc_add_modify (&se.pre, sym->backend_decl, tmp); |
2184 | |
2185 | gfc_add_init_cleanup (block, init: gfc_finish_block( &se.pre), |
2186 | cleanup: gfc_finish_block (&se.post)); |
2187 | } |
2188 | |
2189 | /* Do a simple assignment. This is for scalar expressions, where we |
2190 | can simply use expression assignment. */ |
2191 | else |
2192 | { |
2193 | gfc_expr *lhs; |
2194 | tree res; |
2195 | gfc_se se; |
2196 | stmtblock_t final_block; |
2197 | |
2198 | gfc_init_se (&se, NULL); |
2199 | |
2200 | /* resolve.cc converts some associate names to allocatable so that |
2201 | allocation can take place automatically in gfc_trans_assignment. |
2202 | The frontend prevents them from being either allocated, |
2203 | deallocated or reallocated. */ |
2204 | if (sym->ts.type == BT_DERIVED |
2205 | && sym->ts.u.derived->attr.alloc_comp) |
2206 | { |
2207 | tmp = sym->backend_decl; |
2208 | tmp = gfc_nullify_alloc_comp (sym->ts.u.derived, tmp, |
2209 | sym->attr.dimension ? sym->as->rank : 0); |
2210 | gfc_add_expr_to_block (&se.pre, tmp); |
2211 | } |
2212 | |
2213 | if (sym->attr.allocatable) |
2214 | { |
2215 | tmp = sym->backend_decl; |
2216 | if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (tmp))) |
2217 | tmp = gfc_conv_descriptor_data_get (tmp); |
2218 | gfc_add_modify (&se.pre, tmp, fold_convert (TREE_TYPE (tmp), |
2219 | null_pointer_node)); |
2220 | } |
2221 | |
2222 | lhs = gfc_lval_expr_from_sym (sym); |
2223 | lhs->must_finalize = 0; |
2224 | res = gfc_trans_assignment (lhs, e, false, true); |
2225 | gfc_add_expr_to_block (&se.pre, res); |
2226 | |
2227 | gfc_init_block (&final_block); |
2228 | |
2229 | if (sym->attr.associate_var |
2230 | && sym->ts.type == BT_DERIVED |
2231 | && sym->ts.u.derived->attr.defined_assign_comp |
2232 | && gfc_may_be_finalized (sym->ts) |
2233 | && e->expr_type == EXPR_FUNCTION) |
2234 | { |
2235 | gfc_expr *ef; |
2236 | ef = gfc_lval_expr_from_sym (sym); |
2237 | gfc_add_finalizer_call (&final_block, ef); |
2238 | gfc_free_expr (ef); |
2239 | } |
2240 | |
2241 | if (sym->ts.type == BT_DERIVED |
2242 | && sym->ts.u.derived->attr.alloc_comp) |
2243 | { |
2244 | tmp = sym->backend_decl; |
2245 | tmp = gfc_deallocate_alloc_comp (sym->ts.u.derived, |
2246 | tmp, 0); |
2247 | gfc_add_expr_to_block (&final_block, tmp); |
2248 | } |
2249 | |
2250 | tmp = sym->backend_decl; |
2251 | if (e->expr_type == EXPR_FUNCTION |
2252 | && sym->ts.type == BT_DERIVED |
2253 | && sym->ts.u.derived |
2254 | && sym->ts.u.derived->attr.pdt_type) |
2255 | { |
2256 | tmp = gfc_deallocate_pdt_comp (sym->ts.u.derived, tmp, |
2257 | 0); |
2258 | } |
2259 | else if (e->expr_type == EXPR_FUNCTION |
2260 | && sym->ts.type == BT_CLASS |
2261 | && CLASS_DATA (sym)->ts.u.derived |
2262 | && CLASS_DATA (sym)->ts.u.derived->attr.pdt_type) |
2263 | { |
2264 | tmp = gfc_class_data_get (tmp); |
2265 | tmp = gfc_deallocate_pdt_comp (CLASS_DATA (sym)->ts.u.derived, |
2266 | tmp, 0); |
2267 | } |
2268 | else if (sym->attr.allocatable) |
2269 | { |
2270 | tmp = sym->backend_decl; |
2271 | |
2272 | if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (tmp))) |
2273 | tmp = gfc_conv_descriptor_data_get (tmp); |
2274 | |
2275 | /* A simple call to free suffices here. */ |
2276 | tmp = gfc_call_free (tmp); |
2277 | |
2278 | /* Make sure that reallocation on assignment cannot occur. */ |
2279 | sym->attr.allocatable = 0; |
2280 | } |
2281 | else |
2282 | tmp = NULL_TREE; |
2283 | |
2284 | gfc_add_expr_to_block (&final_block, tmp); |
2285 | tmp = gfc_finish_block (&final_block); |
2286 | res = gfc_finish_block (&se.pre); |
2287 | gfc_add_init_cleanup (block, init: res, cleanup: tmp); |
2288 | gfc_free_expr (lhs); |
2289 | } |
2290 | |
2291 | /* Set the stringlength, when needed. */ |
2292 | if (need_len_assign) |
2293 | { |
2294 | gfc_se se; |
2295 | gfc_init_se (&se, NULL); |
2296 | if (e->symtree->n.sym->ts.type == BT_CHARACTER) |
2297 | { |
2298 | /* Deferred strings are dealt with in the preceding. */ |
2299 | gcc_assert (!e->symtree->n.sym->ts.deferred); |
2300 | tmp = e->symtree->n.sym->ts.u.cl->backend_decl; |
2301 | } |
2302 | else if (e->symtree->n.sym->attr.function |
2303 | && e->symtree->n.sym == e->symtree->n.sym->result) |
2304 | { |
2305 | tmp = gfc_get_fake_result_decl (e->symtree->n.sym, 0); |
2306 | tmp = gfc_class_len_get (tmp); |
2307 | } |
2308 | else |
2309 | tmp = gfc_class_len_get (gfc_get_symbol_decl (e->symtree->n.sym)); |
2310 | gfc_get_symbol_decl (sym); |
2311 | charlen = sym->ts.type == BT_CHARACTER ? sym->ts.u.cl->backend_decl |
2312 | : gfc_class_len_get (sym->backend_decl); |
2313 | /* Prevent adding a noop len= len. */ |
2314 | if (tmp != charlen) |
2315 | { |
2316 | gfc_add_modify (&se.pre, charlen, |
2317 | fold_convert (TREE_TYPE (charlen), tmp)); |
2318 | gfc_add_init_cleanup (block, init: gfc_finish_block (&se.pre), |
2319 | cleanup: gfc_finish_block (&se.post)); |
2320 | } |
2321 | } |
2322 | } |
2323 | |
2324 | |
2325 | /* Translate a BLOCK construct. This is basically what we would do for a |
2326 | procedure body. */ |
2327 | |
2328 | tree |
2329 | gfc_trans_block_construct (gfc_code* code) |
2330 | { |
2331 | gfc_namespace* ns; |
2332 | gfc_symbol* sym; |
2333 | gfc_wrapped_block block; |
2334 | tree exit_label; |
2335 | stmtblock_t body; |
2336 | gfc_association_list *ass; |
2337 | tree translated_body; |
2338 | |
2339 | ns = code->ext.block.ns; |
2340 | gcc_assert (ns); |
2341 | sym = ns->proc_name; |
2342 | gcc_assert (sym); |
2343 | |
2344 | /* Process local variables. */ |
2345 | gcc_assert (!sym->tlink); |
2346 | sym->tlink = sym; |
2347 | gfc_process_block_locals (ns); |
2348 | |
2349 | /* Generate code including exit-label. */ |
2350 | gfc_init_block (&body); |
2351 | exit_label = gfc_build_label_decl (NULL_TREE); |
2352 | code->exit_label = exit_label; |
2353 | |
2354 | finish_oacc_declare (ns, sym, true); |
2355 | |
2356 | translated_body = gfc_trans_code (ns->code); |
2357 | if (ns->omp_structured_block) |
2358 | translated_body = build1 (OMP_STRUCTURED_BLOCK, void_type_node, |
2359 | translated_body); |
2360 | gfc_add_expr_to_block (&body, translated_body); |
2361 | gfc_add_expr_to_block (&body, build1_v (LABEL_EXPR, exit_label)); |
2362 | |
2363 | /* Finish everything. */ |
2364 | gfc_start_wrapped_block (block: &block, code: gfc_finish_block (&body)); |
2365 | gfc_trans_deferred_vars (sym, &block); |
2366 | for (ass = code->ext.block.assoc; ass; ass = ass->next) |
2367 | trans_associate_var (sym: ass->st->n.sym, block: &block); |
2368 | |
2369 | return gfc_finish_wrapped_block (block: &block); |
2370 | } |
2371 | |
2372 | /* Translate the simple DO construct in a C-style manner. |
2373 | This is where the loop variable has integer type and step +-1. |
2374 | Following code will generate infinite loop in case where TO is INT_MAX |
2375 | (for +1 step) or INT_MIN (for -1 step) |
2376 | |
2377 | We translate a do loop from: |
2378 | |
2379 | DO dovar = from, to, step |
2380 | body |
2381 | END DO |
2382 | |
2383 | to: |
2384 | |
2385 | [Evaluate loop bounds and step] |
2386 | dovar = from; |
2387 | for (;;) |
2388 | { |
2389 | if (dovar > to) |
2390 | goto end_label; |
2391 | body; |
2392 | cycle_label: |
2393 | dovar += step; |
2394 | } |
2395 | end_label: |
2396 | |
2397 | This helps the optimizers by avoiding the extra pre-header condition and |
2398 | we save a register as we just compare the updated IV (not a value in |
2399 | previous step). */ |
2400 | |
2401 | static tree |
2402 | gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar, |
2403 | tree from, tree to, tree step, tree exit_cond) |
2404 | { |
2405 | stmtblock_t body; |
2406 | tree type; |
2407 | tree cond; |
2408 | tree tmp; |
2409 | tree saved_dovar = NULL; |
2410 | tree cycle_label; |
2411 | tree exit_label; |
2412 | location_t loc; |
2413 | type = TREE_TYPE (dovar); |
2414 | bool is_step_positive = tree_int_cst_sgn (step) > 0; |
2415 | |
2416 | loc = gfc_get_location (&code->ext.iterator->start->where); |
2417 | |
2418 | /* Initialize the DO variable: dovar = from. */ |
2419 | gfc_add_modify_loc (loc, pblock, dovar, |
2420 | fold_convert (TREE_TYPE (dovar), from)); |
2421 | |
2422 | /* Save value for do-tinkering checking. */ |
2423 | if (gfc_option.rtcheck & GFC_RTCHECK_DO) |
2424 | { |
2425 | saved_dovar = gfc_create_var (type, ".saved_dovar" ); |
2426 | gfc_add_modify_loc (loc, pblock, saved_dovar, dovar); |
2427 | } |
2428 | |
2429 | /* Cycle and exit statements are implemented with gotos. */ |
2430 | cycle_label = gfc_build_label_decl (NULL_TREE); |
2431 | exit_label = gfc_build_label_decl (NULL_TREE); |
2432 | |
2433 | /* Put the labels where they can be found later. See gfc_trans_do(). */ |
2434 | code->cycle_label = cycle_label; |
2435 | code->exit_label = exit_label; |
2436 | |
2437 | /* Loop body. */ |
2438 | gfc_start_block (&body); |
2439 | |
2440 | /* Exit the loop if there is an I/O result condition or error. */ |
2441 | if (exit_cond) |
2442 | { |
2443 | tmp = build1_v (GOTO_EXPR, exit_label); |
2444 | tmp = fold_build3_loc (loc, COND_EXPR, void_type_node, |
2445 | exit_cond, tmp, |
2446 | build_empty_stmt (loc)); |
2447 | gfc_add_expr_to_block (&body, tmp); |
2448 | } |
2449 | |
2450 | /* Evaluate the loop condition. */ |
2451 | if (is_step_positive) |
2452 | cond = fold_build2_loc (loc, GT_EXPR, logical_type_node, dovar, |
2453 | fold_convert (type, to)); |
2454 | else |
2455 | cond = fold_build2_loc (loc, LT_EXPR, logical_type_node, dovar, |
2456 | fold_convert (type, to)); |
2457 | |
2458 | cond = gfc_evaluate_now_loc (loc, cond, &body); |
2459 | if (code->ext.iterator->unroll && cond != error_mark_node) |
2460 | cond |
2461 | = build3 (ANNOTATE_EXPR, TREE_TYPE (cond), cond, |
2462 | build_int_cst (integer_type_node, annot_expr_unroll_kind), |
2463 | build_int_cst (integer_type_node, code->ext.iterator->unroll)); |
2464 | |
2465 | if (code->ext.iterator->ivdep && cond != error_mark_node) |
2466 | cond = build3 (ANNOTATE_EXPR, TREE_TYPE (cond), cond, |
2467 | build_int_cst (integer_type_node, annot_expr_ivdep_kind), |
2468 | integer_zero_node); |
2469 | if (code->ext.iterator->vector && cond != error_mark_node) |
2470 | cond = build3 (ANNOTATE_EXPR, TREE_TYPE (cond), cond, |
2471 | build_int_cst (integer_type_node, annot_expr_vector_kind), |
2472 | integer_zero_node); |
2473 | if (code->ext.iterator->novector && cond != error_mark_node) |
2474 | cond = build3 (ANNOTATE_EXPR, TREE_TYPE (cond), cond, |
2475 | build_int_cst (integer_type_node, annot_expr_no_vector_kind), |
2476 | integer_zero_node); |
2477 | |
2478 | /* The loop exit. */ |
2479 | tmp = fold_build1_loc (loc, GOTO_EXPR, void_type_node, exit_label); |
2480 | TREE_USED (exit_label) = 1; |
2481 | tmp = fold_build3_loc (loc, COND_EXPR, void_type_node, |
2482 | cond, tmp, build_empty_stmt (loc)); |
2483 | gfc_add_expr_to_block (&body, tmp); |
2484 | |
2485 | /* Check whether the induction variable is equal to INT_MAX |
2486 | (respectively to INT_MIN). */ |
2487 | if (gfc_option.rtcheck & GFC_RTCHECK_DO) |
2488 | { |
2489 | tree boundary = is_step_positive ? TYPE_MAX_VALUE (type) |
2490 | : TYPE_MIN_VALUE (type); |
2491 | |
2492 | tmp = fold_build2_loc (loc, EQ_EXPR, logical_type_node, |
2493 | dovar, boundary); |
2494 | gfc_trans_runtime_check (true, false, tmp, &body, &code->loc, |
2495 | "Loop iterates infinitely" ); |
2496 | } |
2497 | |
2498 | /* Main loop body. */ |
2499 | tmp = gfc_trans_code_cond (code->block->next, exit_cond); |
2500 | gfc_add_expr_to_block (&body, tmp); |
2501 | |
2502 | /* Label for cycle statements (if needed). */ |
2503 | if (TREE_USED (cycle_label)) |
2504 | { |
2505 | tmp = build1_v (LABEL_EXPR, cycle_label); |
2506 | gfc_add_expr_to_block (&body, tmp); |
2507 | } |
2508 | |
2509 | /* Check whether someone has modified the loop variable. */ |
2510 | if (gfc_option.rtcheck & GFC_RTCHECK_DO) |
2511 | { |
2512 | tmp = fold_build2_loc (loc, NE_EXPR, logical_type_node, |
2513 | dovar, saved_dovar); |
2514 | gfc_trans_runtime_check (true, false, tmp, &body, &code->loc, |
2515 | "Loop variable has been modified" ); |
2516 | } |
2517 | |
2518 | /* Increment the loop variable. */ |
2519 | tmp = fold_build2_loc (loc, PLUS_EXPR, type, dovar, step); |
2520 | gfc_add_modify_loc (loc, &body, dovar, tmp); |
2521 | |
2522 | if (gfc_option.rtcheck & GFC_RTCHECK_DO) |
2523 | gfc_add_modify_loc (loc, &body, saved_dovar, dovar); |
2524 | |
2525 | /* Finish the loop body. */ |
2526 | tmp = gfc_finish_block (&body); |
2527 | tmp = fold_build1_loc (loc, LOOP_EXPR, void_type_node, tmp); |
2528 | |
2529 | gfc_add_expr_to_block (pblock, tmp); |
2530 | |
2531 | /* Add the exit label. */ |
2532 | tmp = build1_v (LABEL_EXPR, exit_label); |
2533 | gfc_add_expr_to_block (pblock, tmp); |
2534 | |
2535 | return gfc_finish_block (pblock); |
2536 | } |
2537 | |
2538 | /* Translate the DO construct. This obviously is one of the most |
2539 | important ones to get right with any compiler, but especially |
2540 | so for Fortran. |
2541 | |
2542 | We special case some loop forms as described in gfc_trans_simple_do. |
2543 | For other cases we implement them with a separate loop count, |
2544 | as described in the standard. |
2545 | |
2546 | We translate a do loop from: |
2547 | |
2548 | DO dovar = from, to, step |
2549 | body |
2550 | END DO |
2551 | |
2552 | to: |
2553 | |
2554 | [evaluate loop bounds and step] |
2555 | empty = (step > 0 ? to < from : to > from); |
2556 | countm1 = (to - from) / step; |
2557 | dovar = from; |
2558 | if (empty) goto exit_label; |
2559 | for (;;) |
2560 | { |
2561 | body; |
2562 | cycle_label: |
2563 | dovar += step |
2564 | countm1t = countm1; |
2565 | countm1--; |
2566 | if (countm1t == 0) goto exit_label; |
2567 | } |
2568 | exit_label: |
2569 | |
2570 | countm1 is an unsigned integer. It is equal to the loop count minus one, |
2571 | because the loop count itself can overflow. */ |
2572 | |
2573 | tree |
2574 | gfc_trans_do (gfc_code * code, tree exit_cond) |
2575 | { |
2576 | gfc_se se; |
2577 | tree dovar; |
2578 | tree saved_dovar = NULL; |
2579 | tree from; |
2580 | tree to; |
2581 | tree step; |
2582 | tree countm1; |
2583 | tree type; |
2584 | tree utype; |
2585 | tree cond; |
2586 | tree cycle_label; |
2587 | tree exit_label; |
2588 | tree tmp; |
2589 | stmtblock_t block; |
2590 | stmtblock_t body; |
2591 | location_t loc; |
2592 | |
2593 | gfc_start_block (&block); |
2594 | |
2595 | loc = gfc_get_location (&code->ext.iterator->start->where); |
2596 | |
2597 | /* Evaluate all the expressions in the iterator. */ |
2598 | gfc_init_se (&se, NULL); |
2599 | gfc_conv_expr_lhs (se: &se, expr: code->ext.iterator->var); |
2600 | gfc_add_block_to_block (&block, &se.pre); |
2601 | dovar = se.expr; |
2602 | type = TREE_TYPE (dovar); |
2603 | |
2604 | gfc_init_se (&se, NULL); |
2605 | gfc_conv_expr_val (se: &se, expr: code->ext.iterator->start); |
2606 | gfc_add_block_to_block (&block, &se.pre); |
2607 | from = gfc_evaluate_now (se.expr, &block); |
2608 | |
2609 | gfc_init_se (&se, NULL); |
2610 | gfc_conv_expr_val (se: &se, expr: code->ext.iterator->end); |
2611 | gfc_add_block_to_block (&block, &se.pre); |
2612 | to = gfc_evaluate_now (se.expr, &block); |
2613 | |
2614 | gfc_init_se (&se, NULL); |
2615 | gfc_conv_expr_val (se: &se, expr: code->ext.iterator->step); |
2616 | gfc_add_block_to_block (&block, &se.pre); |
2617 | step = gfc_evaluate_now (se.expr, &block); |
2618 | |
2619 | if (gfc_option.rtcheck & GFC_RTCHECK_DO) |
2620 | { |
2621 | tmp = fold_build2_loc (input_location, EQ_EXPR, logical_type_node, step, |
2622 | build_zero_cst (type)); |
2623 | gfc_trans_runtime_check (true, false, tmp, &block, &code->loc, |
2624 | "DO step value is zero" ); |
2625 | } |
2626 | |
2627 | /* Special case simple loops. */ |
2628 | if (TREE_CODE (type) == INTEGER_TYPE |
2629 | && (integer_onep (step) |
2630 | || tree_int_cst_equal (step, integer_minus_one_node))) |
2631 | return gfc_trans_simple_do (code, pblock: &block, dovar, from, to, step, |
2632 | exit_cond); |
2633 | |
2634 | if (TREE_CODE (type) == INTEGER_TYPE) |
2635 | utype = unsigned_type_for (type); |
2636 | else |
2637 | utype = unsigned_type_for (gfc_array_index_type); |
2638 | countm1 = gfc_create_var (utype, "countm1" ); |
2639 | |
2640 | /* Cycle and exit statements are implemented with gotos. */ |
2641 | cycle_label = gfc_build_label_decl (NULL_TREE); |
2642 | exit_label = gfc_build_label_decl (NULL_TREE); |
2643 | TREE_USED (exit_label) = 1; |
2644 | |
2645 | /* Put these labels where they can be found later. */ |
2646 | code->cycle_label = cycle_label; |
2647 | code->exit_label = exit_label; |
2648 | |
2649 | /* Initialize the DO variable: dovar = from. */ |
2650 | gfc_add_modify (&block, dovar, from); |
2651 | |
2652 | /* Save value for do-tinkering checking. */ |
2653 | if (gfc_option.rtcheck & GFC_RTCHECK_DO) |
2654 | { |
2655 | saved_dovar = gfc_create_var (type, ".saved_dovar" ); |
2656 | gfc_add_modify_loc (loc, &block, saved_dovar, dovar); |
2657 | } |
2658 | |
2659 | /* Initialize loop count and jump to exit label if the loop is empty. |
2660 | This code is executed before we enter the loop body. We generate: |
2661 | if (step > 0) |
2662 | { |
2663 | countm1 = (to - from) / step; |
2664 | if (to < from) |
2665 | goto exit_label; |
2666 | } |
2667 | else |
2668 | { |
2669 | countm1 = (from - to) / -step; |
2670 | if (to > from) |
2671 | goto exit_label; |
2672 | } |
2673 | */ |
2674 | |
2675 | if (TREE_CODE (type) == INTEGER_TYPE) |
2676 | { |
2677 | tree pos, neg, tou, fromu, stepu, tmp2; |
2678 | |
2679 | /* The distance from FROM to TO cannot always be represented in a signed |
2680 | type, thus use unsigned arithmetic, also to avoid any undefined |
2681 | overflow issues. */ |
2682 | tou = fold_convert (utype, to); |
2683 | fromu = fold_convert (utype, from); |
2684 | stepu = fold_convert (utype, step); |
2685 | |
2686 | /* For a positive step, when to < from, exit, otherwise compute |
2687 | countm1 = ((unsigned)to - (unsigned)from) / (unsigned)step */ |
2688 | tmp = fold_build2_loc (loc, LT_EXPR, logical_type_node, to, from); |
2689 | tmp2 = fold_build2_loc (loc, TRUNC_DIV_EXPR, utype, |
2690 | fold_build2_loc (loc, MINUS_EXPR, utype, |
2691 | tou, fromu), |
2692 | stepu); |
2693 | pos = build2 (COMPOUND_EXPR, void_type_node, |
2694 | fold_build2 (MODIFY_EXPR, void_type_node, |
2695 | countm1, tmp2), |
2696 | build3_loc (loc, code: COND_EXPR, void_type_node, |
2697 | arg0: gfc_unlikely (tmp, PRED_FORTRAN_LOOP_PREHEADER), |
2698 | arg1: build1_loc (loc, code: GOTO_EXPR, void_type_node, |
2699 | arg1: exit_label), NULL_TREE)); |
2700 | |
2701 | /* For a negative step, when to > from, exit, otherwise compute |
2702 | countm1 = ((unsigned)from - (unsigned)to) / -(unsigned)step */ |
2703 | tmp = fold_build2_loc (loc, GT_EXPR, logical_type_node, to, from); |
2704 | tmp2 = fold_build2_loc (loc, TRUNC_DIV_EXPR, utype, |
2705 | fold_build2_loc (loc, MINUS_EXPR, utype, |
2706 | fromu, tou), |
2707 | fold_build1_loc (loc, NEGATE_EXPR, utype, stepu)); |
2708 | neg = build2 (COMPOUND_EXPR, void_type_node, |
2709 | fold_build2 (MODIFY_EXPR, void_type_node, |
2710 | countm1, tmp2), |
2711 | build3_loc (loc, code: COND_EXPR, void_type_node, |
2712 | arg0: gfc_unlikely (tmp, PRED_FORTRAN_LOOP_PREHEADER), |
2713 | arg1: build1_loc (loc, code: GOTO_EXPR, void_type_node, |
2714 | arg1: exit_label), NULL_TREE)); |
2715 | |
2716 | tmp = fold_build2_loc (loc, LT_EXPR, logical_type_node, step, |
2717 | build_int_cst (TREE_TYPE (step), 0)); |
2718 | tmp = fold_build3_loc (loc, COND_EXPR, void_type_node, tmp, neg, pos); |
2719 | |
2720 | gfc_add_expr_to_block (&block, tmp); |
2721 | } |
2722 | else |
2723 | { |
2724 | tree pos_step; |
2725 | |
2726 | /* TODO: We could use the same width as the real type. |
2727 | This would probably cause more problems that it solves |
2728 | when we implement "long double" types. */ |
2729 | |
2730 | tmp = fold_build2_loc (loc, MINUS_EXPR, type, to, from); |
2731 | tmp = fold_build2_loc (loc, RDIV_EXPR, type, tmp, step); |
2732 | tmp = fold_build1_loc (loc, FIX_TRUNC_EXPR, utype, tmp); |
2733 | gfc_add_modify (&block, countm1, tmp); |
2734 | |
2735 | /* We need a special check for empty loops: |
2736 | empty = (step > 0 ? to < from : to > from); */ |
2737 | pos_step = fold_build2_loc (loc, GT_EXPR, logical_type_node, step, |
2738 | build_zero_cst (type)); |
2739 | tmp = fold_build3_loc (loc, COND_EXPR, logical_type_node, pos_step, |
2740 | fold_build2_loc (loc, LT_EXPR, |
2741 | logical_type_node, to, from), |
2742 | fold_build2_loc (loc, GT_EXPR, |
2743 | logical_type_node, to, from)); |
2744 | /* If the loop is empty, go directly to the exit label. */ |
2745 | tmp = fold_build3_loc (loc, COND_EXPR, void_type_node, tmp, |
2746 | build1_v (GOTO_EXPR, exit_label), |
2747 | build_empty_stmt (input_location)); |
2748 | gfc_add_expr_to_block (&block, tmp); |
2749 | } |
2750 | |
2751 | /* Loop body. */ |
2752 | gfc_start_block (&body); |
2753 | |
2754 | /* Main loop body. */ |
2755 | tmp = gfc_trans_code_cond (code->block->next, exit_cond); |
2756 | gfc_add_expr_to_block (&body, tmp); |
2757 | |
2758 | /* Label for cycle statements (if needed). */ |
2759 | if (TREE_USED (cycle_label)) |
2760 | { |
2761 | tmp = build1_v (LABEL_EXPR, cycle_label); |
2762 | gfc_add_expr_to_block (&body, tmp); |
2763 | } |
2764 | |
2765 | /* Check whether someone has modified the loop variable. */ |
2766 | if (gfc_option.rtcheck & GFC_RTCHECK_DO) |
2767 | { |
2768 | tmp = fold_build2_loc (loc, NE_EXPR, logical_type_node, dovar, |
2769 | saved_dovar); |
2770 | gfc_trans_runtime_check (true, false, tmp, &body, &code->loc, |
2771 | "Loop variable has been modified" ); |
2772 | } |
2773 | |
2774 | /* Exit the loop if there is an I/O result condition or error. */ |
2775 | if (exit_cond) |
2776 | { |
2777 | tmp = build1_v (GOTO_EXPR, exit_label); |
2778 | tmp = fold_build3_loc (loc, COND_EXPR, void_type_node, |
2779 | exit_cond, tmp, |
2780 | build_empty_stmt (input_location)); |
2781 | gfc_add_expr_to_block (&body, tmp); |
2782 | } |
2783 | |
2784 | /* Increment the loop variable. */ |
2785 | tmp = fold_build2_loc (loc, PLUS_EXPR, type, dovar, step); |
2786 | gfc_add_modify_loc (loc, &body, dovar, tmp); |
2787 | |
2788 | if (gfc_option.rtcheck & GFC_RTCHECK_DO) |
2789 | gfc_add_modify_loc (loc, &body, saved_dovar, dovar); |
2790 | |
2791 | /* Initialize countm1t. */ |
2792 | tree countm1t = gfc_create_var (utype, "countm1t" ); |
2793 | gfc_add_modify_loc (loc, &body, countm1t, countm1); |
2794 | |
2795 | /* Decrement the loop count. */ |
2796 | tmp = fold_build2_loc (loc, MINUS_EXPR, utype, countm1, |
2797 | build_int_cst (utype, 1)); |
2798 | gfc_add_modify_loc (loc, &body, countm1, tmp); |
2799 | |
2800 | /* End with the loop condition. Loop until countm1t == 0. */ |
2801 | cond = fold_build2_loc (loc, EQ_EXPR, logical_type_node, countm1t, |
2802 | build_int_cst (utype, 0)); |
2803 | if (code->ext.iterator->unroll && cond != error_mark_node) |
2804 | cond |
2805 | = build3 (ANNOTATE_EXPR, TREE_TYPE (cond), cond, |
2806 | build_int_cst (integer_type_node, annot_expr_unroll_kind), |
2807 | build_int_cst (integer_type_node, code->ext.iterator->unroll)); |
2808 | |
2809 | if (code->ext.iterator->ivdep && cond != error_mark_node) |
2810 | cond = build3 (ANNOTATE_EXPR, TREE_TYPE (cond), cond, |
2811 | build_int_cst (integer_type_node, annot_expr_ivdep_kind), |
2812 | integer_zero_node); |
2813 | if (code->ext.iterator->vector && cond != error_mark_node) |
2814 | cond = build3 (ANNOTATE_EXPR, TREE_TYPE (cond), cond, |
2815 | build_int_cst (integer_type_node, annot_expr_vector_kind), |
2816 | integer_zero_node); |
2817 | if (code->ext.iterator->novector && cond != error_mark_node) |
2818 | cond = build3 (ANNOTATE_EXPR, TREE_TYPE (cond), cond, |
2819 | build_int_cst (integer_type_node, annot_expr_no_vector_kind), |
2820 | integer_zero_node); |
2821 | |
2822 | tmp = fold_build1_loc (loc, GOTO_EXPR, void_type_node, exit_label); |
2823 | tmp = fold_build3_loc (loc, COND_EXPR, void_type_node, |
2824 | cond, tmp, build_empty_stmt (loc)); |
2825 | gfc_add_expr_to_block (&body, tmp); |
2826 | |
2827 | /* End of loop body. */ |
2828 | tmp = gfc_finish_block (&body); |
2829 | |
2830 | /* The for loop itself. */ |
2831 | tmp = fold_build1_loc (loc, LOOP_EXPR, void_type_node, tmp); |
2832 | gfc_add_expr_to_block (&block, tmp); |
2833 | |
2834 | /* Add the exit label. */ |
2835 | tmp = build1_v (LABEL_EXPR, exit_label); |
2836 | gfc_add_expr_to_block (&block, tmp); |
2837 | |
2838 | return gfc_finish_block (&block); |
2839 | } |
2840 | |
2841 | |
2842 | /* Translate the DO WHILE construct. |
2843 | |
2844 | We translate |
2845 | |
2846 | DO WHILE (cond) |
2847 | body |
2848 | END DO |
2849 | |
2850 | to: |
2851 | |
2852 | for ( ; ; ) |
2853 | { |
2854 | pre_cond; |
2855 | if (! cond) goto exit_label; |
2856 | body; |
2857 | cycle_label: |
2858 | } |
2859 | exit_label: |
2860 | |
2861 | Because the evaluation of the exit condition `cond' may have side |
2862 | effects, we can't do much for empty loop bodies. The backend optimizers |
2863 | should be smart enough to eliminate any dead loops. */ |
2864 | |
2865 | tree |
2866 | gfc_trans_do_while (gfc_code * code) |
2867 | { |
2868 | gfc_se cond; |
2869 | tree tmp; |
2870 | tree cycle_label; |
2871 | tree exit_label; |
2872 | stmtblock_t block; |
2873 | |
2874 | /* Everything we build here is part of the loop body. */ |
2875 | gfc_start_block (&block); |
2876 | |
2877 | /* Cycle and exit statements are implemented with gotos. */ |
2878 | cycle_label = gfc_build_label_decl (NULL_TREE); |
2879 | exit_label = gfc_build_label_decl (NULL_TREE); |
2880 | |
2881 | /* Put the labels where they can be found later. See gfc_trans_do(). */ |
2882 | code->cycle_label = cycle_label; |
2883 | code->exit_label = exit_label; |
2884 | |
2885 | /* Create a GIMPLE version of the exit condition. */ |
2886 | gfc_init_se (&cond, NULL); |
2887 | gfc_conv_expr_val (se: &cond, expr: code->expr1); |
2888 | gfc_add_block_to_block (&block, &cond.pre); |
2889 | cond.expr = fold_build1_loc (gfc_get_location (&code->expr1->where), |
2890 | TRUTH_NOT_EXPR, TREE_TYPE (cond.expr), |
2891 | cond.expr); |
2892 | |
2893 | /* Build "IF (! cond) GOTO exit_label". */ |
2894 | tmp = build1_v (GOTO_EXPR, exit_label); |
2895 | TREE_USED (exit_label) = 1; |
2896 | tmp = fold_build3_loc (gfc_get_location (&code->expr1->where), COND_EXPR, |
2897 | void_type_node, cond.expr, tmp, |
2898 | build_empty_stmt (gfc_get_location ( |
2899 | &code->expr1->where))); |
2900 | gfc_add_expr_to_block (&block, tmp); |
2901 | |
2902 | /* The main body of the loop. */ |
2903 | tmp = gfc_trans_code (code->block->next); |
2904 | gfc_add_expr_to_block (&block, tmp); |
2905 | |
2906 | /* Label for cycle statements (if needed). */ |
2907 | if (TREE_USED (cycle_label)) |
2908 | { |
2909 | tmp = build1_v (LABEL_EXPR, cycle_label); |
2910 | gfc_add_expr_to_block (&block, tmp); |
2911 | } |
2912 | |
2913 | /* End of loop body. */ |
2914 | tmp = gfc_finish_block (&block); |
2915 | |
2916 | gfc_init_block (&block); |
2917 | /* Build the loop. */ |
2918 | tmp = fold_build1_loc (gfc_get_location (&code->expr1->where), LOOP_EXPR, |
2919 | void_type_node, tmp); |
2920 | gfc_add_expr_to_block (&block, tmp); |
2921 | |
2922 | /* Add the exit label. */ |
2923 | tmp = build1_v (LABEL_EXPR, exit_label); |
2924 | gfc_add_expr_to_block (&block, tmp); |
2925 | |
2926 | return gfc_finish_block (&block); |
2927 | } |
2928 | |
2929 | |
2930 | /* Deal with the particular case of SELECT_TYPE, where the vtable |
2931 | addresses are used for the selection. Since these are not sorted, |
2932 | the selection has to be made by a series of if statements. */ |
2933 | |
2934 | static tree |
2935 | gfc_trans_select_type_cases (gfc_code * code) |
2936 | { |
2937 | gfc_code *c; |
2938 | gfc_case *cp; |
2939 | tree tmp; |
2940 | tree cond; |
2941 | tree low; |
2942 | tree high; |
2943 | gfc_se se; |
2944 | gfc_se cse; |
2945 | stmtblock_t block; |
2946 | stmtblock_t body; |
2947 | bool def = false; |
2948 | gfc_expr *e; |
2949 | gfc_start_block (&block); |
2950 | |
2951 | /* Calculate the switch expression. */ |
2952 | gfc_init_se (&se, NULL); |
2953 | gfc_conv_expr_val (se: &se, expr: code->expr1); |
2954 | gfc_add_block_to_block (&block, &se.pre); |
2955 | |
2956 | /* Generate an expression for the selector hash value, for |
2957 | use to resolve character cases. */ |
2958 | e = gfc_copy_expr (code->expr1->value.function.actual->expr); |
2959 | gfc_add_hash_component (e); |
2960 | |
2961 | TREE_USED (code->exit_label) = 0; |
2962 | |
2963 | repeat: |
2964 | for (c = code->block; c; c = c->block) |
2965 | { |
2966 | cp = c->ext.block.case_list; |
2967 | |
2968 | /* Assume it's the default case. */ |
2969 | low = NULL_TREE; |
2970 | high = NULL_TREE; |
2971 | tmp = NULL_TREE; |
2972 | |
2973 | /* Put the default case at the end. */ |
2974 | if ((!def && !cp->low) || (def && cp->low)) |
2975 | continue; |
2976 | |
2977 | if (cp->low && (cp->ts.type == BT_CLASS |
2978 | || cp->ts.type == BT_DERIVED)) |
2979 | { |
2980 | gfc_init_se (&cse, NULL); |
2981 | gfc_conv_expr_val (se: &cse, expr: cp->low); |
2982 | gfc_add_block_to_block (&block, &cse.pre); |
2983 | low = cse.expr; |
2984 | } |
2985 | else if (cp->ts.type != BT_UNKNOWN) |
2986 | { |
2987 | gcc_assert (cp->high); |
2988 | gfc_init_se (&cse, NULL); |
2989 | gfc_conv_expr_val (se: &cse, expr: cp->high); |
2990 | gfc_add_block_to_block (&block, &cse.pre); |
2991 | high = cse.expr; |
2992 | } |
2993 | |
2994 | gfc_init_block (&body); |
2995 | |
2996 | /* Add the statements for this case. */ |
2997 | tmp = gfc_trans_code (c->next); |
2998 | gfc_add_expr_to_block (&body, tmp); |
2999 | |
3000 | /* Break to the end of the SELECT TYPE construct. The default |
3001 | case just falls through. */ |
3002 | if (!def) |
3003 | { |
3004 | TREE_USED (code->exit_label) = 1; |
3005 | tmp = build1_v (GOTO_EXPR, code->exit_label); |
3006 | gfc_add_expr_to_block (&body, tmp); |
3007 | } |
3008 | |
3009 | tmp = gfc_finish_block (&body); |
3010 | |
3011 | if (low != NULL_TREE) |
3012 | { |
3013 | /* Compare vtable pointers. */ |
3014 | cond = fold_build2_loc (input_location, EQ_EXPR, |
3015 | TREE_TYPE (se.expr), se.expr, low); |
3016 | tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, |
3017 | cond, tmp, |
3018 | build_empty_stmt (input_location)); |
3019 | } |
3020 | else if (high != NULL_TREE) |
3021 | { |
3022 | /* Compare hash values for character cases. */ |
3023 | gfc_init_se (&cse, NULL); |
3024 | gfc_conv_expr_val (se: &cse, expr: e); |
3025 | gfc_add_block_to_block (&block, &cse.pre); |
3026 | |
3027 | cond = fold_build2_loc (input_location, EQ_EXPR, |
3028 | TREE_TYPE (se.expr), high, cse.expr); |
3029 | tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, |
3030 | cond, tmp, |
3031 | build_empty_stmt (input_location)); |
3032 | } |
3033 | |
3034 | gfc_add_expr_to_block (&block, tmp); |
3035 | } |
3036 | |
3037 | if (!def) |
3038 | { |
3039 | def = true; |
3040 | goto repeat; |
3041 | } |
3042 | |
3043 | gfc_free_expr (e); |
3044 | |
3045 | return gfc_finish_block (&block); |
3046 | } |
3047 | |
3048 | |
3049 | /* Translate the SELECT CASE construct for INTEGER case expressions, |
3050 | without killing all potential optimizations. The problem is that |
3051 | Fortran allows unbounded cases, but the back-end does not, so we |
3052 | need to intercept those before we enter the equivalent SWITCH_EXPR |
3053 | we can build. |
3054 | |
3055 | For example, we translate this, |
3056 | |
3057 | SELECT CASE (expr) |
3058 | CASE (:100,101,105:115) |
3059 | block_1 |
3060 | CASE (190:199,200:) |
3061 | block_2 |
3062 | CASE (300) |
3063 | block_3 |
3064 | CASE DEFAULT |
3065 | block_4 |
3066 | END SELECT |
3067 | |
3068 | to the GENERIC equivalent, |
3069 | |
3070 | switch (expr) |
3071 | { |
3072 | case (minimum value for typeof(expr) ... 100: |
3073 | case 101: |
3074 | case 105 ... 114: |
3075 | block1: |
3076 | goto end_label; |
3077 | |
3078 | case 200 ... (maximum value for typeof(expr): |
3079 | case 190 ... 199: |
3080 | block2; |
3081 | goto end_label; |
3082 | |
3083 | case 300: |
3084 | block_3; |
3085 | goto end_label; |
3086 | |
3087 | default: |
3088 | block_4; |
3089 | goto end_label; |
3090 | } |
3091 | |
3092 | end_label: */ |
3093 | |
3094 | static tree |
3095 | gfc_trans_integer_select (gfc_code * code) |
3096 | { |
3097 | gfc_code *c; |
3098 | gfc_case *cp; |
3099 | tree end_label; |
3100 | tree tmp; |
3101 | gfc_se se; |
3102 | stmtblock_t block; |
3103 | stmtblock_t body; |
3104 | |
3105 | gfc_start_block (&block); |
3106 | |
3107 | /* Calculate the switch expression. */ |
3108 | gfc_init_se (&se, NULL); |
3109 | gfc_conv_expr_val (se: &se, expr: code->expr1); |
3110 | gfc_add_block_to_block (&block, &se.pre); |
3111 | |
3112 | end_label = gfc_build_label_decl (NULL_TREE); |
3113 | |
3114 | gfc_init_block (&body); |
3115 | |
3116 | for (c = code->block; c; c = c->block) |
3117 | { |
3118 | for (cp = c->ext.block.case_list; cp; cp = cp->next) |
3119 | { |
3120 | tree low, high; |
3121 | tree label; |
3122 | |
3123 | /* Assume it's the default case. */ |
3124 | low = high = NULL_TREE; |
3125 | |
3126 | if (cp->low) |
3127 | { |
3128 | low = gfc_conv_mpz_to_tree (cp->low->value.integer, |
3129 | cp->low->ts.kind); |
3130 | |
3131 | /* If there's only a lower bound, set the high bound to the |
3132 | maximum value of the case expression. */ |
3133 | if (!cp->high) |
3134 | high = TYPE_MAX_VALUE (TREE_TYPE (se.expr)); |
3135 | } |
3136 | |
3137 | if (cp->high) |
3138 | { |
3139 | /* Three cases are possible here: |
3140 | |
3141 | 1) There is no lower bound, e.g. CASE (:N). |
3142 | 2) There is a lower bound .NE. high bound, that is |
3143 | a case range, e.g. CASE (N:M) where M>N (we make |
3144 | sure that M>N during type resolution). |
3145 | 3) There is a lower bound, and it has the same value |
3146 | as the high bound, e.g. CASE (N:N). This is our |
3147 | internal representation of CASE(N). |
3148 | |
3149 | In the first and second case, we need to set a value for |
3150 | high. In the third case, we don't because the GCC middle |
3151 | end represents a single case value by just letting high be |
3152 | a NULL_TREE. We can't do that because we need to be able |
3153 | to represent unbounded cases. */ |
3154 | |
3155 | if (!cp->low |
3156 | || (mpz_cmp (cp->low->value.integer, |
3157 | cp->high->value.integer) != 0)) |
3158 | high = gfc_conv_mpz_to_tree (cp->high->value.integer, |
3159 | cp->high->ts.kind); |
3160 | |
3161 | /* Unbounded case. */ |
3162 | if (!cp->low) |
3163 | low = TYPE_MIN_VALUE (TREE_TYPE (se.expr)); |
3164 | } |
3165 | |
3166 | /* Build a label. */ |
3167 | label = gfc_build_label_decl (NULL_TREE); |
3168 | |
3169 | /* Add this case label. |
3170 | Add parameter 'label', make it match GCC backend. */ |
3171 | tmp = build_case_label (low, high, label); |
3172 | gfc_add_expr_to_block (&body, tmp); |
3173 | } |
3174 | |
3175 | /* Add the statements for this case. */ |
3176 | tmp = gfc_trans_code (c->next); |
3177 | gfc_add_expr_to_block (&body, tmp); |
3178 | |
3179 | /* Break to the end of the construct. */ |
3180 | tmp = build1_v (GOTO_EXPR, end_label); |
3181 | gfc_add_expr_to_block (&body, tmp); |
3182 | } |
3183 | |
3184 | tmp = gfc_finish_block (&body); |
3185 | tmp = fold_build2_loc (input_location, SWITCH_EXPR, NULL_TREE, se.expr, tmp); |
3186 | gfc_add_expr_to_block (&block, tmp); |
3187 | |
3188 | tmp = build1_v (LABEL_EXPR, end_label); |
3189 | gfc_add_expr_to_block (&block, tmp); |
3190 | |
3191 | return gfc_finish_block (&block); |
3192 | } |
3193 | |
3194 | |
3195 | /* Translate the SELECT CASE construct for LOGICAL case expressions. |
3196 | |
3197 | There are only two cases possible here, even though the standard |
3198 | does allow three cases in a LOGICAL SELECT CASE construct: .TRUE., |
3199 | .FALSE., and DEFAULT. |
3200 | |
3201 | We never generate more than two blocks here. Instead, we always |
3202 | try to eliminate the DEFAULT case. This way, we can translate this |
3203 | kind of SELECT construct to a simple |
3204 | |
3205 | if {} else {}; |
3206 | |
3207 | expression in GENERIC. */ |
3208 | |
3209 | static tree |
3210 | gfc_trans_logical_select (gfc_code * code) |
3211 | { |
3212 | gfc_code *c; |
3213 | gfc_code *t, *f, *d; |
3214 | gfc_case *cp; |
3215 | gfc_se se; |
3216 | stmtblock_t block; |
3217 | |
3218 | /* Assume we don't have any cases at all. */ |
3219 | t = f = d = NULL; |
3220 | |
3221 | /* Now see which ones we actually do have. We can have at most two |
3222 | cases in a single case list: one for .TRUE. and one for .FALSE. |
3223 | The default case is always separate. If the cases for .TRUE. and |
3224 | .FALSE. are in the same case list, the block for that case list |
3225 | always executed, and we don't generate code a COND_EXPR. */ |
3226 | for (c = code->block; c; c = c->block) |
3227 | { |
3228 | for (cp = c->ext.block.case_list; cp; cp = cp->next) |
3229 | { |
3230 | if (cp->low) |
3231 | { |
3232 | if (cp->low->value.logical == 0) /* .FALSE. */ |
3233 | f = c; |
3234 | else /* if (cp->value.logical != 0), thus .TRUE. */ |
3235 | t = c; |
3236 | } |
3237 | else |
3238 | d = c; |
3239 | } |
3240 | } |
3241 | |
3242 | /* Start a new block. */ |
3243 | gfc_start_block (&block); |
3244 | |
3245 | /* Calculate the switch expression. We always need to do this |
3246 | because it may have side effects. */ |
3247 | gfc_init_se (&se, NULL); |
3248 | gfc_conv_expr_val (se: &se, expr: code->expr1); |
3249 | gfc_add_block_to_block (&block, &se.pre); |
3250 | |
3251 | if (t == f && t != NULL) |
3252 | { |
3253 | /* Cases for .TRUE. and .FALSE. are in the same block. Just |
3254 | translate the code for these cases, append it to the current |
3255 | block. */ |
3256 | gfc_add_expr_to_block (&block, gfc_trans_code (t->next)); |
3257 | } |
3258 | else |
3259 | { |
3260 | tree true_tree, false_tree, stmt; |
3261 | |
3262 | true_tree = build_empty_stmt (input_location); |
3263 | false_tree = build_empty_stmt (input_location); |
3264 | |
3265 | /* If we have a case for .TRUE. and for .FALSE., discard the default case. |
3266 | Otherwise, if .TRUE. or .FALSE. is missing and there is a default case, |
3267 | make the missing case the default case. */ |
3268 | if (t != NULL && f != NULL) |
3269 | d = NULL; |
3270 | else if (d != NULL) |
3271 | { |
3272 | if (t == NULL) |
3273 | t = d; |
3274 | else |
3275 | f = d; |
3276 | } |
3277 | |
3278 | /* Translate the code for each of these blocks, and append it to |
3279 | the current block. */ |
3280 | if (t != NULL) |
3281 | true_tree = gfc_trans_code (t->next); |
3282 | |
3283 | if (f != NULL) |
3284 | false_tree = gfc_trans_code (f->next); |
3285 | |
3286 | stmt = fold_build3_loc (input_location, COND_EXPR, void_type_node, |
3287 | se.expr, true_tree, false_tree); |
3288 | gfc_add_expr_to_block (&block, stmt); |
3289 | } |
3290 | |
3291 | return gfc_finish_block (&block); |
3292 | } |
3293 | |
3294 | |
3295 | /* The jump table types are stored in static variables to avoid |
3296 | constructing them from scratch every single time. */ |
3297 | static GTY(()) tree select_struct[2]; |
3298 | |
3299 | /* Translate the SELECT CASE construct for CHARACTER case expressions. |
3300 | Instead of generating compares and jumps, it is far simpler to |
3301 | generate a data structure describing the cases in order and call a |
3302 | library subroutine that locates the right case. |
3303 | This is particularly true because this is the only case where we |
3304 | might have to dispose of a temporary. |
3305 | The library subroutine returns a pointer to jump to or NULL if no |
3306 | branches are to be taken. */ |
3307 | |
3308 | static tree |
3309 | gfc_trans_character_select (gfc_code *code) |
3310 | { |
3311 | tree init, end_label, tmp, type, case_num, label, fndecl; |
3312 | stmtblock_t block, body; |
3313 | gfc_case *cp, *d; |
3314 | gfc_code *c; |
3315 | gfc_se se, expr1se; |
3316 | int n, k; |
3317 | vec<constructor_elt, va_gc> *inits = NULL; |
3318 | |
3319 | tree pchartype = gfc_get_pchar_type (code->expr1->ts.kind); |
3320 | |
3321 | /* The jump table types are stored in static variables to avoid |
3322 | constructing them from scratch every single time. */ |
3323 | static tree ss_string1[2], ss_string1_len[2]; |
3324 | static tree ss_string2[2], ss_string2_len[2]; |
3325 | static tree ss_target[2]; |
3326 | |
3327 | cp = code->block->ext.block.case_list; |
3328 | while (cp->left != NULL) |
3329 | cp = cp->left; |
3330 | |
3331 | /* Generate the body */ |
3332 | gfc_start_block (&block); |
3333 | gfc_init_se (&expr1se, NULL); |
3334 | gfc_conv_expr_reference (se: &expr1se, expr: code->expr1); |
3335 | |
3336 | gfc_add_block_to_block (&block, &expr1se.pre); |
3337 | |
3338 | end_label = gfc_build_label_decl (NULL_TREE); |
3339 | |
3340 | gfc_init_block (&body); |
3341 | |
3342 | /* Attempt to optimize length 1 selects. */ |
3343 | if (integer_onep (expr1se.string_length)) |
3344 | { |
3345 | for (d = cp; d; d = d->right) |
3346 | { |
3347 | gfc_charlen_t i; |
3348 | if (d->low) |
3349 | { |
3350 | gcc_assert (d->low->expr_type == EXPR_CONSTANT |
3351 | && d->low->ts.type == BT_CHARACTER); |
3352 | if (d->low->value.character.length > 1) |
3353 | { |
3354 | for (i = 1; i < d->low->value.character.length; i++) |
3355 | if (d->low->value.character.string[i] != ' ') |
3356 | break; |
3357 | if (i != d->low->value.character.length) |
3358 | { |
3359 | if (optimize && d->high && i == 1) |
3360 | { |
3361 | gcc_assert (d->high->expr_type == EXPR_CONSTANT |
3362 | && d->high->ts.type == BT_CHARACTER); |
3363 | if (d->high->value.character.length > 1 |
3364 | && (d->low->value.character.string[0] |
3365 | == d->high->value.character.string[0]) |
3366 | && d->high->value.character.string[1] != ' ' |
3367 | && ((d->low->value.character.string[1] < ' ') |
3368 | == (d->high->value.character.string[1] |
3369 | < ' '))) |
3370 | continue; |
3371 | } |
3372 | break; |
3373 | } |
3374 | } |
3375 | } |
3376 | if (d->high) |
3377 | { |
3378 | gcc_assert (d->high->expr_type == EXPR_CONSTANT |
3379 | && d->high->ts.type == BT_CHARACTER); |
3380 | if (d->high->value.character.length > 1) |
3381 | { |
3382 | for (i = 1; i < d->high->value.character.length; i++) |
3383 | if (d->high->value.character.string[i] != ' ') |
3384 | break; |
3385 | if (i != d->high->value.character.length) |
3386 | break; |
3387 | } |
3388 | } |
3389 | } |
3390 | if (d == NULL) |
3391 | { |
3392 | tree ctype = gfc_get_char_type (code->expr1->ts.kind); |
3393 | |
3394 | for (c = code->block; c; c = c->block) |
3395 | { |
3396 | for (cp = c->ext.block.case_list; cp; cp = cp->next) |
3397 | { |
3398 | tree low, high; |
3399 | tree label; |
3400 | gfc_char_t r; |
3401 | |
3402 | /* Assume it's the default case. */ |
3403 | low = high = NULL_TREE; |
3404 | |
3405 | if (cp->low) |
3406 | { |
3407 | /* CASE ('ab') or CASE ('ab':'az') will never match |
3408 | any length 1 character. */ |
3409 | if (cp->low->value.character.length > 1 |
3410 | && cp->low->value.character.string[1] != ' ') |
3411 | continue; |
3412 | |
3413 | if (cp->low->value.character.length > 0) |
3414 | r = cp->low->value.character.string[0]; |
3415 | else |
3416 | r = ' '; |
3417 | low = build_int_cst (ctype, r); |
3418 | |
3419 | /* If there's only a lower bound, set the high bound |
3420 | to the maximum value of the case expression. */ |
3421 | if (!cp->high) |
3422 | high = TYPE_MAX_VALUE (ctype); |
3423 | } |
3424 | |
3425 | if (cp->high) |
3426 | { |
3427 | if (!cp->low |
3428 | || (cp->low->value.character.string[0] |
3429 | != cp->high->value.character.string[0])) |
3430 | { |
3431 | if (cp->high->value.character.length > 0) |
3432 | r = cp->high->value.character.string[0]; |
3433 | else |
3434 | r = ' '; |
3435 | high = build_int_cst (ctype, r); |
3436 | } |
3437 | |
3438 | /* Unbounded case. */ |
3439 | if (!cp->low) |
3440 | low = TYPE_MIN_VALUE (ctype); |
3441 | } |
3442 | |
3443 | /* Build a label. */ |
3444 | label = gfc_build_label_decl (NULL_TREE); |
3445 | |
3446 | /* Add this case label. |
3447 | Add parameter 'label', make it match GCC backend. */ |
3448 | tmp = build_case_label (low, high, label); |
3449 | gfc_add_expr_to_block (&body, tmp); |
3450 | } |
3451 | |
3452 | /* Add the statements for this case. */ |
3453 | tmp = gfc_trans_code (c->next); |
3454 | gfc_add_expr_to_block (&body, tmp); |
3455 | |
3456 | /* Break to the end of the construct. */ |
3457 | tmp = build1_v (GOTO_EXPR, end_label); |
3458 | gfc_add_expr_to_block (&body, tmp); |
3459 | } |
3460 | |
3461 | tmp = gfc_string_to_single_character (len: expr1se.string_length, |
3462 | str: expr1se.expr, |
3463 | kind: code->expr1->ts.kind); |
3464 | case_num = gfc_create_var (ctype, "case_num" ); |
3465 | gfc_add_modify (&block, case_num, tmp); |
3466 | |
3467 | gfc_add_block_to_block (&block, &expr1se.post); |
3468 | |
3469 | tmp = gfc_finish_block (&body); |
3470 | tmp = fold_build2_loc (input_location, SWITCH_EXPR, NULL_TREE, |
3471 | case_num, tmp); |
3472 | gfc_add_expr_to_block (&block, tmp); |
3473 | |
3474 | tmp = build1_v (LABEL_EXPR, end_label); |
3475 | gfc_add_expr_to_block (&block, tmp); |
3476 | |
3477 | return gfc_finish_block (&block); |
3478 | } |
3479 | } |
3480 | |
3481 | if (code->expr1->ts.kind == 1) |
3482 | k = 0; |
3483 | else if (code->expr1->ts.kind == 4) |
3484 | k = 1; |
3485 | else |
3486 | gcc_unreachable (); |
3487 | |
3488 | if (select_struct[k] == NULL) |
3489 | { |
3490 | tree *chain = NULL; |
3491 | select_struct[k] = make_node (RECORD_TYPE); |
3492 | |
3493 | if (code->expr1->ts.kind == 1) |
3494 | TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char1" ); |
3495 | else if (code->expr1->ts.kind == 4) |
3496 | TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char4" ); |
3497 | else |
3498 | gcc_unreachable (); |
3499 | |
3500 | #undef ADD_FIELD |
3501 | #define ADD_FIELD(NAME, TYPE) \ |
3502 | ss_##NAME[k] = gfc_add_field_to_struct (select_struct[k], \ |
3503 | get_identifier (stringize(NAME)), \ |
3504 | TYPE, \ |
3505 | &chain) |
3506 | |
3507 | ADD_FIELD (string1, pchartype); |
3508 | ADD_FIELD (string1_len, gfc_charlen_type_node); |
3509 | |
3510 | ADD_FIELD (string2, pchartype); |
3511 | ADD_FIELD (string2_len, gfc_charlen_type_node); |
3512 | |
3513 | ADD_FIELD (target, integer_type_node); |
3514 | #undef ADD_FIELD |
3515 | |
3516 | gfc_finish_type (select_struct[k]); |
3517 | } |
3518 | |
3519 | n = 0; |
3520 | for (d = cp; d; d = d->right) |
3521 | d->n = n++; |
3522 | |
3523 | for (c = code->block; c; c = c->block) |
3524 | { |
3525 | for (d = c->ext.block.case_list; d; d = d->next) |
3526 | { |
3527 | label = gfc_build_label_decl (NULL_TREE); |
3528 | tmp = build_case_label ((d->low == NULL && d->high == NULL) |
3529 | ? NULL |
3530 | : build_int_cst (integer_type_node, d->n), |
3531 | NULL, label); |
3532 | gfc_add_expr_to_block (&body, tmp); |
3533 | } |
3534 | |
3535 | tmp = gfc_trans_code (c->next); |
3536 | gfc_add_expr_to_block (&body, tmp); |
3537 | |
3538 | tmp = build1_v (GOTO_EXPR, end_label); |
3539 | gfc_add_expr_to_block (&body, tmp); |
3540 | } |
3541 | |
3542 | /* Generate the structure describing the branches */ |
3543 | for (d = cp; d; d = d->right) |
3544 | { |
3545 | vec<constructor_elt, va_gc> *node = NULL; |
3546 | |
3547 | gfc_init_se (&se, NULL); |
3548 | |
3549 | if (d->low == NULL) |
3550 | { |
3551 | CONSTRUCTOR_APPEND_ELT (node, ss_string1[k], null_pointer_node); |
3552 | CONSTRUCTOR_APPEND_ELT (node, ss_string1_len[k], build_zero_cst (gfc_charlen_type_node)); |
3553 | } |
3554 | else |
3555 | { |
3556 | gfc_conv_expr_reference (se: &se, expr: d->low); |
3557 | |
3558 | CONSTRUCTOR_APPEND_ELT (node, ss_string1[k], se.expr); |
3559 | CONSTRUCTOR_APPEND_ELT (node, ss_string1_len[k], se.string_length); |
3560 | } |
3561 | |
3562 | if (d->high == NULL) |
3563 | { |
3564 | CONSTRUCTOR_APPEND_ELT (node, ss_string2[k], null_pointer_node); |
3565 | CONSTRUCTOR_APPEND_ELT (node, ss_string2_len[k], build_zero_cst (gfc_charlen_type_node)); |
3566 | } |
3567 | else |
3568 | { |
3569 | gfc_init_se (&se, NULL); |
3570 | gfc_conv_expr_reference (se: &se, expr: d->high); |
3571 | |
3572 | CONSTRUCTOR_APPEND_ELT (node, ss_string2[k], se.expr); |
3573 | CONSTRUCTOR_APPEND_ELT (node, ss_string2_len[k], se.string_length); |
3574 | } |
3575 | |
3576 | CONSTRUCTOR_APPEND_ELT (node, ss_target[k], |
3577 | build_int_cst (integer_type_node, d->n)); |
3578 | |
3579 | tmp = build_constructor (select_struct[k], node); |
3580 | CONSTRUCTOR_APPEND_ELT (inits, NULL_TREE, tmp); |
3581 | } |
3582 | |
3583 | type = build_array_type (select_struct[k], |
3584 | build_index_type (size_int (n-1))); |
3585 | |
3586 | init = build_constructor (type, inits); |
3587 | TREE_CONSTANT (init) = 1; |
3588 | TREE_STATIC (init) = 1; |
3589 | /* Create a static variable to hold the jump table. */ |
3590 | tmp = gfc_create_var (type, "jumptable" ); |
3591 | TREE_CONSTANT (tmp) = 1; |
3592 | TREE_STATIC (tmp) = 1; |
3593 | TREE_READONLY (tmp) = 1; |
3594 | DECL_INITIAL (tmp) = init; |
3595 | init = tmp; |
3596 | |
3597 | /* Build the library call */ |
3598 | init = gfc_build_addr_expr (pvoid_type_node, init); |
3599 | |
3600 | if (code->expr1->ts.kind == 1) |
3601 | fndecl = gfor_fndecl_select_string; |
3602 | else if (code->expr1->ts.kind == 4) |
3603 | fndecl = gfor_fndecl_select_string_char4; |
3604 | else |
3605 | gcc_unreachable (); |
3606 | |
3607 | tmp = build_call_expr_loc (input_location, |
3608 | fndecl, 4, init, |
3609 | build_int_cst (gfc_charlen_type_node, n), |
3610 | expr1se.expr, expr1se.string_length); |
3611 | case_num = gfc_create_var (integer_type_node, "case_num" ); |
3612 | gfc_add_modify (&block, case_num, tmp); |
3613 | |
3614 | gfc_add_block_to_block (&block, &expr1se.post); |
3615 | |
3616 | tmp = gfc_finish_block (&body); |
3617 | tmp = fold_build2_loc (input_location, SWITCH_EXPR, NULL_TREE, |
3618 | case_num, tmp); |
3619 | gfc_add_expr_to_block (&block, tmp); |
3620 | |
3621 | tmp = build1_v (LABEL_EXPR, end_label); |
3622 | gfc_add_expr_to_block (&block, tmp); |
3623 | |
3624 | return gfc_finish_block (&block); |
3625 | } |
3626 | |
3627 | |
3628 | /* Translate the three variants of the SELECT CASE construct. |
3629 | |
3630 | SELECT CASEs with INTEGER case expressions can be translated to an |
3631 | equivalent GENERIC switch statement, and for LOGICAL case |
3632 | expressions we build one or two if-else compares. |
3633 | |
3634 | SELECT CASEs with CHARACTER case expressions are a whole different |
3635 | story, because they don't exist in GENERIC. So we sort them and |
3636 | do a binary search at runtime. |
3637 | |
3638 | Fortran has no BREAK statement, and it does not allow jumps from |
3639 | one case block to another. That makes things a lot easier for |
3640 | the optimizers. */ |
3641 | |
3642 | tree |
3643 | gfc_trans_select (gfc_code * code) |
3644 | { |
3645 | stmtblock_t block; |
3646 | tree body; |
3647 | tree exit_label; |
3648 | |
3649 | gcc_assert (code && code->expr1); |
3650 | gfc_init_block (&block); |
3651 | |
3652 | /* Build the exit label and hang it in. */ |
3653 | exit_label = gfc_build_label_decl (NULL_TREE); |
3654 | code->exit_label = exit_label; |
3655 | |
3656 | /* Empty SELECT constructs are legal. */ |
3657 | if (code->block == NULL) |
3658 | body = build_empty_stmt (input_location); |
3659 | |
3660 | /* Select the correct translation function. */ |
3661 | else |
3662 | switch (code->expr1->ts.type) |
3663 | { |
3664 | case BT_LOGICAL: |
3665 | body = gfc_trans_logical_select (code); |
3666 | break; |
3667 | |
3668 | case BT_INTEGER: |
3669 | body = gfc_trans_integer_select (code); |
3670 | break; |
3671 | |
3672 | case BT_CHARACTER: |
3673 | body = gfc_trans_character_select (code); |
3674 | break; |
3675 | |
3676 | default: |
3677 | gfc_internal_error ("gfc_trans_select(): Bad type for case expr." ); |
3678 | /* Not reached */ |
3679 | } |
3680 | |
3681 | /* Build everything together. */ |
3682 | gfc_add_expr_to_block (&block, body); |
3683 | gfc_add_expr_to_block (&block, build1_v (LABEL_EXPR, exit_label)); |
3684 | |
3685 | return gfc_finish_block (&block); |
3686 | } |
3687 | |
3688 | tree |
3689 | gfc_trans_select_type (gfc_code * code) |
3690 | { |
3691 | stmtblock_t block; |
3692 | tree body; |
3693 | tree exit_label; |
3694 | |
3695 | gcc_assert (code && code->expr1); |
3696 | gfc_init_block (&block); |
3697 | |
3698 | /* Build the exit label and hang it in. */ |
3699 | exit_label = gfc_build_label_decl (NULL_TREE); |
3700 | code->exit_label = exit_label; |
3701 | |
3702 | /* Empty SELECT constructs are legal. */ |
3703 | if (code->block == NULL) |
3704 | body = build_empty_stmt (input_location); |
3705 | else |
3706 | body = gfc_trans_select_type_cases (code); |
3707 | |
3708 | /* Build everything together. */ |
3709 | gfc_add_expr_to_block (&block, body); |
3710 | |
3711 | if (TREE_USED (exit_label)) |
3712 | gfc_add_expr_to_block (&block, build1_v (LABEL_EXPR, exit_label)); |
3713 | |
3714 | return gfc_finish_block (&block); |
3715 | } |
3716 | |
3717 | |
3718 | static tree |
3719 | gfc_trans_select_rank_cases (gfc_code * code) |
3720 | { |
3721 | gfc_code *c; |
3722 | gfc_case *cp; |
3723 | tree tmp; |
3724 | tree cond; |
3725 | tree low; |
3726 | tree rank; |
3727 | gfc_se se; |
3728 | gfc_se cse; |
3729 | stmtblock_t block; |
3730 | stmtblock_t body; |
3731 | bool def = false; |
3732 | |
3733 | gfc_start_block (&block); |
3734 | |
3735 | /* Calculate the switch expression. */ |
3736 | gfc_init_se (&se, NULL); |
3737 | gfc_conv_expr_descriptor (&se, code->expr1); |
3738 | rank = gfc_conv_descriptor_rank (se.expr); |
3739 | rank = gfc_evaluate_now (rank, &block); |
3740 | symbol_attribute attr = gfc_expr_attr (code->expr1); |
3741 | if (!attr.pointer && !attr.allocatable) |
3742 | { |
3743 | /* Special case for assumed-rank ('rank(*)', internally -1): |
3744 | rank = (rank == 0 || ubound[rank-1] != -1) ? rank : -1. */ |
3745 | cond = fold_build2_loc (input_location, EQ_EXPR, logical_type_node, |
3746 | rank, build_int_cst (TREE_TYPE (rank), 0)); |
3747 | tmp = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type, |
3748 | fold_convert (gfc_array_index_type, rank), |
3749 | gfc_index_one_node); |
3750 | tmp = gfc_conv_descriptor_ubound_get (se.expr, tmp); |
3751 | tmp = fold_build2_loc (input_location, NE_EXPR, logical_type_node, |
3752 | tmp, build_int_cst (TREE_TYPE (tmp), -1)); |
3753 | cond = fold_build2_loc (input_location, TRUTH_ORIF_EXPR, |
3754 | logical_type_node, cond, tmp); |
3755 | tmp = fold_build3_loc (input_location, COND_EXPR, TREE_TYPE (rank), |
3756 | cond, rank, build_int_cst (TREE_TYPE (rank), -1)); |
3757 | rank = gfc_evaluate_now (tmp, &block); |
3758 | } |
3759 | TREE_USED (code->exit_label) = 0; |
3760 | |
3761 | repeat: |
3762 | for (c = code->block; c; c = c->block) |
3763 | { |
3764 | cp = c->ext.block.case_list; |
3765 | |
3766 | /* Assume it's the default case. */ |
3767 | low = NULL_TREE; |
3768 | tmp = NULL_TREE; |
3769 | |
3770 | /* Put the default case at the end. */ |
3771 | if ((!def && !cp->low) || (def && cp->low)) |
3772 | continue; |
3773 | |
3774 | if (cp->low) |
3775 | { |
3776 | gfc_init_se (&cse, NULL); |
3777 | gfc_conv_expr_val (se: &cse, expr: cp->low); |
3778 | gfc_add_block_to_block (&block, &cse.pre); |
3779 | low = cse.expr; |
3780 | } |
3781 | |
3782 | gfc_init_block (&body); |
3783 | |
3784 | /* Add the statements for this case. */ |
3785 | tmp = gfc_trans_code (c->next); |
3786 | gfc_add_expr_to_block (&body, tmp); |
3787 | |
3788 | /* Break to the end of the SELECT RANK construct. The default |
3789 | case just falls through. */ |
3790 | if (!def) |
3791 | { |
3792 | TREE_USED (code->exit_label) = 1; |
3793 | tmp = build1_v (GOTO_EXPR, code->exit_label); |
3794 | gfc_add_expr_to_block (&body, tmp); |
3795 | } |
3796 | |
3797 | tmp = gfc_finish_block (&body); |
3798 | |
3799 | if (low != NULL_TREE) |
3800 | { |
3801 | cond = fold_build2_loc (input_location, EQ_EXPR, |
3802 | TREE_TYPE (rank), rank, |
3803 | fold_convert (TREE_TYPE (rank), low)); |
3804 | tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, |
3805 | cond, tmp, |
3806 | build_empty_stmt (input_location)); |
3807 | } |
3808 | |
3809 | gfc_add_expr_to_block (&block, tmp); |
3810 | } |
3811 | |
3812 | if (!def) |
3813 | { |
3814 | def = true; |
3815 | goto repeat; |
3816 | } |
3817 | |
3818 | return gfc_finish_block (&block); |
3819 | } |
3820 | |
3821 | |
3822 | tree |
3823 | gfc_trans_select_rank (gfc_code * code) |
3824 | { |
3825 | stmtblock_t block; |
3826 | tree body; |
3827 | tree exit_label; |
3828 | |
3829 | gcc_assert (code && code->expr1); |
3830 | gfc_init_block (&block); |
3831 | |
3832 | /* Build the exit label and hang it in. */ |
3833 | exit_label = gfc_build_label_decl (NULL_TREE); |
3834 | code->exit_label = exit_label; |
3835 | |
3836 | /* Empty SELECT constructs are legal. */ |
3837 | if (code->block == NULL) |
3838 | body = build_empty_stmt (input_location); |
3839 | else |
3840 | body = gfc_trans_select_rank_cases (code); |
3841 | |
3842 | /* Build everything together. */ |
3843 | gfc_add_expr_to_block (&block, body); |
3844 | |
3845 | if (TREE_USED (exit_label)) |
3846 | gfc_add_expr_to_block (&block, build1_v (LABEL_EXPR, exit_label)); |
3847 | |
3848 | return gfc_finish_block (&block); |
3849 | } |
3850 | |
3851 | |
3852 | /* Traversal function to substitute a replacement symtree if the symbol |
3853 | in the expression is the same as that passed. f == 2 signals that |
3854 | that variable itself is not to be checked - only the references. |
3855 | This group of functions is used when the variable expression in a |
3856 | FORALL assignment has internal references. For example: |
3857 | FORALL (i = 1:4) p(p(i)) = i |
3858 | The only recourse here is to store a copy of 'p' for the index |
3859 | expression. */ |
3860 | |
3861 | static gfc_symtree *new_symtree; |
3862 | static gfc_symtree *old_symtree; |
3863 | |
3864 | static bool |
3865 | forall_replace (gfc_expr *expr, gfc_symbol *sym, int *f) |
3866 | { |
3867 | if (expr->expr_type != EXPR_VARIABLE) |
3868 | return false; |
3869 | |
3870 | if (*f == 2) |
3871 | *f = 1; |
3872 | else if (expr->symtree->n.sym == sym) |
3873 | expr->symtree = new_symtree; |
3874 | |
3875 | return false; |
3876 | } |
3877 | |
3878 | static void |
3879 | forall_replace_symtree (gfc_expr *e, gfc_symbol *sym, int f) |
3880 | { |
3881 | gfc_traverse_expr (e, sym, forall_replace, f); |
3882 | } |
3883 | |
3884 | static bool |
3885 | forall_restore (gfc_expr *expr, |
3886 | gfc_symbol *sym ATTRIBUTE_UNUSED, |
3887 | int *f ATTRIBUTE_UNUSED) |
3888 | { |
3889 | if (expr->expr_type != EXPR_VARIABLE) |
3890 | return false; |
3891 | |
3892 | if (expr->symtree == new_symtree) |
3893 | expr->symtree = old_symtree; |
3894 | |
3895 | return false; |
3896 | } |
3897 | |
3898 | static void |
3899 | forall_restore_symtree (gfc_expr *e) |
3900 | { |
3901 | gfc_traverse_expr (e, NULL, forall_restore, 0); |
3902 | } |
3903 | |
3904 | static void |
3905 | forall_make_variable_temp (gfc_code *c, stmtblock_t *pre, stmtblock_t *post) |
3906 | { |
3907 | gfc_se tse; |
3908 | gfc_se rse; |
3909 | gfc_expr *e; |
3910 | gfc_symbol *new_sym; |
3911 | gfc_symbol *old_sym; |
3912 | gfc_symtree *root; |
3913 | tree tmp; |
3914 | |
3915 | /* Build a copy of the lvalue. */ |
3916 | old_symtree = c->expr1->symtree; |
3917 | old_sym = old_symtree->n.sym; |
3918 | e = gfc_lval_expr_from_sym (old_sym); |
3919 | if (old_sym->attr.dimension) |
3920 | { |
3921 | gfc_init_se (&tse, NULL); |
3922 | gfc_conv_subref_array_arg (&tse, e, 0, INTENT_IN, false); |
3923 | gfc_add_block_to_block (pre, &tse.pre); |
3924 | gfc_add_block_to_block (post, &tse.post); |
3925 | tse.expr = build_fold_indirect_ref_loc (input_location, tse.expr); |
3926 | |
3927 | if (c->expr1->ref->u.ar.type != AR_SECTION) |
3928 | { |
3929 | /* Use the variable offset for the temporary. */ |
3930 | tmp = gfc_conv_array_offset (old_sym->backend_decl); |
3931 | gfc_conv_descriptor_offset_set (pre, tse.expr, tmp); |
3932 | } |
3933 | } |
3934 | else |
3935 | { |
3936 | gfc_init_se (&tse, NULL); |
3937 | gfc_init_se (&rse, NULL); |
3938 | gfc_conv_expr (se: &rse, expr: e); |
3939 | if (e->ts.type == BT_CHARACTER) |
3940 | { |
3941 | tse.string_length = rse.string_length; |
3942 | tmp = gfc_get_character_type_len (gfc_default_character_kind, |
3943 | tse.string_length); |
3944 | tse.expr = gfc_conv_string_tmp (&tse, build_pointer_type (tmp), |
3945 | rse.string_length); |
3946 | gfc_add_block_to_block (pre, &tse.pre); |
3947 | gfc_add_block_to_block (post, &tse.post); |
3948 | } |
3949 | else |
3950 | { |
3951 | tmp = gfc_typenode_for_spec (&e->ts); |
3952 | tse.expr = gfc_create_var (tmp, "temp" ); |
3953 | } |
3954 | |
3955 | tmp = gfc_trans_scalar_assign (&tse, &rse, e->ts, |
3956 | e->expr_type == EXPR_VARIABLE, false); |
3957 | gfc_add_expr_to_block (pre, tmp); |
3958 | } |
3959 | gfc_free_expr (e); |
3960 | |
3961 | /* Create a new symbol to represent the lvalue. */ |
3962 | new_sym = gfc_new_symbol (old_sym->name, NULL); |
3963 | new_sym->ts = old_sym->ts; |
3964 | new_sym->attr.referenced = 1; |
3965 | new_sym->attr.temporary = 1; |
3966 | new_sym->attr.dimension = old_sym->attr.dimension; |
3967 | new_sym->attr.flavor = old_sym->attr.flavor; |
3968 | |
3969 | /* Use the temporary as the backend_decl. */ |
3970 | new_sym->backend_decl = tse.expr; |
3971 | |
3972 | /* Create a fake symtree for it. */ |
3973 | root = NULL; |
3974 | new_symtree = gfc_new_symtree (&root, old_sym->name); |
3975 | new_symtree->n.sym = new_sym; |
3976 | gcc_assert (new_symtree == root); |
3977 | |
3978 | /* Go through the expression reference replacing the old_symtree |
3979 | with the new. */ |
3980 | forall_replace_symtree (e: c->expr1, sym: old_sym, f: 2); |
3981 | |
3982 | /* Now we have made this temporary, we might as well use it for |
3983 | the right hand side. */ |
3984 | forall_replace_symtree (e: c->expr2, sym: old_sym, f: 1); |
3985 | } |
3986 | |
3987 | |
3988 | /* Handles dependencies in forall assignments. */ |
3989 | static int |
3990 | check_forall_dependencies (gfc_code *c, stmtblock_t *pre, stmtblock_t *post) |
3991 | { |
3992 | gfc_ref *lref; |
3993 | gfc_ref *rref; |
3994 | int need_temp; |
3995 | gfc_symbol *lsym; |
3996 | |
3997 | lsym = c->expr1->symtree->n.sym; |
3998 | need_temp = gfc_check_dependency (c->expr1, c->expr2, 0); |
3999 | |
4000 | /* Now check for dependencies within the 'variable' |
4001 | expression itself. These are treated by making a complete |
4002 | copy of variable and changing all the references to it |
4003 | point to the copy instead. Note that the shallow copy of |
4004 | the variable will not suffice for derived types with |
4005 | pointer components. We therefore leave these to their |
4006 | own devices. Likewise for allocatable components. */ |
4007 | if (lsym->ts.type == BT_DERIVED |
4008 | && (lsym->ts.u.derived->attr.pointer_comp |
4009 | || lsym->ts.u.derived->attr.alloc_comp)) |
4010 | return need_temp; |
4011 | |
4012 | new_symtree = NULL; |
4013 | if (find_forall_index (c->expr1, lsym, 2)) |
4014 | { |
4015 | forall_make_variable_temp (c, pre, post); |
4016 | need_temp = 0; |
4017 | } |
4018 | |
4019 | /* Substrings with dependencies are treated in the same |
4020 | way. */ |
4021 | if (c->expr1->ts.type == BT_CHARACTER |
4022 | && c->expr1->ref |
4023 | && c->expr2->expr_type == EXPR_VARIABLE |
4024 | && lsym == c->expr2->symtree->n.sym) |
4025 | { |
4026 | for (lref = c->expr1->ref; lref; lref = lref->next) |
4027 | if (lref->type == REF_SUBSTRING) |
4028 | break; |
4029 | for (rref = c->expr2->ref; rref; rref = rref->next) |
4030 | if (rref->type == REF_SUBSTRING) |
4031 | break; |
4032 | |
4033 | if (rref && lref |
4034 | && gfc_dep_compare_expr (rref->u.ss.start, lref->u.ss.start) < 0) |
4035 | { |
4036 | forall_make_variable_temp (c, pre, post); |
4037 | need_temp = 0; |
4038 | } |
4039 | } |
4040 | return need_temp; |
4041 | } |
4042 | |
4043 | |
4044 | static void |
4045 | cleanup_forall_symtrees (gfc_code *c) |
4046 | { |
4047 | forall_restore_symtree (e: c->expr1); |
4048 | forall_restore_symtree (e: c->expr2); |
4049 | free (ptr: new_symtree->n.sym); |
4050 | free (ptr: new_symtree); |
4051 | } |
4052 | |
4053 | |
4054 | /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY |
4055 | is the contents of the FORALL block/stmt to be iterated. MASK_FLAG |
4056 | indicates whether we should generate code to test the FORALLs mask |
4057 | array. OUTER is the loop header to be used for initializing mask |
4058 | indices. |
4059 | |
4060 | The generated loop format is: |
4061 | count = (end - start + step) / step |
4062 | loopvar = start |
4063 | while (1) |
4064 | { |
4065 | if (count <=0 ) |
4066 | goto end_of_loop |
4067 | <body> |
4068 | loopvar += step |
4069 | count -- |
4070 | } |
4071 | end_of_loop: */ |
4072 | |
4073 | static tree |
4074 | gfc_trans_forall_loop (forall_info *forall_tmp, tree body, |
4075 | int mask_flag, stmtblock_t *outer) |
4076 | { |
4077 | int n, nvar; |
4078 | tree tmp; |
4079 | tree cond; |
4080 | stmtblock_t block; |
4081 | tree exit_label; |
4082 | tree count; |
4083 | tree var, start, end, step; |
4084 | iter_info *iter; |
4085 | |
4086 | /* Initialize the mask index outside the FORALL nest. */ |
4087 | if (mask_flag && forall_tmp->mask) |
4088 | gfc_add_modify (outer, forall_tmp->maskindex, gfc_index_zero_node); |
4089 | |
4090 | iter = forall_tmp->this_loop; |
4091 | nvar = forall_tmp->nvar; |
4092 | for (n = 0; n < nvar; n++) |
4093 | { |
4094 | var = iter->var; |
4095 | start = iter->start; |
4096 | end = iter->end; |
4097 | step = iter->step; |
4098 | |
4099 | exit_label = gfc_build_label_decl (NULL_TREE); |
4100 | TREE_USED (exit_label) = 1; |
4101 | |
4102 | /* The loop counter. */ |
4103 | count = gfc_create_var (TREE_TYPE (var), "count" ); |
4104 | |
4105 | /* The body of the loop. */ |
4106 | gfc_init_block (&block); |
4107 | |
4108 | /* The exit condition. */ |
4109 | cond = fold_build2_loc (input_location, LE_EXPR, logical_type_node, |
4110 | count, build_int_cst (TREE_TYPE (count), 0)); |
4111 | |
4112 | /* PR 83064 means that we cannot use annot_expr_parallel_kind until |
4113 | the autoparallelizer can handle this. */ |
4114 | if (forall_tmp->do_concurrent) |
4115 | cond = build3 (ANNOTATE_EXPR, TREE_TYPE (cond), cond, |
4116 | build_int_cst (integer_type_node, |
4117 | annot_expr_ivdep_kind), |
4118 | integer_zero_node); |
4119 | |
4120 | tmp = build1_v (GOTO_EXPR, exit_label); |
4121 | tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, |
4122 | cond, tmp, build_empty_stmt (input_location)); |
4123 | gfc_add_expr_to_block (&block, tmp); |
4124 | |
4125 | /* The main loop body. */ |
4126 | gfc_add_expr_to_block (&block, body); |
4127 | |
4128 | /* Increment the loop variable. */ |
4129 | tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (var), var, |
4130 | step); |
4131 | gfc_add_modify (&block, var, tmp); |
4132 | |
4133 | /* Advance to the next mask element. Only do this for the |
4134 | innermost loop. */ |
4135 | if (n == 0 && mask_flag && forall_tmp->mask) |
4136 | { |
4137 | tree maskindex = forall_tmp->maskindex; |
4138 | tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type, |
4139 | maskindex, gfc_index_one_node); |
4140 | gfc_add_modify (&block, maskindex, tmp); |
4141 | } |
4142 | |
4143 | /* Decrement the loop counter. */ |
4144 | tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (var), count, |
4145 | build_int_cst (TREE_TYPE (var), 1)); |
4146 | gfc_add_modify (&block, count, tmp); |
4147 | |
4148 | body = gfc_finish_block (&block); |
4149 | |
4150 | /* Loop var initialization. */ |
4151 | gfc_init_block (&block); |
4152 | gfc_add_modify (&block, var, start); |
4153 | |
4154 | |
4155 | /* Initialize the loop counter. */ |
4156 | tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (var), step, |
4157 | start); |
4158 | tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (var), end, |
4159 | tmp); |
4160 | tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR, TREE_TYPE (var), |
4161 | tmp, step); |
4162 | gfc_add_modify (&block, count, tmp); |
4163 | |
4164 | /* The loop expression. */ |
4165 | tmp = build1_v (LOOP_EXPR, body); |
4166 | gfc_add_expr_to_block (&block, tmp); |
4167 | |
4168 | /* The exit label. */ |
4169 | tmp = build1_v (LABEL_EXPR, exit_label); |
4170 | gfc_add_expr_to_block (&block, tmp); |
4171 | |
4172 | body = gfc_finish_block (&block); |
4173 | iter = iter->next; |
4174 | } |
4175 | return body; |
4176 | } |
4177 | |
4178 | |
4179 | /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG |
4180 | is nonzero, the body is controlled by all masks in the forall nest. |
4181 | Otherwise, the innermost loop is not controlled by it's mask. This |
4182 | is used for initializing that mask. */ |
4183 | |
4184 | static tree |
4185 | gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body, |
4186 | int mask_flag) |
4187 | { |
4188 | tree tmp; |
4189 | stmtblock_t ; |
4190 | forall_info *forall_tmp; |
4191 | tree mask, maskindex; |
4192 | |
4193 | gfc_start_block (&header); |
4194 | |
4195 | forall_tmp = nested_forall_info; |
4196 | while (forall_tmp != NULL) |
4197 | { |
4198 | /* Generate body with masks' control. */ |
4199 | if (mask_flag) |
4200 | { |
4201 | mask = forall_tmp->mask; |
4202 | maskindex = forall_tmp->maskindex; |
4203 | |
4204 | /* If a mask was specified make the assignment conditional. */ |
4205 | if (mask) |
4206 | { |
4207 | tmp = gfc_build_array_ref (mask, maskindex, NULL); |
4208 | body = build3_v (COND_EXPR, tmp, body, |
4209 | build_empty_stmt (input_location)); |
4210 | } |
4211 | } |
4212 | body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, outer: &header); |
4213 | forall_tmp = forall_tmp->prev_nest; |
4214 | mask_flag = 1; |
4215 | } |
4216 | |
4217 | gfc_add_expr_to_block (&header, body); |
4218 | return gfc_finish_block (&header); |
4219 | } |
4220 | |
4221 | |
4222 | /* Allocate data for holding a temporary array. Returns either a local |
4223 | temporary array or a pointer variable. */ |
4224 | |
4225 | static tree |
4226 | gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock, |
4227 | tree elem_type) |
4228 | { |
4229 | tree tmpvar; |
4230 | tree type; |
4231 | tree tmp; |
4232 | |
4233 | if (INTEGER_CST_P (size)) |
4234 | tmp = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type, |
4235 | size, gfc_index_one_node); |
4236 | else |
4237 | tmp = NULL_TREE; |
4238 | |
4239 | type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp); |
4240 | type = build_array_type (elem_type, type); |
4241 | if (gfc_can_put_var_on_stack (bytesize) && INTEGER_CST_P (size)) |
4242 | { |
4243 | tmpvar = gfc_create_var (type, "temp" ); |
4244 | *pdata = NULL_TREE; |
4245 | } |
4246 | else |
4247 | { |
4248 | tmpvar = gfc_create_var (build_pointer_type (type), "temp" ); |
4249 | *pdata = convert (pvoid_type_node, tmpvar); |
4250 | |
4251 | tmp = gfc_call_malloc (pblock, TREE_TYPE (tmpvar), bytesize); |
4252 | gfc_add_modify (pblock, tmpvar, tmp); |
4253 | } |
4254 | return tmpvar; |
4255 | } |
4256 | |
4257 | |
4258 | /* Generate codes to copy the temporary to the actual lhs. */ |
4259 | |
4260 | static tree |
4261 | generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3, |
4262 | tree count1, |
4263 | gfc_ss *lss, gfc_ss *, |
4264 | tree wheremask, bool invert) |
4265 | { |
4266 | stmtblock_t block, body1; |
4267 | gfc_loopinfo loop; |
4268 | gfc_se lse; |
4269 | gfc_se rse; |
4270 | tree tmp; |
4271 | tree wheremaskexpr; |
4272 | |
4273 | (void) rss; /* TODO: unused. */ |
4274 | |
4275 | gfc_start_block (&block); |
4276 | |
4277 | gfc_init_se (&rse, NULL); |
4278 | gfc_init_se (&lse, NULL); |
4279 | |
4280 | if (lss == gfc_ss_terminator) |
4281 | { |
4282 | gfc_init_block (&body1); |
4283 | gfc_conv_expr (se: &lse, expr); |
4284 | rse.expr = gfc_build_array_ref (tmp1, count1, NULL); |
4285 | } |
4286 | else |
4287 | { |
4288 | /* Initialize the loop. */ |
4289 | gfc_init_loopinfo (&loop); |
4290 | |
4291 | /* We may need LSS to determine the shape of the expression. */ |
4292 | gfc_add_ss_to_loop (&loop, lss); |
4293 | |
4294 | gfc_conv_ss_startstride (&loop); |
4295 | gfc_conv_loop_setup (&loop, &expr->where); |
4296 | |
4297 | gfc_mark_ss_chain_used (lss, 1); |
4298 | /* Start the loop body. */ |
4299 | gfc_start_scalarized_body (&loop, &body1); |
4300 | |
4301 | /* Translate the expression. */ |
4302 | gfc_copy_loopinfo_to_se (&lse, &loop); |
4303 | lse.ss = lss; |
4304 | gfc_conv_expr (se: &lse, expr); |
4305 | |
4306 | /* Form the expression of the temporary. */ |
4307 | rse.expr = gfc_build_array_ref (tmp1, count1, NULL); |
4308 | } |
4309 | |
4310 | /* Use the scalar assignment. */ |
4311 | rse.string_length = lse.string_length; |
4312 | tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, |
4313 | expr->expr_type == EXPR_VARIABLE, false); |
4314 | |
4315 | /* Form the mask expression according to the mask tree list. */ |
4316 | if (wheremask) |
4317 | { |
4318 | wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL); |
4319 | if (invert) |
4320 | wheremaskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR, |
4321 | TREE_TYPE (wheremaskexpr), |
4322 | wheremaskexpr); |
4323 | tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, |
4324 | wheremaskexpr, tmp, |
4325 | build_empty_stmt (input_location)); |
4326 | } |
4327 | |
4328 | gfc_add_expr_to_block (&body1, tmp); |
4329 | |
4330 | tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (count1), |
4331 | count1, gfc_index_one_node); |
4332 | gfc_add_modify (&body1, count1, tmp); |
4333 | |
4334 | if (lss == gfc_ss_terminator) |
4335 | gfc_add_block_to_block (&block, &body1); |
4336 | else |
4337 | { |
4338 | /* Increment count3. */ |
4339 | if (count3) |
4340 | { |
4341 | tmp = fold_build2_loc (input_location, PLUS_EXPR, |
4342 | gfc_array_index_type, |
4343 | count3, gfc_index_one_node); |
4344 | gfc_add_modify (&body1, count3, tmp); |
4345 | } |
4346 | |
4347 | /* Generate the copying loops. */ |
4348 | gfc_trans_scalarizing_loops (&loop, &body1); |
4349 | |
4350 | gfc_add_block_to_block (&block, &loop.pre); |
4351 | gfc_add_block_to_block (&block, &loop.post); |
4352 | |
4353 | gfc_cleanup_loop (&loop); |
4354 | /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful |
4355 | as tree nodes in SS may not be valid in different scope. */ |
4356 | } |
4357 | |
4358 | tmp = gfc_finish_block (&block); |
4359 | return tmp; |
4360 | } |
4361 | |
4362 | |
4363 | /* Generate codes to copy rhs to the temporary. TMP1 is the address of |
4364 | temporary, LSS and RSS are formed in function compute_inner_temp_size(), |
4365 | and should not be freed. WHEREMASK is the conditional execution mask |
4366 | whose sense may be inverted by INVERT. */ |
4367 | |
4368 | static tree |
4369 | generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3, |
4370 | tree count1, gfc_ss *lss, gfc_ss *, |
4371 | tree wheremask, bool invert) |
4372 | { |
4373 | stmtblock_t block, body1; |
4374 | gfc_loopinfo loop; |
4375 | gfc_se lse; |
4376 | gfc_se rse; |
4377 | tree tmp; |
4378 | tree wheremaskexpr; |
4379 | |
4380 | gfc_start_block (&block); |
4381 | |
4382 | gfc_init_se (&rse, NULL); |
4383 | gfc_init_se (&lse, NULL); |
4384 | |
4385 | if (lss == gfc_ss_terminator) |
4386 | { |
4387 | gfc_init_block (&body1); |
4388 | gfc_conv_expr (se: &rse, expr: expr2); |
4389 | lse.expr = gfc_build_array_ref (tmp1, count1, NULL); |
4390 | } |
4391 | else |
4392 | { |
4393 | /* Initialize the loop. */ |
4394 | gfc_init_loopinfo (&loop); |
4395 | |
4396 | /* We may need LSS to determine the shape of the expression. */ |
4397 | gfc_add_ss_to_loop (&loop, lss); |
4398 | gfc_add_ss_to_loop (&loop, rss); |
4399 | |
4400 | gfc_conv_ss_startstride (&loop); |
4401 | gfc_conv_loop_setup (&loop, &expr2->where); |
4402 | |
4403 | gfc_mark_ss_chain_used (rss, 1); |
4404 | /* Start the loop body. */ |
4405 | gfc_start_scalarized_body (&loop, &body1); |
4406 | |
4407 | /* Translate the expression. */ |
4408 | gfc_copy_loopinfo_to_se (&rse, &loop); |
4409 | rse.ss = rss; |
4410 | gfc_conv_expr (se: &rse, expr: expr2); |
4411 | |
4412 | /* Form the expression of the temporary. */ |
4413 | lse.expr = gfc_build_array_ref (tmp1, count1, NULL); |
4414 | } |
4415 | |
4416 | /* Use the scalar assignment. */ |
4417 | lse.string_length = rse.string_length; |
4418 | tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, |
4419 | expr2->expr_type == EXPR_VARIABLE, false); |
4420 | |
4421 | /* Form the mask expression according to the mask tree list. */ |
4422 | if (wheremask) |
4423 | { |
4424 | wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL); |
4425 | if (invert) |
4426 | wheremaskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR, |
4427 | TREE_TYPE (wheremaskexpr), |
4428 | wheremaskexpr); |
4429 | tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, |
4430 | wheremaskexpr, tmp, |
4431 | build_empty_stmt (input_location)); |
4432 | } |
4433 | |
4434 | gfc_add_expr_to_block (&body1, tmp); |
4435 | |
4436 | if (lss == gfc_ss_terminator) |
4437 | { |
4438 | gfc_add_block_to_block (&block, &body1); |
4439 | |
4440 | /* Increment count1. */ |
4441 | tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (count1), |
4442 | count1, gfc_index_one_node); |
4443 | gfc_add_modify (&block, count1, tmp); |
4444 | } |
4445 | else |
4446 | { |
4447 | /* Increment count1. */ |
4448 | tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type, |
4449 | count1, gfc_index_one_node); |
4450 | gfc_add_modify (&body1, count1, tmp); |
4451 | |
4452 | /* Increment count3. */ |
4453 | if (count3) |
4454 | { |
4455 | tmp = fold_build2_loc (input_location, PLUS_EXPR, |
4456 | gfc_array_index_type, |
4457 | count3, gfc_index_one_node); |
4458 | gfc_add_modify (&body1, count3, tmp); |
4459 | } |
4460 | |
4461 | /* Generate the copying loops. */ |
4462 | gfc_trans_scalarizing_loops (&loop, &body1); |
4463 | |
4464 | gfc_add_block_to_block (&block, &loop.pre); |
4465 | gfc_add_block_to_block (&block, &loop.post); |
4466 | |
4467 | gfc_cleanup_loop (&loop); |
4468 | /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful |
4469 | as tree nodes in SS may not be valid in different scope. */ |
4470 | } |
4471 | |
4472 | tmp = gfc_finish_block (&block); |
4473 | return tmp; |
4474 | } |
4475 | |
4476 | |
4477 | /* Calculate the size of temporary needed in the assignment inside forall. |
4478 | LSS and RSS are filled in this function. */ |
4479 | |
4480 | static tree |
4481 | compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2, |
4482 | stmtblock_t * pblock, |
4483 | gfc_ss **lss, gfc_ss **) |
4484 | { |
4485 | gfc_loopinfo loop; |
4486 | tree size; |
4487 | int i; |
4488 | int save_flag; |
4489 | tree tmp; |
4490 | |
4491 | *lss = gfc_walk_expr (expr1); |
4492 | *rss = NULL; |
4493 | |
4494 | size = gfc_index_one_node; |
4495 | if (*lss != gfc_ss_terminator) |
4496 | { |
4497 | gfc_init_loopinfo (&loop); |
4498 | |
4499 | /* Walk the RHS of the expression. */ |
4500 | *rss = gfc_walk_expr (expr2); |
4501 | if (*rss == gfc_ss_terminator) |
4502 | /* The rhs is scalar. Add a ss for the expression. */ |
4503 | *rss = gfc_get_scalar_ss (gfc_ss_terminator, expr2); |
4504 | |
4505 | /* Associate the SS with the loop. */ |
4506 | gfc_add_ss_to_loop (&loop, *lss); |
4507 | /* We don't actually need to add the rhs at this point, but it might |
4508 | make guessing the loop bounds a bit easier. */ |
4509 | gfc_add_ss_to_loop (&loop, *rss); |
4510 | |
4511 | /* We only want the shape of the expression, not rest of the junk |
4512 | generated by the scalarizer. */ |
4513 | loop.array_parameter = 1; |
4514 | |
4515 | /* Calculate the bounds of the scalarization. */ |
4516 | save_flag = gfc_option.rtcheck; |
4517 | gfc_option.rtcheck &= ~GFC_RTCHECK_BOUNDS; |
4518 | gfc_conv_ss_startstride (&loop); |
4519 | gfc_option.rtcheck = save_flag; |
4520 | gfc_conv_loop_setup (&loop, &expr2->where); |
4521 | |
4522 | /* Figure out how many elements we need. */ |
4523 | for (i = 0; i < loop.dimen; i++) |
4524 | { |
4525 | tmp = fold_build2_loc (input_location, MINUS_EXPR, |
4526 | gfc_array_index_type, |
4527 | gfc_index_one_node, loop.from[i]); |
4528 | tmp = fold_build2_loc (input_location, PLUS_EXPR, |
4529 | gfc_array_index_type, tmp, loop.to[i]); |
4530 | size = fold_build2_loc (input_location, MULT_EXPR, |
4531 | gfc_array_index_type, size, tmp); |
4532 | } |
4533 | gfc_add_block_to_block (pblock, &loop.pre); |
4534 | size = gfc_evaluate_now (size, pblock); |
4535 | gfc_add_block_to_block (pblock, &loop.post); |
4536 | |
4537 | /* TODO: write a function that cleans up a loopinfo without freeing |
4538 | the SS chains. Currently a NOP. */ |
4539 | } |
4540 | |
4541 | return size; |
4542 | } |
4543 | |
4544 | |
4545 | /* Calculate the overall iterator number of the nested forall construct. |
4546 | This routine actually calculates the number of times the body of the |
4547 | nested forall specified by NESTED_FORALL_INFO is executed and multiplies |
4548 | that by the expression INNER_SIZE. The BLOCK argument specifies the |
4549 | block in which to calculate the result, and the optional INNER_SIZE_BODY |
4550 | argument contains any statements that need to executed (inside the loop) |
4551 | to initialize or calculate INNER_SIZE. */ |
4552 | |
4553 | static tree |
4554 | compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size, |
4555 | stmtblock_t *inner_size_body, stmtblock_t *block) |
4556 | { |
4557 | forall_info *forall_tmp = nested_forall_info; |
4558 | tree tmp, number; |
4559 | stmtblock_t body; |
4560 | |
4561 | /* We can eliminate the innermost unconditional loops with constant |
4562 | array bounds. */ |
4563 | if (INTEGER_CST_P (inner_size)) |
4564 | { |
4565 | while (forall_tmp |
4566 | && !forall_tmp->mask |
4567 | && INTEGER_CST_P (forall_tmp->size)) |
4568 | { |
4569 | inner_size = fold_build2_loc (input_location, MULT_EXPR, |
4570 | gfc_array_index_type, |
4571 | inner_size, forall_tmp->size); |
4572 | forall_tmp = forall_tmp->prev_nest; |
4573 | } |
4574 | |
4575 | /* If there are no loops left, we have our constant result. */ |
4576 | if (!forall_tmp) |
4577 | return inner_size; |
4578 | } |
4579 | |
4580 | /* Otherwise, create a temporary variable to compute the result. */ |
4581 | number = gfc_create_var (gfc_array_index_type, "num" ); |
4582 | gfc_add_modify (block, number, gfc_index_zero_node); |
4583 | |
4584 | gfc_start_block (&body); |
4585 | if (inner_size_body) |
4586 | gfc_add_block_to_block (&body, inner_size_body); |
4587 | if (forall_tmp) |
4588 | tmp = fold_build2_loc (input_location, PLUS_EXPR, |
4589 | gfc_array_index_type, number, inner_size); |
4590 | else |
4591 | tmp = inner_size; |
4592 | gfc_add_modify (&body, number, tmp); |
4593 | tmp = gfc_finish_block (&body); |
4594 | |
4595 | /* Generate loops. */ |
4596 | if (forall_tmp != NULL) |
4597 | tmp = gfc_trans_nested_forall_loop (nested_forall_info: forall_tmp, body: tmp, mask_flag: 1); |
4598 | |
4599 | gfc_add_expr_to_block (block, tmp); |
4600 | |
4601 | return number; |
4602 | } |
4603 | |
4604 | |
4605 | /* Allocate temporary for forall construct. SIZE is the size of temporary |
4606 | needed. PTEMP1 is returned for space free. */ |
4607 | |
4608 | static tree |
4609 | allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block, |
4610 | tree * ptemp1) |
4611 | { |
4612 | tree bytesize; |
4613 | tree unit; |
4614 | tree tmp; |
4615 | |
4616 | unit = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (type)); |
4617 | if (!integer_onep (unit)) |
4618 | bytesize = fold_build2_loc (input_location, MULT_EXPR, |
4619 | gfc_array_index_type, size, unit); |
4620 | else |
4621 | bytesize = size; |
4622 | |
4623 | *ptemp1 = NULL; |
4624 | tmp = gfc_do_allocate (bytesize, size, pdata: ptemp1, pblock: block, elem_type: type); |
4625 | |
4626 | if (*ptemp1) |
4627 | tmp = build_fold_indirect_ref_loc (input_location, tmp); |
4628 | return tmp; |
4629 | } |
4630 | |
4631 | |
4632 | /* Allocate temporary for forall construct according to the information in |
4633 | nested_forall_info. INNER_SIZE is the size of temporary needed in the |
4634 | assignment inside forall. PTEMP1 is returned for space free. */ |
4635 | |
4636 | static tree |
4637 | allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type, |
4638 | tree inner_size, stmtblock_t * inner_size_body, |
4639 | stmtblock_t * block, tree * ptemp1) |
4640 | { |
4641 | tree size; |
4642 | |
4643 | /* Calculate the total size of temporary needed in forall construct. */ |
4644 | size = compute_overall_iter_number (nested_forall_info, inner_size, |
4645 | inner_size_body, block); |
4646 | |
4647 | return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1); |
4648 | } |
4649 | |
4650 | |
4651 | /* Handle assignments inside forall which need temporary. |
4652 | |
4653 | forall (i=start:end:stride; maskexpr) |
4654 | e<i> = f<i> |
4655 | end forall |
4656 | (where e,f<i> are arbitrary expressions possibly involving i |
4657 | and there is a dependency between e<i> and f<i>) |
4658 | Translates to: |
4659 | masktmp(:) = maskexpr(:) |
4660 | |
4661 | maskindex = 0; |
4662 | count1 = 0; |
4663 | num = 0; |
4664 | for (i = start; i <= end; i += stride) |
4665 | num += SIZE (f<i>) |
4666 | count1 = 0; |
4667 | ALLOCATE (tmp(num)) |
4668 | for (i = start; i <= end; i += stride) |
4669 | { |
4670 | if (masktmp[maskindex++]) |
4671 | tmp[count1++] = f<i> |
4672 | } |
4673 | maskindex = 0; |
4674 | count1 = 0; |
4675 | for (i = start; i <= end; i += stride) |
4676 | { |
4677 | if (masktmp[maskindex++]) |
4678 | e<i> = tmp[count1++] |
4679 | } |
4680 | DEALLOCATE (tmp) |
4681 | */ |
4682 | static void |
4683 | gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2, |
4684 | tree wheremask, bool invert, |
4685 | forall_info * nested_forall_info, |
4686 | stmtblock_t * block) |
4687 | { |
4688 | tree type; |
4689 | tree inner_size; |
4690 | gfc_ss *lss, *; |
4691 | tree count, count1; |
4692 | tree tmp, tmp1; |
4693 | tree ptemp1; |
4694 | stmtblock_t inner_size_body; |
4695 | |
4696 | /* Create vars. count1 is the current iterator number of the nested |
4697 | forall. */ |
4698 | count1 = gfc_create_var (gfc_array_index_type, "count1" ); |
4699 | |
4700 | /* Count is the wheremask index. */ |
4701 | if (wheremask) |
4702 | { |
4703 | count = gfc_create_var (gfc_array_index_type, "count" ); |
4704 | gfc_add_modify (block, count, gfc_index_zero_node); |
4705 | } |
4706 | else |
4707 | count = NULL; |
4708 | |
4709 | /* Initialize count1. */ |
4710 | gfc_add_modify (block, count1, gfc_index_zero_node); |
4711 | |
4712 | /* Calculate the size of temporary needed in the assignment. Return loop, lss |
4713 | and rss which are used in function generate_loop_for_rhs_to_temp(). */ |
4714 | /* The type of LHS. Used in function allocate_temp_for_forall_nest */ |
4715 | if (expr1->ts.type == BT_CHARACTER) |
4716 | { |
4717 | type = NULL; |
4718 | if (expr1->ref && expr1->ref->type == REF_SUBSTRING) |
4719 | { |
4720 | gfc_se ssse; |
4721 | gfc_init_se (&ssse, NULL); |
4722 | gfc_conv_expr (se: &ssse, expr: expr1); |
4723 | type = gfc_get_character_type_len (gfc_default_character_kind, |
4724 | ssse.string_length); |
4725 | } |
4726 | else |
4727 | { |
4728 | if (!expr1->ts.u.cl->backend_decl) |
4729 | { |
4730 | gfc_se tse; |
4731 | gcc_assert (expr1->ts.u.cl->length); |
4732 | gfc_init_se (&tse, NULL); |
4733 | gfc_conv_expr (se: &tse, expr: expr1->ts.u.cl->length); |
4734 | expr1->ts.u.cl->backend_decl = tse.expr; |
4735 | } |
4736 | type = gfc_get_character_type_len (gfc_default_character_kind, |
4737 | expr1->ts.u.cl->backend_decl); |
4738 | } |
4739 | } |
4740 | else |
4741 | type = gfc_typenode_for_spec (&expr1->ts); |
4742 | |
4743 | gfc_init_block (&inner_size_body); |
4744 | inner_size = compute_inner_temp_size (expr1, expr2, pblock: &inner_size_body, |
4745 | lss: &lss, rss: &rss); |
4746 | |
4747 | /* Allocate temporary for nested forall construct according to the |
4748 | information in nested_forall_info and inner_size. */ |
4749 | tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size, |
4750 | inner_size_body: &inner_size_body, block, ptemp1: &ptemp1); |
4751 | |
4752 | /* Generate codes to copy rhs to the temporary . */ |
4753 | tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count3: count, count1, lss, rss, |
4754 | wheremask, invert); |
4755 | |
4756 | /* Generate body and loops according to the information in |
4757 | nested_forall_info. */ |
4758 | tmp = gfc_trans_nested_forall_loop (nested_forall_info, body: tmp, mask_flag: 1); |
4759 | gfc_add_expr_to_block (block, tmp); |
4760 | |
4761 | /* Reset count1. */ |
4762 | gfc_add_modify (block, count1, gfc_index_zero_node); |
4763 | |
4764 | /* Reset count. */ |
4765 | if (wheremask) |
4766 | gfc_add_modify (block, count, gfc_index_zero_node); |
4767 | |
4768 | /* TODO: Second call to compute_inner_temp_size to initialize lss and |
4769 | rss; there must be a better way. */ |
4770 | inner_size = compute_inner_temp_size (expr1, expr2, pblock: &inner_size_body, |
4771 | lss: &lss, rss: &rss); |
4772 | |
4773 | /* Generate codes to copy the temporary to lhs. */ |
4774 | tmp = generate_loop_for_temp_to_lhs (expr: expr1, tmp1, count3: count, count1, |
4775 | lss, rss, |
4776 | wheremask, invert); |
4777 | |
4778 | /* Generate body and loops according to the information in |
4779 | nested_forall_info. */ |
4780 | tmp = gfc_trans_nested_forall_loop (nested_forall_info, body: tmp, mask_flag: 1); |
4781 | gfc_add_expr_to_block (block, tmp); |
4782 | |
4783 | if (ptemp1) |
4784 | { |
4785 | /* Free the temporary. */ |
4786 | tmp = gfc_call_free (ptemp1); |
4787 | gfc_add_expr_to_block (block, tmp); |
4788 | } |
4789 | } |
4790 | |
4791 | |
4792 | /* Translate pointer assignment inside FORALL which need temporary. */ |
4793 | |
4794 | static void |
4795 | gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2, |
4796 | forall_info * nested_forall_info, |
4797 | stmtblock_t * block) |
4798 | { |
4799 | tree type; |
4800 | tree inner_size; |
4801 | gfc_ss *lss, *; |
4802 | gfc_se lse; |
4803 | gfc_se rse; |
4804 | gfc_array_info *info; |
4805 | gfc_loopinfo loop; |
4806 | tree desc; |
4807 | tree parm; |
4808 | tree parmtype; |
4809 | stmtblock_t body; |
4810 | tree count; |
4811 | tree tmp, tmp1, ptemp1; |
4812 | |
4813 | count = gfc_create_var (gfc_array_index_type, "count" ); |
4814 | gfc_add_modify (block, count, gfc_index_zero_node); |
4815 | |
4816 | inner_size = gfc_index_one_node; |
4817 | lss = gfc_walk_expr (expr1); |
4818 | rss = gfc_walk_expr (expr2); |
4819 | if (lss == gfc_ss_terminator) |
4820 | { |
4821 | type = gfc_typenode_for_spec (&expr1->ts); |
4822 | type = build_pointer_type (type); |
4823 | |
4824 | /* Allocate temporary for nested forall construct according to the |
4825 | information in nested_forall_info and inner_size. */ |
4826 | tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, |
4827 | inner_size, NULL, block, ptemp1: &ptemp1); |
4828 | gfc_start_block (&body); |
4829 | gfc_init_se (&lse, NULL); |
4830 | lse.expr = gfc_build_array_ref (tmp1, count, NULL); |
4831 | gfc_init_se (&rse, NULL); |
4832 | rse.want_pointer = 1; |
4833 | gfc_conv_expr (se: &rse, expr: expr2); |
4834 | gfc_add_block_to_block (&body, &rse.pre); |
4835 | gfc_add_modify (&body, lse.expr, |
4836 | fold_convert (TREE_TYPE (lse.expr), rse.expr)); |
4837 | gfc_add_block_to_block (&body, &rse.post); |
4838 | |
4839 | /* Increment count. */ |
4840 | tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type, |
4841 | count, gfc_index_one_node); |
4842 | gfc_add_modify (&body, count, tmp); |
4843 | |
4844 | tmp = gfc_finish_block (&body); |
4845 | |
4846 | /* Generate body and loops according to the information in |
4847 | nested_forall_info. */ |
4848 | tmp = gfc_trans_nested_forall_loop (nested_forall_info, body: tmp, mask_flag: 1); |
4849 | gfc_add_expr_to_block (block, tmp); |
4850 | |
4851 | /* Reset count. */ |
4852 | gfc_add_modify (block, count, gfc_index_zero_node); |
4853 | |
4854 | gfc_start_block (&body); |
4855 | gfc_init_se (&lse, NULL); |
4856 | gfc_init_se (&rse, NULL); |
4857 | rse.expr = gfc_build_array_ref (tmp1, count, NULL); |
4858 | lse.want_pointer = 1; |
4859 | gfc_conv_expr (se: &lse, expr: expr1); |
4860 | gfc_add_block_to_block (&body, &lse.pre); |
4861 | gfc_add_modify (&body, lse.expr, rse.expr); |
4862 | gfc_add_block_to_block (&body, &lse.post); |
4863 | /* Increment count. */ |
4864 | tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type, |
4865 | count, gfc_index_one_node); |
4866 | gfc_add_modify (&body, count, tmp); |
4867 | tmp = gfc_finish_block (&body); |
4868 | |
4869 | /* Generate body and loops according to the information in |
4870 | nested_forall_info. */ |
4871 | tmp = gfc_trans_nested_forall_loop (nested_forall_info, body: tmp, mask_flag: 1); |
4872 | gfc_add_expr_to_block (block, tmp); |
4873 | } |
4874 | else |
4875 | { |
4876 | gfc_init_loopinfo (&loop); |
4877 | |
4878 | /* Associate the SS with the loop. */ |
4879 | gfc_add_ss_to_loop (&loop, rss); |
4880 | |
4881 | /* Setup the scalarizing loops and bounds. */ |
4882 | gfc_conv_ss_startstride (&loop); |
4883 | |
4884 | gfc_conv_loop_setup (&loop, &expr2->where); |
4885 | |
4886 | info = &rss->info->data.array; |
4887 | desc = info->descriptor; |
4888 | |
4889 | /* Make a new descriptor. */ |
4890 | parmtype = gfc_get_element_type (TREE_TYPE (desc)); |
4891 | parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen, 0, |
4892 | loop.from, loop.to, 1, |
4893 | GFC_ARRAY_UNKNOWN, true); |
4894 | |
4895 | /* Allocate temporary for nested forall construct. */ |
4896 | tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type: parmtype, |
4897 | inner_size, NULL, block, ptemp1: &ptemp1); |
4898 | gfc_start_block (&body); |
4899 | gfc_init_se (&lse, NULL); |
4900 | lse.expr = gfc_build_array_ref (tmp1, count, NULL); |
4901 | lse.direct_byref = 1; |
4902 | gfc_conv_expr_descriptor (&lse, expr2); |
4903 | |
4904 | gfc_add_block_to_block (&body, &lse.pre); |
4905 | gfc_add_block_to_block (&body, &lse.post); |
4906 | |
4907 | /* Increment count. */ |
4908 | tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type, |
4909 | count, gfc_index_one_node); |
4910 | gfc_add_modify (&body, count, tmp); |
4911 | |
4912 | tmp = gfc_finish_block (&body); |
4913 | |
4914 | /* Generate body and loops according to the information in |
4915 | nested_forall_info. */ |
4916 | tmp = gfc_trans_nested_forall_loop (nested_forall_info, body: tmp, mask_flag: 1); |
4917 | gfc_add_expr_to_block (block, tmp); |
4918 | |
4919 | /* Reset count. */ |
4920 | gfc_add_modify (block, count, gfc_index_zero_node); |
4921 | |
4922 | parm = gfc_build_array_ref (tmp1, count, NULL); |
4923 | gfc_init_se (&lse, NULL); |
4924 | gfc_conv_expr_descriptor (&lse, expr1); |
4925 | gfc_add_modify (&lse.pre, lse.expr, parm); |
4926 | gfc_start_block (&body); |
4927 | gfc_add_block_to_block (&body, &lse.pre); |
4928 | gfc_add_block_to_block (&body, &lse.post); |
4929 | |
4930 | /* Increment count. */ |
4931 | tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type, |
4932 | count, gfc_index_one_node); |
4933 | gfc_add_modify (&body, count, tmp); |
4934 | |
4935 | tmp = gfc_finish_block (&body); |
4936 | |
4937 | tmp = gfc_trans_nested_forall_loop (nested_forall_info, body: tmp, mask_flag: 1); |
4938 | gfc_add_expr_to_block (block, tmp); |
4939 | } |
4940 | /* Free the temporary. */ |
4941 | if (ptemp1) |
4942 | { |
4943 | tmp = gfc_call_free (ptemp1); |
4944 | gfc_add_expr_to_block (block, tmp); |
4945 | } |
4946 | } |
4947 | |
4948 | |
4949 | /* FORALL and WHERE statements are really nasty, especially when you nest |
4950 | them. All the rhs of a forall assignment must be evaluated before the |
4951 | actual assignments are performed. Presumably this also applies to all the |
4952 | assignments in an inner where statement. */ |
4953 | |
4954 | /* Generate code for a FORALL statement. Any temporaries are allocated as a |
4955 | linear array, relying on the fact that we process in the same order in all |
4956 | loops. |
4957 | |
4958 | forall (i=start:end:stride; maskexpr) |
4959 | e<i> = f<i> |
4960 | g<i> = h<i> |
4961 | end forall |
4962 | (where e,f,g,h<i> are arbitrary expressions possibly involving i) |
4963 | Translates to: |
4964 | count = ((end + 1 - start) / stride) |
4965 | masktmp(:) = maskexpr(:) |
4966 | |
4967 | maskindex = 0; |
4968 | for (i = start; i <= end; i += stride) |
4969 | { |
4970 | if (masktmp[maskindex++]) |
4971 | e<i> = f<i> |
4972 | } |
4973 | maskindex = 0; |
4974 | for (i = start; i <= end; i += stride) |
4975 | { |
4976 | if (masktmp[maskindex++]) |
4977 | g<i> = h<i> |
4978 | } |
4979 | |
4980 | Note that this code only works when there are no dependencies. |
4981 | Forall loop with array assignments and data dependencies are a real pain, |
4982 | because the size of the temporary cannot always be determined before the |
4983 | loop is executed. This problem is compounded by the presence of nested |
4984 | FORALL constructs. |
4985 | */ |
4986 | |
4987 | static tree |
4988 | gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info) |
4989 | { |
4990 | stmtblock_t pre; |
4991 | stmtblock_t post; |
4992 | stmtblock_t block; |
4993 | stmtblock_t body; |
4994 | tree *var; |
4995 | tree *start; |
4996 | tree *end; |
4997 | tree *step; |
4998 | gfc_expr **varexpr; |
4999 | tree tmp; |
5000 | tree assign; |
5001 | tree size; |
5002 | tree maskindex; |
5003 | tree mask; |
5004 | tree pmask; |
5005 | tree cycle_label = NULL_TREE; |
5006 | int n; |
5007 | int nvar; |
5008 | int need_temp; |
5009 | gfc_forall_iterator *fa; |
5010 | gfc_se se; |
5011 | gfc_code *c; |
5012 | gfc_saved_var *saved_vars; |
5013 | iter_info *this_forall; |
5014 | forall_info *info; |
5015 | bool need_mask; |
5016 | |
5017 | /* Do nothing if the mask is false. */ |
5018 | if (code->expr1 |
5019 | && code->expr1->expr_type == EXPR_CONSTANT |
5020 | && !code->expr1->value.logical) |
5021 | return build_empty_stmt (input_location); |
5022 | |
5023 | n = 0; |
5024 | /* Count the FORALL index number. */ |
5025 | for (fa = code->ext.forall_iterator; fa; fa = fa->next) |
5026 | n++; |
5027 | nvar = n; |
5028 | |
5029 | /* Allocate the space for var, start, end, step, varexpr. */ |
5030 | var = XCNEWVEC (tree, nvar); |
5031 | start = XCNEWVEC (tree, nvar); |
5032 | end = XCNEWVEC (tree, nvar); |
5033 | step = XCNEWVEC (tree, nvar); |
5034 | varexpr = XCNEWVEC (gfc_expr *, nvar); |
5035 | saved_vars = XCNEWVEC (gfc_saved_var, nvar); |
5036 | |
5037 | /* Allocate the space for info. */ |
5038 | info = XCNEW (forall_info); |
5039 | |
5040 | gfc_start_block (&pre); |
5041 | gfc_init_block (&post); |
5042 | gfc_init_block (&block); |
5043 | |
5044 | n = 0; |
5045 | for (fa = code->ext.forall_iterator; fa; fa = fa->next) |
5046 | { |
5047 | gfc_symbol *sym = fa->var->symtree->n.sym; |
5048 | |
5049 | /* Allocate space for this_forall. */ |
5050 | this_forall = XCNEW (iter_info); |
5051 | |
5052 | /* Create a temporary variable for the FORALL index. */ |
5053 | tmp = gfc_typenode_for_spec (&sym->ts); |
5054 | var[n] = gfc_create_var (tmp, sym->name); |
5055 | gfc_shadow_sym (sym, var[n], &saved_vars[n]); |
5056 | |
5057 | /* Record it in this_forall. */ |
5058 | this_forall->var = var[n]; |
5059 | |
5060 | /* Replace the index symbol's backend_decl with the temporary decl. */ |
5061 | sym->backend_decl = var[n]; |
5062 | |
5063 | /* Work out the start, end and stride for the loop. */ |
5064 | gfc_init_se (&se, NULL); |
5065 | gfc_conv_expr_val (se: &se, expr: fa->start); |
5066 | /* Record it in this_forall. */ |
5067 | this_forall->start = se.expr; |
5068 | gfc_add_block_to_block (&block, &se.pre); |
5069 | start[n] = se.expr; |
5070 | |
5071 | gfc_init_se (&se, NULL); |
5072 | gfc_conv_expr_val (se: &se, expr: fa->end); |
5073 | /* Record it in this_forall. */ |
5074 | this_forall->end = se.expr; |
5075 | gfc_make_safe_expr (se: &se); |
5076 | gfc_add_block_to_block (&block, &se.pre); |
5077 | end[n] = se.expr; |
5078 | |
5079 | gfc_init_se (&se, NULL); |
5080 | gfc_conv_expr_val (se: &se, expr: fa->stride); |
5081 | /* Record it in this_forall. */ |
5082 | this_forall->step = se.expr; |
5083 | gfc_make_safe_expr (se: &se); |
5084 | gfc_add_block_to_block (&block, &se.pre); |
5085 | step[n] = se.expr; |
5086 | |
5087 | /* Set the NEXT field of this_forall to NULL. */ |
5088 | this_forall->next = NULL; |
5089 | /* Link this_forall to the info construct. */ |
5090 | if (info->this_loop) |
5091 | { |
5092 | iter_info *iter_tmp = info->this_loop; |
5093 | while (iter_tmp->next != NULL) |
5094 | iter_tmp = iter_tmp->next; |
5095 | iter_tmp->next = this_forall; |
5096 | } |
5097 | else |
5098 | info->this_loop = this_forall; |
5099 | |
5100 | n++; |
5101 | } |
5102 | nvar = n; |
5103 | |
5104 | /* Calculate the size needed for the current forall level. */ |
5105 | size = gfc_index_one_node; |
5106 | for (n = 0; n < nvar; n++) |
5107 | { |
5108 | /* size = (end + step - start) / step. */ |
5109 | tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (start[n]), |
5110 | step[n], start[n]); |
5111 | tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (end[n]), |
5112 | end[n], tmp); |
5113 | tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR, TREE_TYPE (tmp), |
5114 | tmp, step[n]); |
5115 | tmp = convert (gfc_array_index_type, tmp); |
5116 | |
5117 | size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type, |
5118 | size, tmp); |
5119 | } |
5120 | |
5121 | /* Record the nvar and size of current forall level. */ |
5122 | info->nvar = nvar; |
5123 | info->size = size; |
5124 | |
5125 | if (code->expr1) |
5126 | { |
5127 | /* If the mask is .true., consider the FORALL unconditional. */ |
5128 | if (code->expr1->expr_type == EXPR_CONSTANT |
5129 | && code->expr1->value.logical) |
5130 | need_mask = false; |
5131 | else |
5132 | need_mask = true; |
5133 | } |
5134 | else |
5135 | need_mask = false; |
5136 | |
5137 | /* First we need to allocate the mask. */ |
5138 | if (need_mask) |
5139 | { |
5140 | /* As the mask array can be very big, prefer compact boolean types. */ |
5141 | tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind); |
5142 | mask = allocate_temp_for_forall_nest (nested_forall_info, type: mask_type, |
5143 | inner_size: size, NULL, block: &block, ptemp1: &pmask); |
5144 | maskindex = gfc_create_var_np (gfc_array_index_type, "mi" ); |
5145 | |
5146 | /* Record them in the info structure. */ |
5147 | info->maskindex = maskindex; |
5148 | info->mask = mask; |
5149 | } |
5150 | else |
5151 | { |
5152 | /* No mask was specified. */ |
5153 | maskindex = NULL_TREE; |
5154 | mask = pmask = NULL_TREE; |
5155 | } |
5156 | |
5157 | /* Link the current forall level to nested_forall_info. */ |
5158 | info->prev_nest = nested_forall_info; |
5159 | nested_forall_info = info; |
5160 | |
5161 | /* Copy the mask into a temporary variable if required. |
5162 | For now we assume a mask temporary is needed. */ |
5163 | if (need_mask) |
5164 | { |
5165 | /* As the mask array can be very big, prefer compact boolean types. */ |
5166 | tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind); |
5167 | |
5168 | gfc_add_modify (&block, maskindex, gfc_index_zero_node); |
5169 | |
5170 | /* Start of mask assignment loop body. */ |
5171 | gfc_start_block (&body); |
5172 | |
5173 | /* Evaluate the mask expression. */ |
5174 | gfc_init_se (&se, NULL); |
5175 | gfc_conv_expr_val (se: &se, expr: code->expr1); |
5176 | gfc_add_block_to_block (&body, &se.pre); |
5177 | |
5178 | /* Store the mask. */ |
5179 | se.expr = convert (mask_type, se.expr); |
5180 | |
5181 | tmp = gfc_build_array_ref (mask, maskindex, NULL); |
5182 | gfc_add_modify (&body, tmp, se.expr); |
5183 | |
5184 | /* Advance to the next mask element. */ |
5185 | tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type, |
5186 | maskindex, gfc_index_one_node); |
5187 | gfc_add_modify (&body, maskindex, tmp); |
5188 | |
5189 | /* Generate the loops. */ |
5190 | tmp = gfc_finish_block (&body); |
5191 | tmp = gfc_trans_nested_forall_loop (nested_forall_info: info, body: tmp, mask_flag: 0); |
5192 | gfc_add_expr_to_block (&block, tmp); |
5193 | } |
5194 | |
5195 | if (code->op == EXEC_DO_CONCURRENT) |
5196 | { |
5197 | gfc_init_block (&body); |
5198 | cycle_label = gfc_build_label_decl (NULL_TREE); |
5199 | code->cycle_label = cycle_label; |
5200 | tmp = gfc_trans_code (code->block->next); |
5201 | gfc_add_expr_to_block (&body, tmp); |
5202 | |
5203 | if (TREE_USED (cycle_label)) |
5204 | { |
5205 | tmp = build1_v (LABEL_EXPR, cycle_label); |
5206 | gfc_add_expr_to_block (&body, tmp); |
5207 | } |
5208 | |
5209 | tmp = gfc_finish_block (&body); |
5210 | nested_forall_info->do_concurrent = true; |
5211 | tmp = gfc_trans_nested_forall_loop (nested_forall_info, body: tmp, mask_flag: 1); |
5212 | gfc_add_expr_to_block (&block, tmp); |
5213 | goto done; |
5214 | } |
5215 | |
5216 | c = code->block->next; |
5217 | |
5218 | /* TODO: loop merging in FORALL statements. */ |
5219 | /* Now that we've got a copy of the mask, generate the assignment loops. */ |
5220 | while (c) |
5221 | { |
5222 | switch (c->op) |
5223 | { |
5224 | case EXEC_ASSIGN: |
5225 | /* A scalar or array assignment. DO the simple check for |
5226 | lhs to rhs dependencies. These make a temporary for the |
5227 | rhs and form a second forall block to copy to variable. */ |
5228 | need_temp = check_forall_dependencies(c, pre: &pre, post: &post); |
5229 | |
5230 | /* Temporaries due to array assignment data dependencies introduce |
5231 | no end of problems. */ |
5232 | if (need_temp || flag_test_forall_temp) |
5233 | gfc_trans_assign_need_temp (expr1: c->expr1, expr2: c->expr2, NULL, invert: false, |
5234 | nested_forall_info, block: &block); |
5235 | else |
5236 | { |
5237 | /* Use the normal assignment copying routines. */ |
5238 | assign = gfc_trans_assignment (c->expr1, c->expr2, false, true); |
5239 | |
5240 | /* Generate body and loops. */ |
5241 | tmp = gfc_trans_nested_forall_loop (nested_forall_info, |
5242 | body: assign, mask_flag: 1); |
5243 | gfc_add_expr_to_block (&block, tmp); |
5244 | } |
5245 | |
5246 | /* Cleanup any temporary symtrees that have been made to deal |
5247 | with dependencies. */ |
5248 | if (new_symtree) |
5249 | cleanup_forall_symtrees (c); |
5250 | |
5251 | break; |
5252 | |
5253 | case EXEC_WHERE: |
5254 | /* Translate WHERE or WHERE construct nested in FORALL. */ |
5255 | gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block); |
5256 | break; |
5257 | |
5258 | /* Pointer assignment inside FORALL. */ |
5259 | case EXEC_POINTER_ASSIGN: |
5260 | need_temp = gfc_check_dependency (c->expr1, c->expr2, 0); |
5261 | /* Avoid cases where a temporary would never be needed and where |
5262 | the temp code is guaranteed to fail. */ |
5263 | if (need_temp |
5264 | || (flag_test_forall_temp |
5265 | && c->expr2->expr_type != EXPR_CONSTANT |
5266 | && c->expr2->expr_type != EXPR_NULL)) |
5267 | gfc_trans_pointer_assign_need_temp (expr1: c->expr1, expr2: c->expr2, |
5268 | nested_forall_info, block: &block); |
5269 | else |
5270 | { |
5271 | /* Use the normal assignment copying routines. */ |
5272 | assign = gfc_trans_pointer_assignment (c->expr1, c->expr2); |
5273 | |
5274 | /* Generate body and loops. */ |
5275 | tmp = gfc_trans_nested_forall_loop (nested_forall_info, |
5276 | body: assign, mask_flag: 1); |
5277 | gfc_add_expr_to_block (&block, tmp); |
5278 | } |
5279 | break; |
5280 | |
5281 | case EXEC_FORALL: |
5282 | tmp = gfc_trans_forall_1 (code: c, nested_forall_info); |
5283 | gfc_add_expr_to_block (&block, tmp); |
5284 | break; |
5285 | |
5286 | /* Explicit subroutine calls are prevented by the frontend but interface |
5287 | assignments can legitimately produce them. */ |
5288 | case EXEC_ASSIGN_CALL: |
5289 | assign = gfc_trans_call (code: c, dependency_check: true, NULL_TREE, NULL_TREE, invert: false); |
5290 | tmp = gfc_trans_nested_forall_loop (nested_forall_info, body: assign, mask_flag: 1); |
5291 | gfc_add_expr_to_block (&block, tmp); |
5292 | break; |
5293 | |
5294 | default: |
5295 | gcc_unreachable (); |
5296 | } |
5297 | |
5298 | c = c->next; |
5299 | } |
5300 | |
5301 | done: |
5302 | /* Restore the original index variables. */ |
5303 | for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++) |
5304 | gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]); |
5305 | |
5306 | /* Free the space for var, start, end, step, varexpr. */ |
5307 | free (ptr: var); |
5308 | free (ptr: start); |
5309 | free (ptr: end); |
5310 | free (ptr: step); |
5311 | free (ptr: varexpr); |
5312 | free (ptr: saved_vars); |
5313 | |
5314 | for (this_forall = info->this_loop; this_forall;) |
5315 | { |
5316 | iter_info *next = this_forall->next; |
5317 | free (ptr: this_forall); |
5318 | this_forall = next; |
5319 | } |
5320 | |
5321 | /* Free the space for this forall_info. */ |
5322 | free (ptr: info); |
5323 | |
5324 | if (pmask) |
5325 | { |
5326 | /* Free the temporary for the mask. */ |
5327 | tmp = gfc_call_free (pmask); |
5328 | gfc_add_expr_to_block (&block, tmp); |
5329 | } |
5330 | if (maskindex) |
5331 | pushdecl (maskindex); |
5332 | |
5333 | gfc_add_block_to_block (&pre, &block); |
5334 | gfc_add_block_to_block (&pre, &post); |
5335 | |
5336 | return gfc_finish_block (&pre); |
5337 | } |
5338 | |
5339 | |
5340 | /* Translate the FORALL statement or construct. */ |
5341 | |
5342 | tree gfc_trans_forall (gfc_code * code) |
5343 | { |
5344 | return gfc_trans_forall_1 (code, NULL); |
5345 | } |
5346 | |
5347 | |
5348 | /* Translate the DO CONCURRENT construct. */ |
5349 | |
5350 | tree gfc_trans_do_concurrent (gfc_code * code) |
5351 | { |
5352 | return gfc_trans_forall_1 (code, NULL); |
5353 | } |
5354 | |
5355 | |
5356 | /* Evaluate the WHERE mask expression, copy its value to a temporary. |
5357 | If the WHERE construct is nested in FORALL, compute the overall temporary |
5358 | needed by the WHERE mask expression multiplied by the iterator number of |
5359 | the nested forall. |
5360 | ME is the WHERE mask expression. |
5361 | MASK is the current execution mask upon input, whose sense may or may |
5362 | not be inverted as specified by the INVERT argument. |
5363 | CMASK is the updated execution mask on output, or NULL if not required. |
5364 | PMASK is the pending execution mask on output, or NULL if not required. |
5365 | BLOCK is the block in which to place the condition evaluation loops. */ |
5366 | |
5367 | static void |
5368 | gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info, |
5369 | tree mask, bool invert, tree cmask, tree pmask, |
5370 | tree mask_type, stmtblock_t * block) |
5371 | { |
5372 | tree tmp, tmp1; |
5373 | gfc_ss *lss, *; |
5374 | gfc_loopinfo loop; |
5375 | stmtblock_t body, body1; |
5376 | tree count, cond, mtmp; |
5377 | gfc_se lse, rse; |
5378 | |
5379 | gfc_init_loopinfo (&loop); |
5380 | |
5381 | lss = gfc_walk_expr (me); |
5382 | rss = gfc_walk_expr (me); |
5383 | |
5384 | /* Variable to index the temporary. */ |
5385 | count = gfc_create_var (gfc_array_index_type, "count" ); |
5386 | /* Initialize count. */ |
5387 | gfc_add_modify (block, count, gfc_index_zero_node); |
5388 | |
5389 | gfc_start_block (&body); |
5390 | |
5391 | gfc_init_se (&rse, NULL); |
5392 | gfc_init_se (&lse, NULL); |
5393 | |
5394 | if (lss == gfc_ss_terminator) |
5395 | { |
5396 | gfc_init_block (&body1); |
5397 | } |
5398 | else |
5399 | { |
5400 | /* Initialize the loop. */ |
5401 | gfc_init_loopinfo (&loop); |
5402 | |
5403 | /* We may need LSS to determine the shape of the expression. */ |
5404 | gfc_add_ss_to_loop (&loop, lss); |
5405 | gfc_add_ss_to_loop (&loop, rss); |
5406 | |
5407 | gfc_conv_ss_startstride (&loop); |
5408 | gfc_conv_loop_setup (&loop, &me->where); |
5409 | |
5410 | gfc_mark_ss_chain_used (rss, 1); |
5411 | /* Start the loop body. */ |
5412 | gfc_start_scalarized_body (&loop, &body1); |
5413 | |
5414 | /* Translate the expression. */ |
5415 | gfc_copy_loopinfo_to_se (&rse, &loop); |
5416 | rse.ss = rss; |
5417 | gfc_conv_expr (se: &rse, expr: me); |
5418 | } |
5419 | |
5420 | /* Variable to evaluate mask condition. */ |
5421 | cond = gfc_create_var (mask_type, "cond" ); |
5422 | if (mask && (cmask || pmask)) |
5423 | mtmp = gfc_create_var (mask_type, "mask" ); |
5424 | else mtmp = NULL_TREE; |
5425 | |
5426 | gfc_add_block_to_block (&body1, &lse.pre); |
5427 | gfc_add_block_to_block (&body1, &rse.pre); |
5428 | |
5429 | gfc_add_modify (&body1, cond, fold_convert (mask_type, rse.expr)); |
5430 | |
5431 | if (mask && (cmask || pmask)) |
5432 | { |
5433 | tmp = gfc_build_array_ref (mask, count, NULL); |
5434 | if (invert) |
5435 | tmp = fold_build1_loc (input_location, TRUTH_NOT_EXPR, mask_type, tmp); |
5436 | gfc_add_modify (&body1, mtmp, tmp); |
5437 | } |
5438 | |
5439 | if (cmask) |
5440 | { |
5441 | tmp1 = gfc_build_array_ref (cmask, count, NULL); |
5442 | tmp = cond; |
5443 | if (mask) |
5444 | tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR, mask_type, |
5445 | mtmp, tmp); |
5446 | gfc_add_modify (&body1, tmp1, tmp); |
5447 | } |
5448 | |
5449 | if (pmask) |
5450 | { |
5451 | tmp1 = gfc_build_array_ref (pmask, count, NULL); |
5452 | tmp = fold_build1_loc (input_location, TRUTH_NOT_EXPR, mask_type, cond); |
5453 | if (mask) |
5454 | tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR, mask_type, mtmp, |
5455 | tmp); |
5456 | gfc_add_modify (&body1, tmp1, tmp); |
5457 | } |
5458 | |
5459 | gfc_add_block_to_block (&body1, &lse.post); |
5460 | gfc_add_block_to_block (&body1, &rse.post); |
5461 | |
5462 | if (lss == gfc_ss_terminator) |
5463 | { |
5464 | gfc_add_block_to_block (&body, &body1); |
5465 | } |
5466 | else |
5467 | { |
5468 | /* Increment count. */ |
5469 | tmp1 = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type, |
5470 | count, gfc_index_one_node); |
5471 | gfc_add_modify (&body1, count, tmp1); |
5472 | |
5473 | /* Generate the copying loops. */ |
5474 | gfc_trans_scalarizing_loops (&loop, &body1); |
5475 | |
5476 | gfc_add_block_to_block (&body, &loop.pre); |
5477 | gfc_add_block_to_block (&body, &loop.post); |
5478 | |
5479 | gfc_cleanup_loop (&loop); |
5480 | /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful |
5481 | as tree nodes in SS may not be valid in different scope. */ |
5482 | } |
5483 | |
5484 | tmp1 = gfc_finish_block (&body); |
5485 | /* If the WHERE construct is inside FORALL, fill the full temporary. */ |
5486 | if (nested_forall_info != NULL) |
5487 | tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, body: tmp1, mask_flag: 1); |
5488 | |
5489 | gfc_add_expr_to_block (block, tmp1); |
5490 | } |
5491 | |
5492 | |
5493 | /* Translate an assignment statement in a WHERE statement or construct |
5494 | statement. The MASK expression is used to control which elements |
5495 | of EXPR1 shall be assigned. The sense of MASK is specified by |
5496 | INVERT. */ |
5497 | |
5498 | static tree |
5499 | gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2, |
5500 | tree mask, bool invert, |
5501 | tree count1, tree count2, |
5502 | gfc_code *cnext) |
5503 | { |
5504 | gfc_se lse; |
5505 | gfc_se rse; |
5506 | gfc_ss *lss; |
5507 | gfc_ss *lss_section; |
5508 | gfc_ss *; |
5509 | |
5510 | gfc_loopinfo loop; |
5511 | tree tmp; |
5512 | stmtblock_t block; |
5513 | stmtblock_t body; |
5514 | tree index, maskexpr; |
5515 | |
5516 | /* A defined assignment. */ |
5517 | if (cnext && cnext->resolved_sym) |
5518 | return gfc_trans_call (code: cnext, dependency_check: true, mask, count1, invert); |
5519 | |
5520 | #if 0 |
5521 | /* TODO: handle this special case. |
5522 | Special case a single function returning an array. */ |
5523 | if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0) |
5524 | { |
5525 | tmp = gfc_trans_arrayfunc_assign (expr1, expr2); |
5526 | if (tmp) |
5527 | return tmp; |
5528 | } |
5529 | #endif |
5530 | |
5531 | /* Assignment of the form lhs = rhs. */ |
5532 | gfc_start_block (&block); |
5533 | |
5534 | gfc_init_se (&lse, NULL); |
5535 | gfc_init_se (&rse, NULL); |
5536 | |
5537 | /* Walk the lhs. */ |
5538 | lss = gfc_walk_expr (expr1); |
5539 | rss = NULL; |
5540 | |
5541 | /* In each where-assign-stmt, the mask-expr and the variable being |
5542 | defined shall be arrays of the same shape. */ |
5543 | gcc_assert (lss != gfc_ss_terminator); |
5544 | |
5545 | /* The assignment needs scalarization. */ |
5546 | lss_section = lss; |
5547 | |
5548 | /* Find a non-scalar SS from the lhs. */ |
5549 | while (lss_section != gfc_ss_terminator |
5550 | && lss_section->info->type != GFC_SS_SECTION) |
5551 | lss_section = lss_section->next; |
5552 | |
5553 | gcc_assert (lss_section != gfc_ss_terminator); |
5554 | |
5555 | /* Initialize the scalarizer. */ |
5556 | gfc_init_loopinfo (&loop); |
5557 | |
5558 | /* Walk the rhs. */ |
5559 | rss = gfc_walk_expr (expr2); |
5560 | if (rss == gfc_ss_terminator) |
5561 | { |
5562 | /* The rhs is scalar. Add a ss for the expression. */ |
5563 | rss = gfc_get_scalar_ss (gfc_ss_terminator, expr2); |
5564 | rss->info->where = 1; |
5565 | } |
5566 | |
5567 | /* Associate the SS with the loop. */ |
5568 | gfc_add_ss_to_loop (&loop, lss); |
5569 | gfc_add_ss_to_loop (&loop, rss); |
5570 | |
5571 | /* Calculate the bounds of the scalarization. */ |
5572 | gfc_conv_ss_startstride (&loop); |
5573 | |
5574 | /* Resolve any data dependencies in the statement. */ |
5575 | gfc_conv_resolve_dependencies (&loop, lss_section, rss); |
5576 | |
5577 | /* Setup the scalarizing loops. */ |
5578 | gfc_conv_loop_setup (&loop, &expr2->where); |
5579 | |
5580 | /* Setup the gfc_se structures. */ |
5581 | gfc_copy_loopinfo_to_se (&lse, &loop); |
5582 | gfc_copy_loopinfo_to_se (&rse, &loop); |
5583 | |
5584 | rse.ss = rss; |
5585 | gfc_mark_ss_chain_used (rss, 1); |
5586 | if (loop.temp_ss == NULL) |
5587 | { |
5588 | lse.ss = lss; |
5589 | gfc_mark_ss_chain_used (lss, 1); |
5590 | } |
5591 | else |
5592 | { |
5593 | lse.ss = loop.temp_ss; |
5594 | gfc_mark_ss_chain_used (lss, 3); |
5595 | gfc_mark_ss_chain_used (loop.temp_ss, 3); |
5596 | } |
5597 | |
5598 | /* Start the scalarized loop body. */ |
5599 | gfc_start_scalarized_body (&loop, &body); |
5600 | |
5601 | /* Translate the expression. */ |
5602 | gfc_conv_expr (se: &rse, expr: expr2); |
5603 | if (lss != gfc_ss_terminator && loop.temp_ss != NULL) |
5604 | gfc_conv_tmp_array_ref (se: &lse); |
5605 | else |
5606 | gfc_conv_expr (se: &lse, expr: expr1); |
5607 | |
5608 | /* Form the mask expression according to the mask. */ |
5609 | index = count1; |
5610 | maskexpr = gfc_build_array_ref (mask, index, NULL); |
5611 | if (invert) |
5612 | maskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR, |
5613 | TREE_TYPE (maskexpr), maskexpr); |
5614 | |
5615 | /* Use the scalar assignment as is. */ |
5616 | tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, |
5617 | false, loop.temp_ss == NULL); |
5618 | |
5619 | tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt (input_location)); |
5620 | |
5621 | gfc_add_expr_to_block (&body, tmp); |
5622 | |
5623 | if (lss == gfc_ss_terminator) |
5624 | { |
5625 | /* Increment count1. */ |
5626 | tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type, |
5627 | count1, gfc_index_one_node); |
5628 | gfc_add_modify (&body, count1, tmp); |
5629 | |
5630 | /* Use the scalar assignment as is. */ |
5631 | gfc_add_block_to_block (&block, &body); |
5632 | } |
5633 | else |
5634 | { |
5635 | gcc_assert (lse.ss == gfc_ss_terminator |
5636 | && rse.ss == gfc_ss_terminator); |
5637 | |
5638 | if (loop.temp_ss != NULL) |
5639 | { |
5640 | /* Increment count1 before finish the main body of a scalarized |
5641 | expression. */ |
5642 | tmp = fold_build2_loc (input_location, PLUS_EXPR, |
5643 | gfc_array_index_type, count1, gfc_index_one_node); |
5644 | gfc_add_modify (&body, count1, tmp); |
5645 | gfc_trans_scalarized_loop_boundary (&loop, &body); |
5646 | |
5647 | /* We need to copy the temporary to the actual lhs. */ |
5648 | gfc_init_se (&lse, NULL); |
5649 | gfc_init_se (&rse, NULL); |
5650 | gfc_copy_loopinfo_to_se (&lse, &loop); |
5651 | gfc_copy_loopinfo_to_se (&rse, &loop); |
5652 | |
5653 | rse.ss = loop.temp_ss; |
5654 | lse.ss = lss; |
5655 | |
5656 | gfc_conv_tmp_array_ref (se: &rse); |
5657 | gfc_conv_expr (se: &lse, expr: expr1); |
5658 | |
5659 | gcc_assert (lse.ss == gfc_ss_terminator |
5660 | && rse.ss == gfc_ss_terminator); |
5661 | |
5662 | /* Form the mask expression according to the mask tree list. */ |
5663 | index = count2; |
5664 | maskexpr = gfc_build_array_ref (mask, index, NULL); |
5665 | if (invert) |
5666 | maskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR, |
5667 | TREE_TYPE (maskexpr), maskexpr); |
5668 | |
5669 | /* Use the scalar assignment as is. */ |
5670 | tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, true); |
5671 | tmp = build3_v (COND_EXPR, maskexpr, tmp, |
5672 | build_empty_stmt (input_location)); |
5673 | gfc_add_expr_to_block (&body, tmp); |
5674 | |
5675 | /* Increment count2. */ |
5676 | tmp = fold_build2_loc (input_location, PLUS_EXPR, |
5677 | gfc_array_index_type, count2, |
5678 | gfc_index_one_node); |
5679 | gfc_add_modify (&body, count2, tmp); |
5680 | } |
5681 | else |
5682 | { |
5683 | /* Increment count1. */ |
5684 | tmp = fold_build2_loc (input_location, PLUS_EXPR, |
5685 | gfc_array_index_type, count1, |
5686 | gfc_index_one_node); |
5687 | gfc_add_modify (&body, count1, tmp); |
5688 | } |
5689 | |
5690 | /* Generate the copying loops. */ |
5691 | gfc_trans_scalarizing_loops (&loop, &body); |
5692 | |
5693 | /* Wrap the whole thing up. */ |
5694 | gfc_add_block_to_block (&block, &loop.pre); |
5695 | gfc_add_block_to_block (&block, &loop.post); |
5696 | gfc_cleanup_loop (&loop); |
5697 | } |
5698 | |
5699 | return gfc_finish_block (&block); |
5700 | } |
5701 | |
5702 | |
5703 | /* Translate the WHERE construct or statement. |
5704 | This function can be called iteratively to translate the nested WHERE |
5705 | construct or statement. |
5706 | MASK is the control mask. */ |
5707 | |
5708 | static void |
5709 | gfc_trans_where_2 (gfc_code * code, tree mask, bool invert, |
5710 | forall_info * nested_forall_info, stmtblock_t * block) |
5711 | { |
5712 | stmtblock_t inner_size_body; |
5713 | tree inner_size, size; |
5714 | gfc_ss *lss, *; |
5715 | tree mask_type; |
5716 | gfc_expr *expr1; |
5717 | gfc_expr *expr2; |
5718 | gfc_code *cblock; |
5719 | gfc_code *cnext; |
5720 | tree tmp; |
5721 | tree cond; |
5722 | tree count1, count2; |
5723 | bool need_cmask; |
5724 | bool need_pmask; |
5725 | int need_temp; |
5726 | tree pcmask = NULL_TREE; |
5727 | tree ppmask = NULL_TREE; |
5728 | tree cmask = NULL_TREE; |
5729 | tree pmask = NULL_TREE; |
5730 | gfc_actual_arglist *arg; |
5731 | |
5732 | /* the WHERE statement or the WHERE construct statement. */ |
5733 | cblock = code->block; |
5734 | |
5735 | /* As the mask array can be very big, prefer compact boolean types. */ |
5736 | mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind); |
5737 | |
5738 | /* Determine which temporary masks are needed. */ |
5739 | if (!cblock->block) |
5740 | { |
5741 | /* One clause: No ELSEWHEREs. */ |
5742 | need_cmask = (cblock->next != 0); |
5743 | need_pmask = false; |
5744 | } |
5745 | else if (cblock->block->block) |
5746 | { |
5747 | /* Three or more clauses: Conditional ELSEWHEREs. */ |
5748 | need_cmask = true; |
5749 | need_pmask = true; |
5750 | } |
5751 | else if (cblock->next) |
5752 | { |
5753 | /* Two clauses, the first non-empty. */ |
5754 | need_cmask = true; |
5755 | need_pmask = (mask != NULL_TREE |
5756 | && cblock->block->next != 0); |
5757 | } |
5758 | else if (!cblock->block->next) |
5759 | { |
5760 | /* Two clauses, both empty. */ |
5761 | need_cmask = false; |
5762 | need_pmask = false; |
5763 | } |
5764 | /* Two clauses, the first empty, the second non-empty. */ |
5765 | else if (mask) |
5766 | { |
5767 | need_cmask = (cblock->block->expr1 != 0); |
5768 | need_pmask = true; |
5769 | } |
5770 | else |
5771 | { |
5772 | need_cmask = true; |
5773 | need_pmask = false; |
5774 | } |
5775 | |
5776 | if (need_cmask || need_pmask) |
5777 | { |
5778 | /* Calculate the size of temporary needed by the mask-expr. */ |
5779 | gfc_init_block (&inner_size_body); |
5780 | inner_size = compute_inner_temp_size (expr1: cblock->expr1, expr2: cblock->expr1, |
5781 | pblock: &inner_size_body, lss: &lss, rss: &rss); |
5782 | |
5783 | gfc_free_ss_chain (lss); |
5784 | gfc_free_ss_chain (rss); |
5785 | |
5786 | /* Calculate the total size of temporary needed. */ |
5787 | size = compute_overall_iter_number (nested_forall_info, inner_size, |
5788 | inner_size_body: &inner_size_body, block); |
5789 | |
5790 | /* Check whether the size is negative. */ |
5791 | cond = fold_build2_loc (input_location, LE_EXPR, logical_type_node, size, |
5792 | gfc_index_zero_node); |
5793 | size = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type, |
5794 | cond, gfc_index_zero_node, size); |
5795 | size = gfc_evaluate_now (size, block); |
5796 | |
5797 | /* Allocate temporary for WHERE mask if needed. */ |
5798 | if (need_cmask) |
5799 | cmask = allocate_temp_for_forall_nest_1 (type: mask_type, size, block, |
5800 | ptemp1: &pcmask); |
5801 | |
5802 | /* Allocate temporary for !mask if needed. */ |
5803 | if (need_pmask) |
5804 | pmask = allocate_temp_for_forall_nest_1 (type: mask_type, size, block, |
5805 | ptemp1: &ppmask); |
5806 | } |
5807 | |
5808 | while (cblock) |
5809 | { |
5810 | /* Each time around this loop, the where clause is conditional |
5811 | on the value of mask and invert, which are updated at the |
5812 | bottom of the loop. */ |
5813 | |
5814 | /* Has mask-expr. */ |
5815 | if (cblock->expr1) |
5816 | { |
5817 | /* Ensure that the WHERE mask will be evaluated exactly once. |
5818 | If there are no statements in this WHERE/ELSEWHERE clause, |
5819 | then we don't need to update the control mask (cmask). |
5820 | If this is the last clause of the WHERE construct, then |
5821 | we don't need to update the pending control mask (pmask). */ |
5822 | if (mask) |
5823 | gfc_evaluate_where_mask (me: cblock->expr1, nested_forall_info, |
5824 | mask, invert, |
5825 | cmask: cblock->next ? cmask : NULL_TREE, |
5826 | pmask: cblock->block ? pmask : NULL_TREE, |
5827 | mask_type, block); |
5828 | else |
5829 | gfc_evaluate_where_mask (me: cblock->expr1, nested_forall_info, |
5830 | NULL_TREE, invert: false, |
5831 | cmask: (cblock->next || cblock->block) |
5832 | ? cmask : NULL_TREE, |
5833 | NULL_TREE, mask_type, block); |
5834 | |
5835 | invert = false; |
5836 | } |
5837 | /* It's a final elsewhere-stmt. No mask-expr is present. */ |
5838 | else |
5839 | cmask = mask; |
5840 | |
5841 | /* The body of this where clause are controlled by cmask with |
5842 | sense specified by invert. */ |
5843 | |
5844 | /* Get the assignment statement of a WHERE statement, or the first |
5845 | statement in where-body-construct of a WHERE construct. */ |
5846 | cnext = cblock->next; |
5847 | while (cnext) |
5848 | { |
5849 | switch (cnext->op) |
5850 | { |
5851 | /* WHERE assignment statement. */ |
5852 | case EXEC_ASSIGN_CALL: |
5853 | |
5854 | arg = cnext->ext.actual; |
5855 | expr1 = expr2 = NULL; |
5856 | for (; arg; arg = arg->next) |
5857 | { |
5858 | if (!arg->expr) |
5859 | continue; |
5860 | if (expr1 == NULL) |
5861 | expr1 = arg->expr; |
5862 | else |
5863 | expr2 = arg->expr; |
5864 | } |
5865 | goto evaluate; |
5866 | |
5867 | case EXEC_ASSIGN: |
5868 | expr1 = cnext->expr1; |
5869 | expr2 = cnext->expr2; |
5870 | evaluate: |
5871 | if (nested_forall_info != NULL) |
5872 | { |
5873 | need_temp = gfc_check_dependency (expr1, expr2, 0); |
5874 | if ((need_temp || flag_test_forall_temp) |
5875 | && cnext->op != EXEC_ASSIGN_CALL) |
5876 | gfc_trans_assign_need_temp (expr1, expr2, |
5877 | wheremask: cmask, invert, |
5878 | nested_forall_info, block); |
5879 | else |
5880 | { |
5881 | /* Variables to control maskexpr. */ |
5882 | count1 = gfc_create_var (gfc_array_index_type, "count1" ); |
5883 | count2 = gfc_create_var (gfc_array_index_type, "count2" ); |
5884 | gfc_add_modify (block, count1, gfc_index_zero_node); |
5885 | gfc_add_modify (block, count2, gfc_index_zero_node); |
5886 | |
5887 | tmp = gfc_trans_where_assign (expr1, expr2, |
5888 | mask: cmask, invert, |
5889 | count1, count2, |
5890 | cnext); |
5891 | |
5892 | tmp = gfc_trans_nested_forall_loop (nested_forall_info, |
5893 | body: tmp, mask_flag: 1); |
5894 | gfc_add_expr_to_block (block, tmp); |
5895 | } |
5896 | } |
5897 | else |
5898 | { |
5899 | /* Variables to control maskexpr. */ |
5900 | count1 = gfc_create_var (gfc_array_index_type, "count1" ); |
5901 | count2 = gfc_create_var (gfc_array_index_type, "count2" ); |
5902 | gfc_add_modify (block, count1, gfc_index_zero_node); |
5903 | gfc_add_modify (block, count2, gfc_index_zero_node); |
5904 | |
5905 | tmp = gfc_trans_where_assign (expr1, expr2, |
5906 | mask: cmask, invert, |
5907 | count1, count2, |
5908 | cnext); |
5909 | gfc_add_expr_to_block (block, tmp); |
5910 | |
5911 | } |
5912 | break; |
5913 | |
5914 | /* WHERE or WHERE construct is part of a where-body-construct. */ |
5915 | case EXEC_WHERE: |
5916 | gfc_trans_where_2 (code: cnext, mask: cmask, invert, |
5917 | nested_forall_info, block); |
5918 | break; |
5919 | |
5920 | default: |
5921 | gcc_unreachable (); |
5922 | } |
5923 | |
5924 | /* The next statement within the same where-body-construct. */ |
5925 | cnext = cnext->next; |
5926 | } |
5927 | /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */ |
5928 | cblock = cblock->block; |
5929 | if (mask == NULL_TREE) |
5930 | { |
5931 | /* If we're the initial WHERE, we can simply invert the sense |
5932 | of the current mask to obtain the "mask" for the remaining |
5933 | ELSEWHEREs. */ |
5934 | invert = true; |
5935 | mask = cmask; |
5936 | } |
5937 | else |
5938 | { |
5939 | /* Otherwise, for nested WHERE's we need to use the pending mask. */ |
5940 | invert = false; |
5941 | mask = pmask; |
5942 | } |
5943 | } |
5944 | |
5945 | /* If we allocated a pending mask array, deallocate it now. */ |
5946 | if (ppmask) |
5947 | { |
5948 | tmp = gfc_call_free (ppmask); |
5949 | gfc_add_expr_to_block (block, tmp); |
5950 | } |
5951 | |
5952 | /* If we allocated a current mask array, deallocate it now. */ |
5953 | if (pcmask) |
5954 | { |
5955 | tmp = gfc_call_free (pcmask); |
5956 | gfc_add_expr_to_block (block, tmp); |
5957 | } |
5958 | } |
5959 | |
5960 | /* Translate a simple WHERE construct or statement without dependencies. |
5961 | CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR |
5962 | is the mask condition, and EBLOCK if non-NULL is the "else" clause. |
5963 | Currently both CBLOCK and EBLOCK are restricted to single assignments. */ |
5964 | |
5965 | static tree |
5966 | gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock) |
5967 | { |
5968 | stmtblock_t block, body; |
5969 | gfc_expr *cond, *tdst, *tsrc, *edst, *esrc; |
5970 | tree tmp, cexpr, tstmt, estmt; |
5971 | gfc_ss *css, *tdss, *tsss; |
5972 | gfc_se cse, tdse, tsse, edse, esse; |
5973 | gfc_loopinfo loop; |
5974 | gfc_ss *edss = 0; |
5975 | gfc_ss *esss = 0; |
5976 | bool maybe_workshare = false; |
5977 | |
5978 | /* Allow the scalarizer to workshare simple where loops. */ |
5979 | if ((ompws_flags & (OMPWS_WORKSHARE_FLAG | OMPWS_SCALARIZER_BODY)) |
5980 | == OMPWS_WORKSHARE_FLAG) |
5981 | { |
5982 | maybe_workshare = true; |
5983 | ompws_flags |= OMPWS_SCALARIZER_WS | OMPWS_SCALARIZER_BODY; |
5984 | } |
5985 | |
5986 | cond = cblock->expr1; |
5987 | tdst = cblock->next->expr1; |
5988 | tsrc = cblock->next->expr2; |
5989 | edst = eblock ? eblock->next->expr1 : NULL; |
5990 | esrc = eblock ? eblock->next->expr2 : NULL; |
5991 | |
5992 | gfc_start_block (&block); |
5993 | gfc_init_loopinfo (&loop); |
5994 | |
5995 | /* Handle the condition. */ |
5996 | gfc_init_se (&cse, NULL); |
5997 | css = gfc_walk_expr (cond); |
5998 | gfc_add_ss_to_loop (&loop, css); |
5999 | |
6000 | /* Handle the then-clause. */ |
6001 | gfc_init_se (&tdse, NULL); |
6002 | gfc_init_se (&tsse, NULL); |
6003 | tdss = gfc_walk_expr (tdst); |
6004 | tsss = gfc_walk_expr (tsrc); |
6005 | if (tsss == gfc_ss_terminator) |
6006 | { |
6007 | tsss = gfc_get_scalar_ss (gfc_ss_terminator, tsrc); |
6008 | tsss->info->where = 1; |
6009 | } |
6010 | gfc_add_ss_to_loop (&loop, tdss); |
6011 | gfc_add_ss_to_loop (&loop, tsss); |
6012 | |
6013 | if (eblock) |
6014 | { |
6015 | /* Handle the else clause. */ |
6016 | gfc_init_se (&edse, NULL); |
6017 | gfc_init_se (&esse, NULL); |
6018 | edss = gfc_walk_expr (edst); |
6019 | esss = gfc_walk_expr (esrc); |
6020 | if (esss == gfc_ss_terminator) |
6021 | { |
6022 | esss = gfc_get_scalar_ss (gfc_ss_terminator, esrc); |
6023 | esss->info->where = 1; |
6024 | } |
6025 | gfc_add_ss_to_loop (&loop, edss); |
6026 | gfc_add_ss_to_loop (&loop, esss); |
6027 | } |
6028 | |
6029 | gfc_conv_ss_startstride (&loop); |
6030 | gfc_conv_loop_setup (&loop, &tdst->where); |
6031 | |
6032 | gfc_mark_ss_chain_used (css, 1); |
6033 | gfc_mark_ss_chain_used (tdss, 1); |
6034 | gfc_mark_ss_chain_used (tsss, 1); |
6035 | if (eblock) |
6036 | { |
6037 | gfc_mark_ss_chain_used (edss, 1); |
6038 | gfc_mark_ss_chain_used (esss, 1); |
6039 | } |
6040 | |
6041 | gfc_start_scalarized_body (&loop, &body); |
6042 | |
6043 | gfc_copy_loopinfo_to_se (&cse, &loop); |
6044 | gfc_copy_loopinfo_to_se (&tdse, &loop); |
6045 | gfc_copy_loopinfo_to_se (&tsse, &loop); |
6046 | cse.ss = css; |
6047 | tdse.ss = tdss; |
6048 | tsse.ss = tsss; |
6049 | if (eblock) |
6050 | { |
6051 | gfc_copy_loopinfo_to_se (&edse, &loop); |
6052 | gfc_copy_loopinfo_to_se (&esse, &loop); |
6053 | edse.ss = edss; |
6054 | esse.ss = esss; |
6055 | } |
6056 | |
6057 | gfc_conv_expr (se: &cse, expr: cond); |
6058 | gfc_add_block_to_block (&body, &cse.pre); |
6059 | cexpr = cse.expr; |
6060 | |
6061 | gfc_conv_expr (se: &tsse, expr: tsrc); |
6062 | if (tdss != gfc_ss_terminator && loop.temp_ss != NULL) |
6063 | gfc_conv_tmp_array_ref (se: &tdse); |
6064 | else |
6065 | gfc_conv_expr (se: &tdse, expr: tdst); |
6066 | |
6067 | if (eblock) |
6068 | { |
6069 | gfc_conv_expr (se: &esse, expr: esrc); |
6070 | if (edss != gfc_ss_terminator && loop.temp_ss != NULL) |
6071 | gfc_conv_tmp_array_ref (se: &edse); |
6072 | else |
6073 | gfc_conv_expr (se: &edse, expr: edst); |
6074 | } |
6075 | |
6076 | tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, true); |
6077 | estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, |
6078 | false, true) |
6079 | : build_empty_stmt (input_location); |
6080 | tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt); |
6081 | gfc_add_expr_to_block (&body, tmp); |
6082 | gfc_add_block_to_block (&body, &cse.post); |
6083 | |
6084 | if (maybe_workshare) |
6085 | ompws_flags &= ~OMPWS_SCALARIZER_BODY; |
6086 | gfc_trans_scalarizing_loops (&loop, &body); |
6087 | gfc_add_block_to_block (&block, &loop.pre); |
6088 | gfc_add_block_to_block (&block, &loop.post); |
6089 | gfc_cleanup_loop (&loop); |
6090 | |
6091 | return gfc_finish_block (&block); |
6092 | } |
6093 | |
6094 | /* As the WHERE or WHERE construct statement can be nested, we call |
6095 | gfc_trans_where_2 to do the translation, and pass the initial |
6096 | NULL values for both the control mask and the pending control mask. */ |
6097 | |
6098 | tree |
6099 | gfc_trans_where (gfc_code * code) |
6100 | { |
6101 | stmtblock_t block; |
6102 | gfc_code *cblock; |
6103 | gfc_code *eblock; |
6104 | |
6105 | cblock = code->block; |
6106 | if (cblock->next |
6107 | && cblock->next->op == EXEC_ASSIGN |
6108 | && !cblock->next->next) |
6109 | { |
6110 | eblock = cblock->block; |
6111 | if (!eblock) |
6112 | { |
6113 | /* A simple "WHERE (cond) x = y" statement or block is |
6114 | dependence free if cond is not dependent upon writing x, |
6115 | and the source y is unaffected by the destination x. */ |
6116 | if (!gfc_check_dependency (cblock->next->expr1, |
6117 | cblock->expr1, 0) |
6118 | && !gfc_check_dependency (cblock->next->expr1, |
6119 | cblock->next->expr2, 0)) |
6120 | return gfc_trans_where_3 (cblock, NULL); |
6121 | } |
6122 | else if (!eblock->expr1 |
6123 | && !eblock->block |
6124 | && eblock->next |
6125 | && eblock->next->op == EXEC_ASSIGN |
6126 | && !eblock->next->next) |
6127 | { |
6128 | /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE" |
6129 | block is dependence free if cond is not dependent on writes |
6130 | to x1 and x2, y1 is not dependent on writes to x2, and y2 |
6131 | is not dependent on writes to x1, and both y's are not |
6132 | dependent upon their own x's. In addition to this, the |
6133 | final two dependency checks below exclude all but the same |
6134 | array reference if the where and elswhere destinations |
6135 | are the same. In short, this is VERY conservative and this |
6136 | is needed because the two loops, required by the standard |
6137 | are coalesced in gfc_trans_where_3. */ |
6138 | if (!gfc_check_dependency (cblock->next->expr1, |
6139 | cblock->expr1, 0) |
6140 | && !gfc_check_dependency (eblock->next->expr1, |
6141 | cblock->expr1, 0) |
6142 | && !gfc_check_dependency (cblock->next->expr1, |
6143 | eblock->next->expr2, 1) |
6144 | && !gfc_check_dependency (eblock->next->expr1, |
6145 | cblock->next->expr2, 1) |
6146 | && !gfc_check_dependency (cblock->next->expr1, |
6147 | cblock->next->expr2, 1) |
6148 | && !gfc_check_dependency (eblock->next->expr1, |
6149 | eblock->next->expr2, 1) |
6150 | && !gfc_check_dependency (cblock->next->expr1, |
6151 | eblock->next->expr1, 0) |
6152 | && !gfc_check_dependency (eblock->next->expr1, |
6153 | cblock->next->expr1, 0)) |
6154 | return gfc_trans_where_3 (cblock, eblock); |
6155 | } |
6156 | } |
6157 | |
6158 | gfc_start_block (&block); |
6159 | |
6160 | gfc_trans_where_2 (code, NULL, invert: false, NULL, block: &block); |
6161 | |
6162 | return gfc_finish_block (&block); |
6163 | } |
6164 | |
6165 | |
6166 | /* CYCLE a DO loop. The label decl has already been created by |
6167 | gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code |
6168 | node at the head of the loop. We must mark the label as used. */ |
6169 | |
6170 | tree |
6171 | gfc_trans_cycle (gfc_code * code) |
6172 | { |
6173 | tree cycle_label; |
6174 | |
6175 | cycle_label = code->ext.which_construct->cycle_label; |
6176 | gcc_assert (cycle_label); |
6177 | |
6178 | TREE_USED (cycle_label) = 1; |
6179 | return build1_v (GOTO_EXPR, cycle_label); |
6180 | } |
6181 | |
6182 | |
6183 | /* EXIT a DO loop. Similar to CYCLE, but now the label is in |
6184 | TREE_VALUE (backend_decl) of the gfc_code node at the head of the |
6185 | loop. */ |
6186 | |
6187 | tree |
6188 | gfc_trans_exit (gfc_code * code) |
6189 | { |
6190 | tree exit_label; |
6191 | |
6192 | exit_label = code->ext.which_construct->exit_label; |
6193 | gcc_assert (exit_label); |
6194 | |
6195 | TREE_USED (exit_label) = 1; |
6196 | return build1_v (GOTO_EXPR, exit_label); |
6197 | } |
6198 | |
6199 | |
6200 | /* Get the initializer expression for the code and expr of an allocate. |
6201 | When no initializer is needed return NULL. */ |
6202 | |
6203 | static gfc_expr * |
6204 | allocate_get_initializer (gfc_code * code, gfc_expr * expr) |
6205 | { |
6206 | if (!gfc_bt_struct (expr->ts.type) && expr->ts.type != BT_CLASS) |
6207 | return NULL; |
6208 | |
6209 | /* An explicit type was given in allocate ( T:: object). */ |
6210 | if (code->ext.alloc.ts.type == BT_DERIVED |
6211 | && (code->ext.alloc.ts.u.derived->attr.alloc_comp |
6212 | || gfc_has_default_initializer (code->ext.alloc.ts.u.derived))) |
6213 | return gfc_default_initializer (&code->ext.alloc.ts); |
6214 | |
6215 | if (gfc_bt_struct (expr->ts.type) |
6216 | && (expr->ts.u.derived->attr.alloc_comp |
6217 | || gfc_has_default_initializer (expr->ts.u.derived))) |
6218 | return gfc_default_initializer (&expr->ts); |
6219 | |
6220 | if (expr->ts.type == BT_CLASS |
6221 | && (CLASS_DATA (expr)->ts.u.derived->attr.alloc_comp |
6222 | || gfc_has_default_initializer (CLASS_DATA (expr)->ts.u.derived))) |
6223 | return gfc_default_initializer (&CLASS_DATA (expr)->ts); |
6224 | |
6225 | return NULL; |
6226 | } |
6227 | |
6228 | /* Translate the ALLOCATE statement. */ |
6229 | |
6230 | tree |
6231 | gfc_trans_allocate (gfc_code * code) |
6232 | { |
6233 | gfc_alloc *al; |
6234 | gfc_expr *expr, *e3rhs = NULL, *init_expr; |
6235 | gfc_se se, se_sz; |
6236 | tree tmp; |
6237 | tree parm; |
6238 | tree stat; |
6239 | tree errmsg; |
6240 | tree errlen; |
6241 | tree label_errmsg; |
6242 | tree label_finish; |
6243 | tree memsz; |
6244 | tree al_vptr, al_len; |
6245 | /* If an expr3 is present, then store the tree for accessing its |
6246 | _vptr, and _len components in the variables, respectively. The |
6247 | element size, i.e. _vptr%size, is stored in expr3_esize. Any of |
6248 | the trees may be the NULL_TREE indicating that this is not |
6249 | available for expr3's type. */ |
6250 | tree expr3, expr3_vptr, expr3_len, expr3_esize; |
6251 | /* Classify what expr3 stores. */ |
6252 | enum { E3_UNSET = 0, E3_SOURCE, E3_MOLD, E3_DESC } e3_is; |
6253 | stmtblock_t block; |
6254 | stmtblock_t post; |
6255 | stmtblock_t final_block; |
6256 | tree nelems; |
6257 | bool upoly_expr, tmp_expr3_len_flag = false, al_len_needs_set, is_coarray; |
6258 | bool needs_caf_sync, caf_refs_comp; |
6259 | bool e3_has_nodescriptor = false; |
6260 | gfc_symtree *newsym = NULL; |
6261 | symbol_attribute caf_attr; |
6262 | gfc_actual_arglist *param_list; |
6263 | |
6264 | if (!code->ext.alloc.list) |
6265 | return NULL_TREE; |
6266 | |
6267 | stat = tmp = memsz = al_vptr = al_len = NULL_TREE; |
6268 | expr3 = expr3_vptr = expr3_len = expr3_esize = NULL_TREE; |
6269 | label_errmsg = label_finish = errmsg = errlen = NULL_TREE; |
6270 | e3_is = E3_UNSET; |
6271 | is_coarray = needs_caf_sync = false; |
6272 | |
6273 | gfc_init_block (&block); |
6274 | gfc_init_block (&post); |
6275 | gfc_init_block (&final_block); |
6276 | |
6277 | /* STAT= (and maybe ERRMSG=) is present. */ |
6278 | if (code->expr1) |
6279 | { |
6280 | /* STAT=. */ |
6281 | tree gfc_int4_type_node = gfc_get_int_type (4); |
6282 | stat = gfc_create_var (gfc_int4_type_node, "stat" ); |
6283 | |
6284 | /* ERRMSG= only makes sense with STAT=. */ |
6285 | if (code->expr2) |
6286 | { |
6287 | gfc_init_se (&se, NULL); |
6288 | se.want_pointer = 1; |
6289 | gfc_conv_expr_lhs (se: &se, expr: code->expr2); |
6290 | errmsg = se.expr; |
6291 | errlen = se.string_length; |
6292 | } |
6293 | else |
6294 | { |
6295 | errmsg = null_pointer_node; |
6296 | errlen = build_int_cst (gfc_charlen_type_node, 0); |
6297 | } |
6298 | |
6299 | /* GOTO destinations. */ |
6300 | label_errmsg = gfc_build_label_decl (NULL_TREE); |
6301 | label_finish = gfc_build_label_decl (NULL_TREE); |
6302 | TREE_USED (label_finish) = 0; |
6303 | } |
6304 | |
6305 | /* When an expr3 is present evaluate it only once. The standards prevent a |
6306 | dependency of expr3 on the objects in the allocate list. An expr3 can |
6307 | be pre-evaluated in all cases. One just has to make sure, to use the |
6308 | correct way, i.e., to get the descriptor or to get a reference |
6309 | expression. */ |
6310 | if (code->expr3) |
6311 | { |
6312 | bool vtab_needed = false, temp_var_needed = false, |
6313 | temp_obj_created = false; |
6314 | |
6315 | is_coarray = gfc_is_coarray (code->expr3); |
6316 | |
6317 | if (code->expr3->expr_type == EXPR_FUNCTION && !code->expr3->mold |
6318 | && (gfc_is_class_array_function (code->expr3) |
6319 | || gfc_is_alloc_class_scalar_function (code->expr3))) |
6320 | code->expr3->must_finalize = 1; |
6321 | |
6322 | /* Figure whether we need the vtab from expr3. */ |
6323 | for (al = code->ext.alloc.list; !vtab_needed && al != NULL; |
6324 | al = al->next) |
6325 | vtab_needed = (al->expr->ts.type == BT_CLASS); |
6326 | |
6327 | gfc_init_se (&se, NULL); |
6328 | /* When expr3 is a variable, i.e., a very simple expression, |
6329 | then convert it once here. */ |
6330 | if (code->expr3->expr_type == EXPR_VARIABLE |
6331 | || code->expr3->expr_type == EXPR_ARRAY |
6332 | || code->expr3->expr_type == EXPR_CONSTANT) |
6333 | { |
6334 | if (!code->expr3->mold |
6335 | || code->expr3->ts.type == BT_CHARACTER |
6336 | || vtab_needed |
6337 | || code->ext.alloc.arr_spec_from_expr3) |
6338 | { |
6339 | /* Convert expr3 to a tree. For all "simple" expression just |
6340 | get the descriptor or the reference, respectively, depending |
6341 | on the rank of the expr. */ |
6342 | if (code->ext.alloc.arr_spec_from_expr3 || code->expr3->rank != 0) |
6343 | gfc_conv_expr_descriptor (&se, code->expr3); |
6344 | else |
6345 | { |
6346 | gfc_conv_expr_reference (se: &se, expr: code->expr3); |
6347 | |
6348 | /* gfc_conv_expr_reference wraps POINTER_PLUS_EXPR in a |
6349 | NOP_EXPR, which prevents gfortran from getting the vptr |
6350 | from the source=-expression. Remove the NOP_EXPR and go |
6351 | with the POINTER_PLUS_EXPR in this case. */ |
6352 | if (code->expr3->ts.type == BT_CLASS |
6353 | && TREE_CODE (se.expr) == NOP_EXPR |
6354 | && (TREE_CODE (TREE_OPERAND (se.expr, 0)) |
6355 | == POINTER_PLUS_EXPR |
6356 | || is_coarray)) |
6357 | se.expr = TREE_OPERAND (se.expr, 0); |
6358 | } |
6359 | /* Create a temp variable only for component refs to prevent |
6360 | having to go through the full deref-chain each time and to |
6361 | simplify computation of array properties. */ |
6362 | temp_var_needed = TREE_CODE (se.expr) == COMPONENT_REF; |
6363 | } |
6364 | } |
6365 | else |
6366 | { |
6367 | /* In all other cases evaluate the expr3. */ |
6368 | symbol_attribute attr; |
6369 | /* Get the descriptor for all arrays, that are not allocatable or |
6370 | pointer, because the latter are descriptors already. |
6371 | The exception are function calls returning a class object: |
6372 | The descriptor is stored in their results _data component, which |
6373 | is easier to access, when first a temporary variable for the |
6374 | result is created and the descriptor retrieved from there. */ |
6375 | attr = gfc_expr_attr (code->expr3); |
6376 | if (code->expr3->rank != 0 |
6377 | && ((!attr.allocatable && !attr.pointer) |
6378 | || (code->expr3->expr_type == EXPR_FUNCTION |
6379 | && (code->expr3->ts.type != BT_CLASS |
6380 | || (code->expr3->value.function.isym |
6381 | && code->expr3->value.function.isym |
6382 | ->transformational))))) |
6383 | gfc_conv_expr_descriptor (&se, code->expr3); |
6384 | else |
6385 | gfc_conv_expr_reference (se: &se, expr: code->expr3); |
6386 | if (code->expr3->ts.type == BT_CLASS) |
6387 | gfc_conv_class_to_class (&se, code->expr3, |
6388 | code->expr3->ts, |
6389 | false, true, |
6390 | false, false); |
6391 | temp_obj_created = temp_var_needed = !VAR_P (se.expr); |
6392 | } |
6393 | gfc_add_block_to_block (&block, &se.pre); |
6394 | if (code->expr3->must_finalize) |
6395 | { |
6396 | gfc_add_block_to_block (&final_block, &se.finalblock); |
6397 | gfc_add_block_to_block (&final_block, &se.post); |
6398 | } |
6399 | else |
6400 | gfc_add_block_to_block (&post, &se.post); |
6401 | |
6402 | /* Special case when string in expr3 is zero. */ |
6403 | if (code->expr3->ts.type == BT_CHARACTER |
6404 | && integer_zerop (se.string_length)) |
6405 | { |
6406 | gfc_init_se (&se, NULL); |
6407 | temp_var_needed = false; |
6408 | expr3_len = build_zero_cst (gfc_charlen_type_node); |
6409 | e3_is = E3_MOLD; |
6410 | } |
6411 | /* Prevent aliasing, i.e., se.expr may be already a |
6412 | variable declaration. */ |
6413 | else if (se.expr != NULL_TREE && temp_var_needed) |
6414 | { |
6415 | tree var, desc; |
6416 | tmp = GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (se.expr)) || is_coarray ? |
6417 | se.expr |
6418 | : build_fold_indirect_ref_loc (input_location, se.expr); |
6419 | |
6420 | /* Get the array descriptor and prepare it to be assigned to the |
6421 | temporary variable var. For classes the array descriptor is |
6422 | in the _data component and the object goes into the |
6423 | GFC_DECL_SAVED_DESCRIPTOR. */ |
6424 | if (code->expr3->ts.type == BT_CLASS |
6425 | && code->expr3->rank != 0) |
6426 | { |
6427 | /* When an array_ref was in expr3, then the descriptor is the |
6428 | first operand. */ |
6429 | if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (tmp)) || is_coarray) |
6430 | { |
6431 | desc = TREE_OPERAND (tmp, 0); |
6432 | } |
6433 | else |
6434 | { |
6435 | desc = tmp; |
6436 | tmp = gfc_class_data_get (tmp); |
6437 | } |
6438 | if (code->ext.alloc.arr_spec_from_expr3) |
6439 | e3_is = E3_DESC; |
6440 | } |
6441 | else |
6442 | desc = !is_coarray ? se.expr |
6443 | : TREE_OPERAND (TREE_OPERAND (se.expr, 0), 0); |
6444 | /* We need a regular (non-UID) symbol here, therefore give a |
6445 | prefix. */ |
6446 | var = gfc_create_var (TREE_TYPE (tmp), "source" ); |
6447 | if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (tmp)) || is_coarray) |
6448 | { |
6449 | gfc_allocate_lang_decl (var); |
6450 | GFC_DECL_SAVED_DESCRIPTOR (var) = desc; |
6451 | } |
6452 | gfc_add_modify_loc (input_location, &block, var, tmp); |
6453 | |
6454 | expr3 = var; |
6455 | if (se.string_length) |
6456 | /* Evaluate it assuming that it also is complicated like expr3. */ |
6457 | expr3_len = gfc_evaluate_now (se.string_length, &block); |
6458 | } |
6459 | else |
6460 | { |
6461 | expr3 = se.expr; |
6462 | expr3_len = se.string_length; |
6463 | } |
6464 | |
6465 | /* Deallocate any allocatable components in expressions that use a |
6466 | temporary object, i.e. are not a simple alias of to an EXPR_VARIABLE. |
6467 | E.g. temporaries of a function call need freeing of their components |
6468 | here. Explicit derived type allocation of class entities uses expr3 |
6469 | to carry the default initializer. This must not be deallocated or |
6470 | finalized. */ |
6471 | if ((code->expr3->ts.type == BT_DERIVED |
6472 | || code->expr3->ts.type == BT_CLASS) |
6473 | && (code->expr3->expr_type != EXPR_VARIABLE || temp_obj_created) |
6474 | && code->expr3->ts.u.derived->attr.alloc_comp |
6475 | && !code->expr3->must_finalize |
6476 | && !code->ext.alloc.expr3_not_explicit) |
6477 | { |
6478 | tmp = gfc_deallocate_alloc_comp (code->expr3->ts.u.derived, |
6479 | expr3, code->expr3->rank); |
6480 | gfc_prepend_expr_to_block (&post, tmp); |
6481 | } |
6482 | |
6483 | /* Store what the expr3 is to be used for. */ |
6484 | if (e3_is == E3_UNSET) |
6485 | e3_is = expr3 != NULL_TREE ? |
6486 | (code->ext.alloc.arr_spec_from_expr3 ? |
6487 | E3_DESC |
6488 | : (code->expr3->mold ? E3_MOLD : E3_SOURCE)) |
6489 | : E3_UNSET; |
6490 | |
6491 | /* Figure how to get the _vtab entry. This also obtains the tree |
6492 | expression for accessing the _len component, because only |
6493 | unlimited polymorphic objects, which are a subcategory of class |
6494 | types, have a _len component. */ |
6495 | if (code->expr3->ts.type == BT_CLASS) |
6496 | { |
6497 | gfc_expr *rhs; |
6498 | tmp = expr3 != NULL_TREE && POINTER_TYPE_P (TREE_TYPE (expr3)) ? |
6499 | build_fold_indirect_ref (expr3): expr3; |
6500 | /* Polymorphic SOURCE: VPTR must be determined at run time. |
6501 | expr3 may be a temporary array declaration, therefore check for |
6502 | GFC_CLASS_TYPE_P before trying to get the _vptr component. */ |
6503 | if (tmp != NULL_TREE |
6504 | && (e3_is == E3_DESC |
6505 | || (GFC_CLASS_TYPE_P (TREE_TYPE (tmp)) |
6506 | && (VAR_P (tmp) || !code->expr3->ref)) |
6507 | || (VAR_P (tmp) && DECL_LANG_SPECIFIC (tmp)))) |
6508 | tmp = gfc_class_vptr_get (expr3); |
6509 | else |
6510 | { |
6511 | rhs = gfc_find_and_cut_at_last_class_ref (code->expr3); |
6512 | gfc_add_vptr_component (rhs); |
6513 | gfc_init_se (&se, NULL); |
6514 | se.want_pointer = 1; |
6515 | gfc_conv_expr (se: &se, expr: rhs); |
6516 | tmp = se.expr; |
6517 | gfc_free_expr (rhs); |
6518 | } |
6519 | /* Set the element size. */ |
6520 | expr3_esize = gfc_vptr_size_get (tmp); |
6521 | if (vtab_needed) |
6522 | expr3_vptr = tmp; |
6523 | /* Initialize the ref to the _len component. */ |
6524 | if (expr3_len == NULL_TREE && UNLIMITED_POLY (code->expr3)) |
6525 | { |
6526 | /* Same like for retrieving the _vptr. */ |
6527 | if (expr3 != NULL_TREE && !code->expr3->ref) |
6528 | expr3_len = gfc_class_len_get (expr3); |
6529 | else |
6530 | { |
6531 | rhs = gfc_find_and_cut_at_last_class_ref (code->expr3); |
6532 | gfc_add_len_component (rhs); |
6533 | gfc_init_se (&se, NULL); |
6534 | gfc_conv_expr (se: &se, expr: rhs); |
6535 | expr3_len = se.expr; |
6536 | gfc_free_expr (rhs); |
6537 | } |
6538 | } |
6539 | } |
6540 | else |
6541 | { |
6542 | /* When the object to allocate is polymorphic type, then it |
6543 | needs its vtab set correctly, so deduce the required _vtab |
6544 | and _len from the source expression. */ |
6545 | if (vtab_needed) |
6546 | { |
6547 | /* VPTR is fixed at compile time. */ |
6548 | gfc_symbol *vtab; |
6549 | |
6550 | vtab = gfc_find_vtab (&code->expr3->ts); |
6551 | gcc_assert (vtab); |
6552 | expr3_vptr = gfc_get_symbol_decl (vtab); |
6553 | expr3_vptr = gfc_build_addr_expr (NULL_TREE, |
6554 | expr3_vptr); |
6555 | } |
6556 | /* _len component needs to be set, when ts is a character |
6557 | array. */ |
6558 | if (expr3_len == NULL_TREE |
6559 | && code->expr3->ts.type == BT_CHARACTER) |
6560 | { |
6561 | if (code->expr3->ts.u.cl |
6562 | && code->expr3->ts.u.cl->length) |
6563 | { |
6564 | gfc_init_se (&se, NULL); |
6565 | gfc_conv_expr (se: &se, expr: code->expr3->ts.u.cl->length); |
6566 | gfc_add_block_to_block (&block, &se.pre); |
6567 | expr3_len = gfc_evaluate_now (se.expr, &block); |
6568 | } |
6569 | gcc_assert (expr3_len); |
6570 | } |
6571 | /* For character arrays only the kind's size is needed, because |
6572 | the array mem_size is _len * (elem_size = kind_size). |
6573 | For all other get the element size in the normal way. */ |
6574 | if (code->expr3->ts.type == BT_CHARACTER) |
6575 | expr3_esize = TYPE_SIZE_UNIT ( |
6576 | gfc_get_char_type (code->expr3->ts.kind)); |
6577 | else |
6578 | expr3_esize = TYPE_SIZE_UNIT ( |
6579 | gfc_typenode_for_spec (&code->expr3->ts)); |
6580 | } |
6581 | gcc_assert (expr3_esize); |
6582 | expr3_esize = fold_convert (sizetype, expr3_esize); |
6583 | if (e3_is == E3_MOLD) |
6584 | /* The expr3 is no longer valid after this point. */ |
6585 | expr3 = NULL_TREE; |
6586 | } |
6587 | else if (code->ext.alloc.ts.type != BT_UNKNOWN) |
6588 | { |
6589 | /* Compute the explicit typespec given only once for all objects |
6590 | to allocate. */ |
6591 | if (code->ext.alloc.ts.type != BT_CHARACTER) |
6592 | expr3_esize = TYPE_SIZE_UNIT ( |
6593 | gfc_typenode_for_spec (&code->ext.alloc.ts)); |
6594 | else if (code->ext.alloc.ts.u.cl->length != NULL) |
6595 | { |
6596 | gfc_expr *sz; |
6597 | sz = gfc_copy_expr (code->ext.alloc.ts.u.cl->length); |
6598 | gfc_init_se (&se_sz, NULL); |
6599 | gfc_conv_expr (se: &se_sz, expr: sz); |
6600 | gfc_free_expr (sz); |
6601 | tmp = gfc_get_char_type (code->ext.alloc.ts.kind); |
6602 | tmp = TYPE_SIZE_UNIT (tmp); |
6603 | tmp = fold_convert (TREE_TYPE (se_sz.expr), tmp); |
6604 | gfc_add_block_to_block (&block, &se_sz.pre); |
6605 | expr3_esize = fold_build2_loc (input_location, MULT_EXPR, |
6606 | TREE_TYPE (se_sz.expr), |
6607 | tmp, se_sz.expr); |
6608 | expr3_esize = gfc_evaluate_now (expr3_esize, &block); |
6609 | } |
6610 | else |
6611 | expr3_esize = NULL_TREE; |
6612 | } |
6613 | |
6614 | /* The routine gfc_trans_assignment () already implements all |
6615 | techniques needed. Unfortunately we may have a temporary |
6616 | variable for the source= expression here. When that is the |
6617 | case convert this variable into a temporary gfc_expr of type |
6618 | EXPR_VARIABLE and used it as rhs for the assignment. The |
6619 | advantage is, that we get scalarizer support for free, |
6620 | don't have to take care about scalar to array treatment and |
6621 | will benefit of every enhancements gfc_trans_assignment () |
6622 | gets. |
6623 | No need to check whether e3_is is E3_UNSET, because that is |
6624 | done by expr3 != NULL_TREE. |
6625 | Exclude variables since the following block does not handle |
6626 | array sections. In any case, there is no harm in sending |
6627 | variables to gfc_trans_assignment because there is no |
6628 | evaluation of variables. */ |
6629 | if (code->expr3) |
6630 | { |
6631 | if (code->expr3->expr_type != EXPR_VARIABLE |
6632 | && e3_is != E3_MOLD && expr3 != NULL_TREE |
6633 | && DECL_P (expr3) && DECL_ARTIFICIAL (expr3)) |
6634 | { |
6635 | /* Build a temporary symtree and symbol. Do not add it to the current |
6636 | namespace to prevent accidentaly modifying a colliding |
6637 | symbol's as. */ |
6638 | newsym = XCNEW (gfc_symtree); |
6639 | /* The name of the symtree should be unique, because gfc_create_var () |
6640 | took care about generating the identifier. */ |
6641 | newsym->name |
6642 | = gfc_get_string ("%s" , IDENTIFIER_POINTER (DECL_NAME (expr3))); |
6643 | newsym->n.sym = gfc_new_symbol (newsym->name, NULL); |
6644 | /* The backend_decl is known. It is expr3, which is inserted |
6645 | here. */ |
6646 | newsym->n.sym->backend_decl = expr3; |
6647 | e3rhs = gfc_get_expr (); |
6648 | e3rhs->rank = code->expr3->rank; |
6649 | e3rhs->symtree = newsym; |
6650 | /* Mark the symbol referenced or gfc_trans_assignment will bug. */ |
6651 | newsym->n.sym->attr.referenced = 1; |
6652 | e3rhs->expr_type = EXPR_VARIABLE; |
6653 | e3rhs->where = code->expr3->where; |
6654 | /* Set the symbols type, upto it was BT_UNKNOWN. */ |
6655 | if (IS_CLASS_ARRAY (code->expr3) |
6656 | && code->expr3->expr_type == EXPR_FUNCTION |
6657 | && code->expr3->value.function.isym |
6658 | && code->expr3->value.function.isym->transformational) |
6659 | { |
6660 | e3rhs->ts = CLASS_DATA (code->expr3)->ts; |
6661 | } |
6662 | else if (code->expr3->ts.type == BT_CLASS |
6663 | && !GFC_CLASS_TYPE_P (TREE_TYPE (expr3))) |
6664 | e3rhs->ts = CLASS_DATA (code->expr3)->ts; |
6665 | else |
6666 | e3rhs->ts = code->expr3->ts; |
6667 | newsym->n.sym->ts = e3rhs->ts; |
6668 | /* Check whether the expr3 is array valued. */ |
6669 | if (e3rhs->rank) |
6670 | { |
6671 | gfc_array_spec *arr; |
6672 | arr = gfc_get_array_spec (); |
6673 | arr->rank = e3rhs->rank; |
6674 | arr->type = AS_DEFERRED; |
6675 | /* Set the dimension and pointer attribute for arrays |
6676 | to be on the safe side. */ |
6677 | newsym->n.sym->attr.dimension = 1; |
6678 | newsym->n.sym->attr.pointer = 1; |
6679 | newsym->n.sym->as = arr; |
6680 | if (IS_CLASS_ARRAY (code->expr3) |
6681 | && code->expr3->expr_type == EXPR_FUNCTION |
6682 | && code->expr3->value.function.isym |
6683 | && code->expr3->value.function.isym->transformational) |
6684 | { |
6685 | gfc_array_spec *tarr; |
6686 | tarr = gfc_get_array_spec (); |
6687 | *tarr = *arr; |
6688 | e3rhs->ts.u.derived->as = tarr; |
6689 | } |
6690 | gfc_add_full_array_ref (e3rhs, arr); |
6691 | } |
6692 | else if (POINTER_TYPE_P (TREE_TYPE (expr3))) |
6693 | newsym->n.sym->attr.pointer = 1; |
6694 | /* The string length is known, too. Set it for char arrays. */ |
6695 | if (e3rhs->ts.type == BT_CHARACTER) |
6696 | newsym->n.sym->ts.u.cl->backend_decl = expr3_len; |
6697 | gfc_commit_symbol (newsym->n.sym); |
6698 | } |
6699 | else |
6700 | e3rhs = gfc_copy_expr (code->expr3); |
6701 | |
6702 | // We need to propagate the bounds of the expr3 for source=/mold=. |
6703 | // However, for non-named arrays, the lbound has to be 1 and neither the |
6704 | // bound used inside the called function even when returning an |
6705 | // allocatable/pointer nor the zero used internally. |
6706 | if (e3_is == E3_DESC |
6707 | && code->expr3->expr_type != EXPR_VARIABLE) |
6708 | e3_has_nodescriptor = true; |
6709 | } |
6710 | |
6711 | /* Loop over all objects to allocate. */ |
6712 | for (al = code->ext.alloc.list; al != NULL; al = al->next) |
6713 | { |
6714 | expr = gfc_copy_expr (al->expr); |
6715 | /* UNLIMITED_POLY () needs the _data component to be set, when |
6716 | expr is a unlimited polymorphic object. But the _data component |
6717 | has not been set yet, so check the derived type's attr for the |
6718 | unlimited polymorphic flag to be safe. */ |
6719 | upoly_expr = UNLIMITED_POLY (expr) |
6720 | || (expr->ts.type == BT_DERIVED |
6721 | && expr->ts.u.derived->attr.unlimited_polymorphic); |
6722 | gfc_init_se (&se, NULL); |
6723 | |
6724 | /* For class types prepare the expressions to ref the _vptr |
6725 | and the _len component. The latter for unlimited polymorphic |
6726 | types only. */ |
6727 | if (expr->ts.type == BT_CLASS) |
6728 | { |
6729 | gfc_expr *expr_ref_vptr, *expr_ref_len; |
6730 | gfc_add_data_component (expr); |
6731 | /* Prep the vptr handle. */ |
6732 | expr_ref_vptr = gfc_copy_expr (al->expr); |
6733 | gfc_add_vptr_component (expr_ref_vptr); |
6734 | se.want_pointer = 1; |
6735 | gfc_conv_expr (se: &se, expr: expr_ref_vptr); |
6736 | al_vptr = se.expr; |
6737 | se.want_pointer = 0; |
6738 | gfc_free_expr (expr_ref_vptr); |
6739 | /* Allocated unlimited polymorphic objects always have a _len |
6740 | component. */ |
6741 | if (upoly_expr) |
6742 | { |
6743 | expr_ref_len = gfc_copy_expr (al->expr); |
6744 | gfc_add_len_component (expr_ref_len); |
6745 | gfc_conv_expr (se: &se, expr: expr_ref_len); |
6746 | al_len = se.expr; |
6747 | gfc_free_expr (expr_ref_len); |
6748 | } |
6749 | else |
6750 | /* In a loop ensure that all loop variable dependent variables |
6751 | are initialized at the same spot in all execution paths. */ |
6752 | al_len = NULL_TREE; |
6753 | } |
6754 | else |
6755 | al_vptr = al_len = NULL_TREE; |
6756 | |
6757 | se.want_pointer = 1; |
6758 | se.descriptor_only = 1; |
6759 | |
6760 | gfc_conv_expr (se: &se, expr); |
6761 | if (expr->ts.type == BT_CHARACTER && expr->ts.deferred) |
6762 | /* se.string_length now stores the .string_length variable of expr |
6763 | needed to allocate character(len=:) arrays. */ |
6764 | al_len = se.string_length; |
6765 | |
6766 | al_len_needs_set = al_len != NULL_TREE; |
6767 | /* When allocating an array one cannot use much of the |
6768 | pre-evaluated expr3 expressions, because for most of them the |
6769 | scalarizer is needed which is not available in the pre-evaluation |
6770 | step. Therefore gfc_array_allocate () is responsible (and able) |
6771 | to handle the complete array allocation. Only the element size |
6772 | needs to be provided, which is done most of the time by the |
6773 | pre-evaluation step. */ |
6774 | nelems = NULL_TREE; |
6775 | if (expr3_len && (code->expr3->ts.type == BT_CHARACTER |
6776 | || code->expr3->ts.type == BT_CLASS)) |
6777 | { |
6778 | /* When al is an array, then the element size for each element |
6779 | in the array is needed, which is the product of the len and |
6780 | esize for char arrays. For unlimited polymorphics len can be |
6781 | zero, therefore take the maximum of len and one. */ |
6782 | tmp = fold_build2_loc (input_location, MAX_EXPR, |
6783 | TREE_TYPE (expr3_len), |
6784 | expr3_len, fold_convert (TREE_TYPE (expr3_len), |
6785 | integer_one_node)); |
6786 | tmp = fold_build2_loc (input_location, MULT_EXPR, |
6787 | TREE_TYPE (expr3_esize), expr3_esize, |
6788 | fold_convert (TREE_TYPE (expr3_esize), tmp)); |
6789 | } |
6790 | else |
6791 | tmp = expr3_esize; |
6792 | |
6793 | if (!gfc_array_allocate (&se, expr, stat, errmsg, errlen, |
6794 | label_finish, tmp, &nelems, |
6795 | e3rhs ? e3rhs : code->expr3, |
6796 | e3_is == E3_DESC ? expr3 : NULL_TREE, |
6797 | e3_has_nodescriptor)) |
6798 | { |
6799 | /* A scalar or derived type. First compute the size to |
6800 | allocate. |
6801 | |
6802 | expr3_len is set when expr3 is an unlimited polymorphic |
6803 | object or a deferred length string. */ |
6804 | if (expr3_len != NULL_TREE) |
6805 | { |
6806 | tmp = fold_convert (TREE_TYPE (expr3_esize), expr3_len); |
6807 | tmp = fold_build2_loc (input_location, MULT_EXPR, |
6808 | TREE_TYPE (expr3_esize), |
6809 | expr3_esize, tmp); |
6810 | if (code->expr3->ts.type != BT_CLASS) |
6811 | /* expr3 is a deferred length string, i.e., we are |
6812 | done. */ |
6813 | memsz = tmp; |
6814 | else |
6815 | { |
6816 | /* For unlimited polymorphic enties build |
6817 | (len > 0) ? element_size * len : element_size |
6818 | to compute the number of bytes to allocate. |
6819 | This allows the allocation of unlimited polymorphic |
6820 | objects from an expr3 that is also unlimited |
6821 | polymorphic and stores a _len dependent object, |
6822 | e.g., a string. */ |
6823 | memsz = fold_build2_loc (input_location, GT_EXPR, |
6824 | logical_type_node, expr3_len, |
6825 | build_zero_cst |
6826 | (TREE_TYPE (expr3_len))); |
6827 | memsz = fold_build3_loc (input_location, COND_EXPR, |
6828 | TREE_TYPE (expr3_esize), |
6829 | memsz, tmp, expr3_esize); |
6830 | } |
6831 | } |
6832 | else if (expr3_esize != NULL_TREE) |
6833 | /* Any other object in expr3 just needs element size in |
6834 | bytes. */ |
6835 | memsz = expr3_esize; |
6836 | else if ((expr->ts.type == BT_CHARACTER && expr->ts.deferred) |
6837 | || (upoly_expr |
6838 | && code->ext.alloc.ts.type == BT_CHARACTER)) |
6839 | { |
6840 | /* Allocating deferred length char arrays need the length |
6841 | to allocate in the alloc_type_spec. But also unlimited |
6842 | polymorphic objects may be allocated as char arrays. |
6843 | Both are handled here. */ |
6844 | gfc_init_se (&se_sz, NULL); |
6845 | gfc_conv_expr (se: &se_sz, expr: code->ext.alloc.ts.u.cl->length); |
6846 | gfc_add_block_to_block (&se.pre, &se_sz.pre); |
6847 | se_sz.expr = gfc_evaluate_now (se_sz.expr, &se.pre); |
6848 | gfc_add_block_to_block (&se.pre, &se_sz.post); |
6849 | expr3_len = se_sz.expr; |
6850 | tmp_expr3_len_flag = true; |
6851 | tmp = TYPE_SIZE_UNIT ( |
6852 | gfc_get_char_type (code->ext.alloc.ts.kind)); |
6853 | memsz = fold_build2_loc (input_location, MULT_EXPR, |
6854 | TREE_TYPE (tmp), |
6855 | fold_convert (TREE_TYPE (tmp), |
6856 | expr3_len), |
6857 | tmp); |
6858 | } |
6859 | else if (expr->ts.type == BT_CHARACTER) |
6860 | { |
6861 | /* Compute the number of bytes needed to allocate a fixed |
6862 | length char array. */ |
6863 | gcc_assert (se.string_length != NULL_TREE); |
6864 | tmp = TYPE_SIZE_UNIT (gfc_get_char_type (expr->ts.kind)); |
6865 | memsz = fold_build2_loc (input_location, MULT_EXPR, |
6866 | TREE_TYPE (tmp), tmp, |
6867 | fold_convert (TREE_TYPE (tmp), |
6868 | se.string_length)); |
6869 | } |
6870 | else if (code->ext.alloc.ts.type != BT_UNKNOWN) |
6871 | /* Handle all types, where the alloc_type_spec is set. */ |
6872 | memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->ext.alloc.ts)); |
6873 | else |
6874 | /* Handle size computation of the type declared to alloc. */ |
6875 | memsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr))); |
6876 | |
6877 | /* Store the caf-attributes for latter use. */ |
6878 | if (flag_coarray == GFC_FCOARRAY_LIB |
6879 | && (caf_attr = gfc_caf_attr (expr, i: true, r: &caf_refs_comp)) |
6880 | .codimension) |
6881 | { |
6882 | /* Scalar allocatable components in coarray'ed derived types make |
6883 | it here and are treated now. */ |
6884 | tree caf_decl, token; |
6885 | gfc_se caf_se; |
6886 | |
6887 | is_coarray = true; |
6888 | /* Set flag, to add synchronize after the allocate. */ |
6889 | needs_caf_sync = needs_caf_sync |
6890 | || caf_attr.coarray_comp || !caf_refs_comp; |
6891 | |
6892 | gfc_init_se (&caf_se, NULL); |
6893 | |
6894 | caf_decl = gfc_get_tree_for_caf_expr (expr); |
6895 | gfc_get_caf_token_offset (&caf_se, &token, NULL, caf_decl, |
6896 | NULL_TREE, NULL); |
6897 | gfc_add_block_to_block (&se.pre, &caf_se.pre); |
6898 | gfc_allocate_allocatable (&se.pre, se.expr, memsz, |
6899 | gfc_build_addr_expr (NULL_TREE, token), |
6900 | NULL_TREE, NULL_TREE, NULL_TREE, |
6901 | label_finish, expr, 1); |
6902 | } |
6903 | /* Allocate - for non-pointers with re-alloc checking. */ |
6904 | else if (gfc_expr_attr (expr).allocatable) |
6905 | gfc_allocate_allocatable (&se.pre, se.expr, memsz, |
6906 | NULL_TREE, stat, errmsg, errlen, |
6907 | label_finish, expr, 0); |
6908 | else |
6909 | gfc_allocate_using_malloc (&se.pre, se.expr, memsz, stat); |
6910 | } |
6911 | else |
6912 | { |
6913 | /* Allocating coarrays needs a sync after the allocate executed. |
6914 | Set the flag to add the sync after all objects are allocated. */ |
6915 | if (flag_coarray == GFC_FCOARRAY_LIB |
6916 | && (caf_attr = gfc_caf_attr (expr, i: true, r: &caf_refs_comp)) |
6917 | .codimension) |
6918 | { |
6919 | is_coarray = true; |
6920 | needs_caf_sync = needs_caf_sync |
6921 | || caf_attr.coarray_comp || !caf_refs_comp; |
6922 | } |
6923 | |
6924 | if (expr->ts.type == BT_CHARACTER && al_len != NULL_TREE |
6925 | && expr3_len != NULL_TREE) |
6926 | { |
6927 | /* Arrays need to have a _len set before the array |
6928 | descriptor is filled. */ |
6929 | gfc_add_modify (&block, al_len, |
6930 | fold_convert (TREE_TYPE (al_len), expr3_len)); |
6931 | /* Prevent setting the length twice. */ |
6932 | al_len_needs_set = false; |
6933 | } |
6934 | else if (expr->ts.type == BT_CHARACTER && al_len != NULL_TREE |
6935 | && code->ext.alloc.ts.u.cl->length) |
6936 | { |
6937 | /* Cover the cases where a string length is explicitly |
6938 | specified by a type spec for deferred length character |
6939 | arrays or unlimited polymorphic objects without a |
6940 | source= or mold= expression. */ |
6941 | gfc_init_se (&se_sz, NULL); |
6942 | gfc_conv_expr (se: &se_sz, expr: code->ext.alloc.ts.u.cl->length); |
6943 | gfc_add_block_to_block (&block, &se_sz.pre); |
6944 | gfc_add_modify (&block, al_len, |
6945 | fold_convert (TREE_TYPE (al_len), |
6946 | se_sz.expr)); |
6947 | al_len_needs_set = false; |
6948 | } |
6949 | } |
6950 | |
6951 | gfc_add_block_to_block (&block, &se.pre); |
6952 | |
6953 | /* Error checking -- Note: ERRMSG only makes sense with STAT. */ |
6954 | if (code->expr1) |
6955 | { |
6956 | tmp = build1_v (GOTO_EXPR, label_errmsg); |
6957 | parm = fold_build2_loc (input_location, NE_EXPR, |
6958 | logical_type_node, stat, |
6959 | build_int_cst (TREE_TYPE (stat), 0)); |
6960 | tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, |
6961 | gfc_unlikely (parm, PRED_FORTRAN_FAIL_ALLOC), |
6962 | tmp, build_empty_stmt (input_location)); |
6963 | gfc_add_expr_to_block (&block, tmp); |
6964 | } |
6965 | |
6966 | /* Set the vptr only when no source= is set. When source= is set, then |
6967 | the trans_assignment below will set the vptr. */ |
6968 | if (al_vptr != NULL_TREE && (!code->expr3 || code->expr3->mold)) |
6969 | { |
6970 | if (expr3_vptr != NULL_TREE) |
6971 | /* The vtab is already known, so just assign it. */ |
6972 | gfc_add_modify (&block, al_vptr, |
6973 | fold_convert (TREE_TYPE (al_vptr), expr3_vptr)); |
6974 | else |
6975 | { |
6976 | /* VPTR is fixed at compile time. */ |
6977 | gfc_symbol *vtab; |
6978 | gfc_typespec *ts; |
6979 | |
6980 | if (code->expr3) |
6981 | /* Although expr3 is pre-evaluated above, it may happen, |
6982 | that for arrays or in mold= cases the pre-evaluation |
6983 | was not successful. In these rare cases take the vtab |
6984 | from the typespec of expr3 here. */ |
6985 | ts = &code->expr3->ts; |
6986 | else if (code->ext.alloc.ts.type == BT_DERIVED || upoly_expr) |
6987 | /* The alloc_type_spec gives the type to allocate or the |
6988 | al is unlimited polymorphic, which enforces the use of |
6989 | an alloc_type_spec that is not necessarily a BT_DERIVED. */ |
6990 | ts = &code->ext.alloc.ts; |
6991 | else |
6992 | /* Prepare for setting the vtab as declared. */ |
6993 | ts = &expr->ts; |
6994 | |
6995 | vtab = gfc_find_vtab (ts); |
6996 | gcc_assert (vtab); |
6997 | tmp = gfc_build_addr_expr (NULL_TREE, |
6998 | gfc_get_symbol_decl (vtab)); |
6999 | gfc_add_modify (&block, al_vptr, |
7000 | fold_convert (TREE_TYPE (al_vptr), tmp)); |
7001 | } |
7002 | } |
7003 | |
7004 | /* Add assignment for string length. */ |
7005 | if (al_len != NULL_TREE && al_len_needs_set) |
7006 | { |
7007 | if (expr3_len != NULL_TREE) |
7008 | { |
7009 | gfc_add_modify (&block, al_len, |
7010 | fold_convert (TREE_TYPE (al_len), |
7011 | expr3_len)); |
7012 | /* When tmp_expr3_len_flag is set, then expr3_len is |
7013 | abused to carry the length information from the |
7014 | alloc_type. Clear it to prevent setting incorrect len |
7015 | information in future loop iterations. */ |
7016 | if (tmp_expr3_len_flag) |
7017 | /* No need to reset tmp_expr3_len_flag, because the |
7018 | presence of an expr3 cannot change within in the |
7019 | loop. */ |
7020 | expr3_len = NULL_TREE; |
7021 | } |
7022 | else if (code->ext.alloc.ts.type == BT_CHARACTER |
7023 | && code->ext.alloc.ts.u.cl->length) |
7024 | { |
7025 | /* Cover the cases where a string length is explicitly |
7026 | specified by a type spec for deferred length character |
7027 | arrays or unlimited polymorphic objects without a |
7028 | source= or mold= expression. */ |
7029 | if (expr3_esize == NULL_TREE || code->ext.alloc.ts.kind != 1) |
7030 | { |
7031 | gfc_init_se (&se_sz, NULL); |
7032 | gfc_conv_expr (se: &se_sz, expr: code->ext.alloc.ts.u.cl->length); |
7033 | gfc_add_block_to_block (&block, &se_sz.pre); |
7034 | gfc_add_modify (&block, al_len, |
7035 | fold_convert (TREE_TYPE (al_len), |
7036 | se_sz.expr)); |
7037 | } |
7038 | else |
7039 | gfc_add_modify (&block, al_len, |
7040 | fold_convert (TREE_TYPE (al_len), |
7041 | expr3_esize)); |
7042 | } |
7043 | else |
7044 | /* No length information needed, because type to allocate |
7045 | has no length. Set _len to 0. */ |
7046 | gfc_add_modify (&block, al_len, |
7047 | fold_convert (TREE_TYPE (al_len), |
7048 | integer_zero_node)); |
7049 | } |
7050 | |
7051 | init_expr = NULL; |
7052 | if (code->expr3 && !code->expr3->mold && e3_is != E3_MOLD) |
7053 | { |
7054 | /* Initialization via SOURCE block (or static default initializer). |
7055 | Switch off automatic reallocation since we have just done the |
7056 | ALLOCATE. */ |
7057 | int realloc_lhs = flag_realloc_lhs; |
7058 | gfc_expr *init_expr = gfc_expr_to_initialize (expr); |
7059 | gfc_expr *rhs = e3rhs ? e3rhs : gfc_copy_expr (code->expr3); |
7060 | flag_realloc_lhs = 0; |
7061 | |
7062 | /* Set the symbol to be artificial so that the result is not finalized. */ |
7063 | init_expr->symtree->n.sym->attr.artificial = 1; |
7064 | tmp = gfc_trans_assignment (init_expr, rhs, true, false, p: true, |
7065 | a: false); |
7066 | init_expr->symtree->n.sym->attr.artificial = 0; |
7067 | |
7068 | flag_realloc_lhs = realloc_lhs; |
7069 | /* Free the expression allocated for init_expr. */ |
7070 | gfc_free_expr (init_expr); |
7071 | if (rhs != e3rhs) |
7072 | gfc_free_expr (rhs); |
7073 | gfc_add_expr_to_block (&block, tmp); |
7074 | } |
7075 | /* Set KIND and LEN PDT components and allocate those that are |
7076 | parameterized. */ |
7077 | else if (expr->ts.type == BT_DERIVED |
7078 | && expr->ts.u.derived->attr.pdt_type) |
7079 | { |
7080 | if (code->expr3 && code->expr3->param_list) |
7081 | param_list = code->expr3->param_list; |
7082 | else if (expr->param_list) |
7083 | param_list = expr->param_list; |
7084 | else |
7085 | param_list = expr->symtree->n.sym->param_list; |
7086 | tmp = gfc_allocate_pdt_comp (expr->ts.u.derived, se.expr, |
7087 | expr->rank, param_list); |
7088 | gfc_add_expr_to_block (&block, tmp); |
7089 | } |
7090 | /* Ditto for CLASS expressions. */ |
7091 | else if (expr->ts.type == BT_CLASS |
7092 | && CLASS_DATA (expr)->ts.u.derived->attr.pdt_type) |
7093 | { |
7094 | if (code->expr3 && code->expr3->param_list) |
7095 | param_list = code->expr3->param_list; |
7096 | else if (expr->param_list) |
7097 | param_list = expr->param_list; |
7098 | else |
7099 | param_list = expr->symtree->n.sym->param_list; |
7100 | tmp = gfc_allocate_pdt_comp (CLASS_DATA (expr)->ts.u.derived, |
7101 | se.expr, expr->rank, param_list); |
7102 | gfc_add_expr_to_block (&block, tmp); |
7103 | } |
7104 | else if (code->expr3 && code->expr3->mold |
7105 | && code->expr3->ts.type == BT_CLASS) |
7106 | { |
7107 | /* Use class_init_assign to initialize expr. */ |
7108 | gfc_code *ini; |
7109 | ini = gfc_get_code (EXEC_INIT_ASSIGN); |
7110 | ini->expr1 = gfc_find_and_cut_at_last_class_ref (expr, is_mold: true); |
7111 | tmp = gfc_trans_class_init_assign (ini); |
7112 | gfc_free_statements (ini); |
7113 | gfc_add_expr_to_block (&block, tmp); |
7114 | } |
7115 | else if ((init_expr = allocate_get_initializer (code, expr))) |
7116 | { |
7117 | /* Use class_init_assign to initialize expr. */ |
7118 | gfc_code *ini; |
7119 | int realloc_lhs = flag_realloc_lhs; |
7120 | ini = gfc_get_code (EXEC_INIT_ASSIGN); |
7121 | ini->expr1 = gfc_expr_to_initialize (expr); |
7122 | ini->expr2 = init_expr; |
7123 | flag_realloc_lhs = 0; |
7124 | tmp= gfc_trans_init_assign (ini); |
7125 | flag_realloc_lhs = realloc_lhs; |
7126 | gfc_free_statements (ini); |
7127 | /* Init_expr is freeed by above free_statements, just need to null |
7128 | it here. */ |
7129 | init_expr = NULL; |
7130 | gfc_add_expr_to_block (&block, tmp); |
7131 | } |
7132 | |
7133 | /* Nullify all pointers in derived type coarrays. This registers a |
7134 | token for them which allows their allocation. */ |
7135 | if (is_coarray) |
7136 | { |
7137 | gfc_symbol *type = NULL; |
7138 | symbol_attribute caf_attr; |
7139 | int rank = 0; |
7140 | if (code->ext.alloc.ts.type == BT_DERIVED |
7141 | && code->ext.alloc.ts.u.derived->attr.pointer_comp) |
7142 | { |
7143 | type = code->ext.alloc.ts.u.derived; |
7144 | rank = type->attr.dimension ? type->as->rank : 0; |
7145 | gfc_clear_attr (&caf_attr); |
7146 | } |
7147 | else if (expr->ts.type == BT_DERIVED |
7148 | && expr->ts.u.derived->attr.pointer_comp) |
7149 | { |
7150 | type = expr->ts.u.derived; |
7151 | rank = expr->rank; |
7152 | caf_attr = gfc_caf_attr (expr, i: true); |
7153 | } |
7154 | |
7155 | /* Initialize the tokens of pointer components in derived type |
7156 | coarrays. */ |
7157 | if (type) |
7158 | { |
7159 | tmp = (caf_attr.codimension && !caf_attr.dimension) |
7160 | ? gfc_conv_descriptor_data_get (se.expr) : se.expr; |
7161 | tmp = gfc_nullify_alloc_comp (type, tmp, rank, |
7162 | cm: GFC_STRUCTURE_CAF_MODE_IN_COARRAY); |
7163 | gfc_add_expr_to_block (&block, tmp); |
7164 | } |
7165 | } |
7166 | |
7167 | gfc_free_expr (expr); |
7168 | } // for-loop |
7169 | |
7170 | if (e3rhs) |
7171 | { |
7172 | if (newsym) |
7173 | { |
7174 | gfc_free_symbol (newsym->n.sym); |
7175 | XDELETE (newsym); |
7176 | } |
7177 | gfc_free_expr (e3rhs); |
7178 | } |
7179 | /* STAT. */ |
7180 | if (code->expr1) |
7181 | { |
7182 | tmp = build1_v (LABEL_EXPR, label_errmsg); |
7183 | gfc_add_expr_to_block (&block, tmp); |
7184 | } |
7185 | |
7186 | /* ERRMSG - only useful if STAT is present. */ |
7187 | if (code->expr1 && code->expr2) |
7188 | { |
7189 | const char *msg = "Attempt to allocate an allocated object" ; |
7190 | const char *oommsg = "Insufficient virtual memory" ; |
7191 | tree slen, dlen, errmsg_str, oom_str, oom_loc; |
7192 | stmtblock_t errmsg_block; |
7193 | |
7194 | gfc_init_block (&errmsg_block); |
7195 | |
7196 | errmsg_str = gfc_create_var (pchar_type_node, "ERRMSG" ); |
7197 | gfc_add_modify (&errmsg_block, errmsg_str, |
7198 | gfc_build_addr_expr (pchar_type_node, |
7199 | gfc_build_localized_cstring_const (msg))); |
7200 | |
7201 | slen = build_int_cst (gfc_charlen_type_node, strlen (s: msg)); |
7202 | dlen = gfc_get_expr_charlen (code->expr2); |
7203 | slen = fold_build2_loc (input_location, MIN_EXPR, |
7204 | TREE_TYPE (slen), dlen, slen); |
7205 | |
7206 | gfc_trans_string_copy (&errmsg_block, dlen, errmsg, |
7207 | code->expr2->ts.kind, |
7208 | slen, errmsg_str, |
7209 | gfc_default_character_kind); |
7210 | dlen = gfc_finish_block (&errmsg_block); |
7211 | |
7212 | tmp = fold_build2_loc (input_location, EQ_EXPR, logical_type_node, |
7213 | stat, build_int_cst (TREE_TYPE (stat), |
7214 | LIBERROR_ALLOCATION)); |
7215 | |
7216 | tmp = build3_v (COND_EXPR, tmp, |
7217 | dlen, build_empty_stmt (input_location)); |
7218 | |
7219 | gfc_add_expr_to_block (&block, tmp); |
7220 | |
7221 | oom_str = gfc_create_var (pchar_type_node, "OOMMSG" ); |
7222 | oom_loc = gfc_build_localized_cstring_const (oommsg); |
7223 | gfc_add_modify (&errmsg_block, oom_str, |
7224 | gfc_build_addr_expr (pchar_type_node, oom_loc)); |
7225 | |
7226 | slen = build_int_cst (gfc_charlen_type_node, strlen (s: oommsg)); |
7227 | dlen = gfc_get_expr_charlen (code->expr2); |
7228 | slen = fold_build2_loc (input_location, MIN_EXPR, |
7229 | TREE_TYPE (slen), dlen, slen); |
7230 | |
7231 | gfc_trans_string_copy (&errmsg_block, dlen, errmsg, |
7232 | code->expr2->ts.kind, |
7233 | slen, oom_str, |
7234 | gfc_default_character_kind); |
7235 | dlen = gfc_finish_block (&errmsg_block); |
7236 | |
7237 | tmp = fold_build2_loc (input_location, EQ_EXPR, logical_type_node, |
7238 | stat, build_int_cst (TREE_TYPE (stat), |
7239 | LIBERROR_NO_MEMORY)); |
7240 | |
7241 | tmp = build3_v (COND_EXPR, tmp, |
7242 | dlen, build_empty_stmt (input_location)); |
7243 | |
7244 | gfc_add_expr_to_block (&block, tmp); |
7245 | } |
7246 | |
7247 | /* STAT block. */ |
7248 | if (code->expr1) |
7249 | { |
7250 | if (TREE_USED (label_finish)) |
7251 | { |
7252 | tmp = build1_v (LABEL_EXPR, label_finish); |
7253 | gfc_add_expr_to_block (&block, tmp); |
7254 | } |
7255 | |
7256 | gfc_init_se (&se, NULL); |
7257 | gfc_conv_expr_lhs (se: &se, expr: code->expr1); |
7258 | tmp = convert (TREE_TYPE (se.expr), stat); |
7259 | gfc_add_modify (&block, se.expr, tmp); |
7260 | } |
7261 | |
7262 | if (needs_caf_sync) |
7263 | { |
7264 | /* Add a sync all after the allocation has been executed. */ |
7265 | tree zero_size = build_zero_cst (size_type_node); |
7266 | tmp = build_call_expr_loc (input_location, gfor_fndecl_caf_sync_all, |
7267 | 3, null_pointer_node, null_pointer_node, |
7268 | zero_size); |
7269 | gfc_add_expr_to_block (&post, tmp); |
7270 | } |
7271 | |
7272 | gfc_add_block_to_block (&block, &se.post); |
7273 | gfc_add_block_to_block (&block, &post); |
7274 | if (code->expr3 && code->expr3->must_finalize) |
7275 | gfc_add_block_to_block (&block, &final_block); |
7276 | |
7277 | return gfc_finish_block (&block); |
7278 | } |
7279 | |
7280 | |
7281 | /* Translate a DEALLOCATE statement. */ |
7282 | |
7283 | tree |
7284 | gfc_trans_deallocate (gfc_code *code) |
7285 | { |
7286 | gfc_se se; |
7287 | gfc_alloc *al; |
7288 | tree apstat, pstat, stat, errmsg, errlen, tmp; |
7289 | tree label_finish, label_errmsg; |
7290 | stmtblock_t block; |
7291 | |
7292 | pstat = apstat = stat = errmsg = errlen = tmp = NULL_TREE; |
7293 | label_finish = label_errmsg = NULL_TREE; |
7294 | |
7295 | gfc_start_block (&block); |
7296 | |
7297 | /* Count the number of failed deallocations. If deallocate() was |
7298 | called with STAT= , then set STAT to the count. If deallocate |
7299 | was called with ERRMSG, then set ERRMG to a string. */ |
7300 | if (code->expr1) |
7301 | { |
7302 | tree gfc_int4_type_node = gfc_get_int_type (4); |
7303 | |
7304 | stat = gfc_create_var (gfc_int4_type_node, "stat" ); |
7305 | pstat = gfc_build_addr_expr (NULL_TREE, stat); |
7306 | |
7307 | /* GOTO destinations. */ |
7308 | label_errmsg = gfc_build_label_decl (NULL_TREE); |
7309 | label_finish = gfc_build_label_decl (NULL_TREE); |
7310 | TREE_USED (label_finish) = 0; |
7311 | } |
7312 | |
7313 | /* Set ERRMSG - only needed if STAT is available. */ |
7314 | if (code->expr1 && code->expr2) |
7315 | { |
7316 | gfc_init_se (&se, NULL); |
7317 | se.want_pointer = 1; |
7318 | gfc_conv_expr_lhs (se: &se, expr: code->expr2); |
7319 | errmsg = se.expr; |
7320 | errlen = se.string_length; |
7321 | } |
7322 | |
7323 | for (al = code->ext.alloc.list; al != NULL; al = al->next) |
7324 | { |
7325 | gfc_expr *expr = gfc_copy_expr (al->expr); |
7326 | bool is_coarray = false, is_coarray_array = false; |
7327 | int caf_mode = 0; |
7328 | |
7329 | gcc_assert (expr->expr_type == EXPR_VARIABLE); |
7330 | |
7331 | if (expr->ts.type == BT_CLASS) |
7332 | gfc_add_data_component (expr); |
7333 | |
7334 | gfc_init_se (&se, NULL); |
7335 | gfc_start_block (&se.pre); |
7336 | |
7337 | se.want_pointer = 1; |
7338 | se.descriptor_only = 1; |
7339 | gfc_conv_expr (se: &se, expr); |
7340 | |
7341 | /* Deallocate PDT components that are parameterized. */ |
7342 | tmp = NULL; |
7343 | if (expr->ts.type == BT_DERIVED |
7344 | && expr->ts.u.derived->attr.pdt_type |
7345 | && expr->symtree->n.sym->param_list) |
7346 | tmp = gfc_deallocate_pdt_comp (expr->ts.u.derived, se.expr, expr->rank); |
7347 | else if (expr->ts.type == BT_CLASS |
7348 | && CLASS_DATA (expr)->ts.u.derived->attr.pdt_type |
7349 | && expr->symtree->n.sym->param_list) |
7350 | tmp = gfc_deallocate_pdt_comp (CLASS_DATA (expr)->ts.u.derived, |
7351 | se.expr, expr->rank); |
7352 | |
7353 | if (tmp) |
7354 | gfc_add_expr_to_block (&block, tmp); |
7355 | |
7356 | if (flag_coarray == GFC_FCOARRAY_LIB |
7357 | || flag_coarray == GFC_FCOARRAY_SINGLE) |
7358 | { |
7359 | bool comp_ref; |
7360 | symbol_attribute caf_attr = gfc_caf_attr (expr, i: false, r: &comp_ref); |
7361 | if (caf_attr.codimension) |
7362 | { |
7363 | is_coarray = true; |
7364 | is_coarray_array = caf_attr.dimension || !comp_ref |
7365 | || caf_attr.coarray_comp; |
7366 | |
7367 | if (flag_coarray == GFC_FCOARRAY_LIB) |
7368 | /* When the expression to deallocate is referencing a |
7369 | component, then only deallocate it, but do not |
7370 | deregister. */ |
7371 | caf_mode = GFC_STRUCTURE_CAF_MODE_IN_COARRAY |
7372 | | (comp_ref && !caf_attr.coarray_comp |
7373 | ? GFC_STRUCTURE_CAF_MODE_DEALLOC_ONLY : 0); |
7374 | } |
7375 | } |
7376 | |
7377 | if (expr->rank || is_coarray_array) |
7378 | { |
7379 | gfc_ref *ref; |
7380 | |
7381 | if (gfc_bt_struct (expr->ts.type) |
7382 | && expr->ts.u.derived->attr.alloc_comp |
7383 | && !gfc_is_finalizable (expr->ts.u.derived, NULL)) |
7384 | { |
7385 | gfc_ref *last = NULL; |
7386 | |
7387 | for (ref = expr->ref; ref; ref = ref->next) |
7388 | if (ref->type == REF_COMPONENT) |
7389 | last = ref; |
7390 | |
7391 | /* Do not deallocate the components of a derived type |
7392 | ultimate pointer component. */ |
7393 | if (!(last && last->u.c.component->attr.pointer) |
7394 | && !(!last && expr->symtree->n.sym->attr.pointer)) |
7395 | { |
7396 | if (is_coarray && expr->rank == 0 |
7397 | && (!last || !last->u.c.component->attr.dimension) |
7398 | && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (se.expr))) |
7399 | { |
7400 | /* Add the ref to the data member only, when this is not |
7401 | a regular array or deallocate_alloc_comp will try to |
7402 | add another one. */ |
7403 | tmp = gfc_conv_descriptor_data_get (se.expr); |
7404 | } |
7405 | else |
7406 | tmp = se.expr; |
7407 | tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, tmp, |
7408 | expr->rank, cm: caf_mode); |
7409 | gfc_add_expr_to_block (&se.pre, tmp); |
7410 | } |
7411 | } |
7412 | |
7413 | if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (se.expr))) |
7414 | { |
7415 | gfc_coarray_deregtype caf_dtype; |
7416 | |
7417 | if (is_coarray) |
7418 | caf_dtype = gfc_caf_is_dealloc_only (caf_mode) |
7419 | ? GFC_CAF_COARRAY_DEALLOCATE_ONLY |
7420 | : GFC_CAF_COARRAY_DEREGISTER; |
7421 | else |
7422 | caf_dtype = GFC_CAF_COARRAY_NOCOARRAY; |
7423 | tmp = gfc_deallocate_with_status (se.expr, pstat, errmsg, errlen, |
7424 | label_finish, false, expr, |
7425 | caf_dtype); |
7426 | gfc_add_expr_to_block (&se.pre, tmp); |
7427 | } |
7428 | else if (TREE_CODE (se.expr) == COMPONENT_REF |
7429 | && TREE_CODE (TREE_TYPE (se.expr)) == ARRAY_TYPE |
7430 | && TREE_CODE (TREE_TYPE (TREE_TYPE (se.expr))) |
7431 | == RECORD_TYPE) |
7432 | { |
7433 | /* class.cc(finalize_component) generates these, when a |
7434 | finalizable entity has a non-allocatable derived type array |
7435 | component, which has allocatable components. Obtain the |
7436 | derived type of the array and deallocate the allocatable |
7437 | components. */ |
7438 | for (ref = expr->ref; ref; ref = ref->next) |
7439 | { |
7440 | if (ref->u.c.component->attr.dimension |
7441 | && ref->u.c.component->ts.type == BT_DERIVED) |
7442 | break; |
7443 | } |
7444 | |
7445 | if (ref && ref->u.c.component->ts.u.derived->attr.alloc_comp |
7446 | && !gfc_is_finalizable (ref->u.c.component->ts.u.derived, |
7447 | NULL)) |
7448 | { |
7449 | tmp = gfc_deallocate_alloc_comp |
7450 | (ref->u.c.component->ts.u.derived, |
7451 | se.expr, expr->rank); |
7452 | gfc_add_expr_to_block (&se.pre, tmp); |
7453 | } |
7454 | } |
7455 | |
7456 | if (al->expr->ts.type == BT_CLASS) |
7457 | { |
7458 | gfc_reset_vptr (&se.pre, al->expr); |
7459 | if (UNLIMITED_POLY (al->expr) |
7460 | || (al->expr->ts.type == BT_DERIVED |
7461 | && al->expr->ts.u.derived->attr.unlimited_polymorphic)) |
7462 | /* Clear _len, too. */ |
7463 | gfc_reset_len (&se.pre, al->expr); |
7464 | } |
7465 | } |
7466 | else |
7467 | { |
7468 | tmp = gfc_deallocate_scalar_with_status (se.expr, pstat, label_finish, |
7469 | false, al->expr, |
7470 | al->expr->ts, NULL_TREE, |
7471 | c: is_coarray); |
7472 | gfc_add_expr_to_block (&se.pre, tmp); |
7473 | |
7474 | /* Set to zero after deallocation. */ |
7475 | tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node, |
7476 | se.expr, |
7477 | build_int_cst (TREE_TYPE (se.expr), 0)); |
7478 | gfc_add_expr_to_block (&se.pre, tmp); |
7479 | |
7480 | if (al->expr->ts.type == BT_CLASS) |
7481 | { |
7482 | gfc_reset_vptr (&se.pre, al->expr); |
7483 | if (UNLIMITED_POLY (al->expr) |
7484 | || (al->expr->ts.type == BT_DERIVED |
7485 | && al->expr->ts.u.derived->attr.unlimited_polymorphic)) |
7486 | /* Clear _len, too. */ |
7487 | gfc_reset_len (&se.pre, al->expr); |
7488 | } |
7489 | } |
7490 | |
7491 | if (code->expr1) |
7492 | { |
7493 | tree cond; |
7494 | |
7495 | cond = fold_build2_loc (input_location, NE_EXPR, logical_type_node, stat, |
7496 | build_int_cst (TREE_TYPE (stat), 0)); |
7497 | tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, |
7498 | gfc_unlikely (cond, PRED_FORTRAN_FAIL_ALLOC), |
7499 | build1_v (GOTO_EXPR, label_errmsg), |
7500 | build_empty_stmt (input_location)); |
7501 | gfc_add_expr_to_block (&se.pre, tmp); |
7502 | } |
7503 | |
7504 | tmp = gfc_finish_block (&se.pre); |
7505 | gfc_add_expr_to_block (&block, tmp); |
7506 | gfc_free_expr (expr); |
7507 | } |
7508 | |
7509 | if (code->expr1) |
7510 | { |
7511 | tmp = build1_v (LABEL_EXPR, label_errmsg); |
7512 | gfc_add_expr_to_block (&block, tmp); |
7513 | } |
7514 | |
7515 | /* Set ERRMSG - only needed if STAT is available. */ |
7516 | if (code->expr1 && code->expr2) |
7517 | { |
7518 | const char *msg = "Attempt to deallocate an unallocated object" ; |
7519 | stmtblock_t errmsg_block; |
7520 | tree errmsg_str, slen, dlen, cond; |
7521 | |
7522 | gfc_init_block (&errmsg_block); |
7523 | |
7524 | errmsg_str = gfc_create_var (pchar_type_node, "ERRMSG" ); |
7525 | gfc_add_modify (&errmsg_block, errmsg_str, |
7526 | gfc_build_addr_expr (pchar_type_node, |
7527 | gfc_build_localized_cstring_const (msg))); |
7528 | slen = build_int_cst (gfc_charlen_type_node, strlen (s: msg)); |
7529 | dlen = gfc_get_expr_charlen (code->expr2); |
7530 | |
7531 | gfc_trans_string_copy (&errmsg_block, dlen, errmsg, code->expr2->ts.kind, |
7532 | slen, errmsg_str, gfc_default_character_kind); |
7533 | tmp = gfc_finish_block (&errmsg_block); |
7534 | |
7535 | cond = fold_build2_loc (input_location, NE_EXPR, logical_type_node, stat, |
7536 | build_int_cst (TREE_TYPE (stat), 0)); |
7537 | tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, |
7538 | gfc_unlikely (cond, PRED_FORTRAN_FAIL_ALLOC), tmp, |
7539 | build_empty_stmt (input_location)); |
7540 | |
7541 | gfc_add_expr_to_block (&block, tmp); |
7542 | } |
7543 | |
7544 | if (code->expr1 && TREE_USED (label_finish)) |
7545 | { |
7546 | tmp = build1_v (LABEL_EXPR, label_finish); |
7547 | gfc_add_expr_to_block (&block, tmp); |
7548 | } |
7549 | |
7550 | /* Set STAT. */ |
7551 | if (code->expr1) |
7552 | { |
7553 | gfc_init_se (&se, NULL); |
7554 | gfc_conv_expr_lhs (se: &se, expr: code->expr1); |
7555 | tmp = convert (TREE_TYPE (se.expr), stat); |
7556 | gfc_add_modify (&block, se.expr, tmp); |
7557 | } |
7558 | |
7559 | return gfc_finish_block (&block); |
7560 | } |
7561 | |
7562 | #include "gt-fortran-trans-stmt.h" |
7563 | |