1/* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE.
2
3 Copyright (C) 2003-2025 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "backend.h"
25#include "tree.h"
26#include "gimple.h"
27#include "tree-pass.h"
28#include "fold-const.h"
29#include "tree-nested.h"
30#include "calls.h"
31#include "gimple-iterator.h"
32#include "gimple-low.h"
33#include "predict.h"
34#include "gimple-predict.h"
35#include "gimple-fold.h"
36#include "cgraph.h"
37#include "tree-ssa.h"
38#include "value-range.h"
39#include "stringpool.h"
40#include "tree-ssanames.h"
41#include "tree-inline.h"
42#include "gimple-walk.h"
43#include "attribs.h"
44#include "diagnostic-core.h"
45
46/* The differences between High GIMPLE and Low GIMPLE are the
47 following:
48
49 1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
50
51 2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
52 flow and exception regions are built as an on-the-side region
53 hierarchy (See tree-eh.cc:lower_eh_constructs).
54
55 3- Multiple identical return statements are grouped into a single
56 return and gotos to the unique return site. */
57
58/* Match a return statement with a label. During lowering, we identify
59 identical return statements and replace duplicates with a jump to
60 the corresponding label. */
61struct return_statements_t
62{
63 tree label;
64 greturn *stmt;
65};
66typedef struct return_statements_t return_statements_t;
67
68
69struct lower_data
70{
71 /* Block the current statement belongs to. */
72 tree block;
73
74 /* A vector of label and return statements to be moved to the end
75 of the function. */
76 vec<return_statements_t> return_statements;
77
78 /* True if the current statement cannot fall through. */
79 bool cannot_fallthru;
80};
81
82/* Bitmap of LABEL_DECL uids for user labels moved into assume outlined
83 functions. */
84static bitmap assume_labels;
85
86static void lower_stmt (gimple_stmt_iterator *, struct lower_data *);
87static void lower_gimple_bind (gimple_stmt_iterator *, struct lower_data *);
88static void lower_try_catch (gimple_stmt_iterator *, struct lower_data *);
89static void lower_gimple_return (gimple_stmt_iterator *, struct lower_data *);
90static void lower_builtin_setjmp (gimple_stmt_iterator *);
91static void lower_builtin_posix_memalign (gimple_stmt_iterator *);
92static void lower_builtin_assume_aligned (gimple_stmt_iterator *);
93
94
95/* Helper function for lower_function_body, called via walk_gimple_seq.
96 Diagnose uses of user labels defined inside of assume attribute
97 expressions. */
98
99static tree
100diagnose_assume_labels (tree *tp, int *, void *data)
101{
102 if (TREE_CODE (*tp) == LABEL_DECL
103 && !DECL_ARTIFICIAL (*tp)
104 && DECL_NAME (*tp)
105 && bitmap_bit_p (assume_labels, DECL_UID (*tp)))
106 {
107 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
108 auto_diagnostic_group d;
109 error_at (gimple_location (g: gsi_stmt (i: wi->gsi)),
110 "reference to label %qD defined inside of %<assume%> "
111 "attribute expression from outside of the attribute", *tp);
112 inform (DECL_SOURCE_LOCATION (*tp), "%qD defined here", *tp);
113 }
114 return NULL_TREE;
115}
116
117
118/* Lower the body of current_function_decl from High GIMPLE into Low
119 GIMPLE. */
120
121static unsigned int
122lower_function_body (void)
123{
124 struct lower_data data;
125 gimple_seq body = gimple_body (current_function_decl);
126 gimple_seq lowered_body;
127 gimple_stmt_iterator i;
128 gimple *bind;
129 gimple *x;
130
131 /* The gimplifier should've left a body of exactly one statement,
132 namely a GIMPLE_BIND. */
133 gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
134 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);
135
136 memset (s: &data, c: 0, n: sizeof (data));
137 data.block = DECL_INITIAL (current_function_decl);
138 BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
139 BLOCK_CHAIN (data.block) = NULL_TREE;
140 TREE_ASM_WRITTEN (data.block) = 1;
141 data.return_statements.create (nelems: 8);
142
143 bind = gimple_seq_first_stmt (s: body);
144 lowered_body = NULL;
145 gimple_seq_add_stmt (&lowered_body, bind);
146 i = gsi_start (seq&: lowered_body);
147 lower_gimple_bind (&i, &data);
148
149 i = gsi_last (seq&: lowered_body);
150
151 /* If we had begin stmt markers from e.g. PCH, but this compilation
152 doesn't want them, lower_stmt will have cleaned them up; we can
153 now clear the flag that indicates we had them. */
154 if (!MAY_HAVE_DEBUG_MARKER_STMTS && cfun->debug_nonbind_markers)
155 {
156 /* This counter needs not be exact, but before lowering it will
157 most certainly be. */
158 gcc_assert (cfun->debug_marker_count == 0);
159 cfun->debug_nonbind_markers = false;
160 }
161
162 /* If the function falls off the end, we need a null return statement.
163 If we've already got one in the return_statements vector, we don't
164 need to do anything special. Otherwise build one by hand. */
165 bool may_fallthru = gimple_seq_may_fallthru (lowered_body);
166 if (may_fallthru
167 && (data.return_statements.is_empty ()
168 || (gimple_return_retval (gs: data.return_statements.last().stmt)
169 != NULL)))
170 {
171 x = gimple_build_return (NULL);
172 gimple_set_location (g: x, cfun->function_end_locus);
173 gimple_set_block (g: x, DECL_INITIAL (current_function_decl));
174 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
175 may_fallthru = false;
176 }
177
178 /* If we lowered any return statements, emit the representative
179 at the end of the function. */
180 while (!data.return_statements.is_empty ())
181 {
182 return_statements_t t = data.return_statements.pop ();
183 x = gimple_build_label (label: t.label);
184 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
185 gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
186 if (may_fallthru)
187 {
188 /* Remove the line number from the representative return statement.
189 It now fills in for the fallthru too. Failure to remove this
190 will result in incorrect results for coverage analysis. */
191 gimple_set_location (g: t.stmt, UNKNOWN_LOCATION);
192 may_fallthru = false;
193 }
194 }
195
196 /* Once the old body has been lowered, replace it with the new
197 lowered sequence. */
198 gimple_set_body (current_function_decl, lowered_body);
199
200 if (assume_labels)
201 {
202 struct walk_stmt_info wi;
203
204 memset (s: &wi, c: 0, n: sizeof (wi));
205 walk_gimple_seq (lowered_body, NULL, diagnose_assume_labels, &wi);
206 BITMAP_FREE (assume_labels);
207 }
208
209 gcc_assert (data.block == DECL_INITIAL (current_function_decl));
210 BLOCK_SUBBLOCKS (data.block)
211 = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));
212
213 clear_block_marks (data.block);
214 data.return_statements.release ();
215 return 0;
216}
217
218namespace {
219
220const pass_data pass_data_lower_cf =
221{
222 .type: GIMPLE_PASS, /* type */
223 .name: "lower", /* name */
224 .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */
225 .tv_id: TV_NONE, /* tv_id */
226 PROP_gimple_any, /* properties_required */
227 PROP_gimple_lcf, /* properties_provided */
228 .properties_destroyed: 0, /* properties_destroyed */
229 .todo_flags_start: 0, /* todo_flags_start */
230 .todo_flags_finish: 0, /* todo_flags_finish */
231};
232
233class pass_lower_cf : public gimple_opt_pass
234{
235public:
236 pass_lower_cf (gcc::context *ctxt)
237 : gimple_opt_pass (pass_data_lower_cf, ctxt)
238 {}
239
240 /* opt_pass methods: */
241 unsigned int execute (function *) final override
242 {
243 return lower_function_body ();
244 }
245
246}; // class pass_lower_cf
247
248} // anon namespace
249
250gimple_opt_pass *
251make_pass_lower_cf (gcc::context *ctxt)
252{
253 return new pass_lower_cf (ctxt);
254}
255
256/* Lower sequence SEQ. Unlike gimplification the statements are not relowered
257 when they are changed -- if this has to be done, the lowering routine must
258 do it explicitly. DATA is passed through the recursion. */
259
260static void
261lower_sequence (gimple_seq *seq, struct lower_data *data)
262{
263 gimple_stmt_iterator gsi;
264
265 for (gsi = gsi_start (seq&: *seq); !gsi_end_p (i: gsi); )
266 lower_stmt (&gsi, data);
267}
268
269
270/* Lower the OpenMP directive statement pointed by GSI. DATA is
271 passed through the recursion. */
272
273static void
274lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
275{
276 gimple *stmt;
277
278 stmt = gsi_stmt (i: *gsi);
279
280 lower_sequence (seq: gimple_omp_body_ptr (gs: stmt), data);
281 gsi_insert_seq_after (gsi, gimple_omp_body (gs: stmt), GSI_CONTINUE_LINKING);
282 gimple_omp_set_body (gs: stmt, NULL);
283 gsi_next (i: gsi);
284}
285
286/* Create an artificial FUNCTION_DECL for assumption at LOC. */
287
288static tree
289create_assumption_fn (location_t loc)
290{
291 tree name = clone_function_name_numbered (decl: current_function_decl, suffix: "_assume");
292 /* Temporarily, until we determine all the arguments. */
293 tree type = build_varargs_function_type_list (boolean_type_node, NULL_TREE);
294 tree decl = build_decl (loc, FUNCTION_DECL, name, type);
295 TREE_STATIC (decl) = 1;
296 TREE_USED (decl) = 1;
297 DECL_ARTIFICIAL (decl) = 1;
298 DECL_IGNORED_P (decl) = 1;
299 DECL_NAMELESS (decl) = 1;
300 TREE_PUBLIC (decl) = 0;
301 DECL_UNINLINABLE (decl) = 1;
302 DECL_EXTERNAL (decl) = 0;
303 DECL_CONTEXT (decl) = NULL_TREE;
304 DECL_INITIAL (decl) = make_node (BLOCK);
305 tree attributes = DECL_ATTRIBUTES (current_function_decl);
306 if (lookup_attribute (attr_name: "noipa", list: attributes) == NULL)
307 {
308 attributes = tree_cons (get_identifier ("noipa"), NULL, attributes);
309 if (lookup_attribute (attr_name: "noinline", list: attributes) == NULL)
310 attributes = tree_cons (get_identifier ("noinline"), NULL, attributes);
311 if (lookup_attribute (attr_name: "noclone", list: attributes) == NULL)
312 attributes = tree_cons (get_identifier ("noclone"), NULL, attributes);
313 if (lookup_attribute (attr_name: "no_icf", list: attributes) == NULL)
314 attributes = tree_cons (get_identifier ("no_icf"), NULL, attributes);
315 }
316 DECL_ATTRIBUTES (decl) = attributes;
317 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
318 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
319 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
320 DECL_FUNCTION_SPECIFIC_TARGET (decl)
321 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
322 tree t = build_decl (DECL_SOURCE_LOCATION (decl),
323 RESULT_DECL, NULL_TREE, boolean_type_node);
324 DECL_ARTIFICIAL (t) = 1;
325 DECL_IGNORED_P (t) = 1;
326 DECL_CONTEXT (t) = decl;
327 DECL_RESULT (decl) = t;
328 push_struct_function (fndecl: decl);
329 cfun->function_end_locus = loc;
330 init_tree_ssa (cfun);
331 return decl;
332}
333
334struct lower_assumption_data
335{
336 copy_body_data id;
337 tree return_false_label;
338 tree guard_copy;
339 auto_vec<tree> decls;
340};
341
342/* Helper function for lower_assumptions. Find local vars and labels
343 in the assumption sequence and remove debug stmts. */
344
345static tree
346find_assumption_locals_r (gimple_stmt_iterator *gsi_p, bool *,
347 struct walk_stmt_info *wi)
348{
349 lower_assumption_data *data = (lower_assumption_data *) wi->info;
350 gimple *stmt = gsi_stmt (i: *gsi_p);
351 tree lhs = gimple_get_lhs (stmt);
352 if (lhs && TREE_CODE (lhs) == SSA_NAME)
353 {
354 gcc_assert (SSA_NAME_VAR (lhs) == NULL_TREE);
355 data->id.decl_map->put (k: lhs, NULL_TREE);
356 data->decls.safe_push (obj: lhs);
357 }
358 switch (gimple_code (g: stmt))
359 {
360 case GIMPLE_BIND:
361 for (tree var = gimple_bind_vars (bind_stmt: as_a <gbind *> (p: stmt));
362 var; var = DECL_CHAIN (var))
363 if (VAR_P (var)
364 && !DECL_EXTERNAL (var)
365 && DECL_CONTEXT (var) == data->id.src_fn)
366 {
367 data->id.decl_map->put (k: var, v: var);
368 data->decls.safe_push (obj: var);
369 }
370 break;
371 case GIMPLE_LABEL:
372 {
373 tree label = gimple_label_label (gs: as_a <glabel *> (p: stmt));
374 data->id.decl_map->put (k: label, v: label);
375 if (DECL_NAME (label) && !DECL_ARTIFICIAL (label))
376 {
377 if (assume_labels == NULL)
378 assume_labels = BITMAP_ALLOC (NULL);
379 bitmap_set_bit (assume_labels, DECL_UID (label));
380 }
381 break;
382 }
383 case GIMPLE_RETURN:
384 /* If something in assumption tries to return from parent function,
385 if it would be reached in hypothetical evaluation, it would be UB,
386 so transform such returns into return false; */
387 {
388 gimple *g = gimple_build_assign (data->guard_copy, boolean_false_node);
389 gsi_insert_before (gsi_p, g, GSI_SAME_STMT);
390 gimple_return_set_retval (gs: as_a <greturn *> (p: stmt), retval: data->guard_copy);
391 break;
392 }
393 case GIMPLE_DEBUG:
394 /* As assumptions won't be emitted, debug info stmts in them
395 are useless. */
396 gsi_remove (gsi_p, true);
397 wi->removed_stmt = true;
398 break;
399 default:
400 break;
401 }
402 return NULL_TREE;
403}
404
405/* Create a new PARM_DECL that is indentical in all respect to DECL except that
406 DECL can be either a VAR_DECL, a PARM_DECL or RESULT_DECL. The original
407 DECL must come from ID->src_fn and the copy will be part of ID->dst_fn. */
408
409static tree
410assumption_copy_decl (tree decl, copy_body_data *id)
411{
412 tree type = TREE_TYPE (decl);
413
414 if (is_global_var (t: decl))
415 return decl;
416
417 gcc_assert (VAR_P (decl)
418 || TREE_CODE (decl) == PARM_DECL
419 || TREE_CODE (decl) == RESULT_DECL);
420 if (TREE_THIS_VOLATILE (decl))
421 type = build_pointer_type (type);
422 tree copy = build_decl (DECL_SOURCE_LOCATION (decl),
423 PARM_DECL, DECL_NAME (decl), type);
424 if (DECL_PT_UID_SET_P (decl))
425 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
426 TREE_THIS_VOLATILE (copy) = 0;
427 if (TREE_THIS_VOLATILE (decl))
428 TREE_READONLY (copy) = 1;
429 else
430 {
431 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
432 TREE_READONLY (copy) = TREE_READONLY (decl);
433 DECL_NOT_GIMPLE_REG_P (copy) = DECL_NOT_GIMPLE_REG_P (decl);
434 DECL_BY_REFERENCE (copy) = DECL_BY_REFERENCE (decl);
435 }
436 DECL_ARG_TYPE (copy) = type;
437 ((lower_assumption_data *) id)->decls.safe_push (obj: decl);
438 return copy_decl_for_dup_finish (id, decl, copy);
439}
440
441/* Transform gotos out of the assumption into return false. */
442
443static tree
444adjust_assumption_stmt_r (gimple_stmt_iterator *gsi_p, bool *,
445 struct walk_stmt_info *wi)
446{
447 lower_assumption_data *data = (lower_assumption_data *) wi->info;
448 gimple *stmt = gsi_stmt (i: *gsi_p);
449 tree lab = NULL_TREE;
450 unsigned int idx = 0;
451 if (gimple_code (g: stmt) == GIMPLE_GOTO)
452 lab = gimple_goto_dest (gs: stmt);
453 else if (gimple_code (g: stmt) == GIMPLE_COND)
454 {
455 repeat:
456 if (idx == 0)
457 lab = gimple_cond_true_label (gs: as_a <gcond *> (p: stmt));
458 else
459 lab = gimple_cond_false_label (gs: as_a <gcond *> (p: stmt));
460 }
461 else if (gimple_code (g: stmt) == GIMPLE_LABEL)
462 {
463 tree label = gimple_label_label (gs: as_a <glabel *> (p: stmt));
464 DECL_CONTEXT (label) = current_function_decl;
465 }
466 if (lab)
467 {
468 if (!data->id.decl_map->get (k: lab))
469 {
470 if (!data->return_false_label)
471 data->return_false_label
472 = create_artificial_label (UNKNOWN_LOCATION);
473 if (gimple_code (g: stmt) == GIMPLE_GOTO)
474 gimple_goto_set_dest (gs: as_a <ggoto *> (p: stmt),
475 dest: data->return_false_label);
476 else if (idx == 0)
477 gimple_cond_set_true_label (gs: as_a <gcond *> (p: stmt),
478 label: data->return_false_label);
479 else
480 gimple_cond_set_false_label (gs: as_a <gcond *> (p: stmt),
481 label: data->return_false_label);
482 }
483 if (gimple_code (g: stmt) == GIMPLE_COND && idx == 0)
484 {
485 idx = 1;
486 goto repeat;
487 }
488 }
489 return NULL_TREE;
490}
491
492/* Adjust trees in the assumption body. Called through walk_tree. */
493
494static tree
495adjust_assumption_stmt_op (tree *tp, int *, void *datap)
496{
497 struct walk_stmt_info *wi = (struct walk_stmt_info *) datap;
498 lower_assumption_data *data = (lower_assumption_data *) wi->info;
499 tree t = *tp;
500 tree *newt;
501 switch (TREE_CODE (t))
502 {
503 case SSA_NAME:
504 newt = data->id.decl_map->get (k: t);
505 /* There shouldn't be SSA_NAMEs other than ones defined in the
506 assumption's body. */
507 gcc_assert (newt);
508 *tp = *newt;
509 break;
510 case LABEL_DECL:
511 newt = data->id.decl_map->get (k: t);
512 if (newt)
513 *tp = *newt;
514 break;
515 case VAR_DECL:
516 case PARM_DECL:
517 case RESULT_DECL:
518 *tp = remap_decl (decl: t, id: &data->id);
519 if (TREE_THIS_VOLATILE (t) && *tp != t)
520 {
521 *tp = build_simple_mem_ref (*tp);
522 TREE_THIS_NOTRAP (*tp) = 1;
523 }
524 break;
525 default:
526 break;
527 }
528 return NULL_TREE;
529}
530
531/* Lower assumption.
532 The gimplifier transformed:
533 .ASSUME (cond);
534 into:
535 [[assume (guard)]]
536 {
537 guard = cond;
538 }
539 which we should transform into:
540 .ASSUME (&artificial_fn, args...);
541 where artificial_fn will look like:
542 bool artificial_fn (args...)
543 {
544 guard = cond;
545 return guard;
546 }
547 with any debug stmts in the block removed and jumps out of
548 the block or return stmts replaced with return false; */
549
550static void
551lower_assumption (gimple_stmt_iterator *gsi, struct lower_data *data)
552{
553 gimple *stmt = gsi_stmt (i: *gsi);
554 tree guard = gimple_assume_guard (gs: stmt);
555 gimple *bind = gimple_assume_body (gs: stmt);
556 location_t loc = gimple_location (g: stmt);
557 gcc_assert (gimple_code (bind) == GIMPLE_BIND);
558
559 lower_assumption_data lad;
560 hash_map<tree, tree> decl_map;
561 memset (s: &lad.id, c: 0, n: sizeof (lad.id));
562 lad.return_false_label = NULL_TREE;
563 lad.id.src_fn = current_function_decl;
564 lad.id.dst_fn = create_assumption_fn (loc);
565 lad.id.src_cfun = DECL_STRUCT_FUNCTION (lad.id.src_fn);
566 lad.id.decl_map = &decl_map;
567 lad.id.copy_decl = assumption_copy_decl;
568 lad.id.transform_call_graph_edges = CB_CGE_DUPLICATE;
569 lad.id.transform_parameter = true;
570 lad.id.do_not_unshare = true;
571 lad.id.do_not_fold = true;
572 cfun->curr_properties = lad.id.src_cfun->curr_properties;
573 lad.guard_copy = create_tmp_var (boolean_type_node);
574 decl_map.put (k: lad.guard_copy, v: lad.guard_copy);
575 decl_map.put (k: guard, v: lad.guard_copy);
576 cfun->assume_function = 1;
577
578 /* Find variables, labels and SSA_NAMEs local to the assume GIMPLE_BIND. */
579 gimple_stmt_iterator gsi2 = gsi_start (seq&: *gimple_assume_body_ptr (gs: stmt));
580 struct walk_stmt_info wi;
581 memset (s: &wi, c: 0, n: sizeof (wi));
582 wi.info = (void *) &lad;
583 walk_gimple_stmt (&gsi2, find_assumption_locals_r, NULL, &wi);
584 unsigned int sz = lad.decls.length ();
585 for (unsigned i = 0; i < sz; ++i)
586 {
587 tree v = lad.decls[i];
588 tree newv;
589 /* SSA_NAMEs defined in the assume condition should be replaced
590 by new SSA_NAMEs in the artificial function. */
591 if (TREE_CODE (v) == SSA_NAME)
592 {
593 newv = make_ssa_name (var: remap_type (TREE_TYPE (v), id: &lad.id));
594 decl_map.put (k: v, v: newv);
595 }
596 /* Local vars should have context and type adjusted to the
597 new artificial function. */
598 else if (VAR_P (v))
599 {
600 if (is_global_var (t: v) && !DECL_ASSEMBLER_NAME_SET_P (v))
601 DECL_ASSEMBLER_NAME (v);
602 TREE_TYPE (v) = remap_type (TREE_TYPE (v), id: &lad.id);
603 DECL_CONTEXT (v) = current_function_decl;
604 }
605 }
606 /* References to other automatic vars should be replaced by
607 PARM_DECLs to the artificial function. */
608 memset (s: &wi, c: 0, n: sizeof (wi));
609 wi.info = (void *) &lad;
610 walk_gimple_stmt (&gsi2, adjust_assumption_stmt_r,
611 adjust_assumption_stmt_op, &wi);
612
613 /* At the start prepend guard = false; */
614 gimple_seq body = NULL;
615 gimple *g = gimple_build_assign (lad.guard_copy, boolean_false_node);
616 gimple_seq_add_stmt (&body, g);
617 gimple_seq_add_stmt (&body, bind);
618 /* At the end add return guard; */
619 greturn *gr = gimple_build_return (lad.guard_copy);
620 gimple_seq_add_stmt (&body, gr);
621 /* If there were any jumps to labels outside of the condition,
622 replace them with a jump to
623 return_false_label:
624 guard = false;
625 return guard; */
626 if (lad.return_false_label)
627 {
628 g = gimple_build_label (label: lad.return_false_label);
629 gimple_seq_add_stmt (&body, g);
630 g = gimple_build_assign (lad.guard_copy, boolean_false_node);
631 gimple_seq_add_stmt (&body, g);
632 gr = gimple_build_return (lad.guard_copy);
633 gimple_seq_add_stmt (&body, gr);
634 }
635 bind = gimple_build_bind (NULL_TREE, body, NULL_TREE);
636 body = NULL;
637 gimple_seq_add_stmt (&body, bind);
638 gimple_set_body (current_function_decl, body);
639 pop_cfun ();
640
641 tree parms = NULL_TREE;
642 tree parmt = void_list_node;
643 auto_vec<tree, 8> vargs;
644 vargs.safe_grow (len: 1 + (lad.decls.length () - sz), exact: true);
645 /* First argument to IFN_ASSUME will be address of the
646 artificial function. */
647 vargs[0] = build_fold_addr_expr (lad.id.dst_fn);
648 for (unsigned i = lad.decls.length (); i > sz; --i)
649 {
650 tree *v = decl_map.get (k: lad.decls[i - 1]);
651 gcc_assert (v && TREE_CODE (*v) == PARM_DECL);
652 DECL_CHAIN (*v) = parms;
653 parms = *v;
654 parmt = tree_cons (NULL_TREE, TREE_TYPE (*v), parmt);
655 /* Remaining arguments will be the variables/parameters
656 mentioned in the condition. */
657 vargs[i - sz] = lad.decls[i - 1];
658 if (TREE_THIS_VOLATILE (lad.decls[i - 1]))
659 {
660 TREE_ADDRESSABLE (lad.decls[i - 1]) = 1;
661 vargs[i - sz] = build_fold_addr_expr (lad.decls[i - 1]);
662 }
663 /* If they have gimple types, we might need to regimplify
664 them to make the IFN_ASSUME call valid. */
665 if (is_gimple_reg_type (TREE_TYPE (vargs[i - sz]))
666 && !is_gimple_val (vargs[i - sz]))
667 {
668 tree t = make_ssa_name (TREE_TYPE (vargs[i - sz]));
669 g = gimple_build_assign (t, vargs[i - sz]);
670 gsi_insert_before (gsi, g, GSI_SAME_STMT);
671 vargs[i - sz] = t;
672 }
673 }
674 DECL_ARGUMENTS (lad.id.dst_fn) = parms;
675 TREE_TYPE (lad.id.dst_fn) = build_function_type (boolean_type_node, parmt);
676
677 cgraph_node::add_new_function (fndecl: lad.id.dst_fn, lowered: false);
678
679 for (unsigned i = 0; i < sz; ++i)
680 {
681 tree v = lad.decls[i];
682 if (TREE_CODE (v) == SSA_NAME)
683 release_ssa_name (name: v);
684 }
685
686 data->cannot_fallthru = false;
687 /* Replace GIMPLE_ASSUME statement with IFN_ASSUME call. */
688 gcall *call = gimple_build_call_internal_vec (IFN_ASSUME, vargs);
689 gimple_set_location (g: call, location: loc);
690 gsi_replace (gsi, call, true);
691}
692
693/* Lower statement GSI. DATA is passed through the recursion. We try to
694 track the fallthruness of statements and get rid of unreachable return
695 statements in order to prevent the EH lowering pass from adding useless
696 edges that can cause bogus warnings to be issued later; this guess need
697 not be 100% accurate, simply be conservative and reset cannot_fallthru
698 to false if we don't know. */
699
700static void
701lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
702{
703 gimple *stmt = gsi_stmt (i: *gsi);
704
705 gimple_set_block (g: stmt, block: data->block);
706
707 switch (gimple_code (g: stmt))
708 {
709 case GIMPLE_BIND:
710 lower_gimple_bind (gsi, data);
711 /* Propagate fallthruness. */
712 return;
713
714 case GIMPLE_COND:
715 case GIMPLE_GOTO:
716 case GIMPLE_SWITCH:
717 data->cannot_fallthru = true;
718 gsi_next (i: gsi);
719 return;
720
721 case GIMPLE_RETURN:
722 if (data->cannot_fallthru)
723 {
724 gsi_remove (gsi, false);
725 /* Propagate fallthruness. */
726 }
727 else
728 {
729 lower_gimple_return (gsi, data);
730 data->cannot_fallthru = true;
731 }
732 return;
733
734 case GIMPLE_TRY:
735 if (gimple_try_kind (gs: stmt) == GIMPLE_TRY_CATCH)
736 lower_try_catch (gsi, data);
737 else
738 {
739 /* It must be a GIMPLE_TRY_FINALLY. */
740 bool cannot_fallthru;
741 lower_sequence (seq: gimple_try_eval_ptr (gs: stmt), data);
742 cannot_fallthru = data->cannot_fallthru;
743
744 /* The finally clause is always executed after the try clause,
745 so if it does not fall through, then the try-finally will not
746 fall through. Otherwise, if the try clause does not fall
747 through, then when the finally clause falls through it will
748 resume execution wherever the try clause was going. So the
749 whole try-finally will only fall through if both the try
750 clause and the finally clause fall through. */
751 data->cannot_fallthru = false;
752 lower_sequence (seq: gimple_try_cleanup_ptr (gs: stmt), data);
753 data->cannot_fallthru |= cannot_fallthru;
754 gsi_next (i: gsi);
755 }
756 return;
757
758 case GIMPLE_EH_ELSE:
759 {
760 geh_else *eh_else_stmt = as_a <geh_else *> (p: stmt);
761 lower_sequence (seq: gimple_eh_else_n_body_ptr (eh_else_stmt), data);
762 lower_sequence (seq: gimple_eh_else_e_body_ptr (eh_else_stmt), data);
763 }
764 break;
765
766 case GIMPLE_DEBUG:
767 gcc_checking_assert (cfun->debug_nonbind_markers);
768 /* We can't possibly have debug bind stmts before lowering, we
769 first emit them when entering SSA. */
770 gcc_checking_assert (gimple_debug_nonbind_marker_p (stmt));
771 /* Propagate fallthruness. */
772 /* If the function (e.g. from PCH) had debug stmts, but they're
773 disabled for this compilation, remove them. */
774 if (!MAY_HAVE_DEBUG_MARKER_STMTS)
775 gsi_remove (gsi, true);
776 else
777 gsi_next (i: gsi);
778 return;
779
780 case GIMPLE_OMP_STRUCTURED_BLOCK:
781 /* These are supposed to be removed already in OMP lowering. */
782 gcc_unreachable ();
783
784 case GIMPLE_NOP:
785 case GIMPLE_ASM:
786 case GIMPLE_ASSIGN:
787 case GIMPLE_PREDICT:
788 case GIMPLE_LABEL:
789 case GIMPLE_EH_MUST_NOT_THROW:
790 case GIMPLE_OMP_FOR:
791 case GIMPLE_OMP_SCOPE:
792 case GIMPLE_OMP_DISPATCH:
793 case GIMPLE_OMP_INTEROP:
794 case GIMPLE_OMP_SECTIONS:
795 case GIMPLE_OMP_SECTIONS_SWITCH:
796 case GIMPLE_OMP_SECTION:
797 case GIMPLE_OMP_SINGLE:
798 case GIMPLE_OMP_MASTER:
799 case GIMPLE_OMP_MASKED:
800 case GIMPLE_OMP_TASKGROUP:
801 case GIMPLE_OMP_ORDERED:
802 case GIMPLE_OMP_SCAN:
803 case GIMPLE_OMP_CRITICAL:
804 case GIMPLE_OMP_RETURN:
805 case GIMPLE_OMP_ATOMIC_LOAD:
806 case GIMPLE_OMP_ATOMIC_STORE:
807 case GIMPLE_OMP_CONTINUE:
808 break;
809
810 case GIMPLE_CALL:
811 {
812 tree decl = gimple_call_fndecl (gs: stmt);
813 unsigned i;
814
815 for (i = 0; i < gimple_call_num_args (gs: stmt); i++)
816 {
817 tree arg = gimple_call_arg (gs: stmt, index: i);
818 if (EXPR_P (arg))
819 TREE_SET_BLOCK (arg, data->block);
820 }
821
822 if (decl
823 && fndecl_built_in_p (node: decl, klass: BUILT_IN_NORMAL))
824 {
825 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
826 {
827 lower_builtin_setjmp (gsi);
828 data->cannot_fallthru = false;
829 return;
830 }
831 else if (DECL_FUNCTION_CODE (decl) == BUILT_IN_POSIX_MEMALIGN
832 && flag_tree_bit_ccp
833 && gimple_builtin_call_types_compatible_p (stmt, decl))
834 {
835 lower_builtin_posix_memalign (gsi);
836 return;
837 }
838 else if (DECL_FUNCTION_CODE (decl) == BUILT_IN_ASSUME_ALIGNED
839 && !optimize)
840 {
841 lower_builtin_assume_aligned (gsi);
842 data->cannot_fallthru = false;
843 gsi_next (i: gsi);
844 return;
845 }
846 }
847
848 if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN))
849 {
850 data->cannot_fallthru = true;
851 gsi_next (i: gsi);
852 return;
853 }
854
855 if (gimple_call_internal_p (gs: stmt, fn: IFN_ASAN_MARK))
856 {
857 tree base = gimple_call_arg (gs: stmt, index: 1);
858 gcc_checking_assert (TREE_CODE (base) == ADDR_EXPR);
859 tree decl = TREE_OPERAND (base, 0);
860 if (VAR_P (decl) && TREE_STATIC (decl))
861 {
862 /* Don't poison a variable with static storage; it might have
863 gotten marked before gimplify_init_constructor promoted it
864 to static. */
865 gsi_remove (gsi, true);
866 return;
867 }
868 }
869
870 /* We delay folding of built calls from gimplification to
871 here so the IL is in consistent state for the diagnostic
872 machineries job. */
873 if (gimple_call_builtin_p (stmt))
874 fold_stmt (gsi);
875 }
876 break;
877
878 case GIMPLE_OMP_PARALLEL:
879 case GIMPLE_OMP_TASK:
880 case GIMPLE_OMP_TARGET:
881 case GIMPLE_OMP_TEAMS:
882 data->cannot_fallthru = false;
883 lower_omp_directive (gsi, data);
884 data->cannot_fallthru = false;
885 return;
886
887 case GIMPLE_ASSUME:
888 lower_assumption (gsi, data);
889 return;
890
891 case GIMPLE_TRANSACTION:
892 lower_sequence (seq: gimple_transaction_body_ptr (
893 transaction_stmt: as_a <gtransaction *> (p: stmt)),
894 data);
895 break;
896
897 default:
898 gcc_unreachable ();
899 }
900
901 data->cannot_fallthru = false;
902 gsi_next (i: gsi);
903}
904
905/* Lower a bind_expr TSI. DATA is passed through the recursion. */
906
907static void
908lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
909{
910 tree old_block = data->block;
911 gbind *stmt = as_a <gbind *> (p: gsi_stmt (i: *gsi));
912 tree new_block = gimple_bind_block (bind_stmt: stmt);
913
914 if (new_block)
915 {
916 if (new_block == old_block)
917 {
918 /* The outermost block of the original function may not be the
919 outermost statement chain of the gimplified function. So we
920 may see the outermost block just inside the function. */
921 gcc_assert (new_block == DECL_INITIAL (current_function_decl));
922 new_block = NULL;
923 }
924 else
925 {
926 /* We do not expect to handle duplicate blocks. */
927 gcc_assert (!TREE_ASM_WRITTEN (new_block));
928 TREE_ASM_WRITTEN (new_block) = 1;
929
930 /* Block tree may get clobbered by inlining. Normally this would
931 be fixed in rest_of_decl_compilation using block notes, but
932 since we are not going to emit them, it is up to us. */
933 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block);
934 BLOCK_SUBBLOCKS (old_block) = new_block;
935 BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
936 BLOCK_SUPERCONTEXT (new_block) = old_block;
937
938 data->block = new_block;
939 }
940 }
941
942 record_vars (gimple_bind_vars (bind_stmt: stmt));
943
944 /* Scrap DECL_CHAIN up to BLOCK_VARS to ease GC after we no longer
945 need gimple_bind_vars. */
946 tree next;
947 /* BLOCK_VARS and gimple_bind_vars share a common sub-chain. Find
948 it by marking all BLOCK_VARS. */
949 if (gimple_bind_block (bind_stmt: stmt))
950 for (tree t = BLOCK_VARS (gimple_bind_block (stmt)); t; t = DECL_CHAIN (t))
951 TREE_VISITED (t) = 1;
952 for (tree var = gimple_bind_vars (bind_stmt: stmt);
953 var && ! TREE_VISITED (var); var = next)
954 {
955 next = DECL_CHAIN (var);
956 DECL_CHAIN (var) = NULL_TREE;
957 }
958 /* Unmark BLOCK_VARS. */
959 if (gimple_bind_block (bind_stmt: stmt))
960 for (tree t = BLOCK_VARS (gimple_bind_block (stmt)); t; t = DECL_CHAIN (t))
961 TREE_VISITED (t) = 0;
962
963 lower_sequence (seq: gimple_bind_body_ptr (bind_stmt: stmt), data);
964
965 if (new_block)
966 {
967 gcc_assert (data->block == new_block);
968
969 BLOCK_SUBBLOCKS (new_block)
970 = blocks_nreverse (BLOCK_SUBBLOCKS (new_block));
971 data->block = old_block;
972 }
973
974 /* The GIMPLE_BIND no longer carries any useful information -- kill it. */
975 gsi_insert_seq_before (gsi, gimple_bind_body (gs: stmt), GSI_SAME_STMT);
976 gsi_remove (gsi, false);
977}
978
979/* Same as above, but for a GIMPLE_TRY_CATCH. */
980
981static void
982lower_try_catch (gimple_stmt_iterator *gsi, struct lower_data *data)
983{
984 bool cannot_fallthru;
985 gimple *stmt = gsi_stmt (i: *gsi);
986 gimple_stmt_iterator i;
987
988 /* We don't handle GIMPLE_TRY_FINALLY. */
989 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
990
991 lower_sequence (seq: gimple_try_eval_ptr (gs: stmt), data);
992 cannot_fallthru = data->cannot_fallthru;
993
994 i = gsi_start (seq&: *gimple_try_cleanup_ptr (gs: stmt));
995 switch (gimple_code (g: gsi_stmt (i)))
996 {
997 case GIMPLE_CATCH:
998 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
999 catch expression and a body. The whole try/catch may fall
1000 through iff any of the catch bodies falls through. */
1001 for (; !gsi_end_p (i); gsi_next (i: &i))
1002 {
1003 data->cannot_fallthru = false;
1004 lower_sequence (seq: gimple_catch_handler_ptr (
1005 catch_stmt: as_a <gcatch *> (p: gsi_stmt (i))),
1006 data);
1007 if (!data->cannot_fallthru)
1008 cannot_fallthru = false;
1009 }
1010 break;
1011
1012 case GIMPLE_EH_FILTER:
1013 /* The exception filter expression only matters if there is an
1014 exception. If the exception does not match EH_FILTER_TYPES,
1015 we will execute EH_FILTER_FAILURE, and we will fall through
1016 if that falls through. If the exception does match
1017 EH_FILTER_TYPES, the stack unwinder will continue up the
1018 stack, so we will not fall through. We don't know whether we
1019 will throw an exception which matches EH_FILTER_TYPES or not,
1020 so we just ignore EH_FILTER_TYPES and assume that we might
1021 throw an exception which doesn't match. */
1022 data->cannot_fallthru = false;
1023 lower_sequence (seq: gimple_eh_filter_failure_ptr (gs: gsi_stmt (i)), data);
1024 if (!data->cannot_fallthru)
1025 cannot_fallthru = false;
1026 break;
1027
1028 case GIMPLE_DEBUG:
1029 gcc_checking_assert (gimple_debug_begin_stmt_p (stmt));
1030 break;
1031
1032 default:
1033 /* This case represents statements to be executed when an
1034 exception occurs. Those statements are implicitly followed
1035 by a GIMPLE_RESX to resume execution after the exception. So
1036 in this case the try/catch never falls through. */
1037 data->cannot_fallthru = false;
1038 lower_sequence (seq: gimple_try_cleanup_ptr (gs: stmt), data);
1039 break;
1040 }
1041
1042 data->cannot_fallthru = cannot_fallthru;
1043 gsi_next (i: gsi);
1044}
1045
1046
1047/* Try to determine whether a TRY_CATCH expression can fall through.
1048 This is a subroutine of gimple_stmt_may_fallthru. */
1049
1050static bool
1051gimple_try_catch_may_fallthru (gtry *stmt)
1052{
1053 gimple_stmt_iterator i;
1054
1055 /* We don't handle GIMPLE_TRY_FINALLY. */
1056 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
1057
1058 /* If the TRY block can fall through, the whole TRY_CATCH can
1059 fall through. */
1060 if (gimple_seq_may_fallthru (gimple_try_eval (gs: stmt)))
1061 return true;
1062
1063 i = gsi_start (seq&: *gimple_try_cleanup_ptr (gs: stmt));
1064 switch (gimple_code (g: gsi_stmt (i)))
1065 {
1066 case GIMPLE_CATCH:
1067 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
1068 catch expression and a body. The whole try/catch may fall
1069 through iff any of the catch bodies falls through. */
1070 for (; !gsi_end_p (i); gsi_next (i: &i))
1071 {
1072 if (gimple_seq_may_fallthru (gimple_catch_handler (
1073 catch_stmt: as_a <gcatch *> (p: gsi_stmt (i)))))
1074 return true;
1075 }
1076 return false;
1077
1078 case GIMPLE_EH_FILTER:
1079 /* The exception filter expression only matters if there is an
1080 exception. If the exception does not match EH_FILTER_TYPES,
1081 we will execute EH_FILTER_FAILURE, and we will fall through
1082 if that falls through. If the exception does match
1083 EH_FILTER_TYPES, the stack unwinder will continue up the
1084 stack, so we will not fall through. We don't know whether we
1085 will throw an exception which matches EH_FILTER_TYPES or not,
1086 so we just ignore EH_FILTER_TYPES and assume that we might
1087 throw an exception which doesn't match. */
1088 return gimple_seq_may_fallthru (gimple_eh_filter_failure (gs: gsi_stmt (i)));
1089
1090 default:
1091 /* This case represents statements to be executed when an
1092 exception occurs. Those statements are implicitly followed
1093 by a GIMPLE_RESX to resume execution after the exception. So
1094 in this case the try/catch never falls through. */
1095 return false;
1096 }
1097}
1098
1099
1100/* Try to determine if we can continue executing the statement
1101 immediately following STMT. This guess need not be 100% accurate;
1102 simply be conservative and return true if we don't know. This is
1103 used only to avoid stupidly generating extra code. If we're wrong,
1104 we'll just delete the extra code later. */
1105
1106bool
1107gimple_stmt_may_fallthru (gimple *stmt)
1108{
1109 if (!stmt)
1110 return true;
1111
1112 switch (gimple_code (g: stmt))
1113 {
1114 case GIMPLE_GOTO:
1115 case GIMPLE_RETURN:
1116 case GIMPLE_RESX:
1117 /* Easy cases. If the last statement of the seq implies
1118 control transfer, then we can't fall through. */
1119 return false;
1120
1121 case GIMPLE_SWITCH:
1122 /* Switch has already been lowered and represents a branch
1123 to a selected label and hence can't fall through. */
1124 return false;
1125
1126 case GIMPLE_COND:
1127 /* GIMPLE_COND's are already lowered into a two-way branch. They
1128 can't fall through. */
1129 return false;
1130
1131 case GIMPLE_BIND:
1132 return gimple_seq_may_fallthru (
1133 gimple_bind_body (gs: as_a <gbind *> (p: stmt)));
1134
1135 case GIMPLE_TRY:
1136 if (gimple_try_kind (gs: stmt) == GIMPLE_TRY_CATCH)
1137 return gimple_try_catch_may_fallthru (stmt: as_a <gtry *> (p: stmt));
1138
1139 /* It must be a GIMPLE_TRY_FINALLY. */
1140
1141 /* The finally clause is always executed after the try clause,
1142 so if it does not fall through, then the try-finally will not
1143 fall through. Otherwise, if the try clause does not fall
1144 through, then when the finally clause falls through it will
1145 resume execution wherever the try clause was going. So the
1146 whole try-finally will only fall through if both the try
1147 clause and the finally clause fall through. */
1148 return (gimple_seq_may_fallthru (gimple_try_eval (gs: stmt))
1149 && gimple_seq_may_fallthru (gimple_try_cleanup (gs: stmt)));
1150
1151 case GIMPLE_EH_ELSE:
1152 {
1153 geh_else *eh_else_stmt = as_a <geh_else *> (p: stmt);
1154 return (gimple_seq_may_fallthru (gimple_eh_else_n_body (eh_else_stmt))
1155 || gimple_seq_may_fallthru (gimple_eh_else_e_body (
1156 eh_else_stmt)));
1157 }
1158
1159 case GIMPLE_CALL:
1160 /* Functions that do not return do not fall through. */
1161 return !gimple_call_noreturn_p (s: stmt);
1162
1163 default:
1164 return true;
1165 }
1166}
1167
1168
1169/* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ. */
1170
1171bool
1172gimple_seq_may_fallthru (gimple_seq seq)
1173{
1174 return gimple_stmt_may_fallthru (stmt: gimple_seq_last_nondebug_stmt (s: seq));
1175}
1176
1177
1178/* Lower a GIMPLE_RETURN GSI. DATA is passed through the recursion. */
1179
1180static void
1181lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
1182{
1183 greturn *stmt = as_a <greturn *> (p: gsi_stmt (i: *gsi));
1184 gimple *t;
1185 int i;
1186 return_statements_t tmp_rs;
1187
1188 /* Match this up with an existing return statement that's been created. */
1189 for (i = data->return_statements.length () - 1;
1190 i >= 0; i--)
1191 {
1192 tmp_rs = data->return_statements[i];
1193
1194 if (gimple_return_retval (gs: stmt) == gimple_return_retval (gs: tmp_rs.stmt))
1195 {
1196 /* Remove the line number from the representative return statement.
1197 It now fills in for many such returns. Failure to remove this
1198 will result in incorrect results for coverage analysis. */
1199 gimple_set_location (g: tmp_rs.stmt, UNKNOWN_LOCATION);
1200
1201 goto found;
1202 }
1203 }
1204
1205 /* Not found. Create a new label and record the return statement. */
1206 tmp_rs.label = create_artificial_label (cfun->function_end_locus);
1207 tmp_rs.stmt = stmt;
1208 data->return_statements.safe_push (obj: tmp_rs);
1209
1210 /* Generate a goto statement and remove the return statement. */
1211 found:
1212 /* When not optimizing, make sure user returns are preserved. */
1213 if (!optimize && gimple_has_location (g: stmt))
1214 DECL_ARTIFICIAL (tmp_rs.label) = 0;
1215 t = gimple_build_goto (dest: tmp_rs.label);
1216 /* location includes block. */
1217 gimple_set_location (g: t, location: gimple_location (g: stmt));
1218 gsi_insert_before (gsi, t, GSI_SAME_STMT);
1219 gsi_remove (gsi, false);
1220}
1221
1222/* Lower a __builtin_setjmp GSI.
1223
1224 __builtin_setjmp is passed a pointer to an array of five words (not
1225 all will be used on all machines). It operates similarly to the C
1226 library function of the same name, but is more efficient.
1227
1228 It is lowered into 2 other builtins, namely __builtin_setjmp_setup,
1229 __builtin_setjmp_receiver.
1230
1231 After full lowering, the body of the function should look like:
1232
1233 {
1234 int D.1844;
1235 int D.2844;
1236
1237 [...]
1238
1239 __builtin_setjmp_setup (&buf, &<D1847>);
1240 D.1844 = 0;
1241 goto <D1846>;
1242 <D1847>:;
1243 __builtin_setjmp_receiver (&<D1847>);
1244 D.1844 = 1;
1245 <D1846>:;
1246 if (D.1844 == 0) goto <D1848>; else goto <D1849>;
1247
1248 [...]
1249
1250 __builtin_setjmp_setup (&buf, &<D2847>);
1251 D.2844 = 0;
1252 goto <D2846>;
1253 <D2847>:;
1254 __builtin_setjmp_receiver (&<D2847>);
1255 D.2844 = 1;
1256 <D2846>:;
1257 if (D.2844 == 0) goto <D2848>; else goto <D2849>;
1258
1259 [...]
1260
1261 <D3850>:;
1262 return;
1263 }
1264
1265 During cfg creation an extra per-function (or per-OpenMP region)
1266 block with ABNORMAL_DISPATCHER internal call will be added, unique
1267 destination of all the abnormal call edges and the unique source of
1268 all the abnormal edges to the receivers, thus keeping the complexity
1269 explosion localized. */
1270
1271static void
1272lower_builtin_setjmp (gimple_stmt_iterator *gsi)
1273{
1274 gimple *stmt = gsi_stmt (i: *gsi);
1275 location_t loc = gimple_location (g: stmt);
1276 tree cont_label = create_artificial_label (loc);
1277 tree next_label = create_artificial_label (loc);
1278 tree dest, t, arg;
1279 gimple *g;
1280
1281 /* __builtin_setjmp_{setup,receiver} aren't ECF_RETURNS_TWICE and for RTL
1282 these builtins are modelled as non-local label jumps to the label
1283 that is passed to these two builtins, so pretend we have a non-local
1284 label during GIMPLE passes too. See PR60003. */
1285 cfun->has_nonlocal_label = 1;
1286
1287 /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is
1288 passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */
1289 FORCED_LABEL (next_label) = 1;
1290
1291 tree orig_dest = dest = gimple_call_lhs (gs: stmt);
1292 if (orig_dest && TREE_CODE (orig_dest) == SSA_NAME)
1293 dest = create_tmp_reg (TREE_TYPE (orig_dest));
1294
1295 /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */
1296 arg = build_addr (next_label);
1297 t = builtin_decl_implicit (fncode: BUILT_IN_SETJMP_SETUP);
1298 g = gimple_build_call (t, 2, gimple_call_arg (gs: stmt, index: 0), arg);
1299 /* location includes block. */
1300 gimple_set_location (g, location: loc);
1301 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1302
1303 /* Build 'DEST = 0' and insert. */
1304 if (dest)
1305 {
1306 g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest)));
1307 gimple_set_location (g, location: loc);
1308 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1309 }
1310
1311 /* Build 'goto CONT_LABEL' and insert. */
1312 g = gimple_build_goto (dest: cont_label);
1313 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1314
1315 /* Build 'NEXT_LABEL:' and insert. */
1316 g = gimple_build_label (label: next_label);
1317 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1318
1319 /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */
1320 arg = build_addr (next_label);
1321 t = builtin_decl_implicit (fncode: BUILT_IN_SETJMP_RECEIVER);
1322 g = gimple_build_call (t, 1, arg);
1323 gimple_set_location (g, location: loc);
1324 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1325
1326 /* Build 'DEST = 1' and insert. */
1327 if (dest)
1328 {
1329 g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
1330 integer_one_node));
1331 gimple_set_location (g, location: loc);
1332 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1333 }
1334
1335 /* Build 'CONT_LABEL:' and insert. */
1336 g = gimple_build_label (label: cont_label);
1337 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1338
1339 /* Build orig_dest = dest if necessary. */
1340 if (dest != orig_dest)
1341 {
1342 g = gimple_build_assign (orig_dest, dest);
1343 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1344 }
1345
1346 /* Remove the call to __builtin_setjmp. */
1347 gsi_remove (gsi, false);
1348}
1349
1350/* Lower calls to posix_memalign to
1351 res = posix_memalign (ptr, align, size);
1352 if (res == 0)
1353 *ptr = __builtin_assume_aligned (*ptr, align);
1354 or to
1355 void *tem;
1356 res = posix_memalign (&tem, align, size);
1357 if (res == 0)
1358 ptr = __builtin_assume_aligned (tem, align);
1359 in case the first argument was &ptr. That way we can get at the
1360 alignment of the heap pointer in CCP. */
1361
1362static void
1363lower_builtin_posix_memalign (gimple_stmt_iterator *gsi)
1364{
1365 gimple *stmt, *call = gsi_stmt (i: *gsi);
1366 tree pptr = gimple_call_arg (gs: call, index: 0);
1367 tree align = gimple_call_arg (gs: call, index: 1);
1368 tree res = gimple_call_lhs (gs: call);
1369 tree ptr = create_tmp_reg (ptr_type_node);
1370 if (TREE_CODE (pptr) == ADDR_EXPR)
1371 {
1372 tree tem = create_tmp_var (ptr_type_node);
1373 TREE_ADDRESSABLE (tem) = 1;
1374 gimple_call_set_arg (gs: call, index: 0, build_fold_addr_expr (tem));
1375 stmt = gimple_build_assign (ptr, tem);
1376 }
1377 else
1378 stmt = gimple_build_assign (ptr,
1379 fold_build2 (MEM_REF, ptr_type_node, pptr,
1380 build_int_cst (ptr_type_node, 0)));
1381 if (res == NULL_TREE)
1382 {
1383 res = create_tmp_reg (integer_type_node);
1384 gimple_call_set_lhs (gs: call, lhs: res);
1385 }
1386 tree align_label = create_artificial_label (UNKNOWN_LOCATION);
1387 tree noalign_label = create_artificial_label (UNKNOWN_LOCATION);
1388 gimple *cond = gimple_build_cond (EQ_EXPR, res, integer_zero_node,
1389 align_label, noalign_label);
1390 gsi_insert_after (gsi, cond, GSI_NEW_STMT);
1391 gsi_insert_after (gsi, gimple_build_label (label: align_label), GSI_NEW_STMT);
1392 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1393 stmt = gimple_build_call (builtin_decl_implicit (fncode: BUILT_IN_ASSUME_ALIGNED),
1394 2, ptr, align);
1395 gimple_call_set_lhs (gs: stmt, lhs: ptr);
1396 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1397 stmt = gimple_build_assign (fold_build2 (MEM_REF, ptr_type_node, pptr,
1398 build_int_cst (ptr_type_node, 0)),
1399 ptr);
1400 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1401 gsi_insert_after (gsi, gimple_build_label (label: noalign_label), GSI_NEW_STMT);
1402}
1403
1404/* Lower calls to __builtin_assume_aligned when not optimizing. */
1405
1406static void
1407lower_builtin_assume_aligned (gimple_stmt_iterator *gsi)
1408{
1409 gcall *call = as_a <gcall *> (p: gsi_stmt (i: *gsi));
1410
1411 tree lhs = gimple_call_lhs (gs: call);
1412 if (!lhs || !POINTER_TYPE_P (TREE_TYPE (lhs)) || TREE_CODE (lhs) != SSA_NAME)
1413 return;
1414
1415 tree align = gimple_call_arg (gs: call, index: 1);
1416 tree misalign = (gimple_call_num_args (gs: call) > 2
1417 ? gimple_call_arg (gs: call, index: 2) : NULL_TREE);
1418 if (!tree_fits_uhwi_p (align)
1419 || (misalign && !tree_fits_uhwi_p (misalign)))
1420 return;
1421
1422 unsigned aligni = TREE_INT_CST_LOW (align);
1423 unsigned misaligni = misalign ? TREE_INT_CST_LOW (misalign) : 0;
1424 if (aligni <= 1
1425 || (aligni & (aligni - 1)) != 0
1426 || (misaligni & ~(aligni - 1)) != 0)
1427 return;
1428
1429 /* For lowering we simply transfer alignment information to the
1430 result and leave the call otherwise unchanged, it will be elided
1431 at RTL expansion time. */
1432 ptr_info_def *pi = get_ptr_info (lhs);
1433 set_ptr_info_alignment (pi, aligni, misaligni);
1434}
1435
1436
1437/* Record the variables in VARS into function FN. */
1438
1439void
1440record_vars_into (tree vars, tree fn)
1441{
1442 for (; vars; vars = DECL_CHAIN (vars))
1443 {
1444 tree var = vars;
1445
1446 /* BIND_EXPRs contains also function/type/constant declarations
1447 we don't need to care about. */
1448 if (!VAR_P (var))
1449 continue;
1450
1451 /* Nothing to do in this case. */
1452 if (DECL_EXTERNAL (var))
1453 continue;
1454
1455 /* Record the variable. */
1456 add_local_decl (DECL_STRUCT_FUNCTION (fn), d: var);
1457 }
1458}
1459
1460
1461/* Record the variables in VARS into current_function_decl. */
1462
1463void
1464record_vars (tree vars)
1465{
1466 record_vars_into (vars, fn: current_function_decl);
1467}
1468

source code of gcc/gimple-low.cc