1/* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE.
2
3 Copyright (C) 2003-2023 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "backend.h"
25#include "tree.h"
26#include "gimple.h"
27#include "tree-pass.h"
28#include "fold-const.h"
29#include "tree-nested.h"
30#include "calls.h"
31#include "gimple-iterator.h"
32#include "gimple-low.h"
33#include "predict.h"
34#include "gimple-predict.h"
35#include "gimple-fold.h"
36#include "cgraph.h"
37#include "tree-ssa.h"
38#include "value-range.h"
39#include "stringpool.h"
40#include "tree-ssanames.h"
41#include "tree-inline.h"
42#include "gimple-walk.h"
43#include "attribs.h"
44
45/* The differences between High GIMPLE and Low GIMPLE are the
46 following:
47
48 1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
49
50 2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
51 flow and exception regions are built as an on-the-side region
52 hierarchy (See tree-eh.cc:lower_eh_constructs).
53
54 3- Multiple identical return statements are grouped into a single
55 return and gotos to the unique return site. */
56
57/* Match a return statement with a label. During lowering, we identify
58 identical return statements and replace duplicates with a jump to
59 the corresponding label. */
60struct return_statements_t
61{
62 tree label;
63 greturn *stmt;
64};
65typedef struct return_statements_t return_statements_t;
66
67
68struct lower_data
69{
70 /* Block the current statement belongs to. */
71 tree block;
72
73 /* A vector of label and return statements to be moved to the end
74 of the function. */
75 vec<return_statements_t> return_statements;
76
77 /* True if the current statement cannot fall through. */
78 bool cannot_fallthru;
79};
80
81static void lower_stmt (gimple_stmt_iterator *, struct lower_data *);
82static void lower_gimple_bind (gimple_stmt_iterator *, struct lower_data *);
83static void lower_try_catch (gimple_stmt_iterator *, struct lower_data *);
84static void lower_gimple_return (gimple_stmt_iterator *, struct lower_data *);
85static void lower_builtin_setjmp (gimple_stmt_iterator *);
86static void lower_builtin_posix_memalign (gimple_stmt_iterator *);
87static void lower_builtin_assume_aligned (gimple_stmt_iterator *);
88
89
90/* Lower the body of current_function_decl from High GIMPLE into Low
91 GIMPLE. */
92
93static unsigned int
94lower_function_body (void)
95{
96 struct lower_data data;
97 gimple_seq body = gimple_body (current_function_decl);
98 gimple_seq lowered_body;
99 gimple_stmt_iterator i;
100 gimple *bind;
101 gimple *x;
102
103 /* The gimplifier should've left a body of exactly one statement,
104 namely a GIMPLE_BIND. */
105 gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
106 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);
107
108 memset (s: &data, c: 0, n: sizeof (data));
109 data.block = DECL_INITIAL (current_function_decl);
110 BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
111 BLOCK_CHAIN (data.block) = NULL_TREE;
112 TREE_ASM_WRITTEN (data.block) = 1;
113 data.return_statements.create (nelems: 8);
114
115 bind = gimple_seq_first_stmt (s: body);
116 lowered_body = NULL;
117 gimple_seq_add_stmt (&lowered_body, bind);
118 i = gsi_start (seq&: lowered_body);
119 lower_gimple_bind (&i, &data);
120
121 i = gsi_last (seq&: lowered_body);
122
123 /* If we had begin stmt markers from e.g. PCH, but this compilation
124 doesn't want them, lower_stmt will have cleaned them up; we can
125 now clear the flag that indicates we had them. */
126 if (!MAY_HAVE_DEBUG_MARKER_STMTS && cfun->debug_nonbind_markers)
127 {
128 /* This counter needs not be exact, but before lowering it will
129 most certainly be. */
130 gcc_assert (cfun->debug_marker_count == 0);
131 cfun->debug_nonbind_markers = false;
132 }
133
134 /* If the function falls off the end, we need a null return statement.
135 If we've already got one in the return_statements vector, we don't
136 need to do anything special. Otherwise build one by hand. */
137 bool may_fallthru = gimple_seq_may_fallthru (lowered_body);
138 if (may_fallthru
139 && (data.return_statements.is_empty ()
140 || (gimple_return_retval (gs: data.return_statements.last().stmt)
141 != NULL)))
142 {
143 x = gimple_build_return (NULL);
144 gimple_set_location (g: x, cfun->function_end_locus);
145 gimple_set_block (g: x, DECL_INITIAL (current_function_decl));
146 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
147 may_fallthru = false;
148 }
149
150 /* If we lowered any return statements, emit the representative
151 at the end of the function. */
152 while (!data.return_statements.is_empty ())
153 {
154 return_statements_t t = data.return_statements.pop ();
155 x = gimple_build_label (label: t.label);
156 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
157 gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
158 if (may_fallthru)
159 {
160 /* Remove the line number from the representative return statement.
161 It now fills in for the fallthru too. Failure to remove this
162 will result in incorrect results for coverage analysis. */
163 gimple_set_location (g: t.stmt, UNKNOWN_LOCATION);
164 may_fallthru = false;
165 }
166 }
167
168 /* Once the old body has been lowered, replace it with the new
169 lowered sequence. */
170 gimple_set_body (current_function_decl, lowered_body);
171
172 gcc_assert (data.block == DECL_INITIAL (current_function_decl));
173 BLOCK_SUBBLOCKS (data.block)
174 = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));
175
176 clear_block_marks (data.block);
177 data.return_statements.release ();
178 return 0;
179}
180
181namespace {
182
183const pass_data pass_data_lower_cf =
184{
185 .type: GIMPLE_PASS, /* type */
186 .name: "lower", /* name */
187 .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */
188 .tv_id: TV_NONE, /* tv_id */
189 PROP_gimple_any, /* properties_required */
190 PROP_gimple_lcf, /* properties_provided */
191 .properties_destroyed: 0, /* properties_destroyed */
192 .todo_flags_start: 0, /* todo_flags_start */
193 .todo_flags_finish: 0, /* todo_flags_finish */
194};
195
196class pass_lower_cf : public gimple_opt_pass
197{
198public:
199 pass_lower_cf (gcc::context *ctxt)
200 : gimple_opt_pass (pass_data_lower_cf, ctxt)
201 {}
202
203 /* opt_pass methods: */
204 unsigned int execute (function *) final override
205 {
206 return lower_function_body ();
207 }
208
209}; // class pass_lower_cf
210
211} // anon namespace
212
213gimple_opt_pass *
214make_pass_lower_cf (gcc::context *ctxt)
215{
216 return new pass_lower_cf (ctxt);
217}
218
219/* Lower sequence SEQ. Unlike gimplification the statements are not relowered
220 when they are changed -- if this has to be done, the lowering routine must
221 do it explicitly. DATA is passed through the recursion. */
222
223static void
224lower_sequence (gimple_seq *seq, struct lower_data *data)
225{
226 gimple_stmt_iterator gsi;
227
228 for (gsi = gsi_start (seq&: *seq); !gsi_end_p (i: gsi); )
229 lower_stmt (&gsi, data);
230}
231
232
233/* Lower the OpenMP directive statement pointed by GSI. DATA is
234 passed through the recursion. */
235
236static void
237lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
238{
239 gimple *stmt;
240
241 stmt = gsi_stmt (i: *gsi);
242
243 lower_sequence (seq: gimple_omp_body_ptr (gs: stmt), data);
244 gsi_insert_seq_after (gsi, gimple_omp_body (gs: stmt), GSI_CONTINUE_LINKING);
245 gimple_omp_set_body (gs: stmt, NULL);
246 gsi_next (i: gsi);
247}
248
249/* Create an artificial FUNCTION_DECL for assumption at LOC. */
250
251static tree
252create_assumption_fn (location_t loc)
253{
254 tree name = clone_function_name_numbered (decl: current_function_decl, suffix: "_assume");
255 /* Temporarily, until we determine all the arguments. */
256 tree type = build_varargs_function_type_list (boolean_type_node, NULL_TREE);
257 tree decl = build_decl (loc, FUNCTION_DECL, name, type);
258 TREE_STATIC (decl) = 1;
259 TREE_USED (decl) = 1;
260 DECL_ARTIFICIAL (decl) = 1;
261 DECL_IGNORED_P (decl) = 1;
262 DECL_NAMELESS (decl) = 1;
263 TREE_PUBLIC (decl) = 0;
264 DECL_UNINLINABLE (decl) = 1;
265 DECL_EXTERNAL (decl) = 0;
266 DECL_CONTEXT (decl) = NULL_TREE;
267 DECL_INITIAL (decl) = make_node (BLOCK);
268 tree attributes = DECL_ATTRIBUTES (current_function_decl);
269 if (lookup_attribute (attr_name: "noipa", list: attributes) == NULL)
270 {
271 attributes = tree_cons (get_identifier ("noipa"), NULL, attributes);
272 if (lookup_attribute (attr_name: "noinline", list: attributes) == NULL)
273 attributes = tree_cons (get_identifier ("noinline"), NULL, attributes);
274 if (lookup_attribute (attr_name: "noclone", list: attributes) == NULL)
275 attributes = tree_cons (get_identifier ("noclone"), NULL, attributes);
276 if (lookup_attribute (attr_name: "no_icf", list: attributes) == NULL)
277 attributes = tree_cons (get_identifier ("no_icf"), NULL, attributes);
278 }
279 DECL_ATTRIBUTES (decl) = attributes;
280 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
281 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
282 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
283 DECL_FUNCTION_SPECIFIC_TARGET (decl)
284 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
285 tree t = build_decl (DECL_SOURCE_LOCATION (decl),
286 RESULT_DECL, NULL_TREE, boolean_type_node);
287 DECL_ARTIFICIAL (t) = 1;
288 DECL_IGNORED_P (t) = 1;
289 DECL_CONTEXT (t) = decl;
290 DECL_RESULT (decl) = t;
291 push_struct_function (fndecl: decl);
292 cfun->function_end_locus = loc;
293 init_tree_ssa (cfun);
294 return decl;
295}
296
297struct lower_assumption_data
298{
299 copy_body_data id;
300 tree return_false_label;
301 tree guard_copy;
302 auto_vec<tree> decls;
303};
304
305/* Helper function for lower_assumptions. Find local vars and labels
306 in the assumption sequence and remove debug stmts. */
307
308static tree
309find_assumption_locals_r (gimple_stmt_iterator *gsi_p, bool *,
310 struct walk_stmt_info *wi)
311{
312 lower_assumption_data *data = (lower_assumption_data *) wi->info;
313 gimple *stmt = gsi_stmt (i: *gsi_p);
314 tree lhs = gimple_get_lhs (stmt);
315 if (lhs && TREE_CODE (lhs) == SSA_NAME)
316 {
317 gcc_assert (SSA_NAME_VAR (lhs) == NULL_TREE);
318 data->id.decl_map->put (k: lhs, NULL_TREE);
319 data->decls.safe_push (obj: lhs);
320 }
321 switch (gimple_code (g: stmt))
322 {
323 case GIMPLE_BIND:
324 for (tree var = gimple_bind_vars (bind_stmt: as_a <gbind *> (p: stmt));
325 var; var = DECL_CHAIN (var))
326 if (VAR_P (var)
327 && !DECL_EXTERNAL (var)
328 && DECL_CONTEXT (var) == data->id.src_fn)
329 {
330 data->id.decl_map->put (k: var, v: var);
331 data->decls.safe_push (obj: var);
332 }
333 break;
334 case GIMPLE_LABEL:
335 {
336 tree label = gimple_label_label (gs: as_a <glabel *> (p: stmt));
337 data->id.decl_map->put (k: label, v: label);
338 break;
339 }
340 case GIMPLE_RETURN:
341 /* If something in assumption tries to return from parent function,
342 if it would be reached in hypothetical evaluation, it would be UB,
343 so transform such returns into return false; */
344 {
345 gimple *g = gimple_build_assign (data->guard_copy, boolean_false_node);
346 gsi_insert_before (gsi_p, g, GSI_SAME_STMT);
347 gimple_return_set_retval (gs: as_a <greturn *> (p: stmt), retval: data->guard_copy);
348 break;
349 }
350 case GIMPLE_DEBUG:
351 /* As assumptions won't be emitted, debug info stmts in them
352 are useless. */
353 gsi_remove (gsi_p, true);
354 wi->removed_stmt = true;
355 break;
356 default:
357 break;
358 }
359 return NULL_TREE;
360}
361
362/* Create a new PARM_DECL that is indentical in all respect to DECL except that
363 DECL can be either a VAR_DECL, a PARM_DECL or RESULT_DECL. The original
364 DECL must come from ID->src_fn and the copy will be part of ID->dst_fn. */
365
366static tree
367assumption_copy_decl (tree decl, copy_body_data *id)
368{
369 tree type = TREE_TYPE (decl);
370
371 if (is_global_var (t: decl))
372 return decl;
373
374 gcc_assert (VAR_P (decl)
375 || TREE_CODE (decl) == PARM_DECL
376 || TREE_CODE (decl) == RESULT_DECL);
377 tree copy = build_decl (DECL_SOURCE_LOCATION (decl),
378 PARM_DECL, DECL_NAME (decl), type);
379 if (DECL_PT_UID_SET_P (decl))
380 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
381 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
382 TREE_READONLY (copy) = TREE_READONLY (decl);
383 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
384 DECL_NOT_GIMPLE_REG_P (copy) = DECL_NOT_GIMPLE_REG_P (decl);
385 DECL_BY_REFERENCE (copy) = DECL_BY_REFERENCE (decl);
386 DECL_ARG_TYPE (copy) = type;
387 ((lower_assumption_data *) id)->decls.safe_push (obj: decl);
388 return copy_decl_for_dup_finish (id, decl, copy);
389}
390
391/* Transform gotos out of the assumption into return false. */
392
393static tree
394adjust_assumption_stmt_r (gimple_stmt_iterator *gsi_p, bool *,
395 struct walk_stmt_info *wi)
396{
397 lower_assumption_data *data = (lower_assumption_data *) wi->info;
398 gimple *stmt = gsi_stmt (i: *gsi_p);
399 tree lab = NULL_TREE;
400 unsigned int idx = 0;
401 if (gimple_code (g: stmt) == GIMPLE_GOTO)
402 lab = gimple_goto_dest (gs: stmt);
403 else if (gimple_code (g: stmt) == GIMPLE_COND)
404 {
405 repeat:
406 if (idx == 0)
407 lab = gimple_cond_true_label (gs: as_a <gcond *> (p: stmt));
408 else
409 lab = gimple_cond_false_label (gs: as_a <gcond *> (p: stmt));
410 }
411 else if (gimple_code (g: stmt) == GIMPLE_LABEL)
412 {
413 tree label = gimple_label_label (gs: as_a <glabel *> (p: stmt));
414 DECL_CONTEXT (label) = current_function_decl;
415 }
416 if (lab)
417 {
418 if (!data->id.decl_map->get (k: lab))
419 {
420 if (!data->return_false_label)
421 data->return_false_label
422 = create_artificial_label (UNKNOWN_LOCATION);
423 if (gimple_code (g: stmt) == GIMPLE_GOTO)
424 gimple_goto_set_dest (gs: as_a <ggoto *> (p: stmt),
425 dest: data->return_false_label);
426 else if (idx == 0)
427 gimple_cond_set_true_label (gs: as_a <gcond *> (p: stmt),
428 label: data->return_false_label);
429 else
430 gimple_cond_set_false_label (gs: as_a <gcond *> (p: stmt),
431 label: data->return_false_label);
432 }
433 if (gimple_code (g: stmt) == GIMPLE_COND && idx == 0)
434 {
435 idx = 1;
436 goto repeat;
437 }
438 }
439 return NULL_TREE;
440}
441
442/* Adjust trees in the assumption body. Called through walk_tree. */
443
444static tree
445adjust_assumption_stmt_op (tree *tp, int *, void *datap)
446{
447 struct walk_stmt_info *wi = (struct walk_stmt_info *) datap;
448 lower_assumption_data *data = (lower_assumption_data *) wi->info;
449 tree t = *tp;
450 tree *newt;
451 switch (TREE_CODE (t))
452 {
453 case SSA_NAME:
454 newt = data->id.decl_map->get (k: t);
455 /* There shouldn't be SSA_NAMEs other than ones defined in the
456 assumption's body. */
457 gcc_assert (newt);
458 *tp = *newt;
459 break;
460 case LABEL_DECL:
461 newt = data->id.decl_map->get (k: t);
462 if (newt)
463 *tp = *newt;
464 break;
465 case VAR_DECL:
466 case PARM_DECL:
467 case RESULT_DECL:
468 *tp = remap_decl (decl: t, id: &data->id);
469 break;
470 default:
471 break;
472 }
473 return NULL_TREE;
474}
475
476/* Lower assumption.
477 The gimplifier transformed:
478 .ASSUME (cond);
479 into:
480 [[assume (guard)]]
481 {
482 guard = cond;
483 }
484 which we should transform into:
485 .ASSUME (&artificial_fn, args...);
486 where artificial_fn will look like:
487 bool artificial_fn (args...)
488 {
489 guard = cond;
490 return guard;
491 }
492 with any debug stmts in the block removed and jumps out of
493 the block or return stmts replaced with return false; */
494
495static void
496lower_assumption (gimple_stmt_iterator *gsi, struct lower_data *data)
497{
498 gimple *stmt = gsi_stmt (i: *gsi);
499 tree guard = gimple_assume_guard (gs: stmt);
500 gimple *bind = gimple_assume_body (gs: stmt);
501 location_t loc = gimple_location (g: stmt);
502 gcc_assert (gimple_code (bind) == GIMPLE_BIND);
503
504 lower_assumption_data lad;
505 hash_map<tree, tree> decl_map;
506 memset (s: &lad.id, c: 0, n: sizeof (lad.id));
507 lad.return_false_label = NULL_TREE;
508 lad.id.src_fn = current_function_decl;
509 lad.id.dst_fn = create_assumption_fn (loc);
510 lad.id.src_cfun = DECL_STRUCT_FUNCTION (lad.id.src_fn);
511 lad.id.decl_map = &decl_map;
512 lad.id.copy_decl = assumption_copy_decl;
513 lad.id.transform_call_graph_edges = CB_CGE_DUPLICATE;
514 lad.id.transform_parameter = true;
515 lad.id.do_not_unshare = true;
516 lad.id.do_not_fold = true;
517 cfun->curr_properties = lad.id.src_cfun->curr_properties;
518 lad.guard_copy = create_tmp_var (boolean_type_node);
519 decl_map.put (k: lad.guard_copy, v: lad.guard_copy);
520 decl_map.put (k: guard, v: lad.guard_copy);
521 cfun->assume_function = 1;
522
523 /* Find variables, labels and SSA_NAMEs local to the assume GIMPLE_BIND. */
524 gimple_stmt_iterator gsi2 = gsi_start (seq&: *gimple_assume_body_ptr (gs: stmt));
525 struct walk_stmt_info wi;
526 memset (s: &wi, c: 0, n: sizeof (wi));
527 wi.info = (void *) &lad;
528 walk_gimple_stmt (&gsi2, find_assumption_locals_r, NULL, &wi);
529 unsigned int sz = lad.decls.length ();
530 for (unsigned i = 0; i < sz; ++i)
531 {
532 tree v = lad.decls[i];
533 tree newv;
534 /* SSA_NAMEs defined in the assume condition should be replaced
535 by new SSA_NAMEs in the artificial function. */
536 if (TREE_CODE (v) == SSA_NAME)
537 {
538 newv = make_ssa_name (var: remap_type (TREE_TYPE (v), id: &lad.id));
539 decl_map.put (k: v, v: newv);
540 }
541 /* Local vars should have context and type adjusted to the
542 new artificial function. */
543 else if (VAR_P (v))
544 {
545 if (is_global_var (t: v) && !DECL_ASSEMBLER_NAME_SET_P (v))
546 DECL_ASSEMBLER_NAME (v);
547 TREE_TYPE (v) = remap_type (TREE_TYPE (v), id: &lad.id);
548 DECL_CONTEXT (v) = current_function_decl;
549 }
550 }
551 /* References to other automatic vars should be replaced by
552 PARM_DECLs to the artificial function. */
553 memset (s: &wi, c: 0, n: sizeof (wi));
554 wi.info = (void *) &lad;
555 walk_gimple_stmt (&gsi2, adjust_assumption_stmt_r,
556 adjust_assumption_stmt_op, &wi);
557
558 /* At the start prepend guard = false; */
559 gimple_seq body = NULL;
560 gimple *g = gimple_build_assign (lad.guard_copy, boolean_false_node);
561 gimple_seq_add_stmt (&body, g);
562 gimple_seq_add_stmt (&body, bind);
563 /* At the end add return guard; */
564 greturn *gr = gimple_build_return (lad.guard_copy);
565 gimple_seq_add_stmt (&body, gr);
566 /* If there were any jumps to labels outside of the condition,
567 replace them with a jump to
568 return_false_label:
569 guard = false;
570 return guard; */
571 if (lad.return_false_label)
572 {
573 g = gimple_build_label (label: lad.return_false_label);
574 gimple_seq_add_stmt (&body, g);
575 g = gimple_build_assign (lad.guard_copy, boolean_false_node);
576 gimple_seq_add_stmt (&body, g);
577 gr = gimple_build_return (lad.guard_copy);
578 gimple_seq_add_stmt (&body, gr);
579 }
580 bind = gimple_build_bind (NULL_TREE, body, NULL_TREE);
581 body = NULL;
582 gimple_seq_add_stmt (&body, bind);
583 gimple_set_body (current_function_decl, body);
584 pop_cfun ();
585
586 tree parms = NULL_TREE;
587 tree parmt = void_list_node;
588 auto_vec<tree, 8> vargs;
589 vargs.safe_grow (len: 1 + (lad.decls.length () - sz), exact: true);
590 /* First argument to IFN_ASSUME will be address of the
591 artificial function. */
592 vargs[0] = build_fold_addr_expr (lad.id.dst_fn);
593 for (unsigned i = lad.decls.length (); i > sz; --i)
594 {
595 tree *v = decl_map.get (k: lad.decls[i - 1]);
596 gcc_assert (v && TREE_CODE (*v) == PARM_DECL);
597 DECL_CHAIN (*v) = parms;
598 parms = *v;
599 parmt = tree_cons (NULL_TREE, TREE_TYPE (*v), parmt);
600 /* Remaining arguments will be the variables/parameters
601 mentioned in the condition. */
602 vargs[i - sz] = lad.decls[i - 1];
603 /* If they have gimple types, we might need to regimplify
604 them to make the IFN_ASSUME call valid. */
605 if (is_gimple_reg_type (TREE_TYPE (vargs[i - sz]))
606 && !is_gimple_val (vargs[i - sz]))
607 {
608 tree t = make_ssa_name (TREE_TYPE (vargs[i - sz]));
609 g = gimple_build_assign (t, vargs[i - sz]);
610 gsi_insert_before (gsi, g, GSI_SAME_STMT);
611 vargs[i - sz] = t;
612 }
613 }
614 DECL_ARGUMENTS (lad.id.dst_fn) = parms;
615 TREE_TYPE (lad.id.dst_fn) = build_function_type (boolean_type_node, parmt);
616
617 cgraph_node::add_new_function (fndecl: lad.id.dst_fn, lowered: false);
618
619 for (unsigned i = 0; i < sz; ++i)
620 {
621 tree v = lad.decls[i];
622 if (TREE_CODE (v) == SSA_NAME)
623 release_ssa_name (name: v);
624 }
625
626 data->cannot_fallthru = false;
627 /* Replace GIMPLE_ASSUME statement with IFN_ASSUME call. */
628 gcall *call = gimple_build_call_internal_vec (IFN_ASSUME, vargs);
629 gimple_set_location (g: call, location: loc);
630 gsi_replace (gsi, call, true);
631}
632
633/* Lower statement GSI. DATA is passed through the recursion. We try to
634 track the fallthruness of statements and get rid of unreachable return
635 statements in order to prevent the EH lowering pass from adding useless
636 edges that can cause bogus warnings to be issued later; this guess need
637 not be 100% accurate, simply be conservative and reset cannot_fallthru
638 to false if we don't know. */
639
640static void
641lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
642{
643 gimple *stmt = gsi_stmt (i: *gsi);
644
645 gimple_set_block (g: stmt, block: data->block);
646
647 switch (gimple_code (g: stmt))
648 {
649 case GIMPLE_BIND:
650 lower_gimple_bind (gsi, data);
651 /* Propagate fallthruness. */
652 return;
653
654 case GIMPLE_COND:
655 case GIMPLE_GOTO:
656 case GIMPLE_SWITCH:
657 data->cannot_fallthru = true;
658 gsi_next (i: gsi);
659 return;
660
661 case GIMPLE_RETURN:
662 if (data->cannot_fallthru)
663 {
664 gsi_remove (gsi, false);
665 /* Propagate fallthruness. */
666 }
667 else
668 {
669 lower_gimple_return (gsi, data);
670 data->cannot_fallthru = true;
671 }
672 return;
673
674 case GIMPLE_TRY:
675 if (gimple_try_kind (gs: stmt) == GIMPLE_TRY_CATCH)
676 lower_try_catch (gsi, data);
677 else
678 {
679 /* It must be a GIMPLE_TRY_FINALLY. */
680 bool cannot_fallthru;
681 lower_sequence (seq: gimple_try_eval_ptr (gs: stmt), data);
682 cannot_fallthru = data->cannot_fallthru;
683
684 /* The finally clause is always executed after the try clause,
685 so if it does not fall through, then the try-finally will not
686 fall through. Otherwise, if the try clause does not fall
687 through, then when the finally clause falls through it will
688 resume execution wherever the try clause was going. So the
689 whole try-finally will only fall through if both the try
690 clause and the finally clause fall through. */
691 data->cannot_fallthru = false;
692 lower_sequence (seq: gimple_try_cleanup_ptr (gs: stmt), data);
693 data->cannot_fallthru |= cannot_fallthru;
694 gsi_next (i: gsi);
695 }
696 return;
697
698 case GIMPLE_EH_ELSE:
699 {
700 geh_else *eh_else_stmt = as_a <geh_else *> (p: stmt);
701 lower_sequence (seq: gimple_eh_else_n_body_ptr (eh_else_stmt), data);
702 lower_sequence (seq: gimple_eh_else_e_body_ptr (eh_else_stmt), data);
703 }
704 break;
705
706 case GIMPLE_DEBUG:
707 gcc_checking_assert (cfun->debug_nonbind_markers);
708 /* We can't possibly have debug bind stmts before lowering, we
709 first emit them when entering SSA. */
710 gcc_checking_assert (gimple_debug_nonbind_marker_p (stmt));
711 /* Propagate fallthruness. */
712 /* If the function (e.g. from PCH) had debug stmts, but they're
713 disabled for this compilation, remove them. */
714 if (!MAY_HAVE_DEBUG_MARKER_STMTS)
715 gsi_remove (gsi, true);
716 else
717 gsi_next (i: gsi);
718 return;
719
720 case GIMPLE_OMP_STRUCTURED_BLOCK:
721 /* These are supposed to be removed already in OMP lowering. */
722 gcc_unreachable ();
723
724 case GIMPLE_NOP:
725 case GIMPLE_ASM:
726 case GIMPLE_ASSIGN:
727 case GIMPLE_PREDICT:
728 case GIMPLE_LABEL:
729 case GIMPLE_EH_MUST_NOT_THROW:
730 case GIMPLE_OMP_FOR:
731 case GIMPLE_OMP_SCOPE:
732 case GIMPLE_OMP_SECTIONS:
733 case GIMPLE_OMP_SECTIONS_SWITCH:
734 case GIMPLE_OMP_SECTION:
735 case GIMPLE_OMP_SINGLE:
736 case GIMPLE_OMP_MASTER:
737 case GIMPLE_OMP_MASKED:
738 case GIMPLE_OMP_TASKGROUP:
739 case GIMPLE_OMP_ORDERED:
740 case GIMPLE_OMP_SCAN:
741 case GIMPLE_OMP_CRITICAL:
742 case GIMPLE_OMP_RETURN:
743 case GIMPLE_OMP_ATOMIC_LOAD:
744 case GIMPLE_OMP_ATOMIC_STORE:
745 case GIMPLE_OMP_CONTINUE:
746 break;
747
748 case GIMPLE_CALL:
749 {
750 tree decl = gimple_call_fndecl (gs: stmt);
751 unsigned i;
752
753 for (i = 0; i < gimple_call_num_args (gs: stmt); i++)
754 {
755 tree arg = gimple_call_arg (gs: stmt, index: i);
756 if (EXPR_P (arg))
757 TREE_SET_BLOCK (arg, data->block);
758 }
759
760 if (decl
761 && fndecl_built_in_p (node: decl, klass: BUILT_IN_NORMAL))
762 {
763 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
764 {
765 lower_builtin_setjmp (gsi);
766 data->cannot_fallthru = false;
767 return;
768 }
769 else if (DECL_FUNCTION_CODE (decl) == BUILT_IN_POSIX_MEMALIGN
770 && flag_tree_bit_ccp
771 && gimple_builtin_call_types_compatible_p (stmt, decl))
772 {
773 lower_builtin_posix_memalign (gsi);
774 return;
775 }
776 else if (DECL_FUNCTION_CODE (decl) == BUILT_IN_ASSUME_ALIGNED
777 && !optimize)
778 {
779 lower_builtin_assume_aligned (gsi);
780 data->cannot_fallthru = false;
781 gsi_next (i: gsi);
782 return;
783 }
784 }
785
786 if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN))
787 {
788 data->cannot_fallthru = true;
789 gsi_next (i: gsi);
790 return;
791 }
792
793 /* We delay folding of built calls from gimplification to
794 here so the IL is in consistent state for the diagnostic
795 machineries job. */
796 if (gimple_call_builtin_p (stmt))
797 fold_stmt (gsi);
798 }
799 break;
800
801 case GIMPLE_OMP_PARALLEL:
802 case GIMPLE_OMP_TASK:
803 case GIMPLE_OMP_TARGET:
804 case GIMPLE_OMP_TEAMS:
805 data->cannot_fallthru = false;
806 lower_omp_directive (gsi, data);
807 data->cannot_fallthru = false;
808 return;
809
810 case GIMPLE_ASSUME:
811 lower_assumption (gsi, data);
812 return;
813
814 case GIMPLE_TRANSACTION:
815 lower_sequence (seq: gimple_transaction_body_ptr (
816 transaction_stmt: as_a <gtransaction *> (p: stmt)),
817 data);
818 break;
819
820 default:
821 gcc_unreachable ();
822 }
823
824 data->cannot_fallthru = false;
825 gsi_next (i: gsi);
826}
827
828/* Lower a bind_expr TSI. DATA is passed through the recursion. */
829
830static void
831lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
832{
833 tree old_block = data->block;
834 gbind *stmt = as_a <gbind *> (p: gsi_stmt (i: *gsi));
835 tree new_block = gimple_bind_block (bind_stmt: stmt);
836
837 if (new_block)
838 {
839 if (new_block == old_block)
840 {
841 /* The outermost block of the original function may not be the
842 outermost statement chain of the gimplified function. So we
843 may see the outermost block just inside the function. */
844 gcc_assert (new_block == DECL_INITIAL (current_function_decl));
845 new_block = NULL;
846 }
847 else
848 {
849 /* We do not expect to handle duplicate blocks. */
850 gcc_assert (!TREE_ASM_WRITTEN (new_block));
851 TREE_ASM_WRITTEN (new_block) = 1;
852
853 /* Block tree may get clobbered by inlining. Normally this would
854 be fixed in rest_of_decl_compilation using block notes, but
855 since we are not going to emit them, it is up to us. */
856 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block);
857 BLOCK_SUBBLOCKS (old_block) = new_block;
858 BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
859 BLOCK_SUPERCONTEXT (new_block) = old_block;
860
861 data->block = new_block;
862 }
863 }
864
865 record_vars (gimple_bind_vars (bind_stmt: stmt));
866
867 /* Scrap DECL_CHAIN up to BLOCK_VARS to ease GC after we no longer
868 need gimple_bind_vars. */
869 tree next;
870 /* BLOCK_VARS and gimple_bind_vars share a common sub-chain. Find
871 it by marking all BLOCK_VARS. */
872 if (gimple_bind_block (bind_stmt: stmt))
873 for (tree t = BLOCK_VARS (gimple_bind_block (stmt)); t; t = DECL_CHAIN (t))
874 TREE_VISITED (t) = 1;
875 for (tree var = gimple_bind_vars (bind_stmt: stmt);
876 var && ! TREE_VISITED (var); var = next)
877 {
878 next = DECL_CHAIN (var);
879 DECL_CHAIN (var) = NULL_TREE;
880 }
881 /* Unmark BLOCK_VARS. */
882 if (gimple_bind_block (bind_stmt: stmt))
883 for (tree t = BLOCK_VARS (gimple_bind_block (stmt)); t; t = DECL_CHAIN (t))
884 TREE_VISITED (t) = 0;
885
886 lower_sequence (seq: gimple_bind_body_ptr (bind_stmt: stmt), data);
887
888 if (new_block)
889 {
890 gcc_assert (data->block == new_block);
891
892 BLOCK_SUBBLOCKS (new_block)
893 = blocks_nreverse (BLOCK_SUBBLOCKS (new_block));
894 data->block = old_block;
895 }
896
897 /* The GIMPLE_BIND no longer carries any useful information -- kill it. */
898 gsi_insert_seq_before (gsi, gimple_bind_body (gs: stmt), GSI_SAME_STMT);
899 gsi_remove (gsi, false);
900}
901
902/* Same as above, but for a GIMPLE_TRY_CATCH. */
903
904static void
905lower_try_catch (gimple_stmt_iterator *gsi, struct lower_data *data)
906{
907 bool cannot_fallthru;
908 gimple *stmt = gsi_stmt (i: *gsi);
909 gimple_stmt_iterator i;
910
911 /* We don't handle GIMPLE_TRY_FINALLY. */
912 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
913
914 lower_sequence (seq: gimple_try_eval_ptr (gs: stmt), data);
915 cannot_fallthru = data->cannot_fallthru;
916
917 i = gsi_start (seq&: *gimple_try_cleanup_ptr (gs: stmt));
918 switch (gimple_code (g: gsi_stmt (i)))
919 {
920 case GIMPLE_CATCH:
921 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
922 catch expression and a body. The whole try/catch may fall
923 through iff any of the catch bodies falls through. */
924 for (; !gsi_end_p (i); gsi_next (i: &i))
925 {
926 data->cannot_fallthru = false;
927 lower_sequence (seq: gimple_catch_handler_ptr (
928 catch_stmt: as_a <gcatch *> (p: gsi_stmt (i))),
929 data);
930 if (!data->cannot_fallthru)
931 cannot_fallthru = false;
932 }
933 break;
934
935 case GIMPLE_EH_FILTER:
936 /* The exception filter expression only matters if there is an
937 exception. If the exception does not match EH_FILTER_TYPES,
938 we will execute EH_FILTER_FAILURE, and we will fall through
939 if that falls through. If the exception does match
940 EH_FILTER_TYPES, the stack unwinder will continue up the
941 stack, so we will not fall through. We don't know whether we
942 will throw an exception which matches EH_FILTER_TYPES or not,
943 so we just ignore EH_FILTER_TYPES and assume that we might
944 throw an exception which doesn't match. */
945 data->cannot_fallthru = false;
946 lower_sequence (seq: gimple_eh_filter_failure_ptr (gs: gsi_stmt (i)), data);
947 if (!data->cannot_fallthru)
948 cannot_fallthru = false;
949 break;
950
951 case GIMPLE_DEBUG:
952 gcc_checking_assert (gimple_debug_begin_stmt_p (stmt));
953 break;
954
955 default:
956 /* This case represents statements to be executed when an
957 exception occurs. Those statements are implicitly followed
958 by a GIMPLE_RESX to resume execution after the exception. So
959 in this case the try/catch never falls through. */
960 data->cannot_fallthru = false;
961 lower_sequence (seq: gimple_try_cleanup_ptr (gs: stmt), data);
962 break;
963 }
964
965 data->cannot_fallthru = cannot_fallthru;
966 gsi_next (i: gsi);
967}
968
969
970/* Try to determine whether a TRY_CATCH expression can fall through.
971 This is a subroutine of gimple_stmt_may_fallthru. */
972
973static bool
974gimple_try_catch_may_fallthru (gtry *stmt)
975{
976 gimple_stmt_iterator i;
977
978 /* We don't handle GIMPLE_TRY_FINALLY. */
979 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
980
981 /* If the TRY block can fall through, the whole TRY_CATCH can
982 fall through. */
983 if (gimple_seq_may_fallthru (gimple_try_eval (gs: stmt)))
984 return true;
985
986 i = gsi_start (seq&: *gimple_try_cleanup_ptr (gs: stmt));
987 switch (gimple_code (g: gsi_stmt (i)))
988 {
989 case GIMPLE_CATCH:
990 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
991 catch expression and a body. The whole try/catch may fall
992 through iff any of the catch bodies falls through. */
993 for (; !gsi_end_p (i); gsi_next (i: &i))
994 {
995 if (gimple_seq_may_fallthru (gimple_catch_handler (
996 catch_stmt: as_a <gcatch *> (p: gsi_stmt (i)))))
997 return true;
998 }
999 return false;
1000
1001 case GIMPLE_EH_FILTER:
1002 /* The exception filter expression only matters if there is an
1003 exception. If the exception does not match EH_FILTER_TYPES,
1004 we will execute EH_FILTER_FAILURE, and we will fall through
1005 if that falls through. If the exception does match
1006 EH_FILTER_TYPES, the stack unwinder will continue up the
1007 stack, so we will not fall through. We don't know whether we
1008 will throw an exception which matches EH_FILTER_TYPES or not,
1009 so we just ignore EH_FILTER_TYPES and assume that we might
1010 throw an exception which doesn't match. */
1011 return gimple_seq_may_fallthru (gimple_eh_filter_failure (gs: gsi_stmt (i)));
1012
1013 default:
1014 /* This case represents statements to be executed when an
1015 exception occurs. Those statements are implicitly followed
1016 by a GIMPLE_RESX to resume execution after the exception. So
1017 in this case the try/catch never falls through. */
1018 return false;
1019 }
1020}
1021
1022
1023/* Try to determine if we can continue executing the statement
1024 immediately following STMT. This guess need not be 100% accurate;
1025 simply be conservative and return true if we don't know. This is
1026 used only to avoid stupidly generating extra code. If we're wrong,
1027 we'll just delete the extra code later. */
1028
1029bool
1030gimple_stmt_may_fallthru (gimple *stmt)
1031{
1032 if (!stmt)
1033 return true;
1034
1035 switch (gimple_code (g: stmt))
1036 {
1037 case GIMPLE_GOTO:
1038 case GIMPLE_RETURN:
1039 case GIMPLE_RESX:
1040 /* Easy cases. If the last statement of the seq implies
1041 control transfer, then we can't fall through. */
1042 return false;
1043
1044 case GIMPLE_SWITCH:
1045 /* Switch has already been lowered and represents a branch
1046 to a selected label and hence can't fall through. */
1047 return false;
1048
1049 case GIMPLE_COND:
1050 /* GIMPLE_COND's are already lowered into a two-way branch. They
1051 can't fall through. */
1052 return false;
1053
1054 case GIMPLE_BIND:
1055 return gimple_seq_may_fallthru (
1056 gimple_bind_body (gs: as_a <gbind *> (p: stmt)));
1057
1058 case GIMPLE_TRY:
1059 if (gimple_try_kind (gs: stmt) == GIMPLE_TRY_CATCH)
1060 return gimple_try_catch_may_fallthru (stmt: as_a <gtry *> (p: stmt));
1061
1062 /* It must be a GIMPLE_TRY_FINALLY. */
1063
1064 /* The finally clause is always executed after the try clause,
1065 so if it does not fall through, then the try-finally will not
1066 fall through. Otherwise, if the try clause does not fall
1067 through, then when the finally clause falls through it will
1068 resume execution wherever the try clause was going. So the
1069 whole try-finally will only fall through if both the try
1070 clause and the finally clause fall through. */
1071 return (gimple_seq_may_fallthru (gimple_try_eval (gs: stmt))
1072 && gimple_seq_may_fallthru (gimple_try_cleanup (gs: stmt)));
1073
1074 case GIMPLE_EH_ELSE:
1075 {
1076 geh_else *eh_else_stmt = as_a <geh_else *> (p: stmt);
1077 return (gimple_seq_may_fallthru (gimple_eh_else_n_body (eh_else_stmt))
1078 || gimple_seq_may_fallthru (gimple_eh_else_e_body (
1079 eh_else_stmt)));
1080 }
1081
1082 case GIMPLE_CALL:
1083 /* Functions that do not return do not fall through. */
1084 return !gimple_call_noreturn_p (s: stmt);
1085
1086 default:
1087 return true;
1088 }
1089}
1090
1091
1092/* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ. */
1093
1094bool
1095gimple_seq_may_fallthru (gimple_seq seq)
1096{
1097 return gimple_stmt_may_fallthru (stmt: gimple_seq_last_nondebug_stmt (s: seq));
1098}
1099
1100
1101/* Lower a GIMPLE_RETURN GSI. DATA is passed through the recursion. */
1102
1103static void
1104lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
1105{
1106 greturn *stmt = as_a <greturn *> (p: gsi_stmt (i: *gsi));
1107 gimple *t;
1108 int i;
1109 return_statements_t tmp_rs;
1110
1111 /* Match this up with an existing return statement that's been created. */
1112 for (i = data->return_statements.length () - 1;
1113 i >= 0; i--)
1114 {
1115 tmp_rs = data->return_statements[i];
1116
1117 if (gimple_return_retval (gs: stmt) == gimple_return_retval (gs: tmp_rs.stmt))
1118 {
1119 /* Remove the line number from the representative return statement.
1120 It now fills in for many such returns. Failure to remove this
1121 will result in incorrect results for coverage analysis. */
1122 gimple_set_location (g: tmp_rs.stmt, UNKNOWN_LOCATION);
1123
1124 goto found;
1125 }
1126 }
1127
1128 /* Not found. Create a new label and record the return statement. */
1129 tmp_rs.label = create_artificial_label (cfun->function_end_locus);
1130 tmp_rs.stmt = stmt;
1131 data->return_statements.safe_push (obj: tmp_rs);
1132
1133 /* Generate a goto statement and remove the return statement. */
1134 found:
1135 /* When not optimizing, make sure user returns are preserved. */
1136 if (!optimize && gimple_has_location (g: stmt))
1137 DECL_ARTIFICIAL (tmp_rs.label) = 0;
1138 t = gimple_build_goto (dest: tmp_rs.label);
1139 /* location includes block. */
1140 gimple_set_location (g: t, location: gimple_location (g: stmt));
1141 gsi_insert_before (gsi, t, GSI_SAME_STMT);
1142 gsi_remove (gsi, false);
1143}
1144
1145/* Lower a __builtin_setjmp GSI.
1146
1147 __builtin_setjmp is passed a pointer to an array of five words (not
1148 all will be used on all machines). It operates similarly to the C
1149 library function of the same name, but is more efficient.
1150
1151 It is lowered into 2 other builtins, namely __builtin_setjmp_setup,
1152 __builtin_setjmp_receiver.
1153
1154 After full lowering, the body of the function should look like:
1155
1156 {
1157 int D.1844;
1158 int D.2844;
1159
1160 [...]
1161
1162 __builtin_setjmp_setup (&buf, &<D1847>);
1163 D.1844 = 0;
1164 goto <D1846>;
1165 <D1847>:;
1166 __builtin_setjmp_receiver (&<D1847>);
1167 D.1844 = 1;
1168 <D1846>:;
1169 if (D.1844 == 0) goto <D1848>; else goto <D1849>;
1170
1171 [...]
1172
1173 __builtin_setjmp_setup (&buf, &<D2847>);
1174 D.2844 = 0;
1175 goto <D2846>;
1176 <D2847>:;
1177 __builtin_setjmp_receiver (&<D2847>);
1178 D.2844 = 1;
1179 <D2846>:;
1180 if (D.2844 == 0) goto <D2848>; else goto <D2849>;
1181
1182 [...]
1183
1184 <D3850>:;
1185 return;
1186 }
1187
1188 During cfg creation an extra per-function (or per-OpenMP region)
1189 block with ABNORMAL_DISPATCHER internal call will be added, unique
1190 destination of all the abnormal call edges and the unique source of
1191 all the abnormal edges to the receivers, thus keeping the complexity
1192 explosion localized. */
1193
1194static void
1195lower_builtin_setjmp (gimple_stmt_iterator *gsi)
1196{
1197 gimple *stmt = gsi_stmt (i: *gsi);
1198 location_t loc = gimple_location (g: stmt);
1199 tree cont_label = create_artificial_label (loc);
1200 tree next_label = create_artificial_label (loc);
1201 tree dest, t, arg;
1202 gimple *g;
1203
1204 /* __builtin_setjmp_{setup,receiver} aren't ECF_RETURNS_TWICE and for RTL
1205 these builtins are modelled as non-local label jumps to the label
1206 that is passed to these two builtins, so pretend we have a non-local
1207 label during GIMPLE passes too. See PR60003. */
1208 cfun->has_nonlocal_label = 1;
1209
1210 /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is
1211 passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */
1212 FORCED_LABEL (next_label) = 1;
1213
1214 tree orig_dest = dest = gimple_call_lhs (gs: stmt);
1215 if (orig_dest && TREE_CODE (orig_dest) == SSA_NAME)
1216 dest = create_tmp_reg (TREE_TYPE (orig_dest));
1217
1218 /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */
1219 arg = build_addr (next_label);
1220 t = builtin_decl_implicit (fncode: BUILT_IN_SETJMP_SETUP);
1221 g = gimple_build_call (t, 2, gimple_call_arg (gs: stmt, index: 0), arg);
1222 /* location includes block. */
1223 gimple_set_location (g, location: loc);
1224 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1225
1226 /* Build 'DEST = 0' and insert. */
1227 if (dest)
1228 {
1229 g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest)));
1230 gimple_set_location (g, location: loc);
1231 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1232 }
1233
1234 /* Build 'goto CONT_LABEL' and insert. */
1235 g = gimple_build_goto (dest: cont_label);
1236 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1237
1238 /* Build 'NEXT_LABEL:' and insert. */
1239 g = gimple_build_label (label: next_label);
1240 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1241
1242 /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */
1243 arg = build_addr (next_label);
1244 t = builtin_decl_implicit (fncode: BUILT_IN_SETJMP_RECEIVER);
1245 g = gimple_build_call (t, 1, arg);
1246 gimple_set_location (g, location: loc);
1247 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1248
1249 /* Build 'DEST = 1' and insert. */
1250 if (dest)
1251 {
1252 g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
1253 integer_one_node));
1254 gimple_set_location (g, location: loc);
1255 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1256 }
1257
1258 /* Build 'CONT_LABEL:' and insert. */
1259 g = gimple_build_label (label: cont_label);
1260 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1261
1262 /* Build orig_dest = dest if necessary. */
1263 if (dest != orig_dest)
1264 {
1265 g = gimple_build_assign (orig_dest, dest);
1266 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1267 }
1268
1269 /* Remove the call to __builtin_setjmp. */
1270 gsi_remove (gsi, false);
1271}
1272
1273/* Lower calls to posix_memalign to
1274 res = posix_memalign (ptr, align, size);
1275 if (res == 0)
1276 *ptr = __builtin_assume_aligned (*ptr, align);
1277 or to
1278 void *tem;
1279 res = posix_memalign (&tem, align, size);
1280 if (res == 0)
1281 ptr = __builtin_assume_aligned (tem, align);
1282 in case the first argument was &ptr. That way we can get at the
1283 alignment of the heap pointer in CCP. */
1284
1285static void
1286lower_builtin_posix_memalign (gimple_stmt_iterator *gsi)
1287{
1288 gimple *stmt, *call = gsi_stmt (i: *gsi);
1289 tree pptr = gimple_call_arg (gs: call, index: 0);
1290 tree align = gimple_call_arg (gs: call, index: 1);
1291 tree res = gimple_call_lhs (gs: call);
1292 tree ptr = create_tmp_reg (ptr_type_node);
1293 if (TREE_CODE (pptr) == ADDR_EXPR)
1294 {
1295 tree tem = create_tmp_var (ptr_type_node);
1296 TREE_ADDRESSABLE (tem) = 1;
1297 gimple_call_set_arg (gs: call, index: 0, build_fold_addr_expr (tem));
1298 stmt = gimple_build_assign (ptr, tem);
1299 }
1300 else
1301 stmt = gimple_build_assign (ptr,
1302 fold_build2 (MEM_REF, ptr_type_node, pptr,
1303 build_int_cst (ptr_type_node, 0)));
1304 if (res == NULL_TREE)
1305 {
1306 res = create_tmp_reg (integer_type_node);
1307 gimple_call_set_lhs (gs: call, lhs: res);
1308 }
1309 tree align_label = create_artificial_label (UNKNOWN_LOCATION);
1310 tree noalign_label = create_artificial_label (UNKNOWN_LOCATION);
1311 gimple *cond = gimple_build_cond (EQ_EXPR, res, integer_zero_node,
1312 align_label, noalign_label);
1313 gsi_insert_after (gsi, cond, GSI_NEW_STMT);
1314 gsi_insert_after (gsi, gimple_build_label (label: align_label), GSI_NEW_STMT);
1315 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1316 stmt = gimple_build_call (builtin_decl_implicit (fncode: BUILT_IN_ASSUME_ALIGNED),
1317 2, ptr, align);
1318 gimple_call_set_lhs (gs: stmt, lhs: ptr);
1319 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1320 stmt = gimple_build_assign (fold_build2 (MEM_REF, ptr_type_node, pptr,
1321 build_int_cst (ptr_type_node, 0)),
1322 ptr);
1323 gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1324 gsi_insert_after (gsi, gimple_build_label (label: noalign_label), GSI_NEW_STMT);
1325}
1326
1327/* Lower calls to __builtin_assume_aligned when not optimizing. */
1328
1329static void
1330lower_builtin_assume_aligned (gimple_stmt_iterator *gsi)
1331{
1332 gcall *call = as_a <gcall *> (p: gsi_stmt (i: *gsi));
1333
1334 tree lhs = gimple_call_lhs (gs: call);
1335 if (!lhs || !POINTER_TYPE_P (TREE_TYPE (lhs)) || TREE_CODE (lhs) != SSA_NAME)
1336 return;
1337
1338 tree align = gimple_call_arg (gs: call, index: 1);
1339 tree misalign = (gimple_call_num_args (gs: call) > 2
1340 ? gimple_call_arg (gs: call, index: 2) : NULL_TREE);
1341 if (!tree_fits_uhwi_p (align)
1342 || (misalign && !tree_fits_uhwi_p (misalign)))
1343 return;
1344
1345 unsigned aligni = TREE_INT_CST_LOW (align);
1346 unsigned misaligni = misalign ? TREE_INT_CST_LOW (misalign) : 0;
1347 if (aligni <= 1
1348 || (aligni & (aligni - 1)) != 0
1349 || (misaligni & ~(aligni - 1)) != 0)
1350 return;
1351
1352 /* For lowering we simply transfer alignment information to the
1353 result and leave the call otherwise unchanged, it will be elided
1354 at RTL expansion time. */
1355 ptr_info_def *pi = get_ptr_info (lhs);
1356 set_ptr_info_alignment (pi, aligni, misaligni);
1357}
1358
1359
1360/* Record the variables in VARS into function FN. */
1361
1362void
1363record_vars_into (tree vars, tree fn)
1364{
1365 for (; vars; vars = DECL_CHAIN (vars))
1366 {
1367 tree var = vars;
1368
1369 /* BIND_EXPRs contains also function/type/constant declarations
1370 we don't need to care about. */
1371 if (!VAR_P (var))
1372 continue;
1373
1374 /* Nothing to do in this case. */
1375 if (DECL_EXTERNAL (var))
1376 continue;
1377
1378 /* Record the variable. */
1379 add_local_decl (DECL_STRUCT_FUNCTION (fn), d: var);
1380 }
1381}
1382
1383
1384/* Record the variables in VARS into current_function_decl. */
1385
1386void
1387record_vars (tree vars)
1388{
1389 record_vars_into (vars, fn: current_function_decl);
1390}
1391

source code of gcc/gimple-low.cc