1/* Callgraph based analysis of static variables.
2 Copyright (C) 2004-2023 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21/* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
24
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
29
30/* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
33
34#include "config.h"
35#include "system.h"
36#include "coretypes.h"
37#include "backend.h"
38#include "target.h"
39#include "tree.h"
40#include "gimple.h"
41#include "tree-pass.h"
42#include "tree-streamer.h"
43#include "cgraph.h"
44#include "diagnostic.h"
45#include "calls.h"
46#include "cfganal.h"
47#include "tree-eh.h"
48#include "gimple-iterator.h"
49#include "gimple-walk.h"
50#include "tree-cfg.h"
51#include "tree-ssa-loop-niter.h"
52#include "langhooks.h"
53#include "ipa-utils.h"
54#include "gimple-pretty-print.h"
55#include "cfgloop.h"
56#include "tree-scalar-evolution.h"
57#include "intl.h"
58#include "opts.h"
59#include "ssa.h"
60#include "alloc-pool.h"
61#include "symbol-summary.h"
62#include "ipa-prop.h"
63#include "ipa-fnsummary.h"
64#include "symtab-thunks.h"
65#include "dbgcnt.h"
66
67/* Lattice values for const and pure functions. Everything starts out
68 being const, then may drop to pure and then neither depending on
69 what is found. */
70enum pure_const_state_e
71{
72 IPA_CONST,
73 IPA_PURE,
74 IPA_NEITHER
75};
76
77static const char *pure_const_names[3] = {"const", "pure", "neither"};
78
79enum malloc_state_e
80{
81 STATE_MALLOC_TOP,
82 STATE_MALLOC,
83 STATE_MALLOC_BOTTOM
84};
85
86static const char *malloc_state_names[] = {"malloc_top", "malloc", "malloc_bottom"};
87
88/* Holder for the const_state. There is one of these per function
89 decl. */
90class funct_state_d
91{
92public:
93 funct_state_d (): pure_const_state (IPA_NEITHER),
94 state_previously_known (IPA_NEITHER), looping_previously_known (true),
95 looping (true), can_throw (true), can_free (true),
96 malloc_state (STATE_MALLOC_BOTTOM) {}
97
98 funct_state_d (const funct_state_d &s): pure_const_state (s.pure_const_state),
99 state_previously_known (s.state_previously_known),
100 looping_previously_known (s.looping_previously_known),
101 looping (s.looping), can_throw (s.can_throw), can_free (s.can_free),
102 malloc_state (s.malloc_state) {}
103
104 /* See above. */
105 enum pure_const_state_e pure_const_state;
106 /* What user set here; we can be always sure about this. */
107 enum pure_const_state_e state_previously_known;
108 bool looping_previously_known;
109
110 /* True if the function could possibly infinite loop. There are a
111 lot of ways that this could be determined. We are pretty
112 conservative here. While it is possible to cse pure and const
113 calls, it is not legal to have dce get rid of the call if there
114 is a possibility that the call could infinite loop since this is
115 a behavioral change. */
116 bool looping;
117
118 bool can_throw;
119
120 /* If function can call free, munmap or otherwise make previously
121 non-trapping memory accesses trapping. */
122 bool can_free;
123
124 enum malloc_state_e malloc_state;
125};
126
127typedef class funct_state_d * funct_state;
128
129/* The storage of the funct_state is abstracted because there is the
130 possibility that it may be desirable to move this to the cgraph
131 local info. */
132
133class funct_state_summary_t:
134 public fast_function_summary <funct_state_d *, va_heap>
135{
136public:
137 funct_state_summary_t (symbol_table *symtab):
138 fast_function_summary <funct_state_d *, va_heap> (symtab) {}
139
140 void insert (cgraph_node *, funct_state_d *state) final override;
141 void duplicate (cgraph_node *src_node, cgraph_node *dst_node,
142 funct_state_d *src_data,
143 funct_state_d *dst_data) final override;
144};
145
146static funct_state_summary_t *funct_state_summaries = NULL;
147
148static bool gate_pure_const (void);
149
150namespace {
151
152const pass_data pass_data_ipa_pure_const =
153{
154 .type: IPA_PASS, /* type */
155 .name: "pure-const", /* name */
156 .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */
157 .tv_id: TV_IPA_PURE_CONST, /* tv_id */
158 .properties_required: 0, /* properties_required */
159 .properties_provided: 0, /* properties_provided */
160 .properties_destroyed: 0, /* properties_destroyed */
161 .todo_flags_start: 0, /* todo_flags_start */
162 .todo_flags_finish: 0, /* todo_flags_finish */
163};
164
165class pass_ipa_pure_const : public ipa_opt_pass_d
166{
167public:
168 pass_ipa_pure_const(gcc::context *ctxt);
169
170 /* opt_pass methods: */
171 bool gate (function *) final override { return gate_pure_const (); }
172 unsigned int execute (function *fun) final override;
173
174 void register_hooks (void);
175
176private:
177 bool init_p;
178}; // class pass_ipa_pure_const
179
180} // anon namespace
181
182/* Try to guess if function body will always be visible to compiler
183 when compiling the call and whether compiler will be able
184 to propagate the information by itself. */
185
186static bool
187function_always_visible_to_compiler_p (tree decl)
188{
189 return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl)
190 || DECL_COMDAT (decl));
191}
192
193/* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
194 is true if the function is known to be finite. The diagnostic is
195 controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
196 OPTION, this function may initialize it and it is always returned
197 by the function. */
198
199static hash_set<tree> *
200suggest_attribute (int option, tree decl, bool known_finite,
201 hash_set<tree> *warned_about,
202 const char * attrib_name)
203{
204 if (!option_enabled (opt_idx: option, lang_mask: lang_hooks.option_lang_mask (), opts: &global_options))
205 return warned_about;
206 if (TREE_THIS_VOLATILE (decl)
207 || (known_finite && function_always_visible_to_compiler_p (decl)))
208 return warned_about;
209
210 if (!warned_about)
211 warned_about = new hash_set<tree>;
212 if (warned_about->contains (k: decl))
213 return warned_about;
214 warned_about->add (k: decl);
215 warning_at (DECL_SOURCE_LOCATION (decl),
216 option,
217 known_finite
218 ? G_("function might be candidate for attribute %qs")
219 : G_("function might be candidate for attribute %qs"
220 " if it is known to return normally"), attrib_name);
221 return warned_about;
222}
223
224/* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
225 is true if the function is known to be finite. */
226
227static void
228warn_function_pure (tree decl, bool known_finite)
229{
230 /* Declaring a void function pure makes no sense and is diagnosed
231 by -Wattributes because calling it would have no effect. */
232 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
233 return;
234
235 static hash_set<tree> *warned_about;
236 warned_about
237 = suggest_attribute (option: OPT_Wsuggest_attribute_pure, decl,
238 known_finite, warned_about, attrib_name: "pure");
239}
240
241/* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
242 is true if the function is known to be finite. */
243
244static void
245warn_function_const (tree decl, bool known_finite)
246{
247 /* Declaring a void function const makes no sense is diagnosed
248 by -Wattributes because calling it would have no effect. */
249 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
250 return;
251
252 static hash_set<tree> *warned_about;
253 warned_about
254 = suggest_attribute (option: OPT_Wsuggest_attribute_const, decl,
255 known_finite, warned_about, attrib_name: "const");
256}
257
258/* Emit suggestion about __attribute__((malloc)) for DECL. */
259
260static void
261warn_function_malloc (tree decl)
262{
263 static hash_set<tree> *warned_about;
264 warned_about
265 = suggest_attribute (option: OPT_Wsuggest_attribute_malloc, decl,
266 known_finite: true, warned_about, attrib_name: "malloc");
267}
268
269/* Emit suggestion about __attribute__((noreturn)) for DECL. */
270
271static void
272warn_function_noreturn (tree decl)
273{
274 tree original_decl = decl;
275
276 static hash_set<tree> *warned_about;
277 if (!lang_hooks.missing_noreturn_ok_p (decl)
278 && targetm.warn_func_return (decl))
279 warned_about
280 = suggest_attribute (option: OPT_Wsuggest_attribute_noreturn, decl: original_decl,
281 known_finite: true, warned_about, attrib_name: "noreturn");
282}
283
284void
285warn_function_cold (tree decl)
286{
287 tree original_decl = decl;
288
289 static hash_set<tree> *warned_about;
290 warned_about
291 = suggest_attribute (option: OPT_Wsuggest_attribute_cold, decl: original_decl,
292 known_finite: true, warned_about, attrib_name: "cold");
293}
294
295/* Check to see if the use (or definition when CHECKING_WRITE is true)
296 variable T is legal in a function that is either pure or const. */
297
298static inline void
299check_decl (funct_state local,
300 tree t, bool checking_write, bool ipa)
301{
302 /* Do not want to do anything with volatile except mark any
303 function that uses one to be not const or pure. */
304 if (TREE_THIS_VOLATILE (t))
305 {
306 local->pure_const_state = IPA_NEITHER;
307 if (dump_file)
308 fprintf (stream: dump_file, format: " Volatile operand is not const/pure\n");
309 return;
310 }
311
312 /* Do not care about a local automatic that is not static. */
313 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
314 return;
315
316 /* If the variable has the "used" attribute, treat it as if it had a
317 been touched by the devil. */
318 if (DECL_PRESERVE_P (t))
319 {
320 local->pure_const_state = IPA_NEITHER;
321 if (dump_file)
322 fprintf (stream: dump_file, format: " Used static/global variable is not const/pure\n");
323 return;
324 }
325
326 /* In IPA mode we are not interested in checking actual loads and stores;
327 they will be processed at propagation time using ipa_ref. */
328 if (ipa)
329 return;
330
331 /* Since we have dealt with the locals and params cases above, if we
332 are CHECKING_WRITE, this cannot be a pure or constant
333 function. */
334 if (checking_write)
335 {
336 local->pure_const_state = IPA_NEITHER;
337 if (dump_file)
338 fprintf (stream: dump_file, format: " static/global memory write is not const/pure\n");
339 return;
340 }
341
342 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
343 {
344 /* Readonly reads are safe. */
345 if (TREE_READONLY (t))
346 return; /* Read of a constant, do not change the function state. */
347 else
348 {
349 if (dump_file)
350 fprintf (stream: dump_file, format: " global memory read is not const\n");
351 /* Just a regular read. */
352 if (local->pure_const_state == IPA_CONST)
353 local->pure_const_state = IPA_PURE;
354 }
355 }
356 else
357 {
358 /* Compilation level statics can be read if they are readonly
359 variables. */
360 if (TREE_READONLY (t))
361 return;
362
363 if (dump_file)
364 fprintf (stream: dump_file, format: " static memory read is not const\n");
365 /* Just a regular read. */
366 if (local->pure_const_state == IPA_CONST)
367 local->pure_const_state = IPA_PURE;
368 }
369}
370
371
372/* Check to see if the use (or definition when CHECKING_WRITE is true)
373 variable T is legal in a function that is either pure or const. */
374
375static inline void
376check_op (funct_state local, tree t, bool checking_write)
377{
378 t = get_base_address (t);
379 if (t && TREE_THIS_VOLATILE (t))
380 {
381 local->pure_const_state = IPA_NEITHER;
382 if (dump_file)
383 fprintf (stream: dump_file, format: " Volatile indirect ref is not const/pure\n");
384 return;
385 }
386 else if (refs_local_or_readonly_memory_p (t))
387 {
388 if (dump_file)
389 fprintf (stream: dump_file, format: " Indirect ref to local or readonly "
390 "memory is OK\n");
391 return;
392 }
393 else if (checking_write)
394 {
395 local->pure_const_state = IPA_NEITHER;
396 if (dump_file)
397 fprintf (stream: dump_file, format: " Indirect ref write is not const/pure\n");
398 return;
399 }
400 else
401 {
402 if (dump_file)
403 fprintf (stream: dump_file, format: " Indirect ref read is not const\n");
404 if (local->pure_const_state == IPA_CONST)
405 local->pure_const_state = IPA_PURE;
406 }
407}
408
409/* compute state based on ECF FLAGS and store to STATE and LOOPING. */
410
411static void
412state_from_flags (enum pure_const_state_e *state, bool *looping,
413 int flags, bool cannot_lead_to_return)
414{
415 *looping = false;
416 if (flags & ECF_LOOPING_CONST_OR_PURE)
417 {
418 *looping = true;
419 if (dump_file && (dump_flags & TDF_DETAILS))
420 fprintf (stream: dump_file, format: " looping\n");
421 }
422 if (flags & ECF_CONST)
423 {
424 *state = IPA_CONST;
425 if (dump_file && (dump_flags & TDF_DETAILS))
426 fprintf (stream: dump_file, format: " const\n");
427 }
428 else if (flags & ECF_PURE)
429 {
430 *state = IPA_PURE;
431 if (dump_file && (dump_flags & TDF_DETAILS))
432 fprintf (stream: dump_file, format: " pure\n");
433 }
434 else if (cannot_lead_to_return)
435 {
436 *state = IPA_PURE;
437 *looping = true;
438 if (dump_file && (dump_flags & TDF_DETAILS))
439 fprintf (stream: dump_file, format: " ignoring side effects->pure looping\n");
440 }
441 else
442 {
443 if (dump_file && (dump_flags & TDF_DETAILS))
444 fprintf (stream: dump_file, format: " neither\n");
445 *state = IPA_NEITHER;
446 *looping = true;
447 }
448}
449
450/* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
451 into STATE and LOOPING better of the two variants.
452 Be sure to merge looping correctly. IPA_NEITHER functions
453 have looping 0 even if they don't have to return. */
454
455static inline void
456better_state (enum pure_const_state_e *state, bool *looping,
457 enum pure_const_state_e state2, bool looping2)
458{
459 if (state2 < *state)
460 {
461 if (*state == IPA_NEITHER)
462 *looping = looping2;
463 else
464 *looping = MIN (*looping, looping2);
465 *state = state2;
466 }
467 else if (state2 != IPA_NEITHER)
468 *looping = MIN (*looping, looping2);
469}
470
471/* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
472 into STATE and LOOPING worse of the two variants.
473 N is the actual node called. */
474
475static inline void
476worse_state (enum pure_const_state_e *state, bool *looping,
477 enum pure_const_state_e state2, bool looping2,
478 struct symtab_node *from,
479 struct symtab_node *to)
480{
481 /* Consider function:
482
483 bool a(int *p)
484 {
485 return *p==*p;
486 }
487
488 During early optimization we will turn this into:
489
490 bool a(int *p)
491 {
492 return true;
493 }
494
495 Now if this function will be detected as CONST however when interposed it
496 may end up being just pure. We always must assume the worst scenario here.
497 */
498 if (*state == IPA_CONST && state2 == IPA_CONST
499 && to && !TREE_READONLY (to->decl) && !to->binds_to_current_def_p (ref: from))
500 {
501 if (dump_file && (dump_flags & TDF_DETAILS))
502 fprintf (stream: dump_file, format: "Dropping state to PURE because call to %s may not "
503 "bind to current def.\n", to->dump_name ());
504 state2 = IPA_PURE;
505 }
506 *state = MAX (*state, state2);
507 *looping = MAX (*looping, looping2);
508}
509
510/* Recognize special cases of builtins that are by themselves not const
511 but function using them is. */
512bool
513builtin_safe_for_const_function_p (bool *looping, tree callee)
514{
515 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
516 switch (DECL_FUNCTION_CODE (decl: callee))
517 {
518 case BUILT_IN_RETURN:
519 case BUILT_IN_UNREACHABLE:
520 CASE_BUILT_IN_ALLOCA:
521 case BUILT_IN_STACK_SAVE:
522 case BUILT_IN_STACK_RESTORE:
523 case BUILT_IN_EH_POINTER:
524 case BUILT_IN_EH_FILTER:
525 case BUILT_IN_UNWIND_RESUME:
526 case BUILT_IN_CXA_END_CLEANUP:
527 case BUILT_IN_EH_COPY_VALUES:
528 case BUILT_IN_FRAME_ADDRESS:
529 case BUILT_IN_APPLY_ARGS:
530 case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
531 case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
532 case BUILT_IN_DWARF_CFA:
533 case BUILT_IN_RETURN_ADDRESS:
534 *looping = false;
535 return true;
536 case BUILT_IN_PREFETCH:
537 *looping = true;
538 return true;
539 default:
540 break;
541 }
542 return false;
543}
544
545/* Check the parameters of a function call to CALL_EXPR to see if
546 there are any references in the parameters that are not allowed for
547 pure or const functions. Also check to see if this is either an
548 indirect call, a call outside the compilation unit, or has special
549 attributes that may also effect the purity. The CALL_EXPR node for
550 the entire call expression. */
551
552static void
553check_call (funct_state local, gcall *call, bool ipa)
554{
555 int flags = gimple_call_flags (call);
556 tree callee_t = gimple_call_fndecl (gs: call);
557 bool possibly_throws = stmt_could_throw_p (cfun, call);
558 bool possibly_throws_externally = (possibly_throws
559 && stmt_can_throw_external (cfun, call));
560
561 if (possibly_throws)
562 {
563 unsigned int i;
564 for (i = 0; i < gimple_num_ops (gs: call); i++)
565 if (gimple_op (gs: call, i)
566 && tree_could_throw_p (gimple_op (gs: call, i)))
567 {
568 if (possibly_throws && cfun->can_throw_non_call_exceptions)
569 {
570 if (dump_file)
571 fprintf (stream: dump_file, format: " operand can throw; looping\n");
572 local->looping = true;
573 }
574 if (possibly_throws_externally)
575 {
576 if (dump_file)
577 fprintf (stream: dump_file, format: " operand can throw externally\n");
578 local->can_throw = true;
579 }
580 }
581 }
582
583 /* The const and pure flags are set by a variety of places in the
584 compiler (including here). If someone has already set the flags
585 for the callee, (such as for some of the builtins) we will use
586 them, otherwise we will compute our own information.
587
588 Const and pure functions have less clobber effects than other
589 functions so we process these first. Otherwise if it is a call
590 outside the compilation unit or an indirect call we punt. This
591 leaves local calls which will be processed by following the call
592 graph. */
593 if (callee_t)
594 {
595 bool call_looping;
596
597 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
598 && !nonfreeing_call_p (call))
599 local->can_free = true;
600
601 if (builtin_safe_for_const_function_p (looping: &call_looping, callee: callee_t))
602 {
603 worse_state (state: &local->pure_const_state, looping: &local->looping,
604 state2: IPA_CONST, looping2: call_looping,
605 NULL, NULL);
606 return;
607 }
608 /* When bad things happen to bad functions, they cannot be const
609 or pure. */
610 if (setjmp_call_p (callee_t))
611 {
612 if (dump_file)
613 fprintf (stream: dump_file, format: " setjmp is not const/pure\n");
614 local->looping = true;
615 local->pure_const_state = IPA_NEITHER;
616 }
617
618 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
619 switch (DECL_FUNCTION_CODE (decl: callee_t))
620 {
621 case BUILT_IN_LONGJMP:
622 case BUILT_IN_NONLOCAL_GOTO:
623 if (dump_file)
624 fprintf (stream: dump_file,
625 format: " longjmp and nonlocal goto is not const/pure\n");
626 local->pure_const_state = IPA_NEITHER;
627 local->looping = true;
628 break;
629 default:
630 break;
631 }
632 }
633 else if (gimple_call_internal_p (gs: call) && !nonfreeing_call_p (call))
634 local->can_free = true;
635
636 /* When not in IPA mode, we can still handle self recursion. */
637 if (!ipa && callee_t
638 && recursive_call_p (current_function_decl, callee_t))
639 {
640 if (dump_file)
641 fprintf (stream: dump_file, format: " Recursive call can loop.\n");
642 local->looping = true;
643 }
644 /* Either callee is unknown or we are doing local analysis.
645 Look to see if there are any bits available for the callee (such as by
646 declaration or because it is builtin) and process solely on the basis of
647 those bits. Handle internal calls always, those calls don't have
648 corresponding cgraph edges and thus aren't processed during
649 the propagation. */
650 else if (!ipa || gimple_call_internal_p (gs: call))
651 {
652 enum pure_const_state_e call_state;
653 bool call_looping;
654 if (possibly_throws && cfun->can_throw_non_call_exceptions)
655 {
656 if (dump_file)
657 fprintf (stream: dump_file, format: " can throw; looping\n");
658 local->looping = true;
659 }
660 if (possibly_throws_externally)
661 {
662 if (dump_file)
663 {
664 fprintf (stream: dump_file, format: " can throw externally to lp %i\n",
665 lookup_stmt_eh_lp (call));
666 if (callee_t)
667 fprintf (stream: dump_file, format: " callee:%s\n",
668 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
669 }
670 local->can_throw = true;
671 }
672 if (dump_file && (dump_flags & TDF_DETAILS))
673 fprintf (stream: dump_file, format: " checking flags for call:");
674 state_from_flags (state: &call_state, looping: &call_looping, flags,
675 cannot_lead_to_return: ((flags & (ECF_NORETURN | ECF_NOTHROW))
676 == (ECF_NORETURN | ECF_NOTHROW))
677 || (!flag_exceptions && (flags & ECF_NORETURN)));
678 worse_state (state: &local->pure_const_state, looping: &local->looping,
679 state2: call_state, looping2: call_looping, NULL, NULL);
680 }
681 /* Direct functions calls are handled by IPA propagation. */
682}
683
684/* Wrapper around check_decl for loads in local more. */
685
686static bool
687check_load (gimple *, tree op, tree, void *data)
688{
689 if (DECL_P (op))
690 check_decl (local: (funct_state)data, t: op, checking_write: false, ipa: false);
691 else
692 check_op (local: (funct_state)data, t: op, checking_write: false);
693 return false;
694}
695
696/* Wrapper around check_decl for stores in local more. */
697
698static bool
699check_store (gimple *, tree op, tree, void *data)
700{
701 if (DECL_P (op))
702 check_decl (local: (funct_state)data, t: op, checking_write: true, ipa: false);
703 else
704 check_op (local: (funct_state)data, t: op, checking_write: true);
705 return false;
706}
707
708/* Wrapper around check_decl for loads in ipa mode. */
709
710static bool
711check_ipa_load (gimple *, tree op, tree, void *data)
712{
713 if (DECL_P (op))
714 check_decl (local: (funct_state)data, t: op, checking_write: false, ipa: true);
715 else
716 check_op (local: (funct_state)data, t: op, checking_write: false);
717 return false;
718}
719
720/* Wrapper around check_decl for stores in ipa mode. */
721
722static bool
723check_ipa_store (gimple *, tree op, tree, void *data)
724{
725 if (DECL_P (op))
726 check_decl (local: (funct_state)data, t: op, checking_write: true, ipa: true);
727 else
728 check_op (local: (funct_state)data, t: op, checking_write: true);
729 return false;
730}
731
732/* Look into pointer pointed to by GSIP and figure out what interesting side
733 effects it has. */
734static void
735check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
736{
737 gimple *stmt = gsi_stmt (i: *gsip);
738
739 if (is_gimple_debug (gs: stmt))
740 return;
741
742 /* Do consider clobber as side effects before IPA, so we rather inline
743 C++ destructors and keep clobber semantics than eliminate them.
744
745 Similar logic is in ipa-modref.
746
747 TODO: We may get smarter during early optimizations on these and let
748 functions containing only clobbers to be optimized more. This is a common
749 case of C++ destructors. */
750
751 if ((ipa || cfun->after_inlining) && gimple_clobber_p (s: stmt))
752 return;
753
754 if (dump_file)
755 {
756 fprintf (stream: dump_file, format: " scanning: ");
757 print_gimple_stmt (dump_file, stmt, 0);
758 }
759
760 if (gimple_has_volatile_ops (stmt)
761 && !gimple_clobber_p (s: stmt))
762 {
763 local->pure_const_state = IPA_NEITHER;
764 if (dump_file)
765 fprintf (stream: dump_file, format: " Volatile stmt is not const/pure\n");
766 }
767
768 /* Look for loads and stores. */
769 walk_stmt_load_store_ops (stmt, local,
770 ipa ? check_ipa_load : check_load,
771 ipa ? check_ipa_store : check_store);
772
773 if (gimple_code (g: stmt) != GIMPLE_CALL
774 && stmt_could_throw_p (cfun, stmt))
775 {
776 if (cfun->can_throw_non_call_exceptions)
777 {
778 if (dump_file)
779 fprintf (stream: dump_file, format: " can throw; looping\n");
780 local->looping = true;
781 }
782 if (stmt_can_throw_external (cfun, stmt))
783 {
784 if (dump_file)
785 fprintf (stream: dump_file, format: " can throw externally\n");
786 local->can_throw = true;
787 }
788 else
789 if (dump_file)
790 fprintf (stream: dump_file, format: " can throw\n");
791 }
792 switch (gimple_code (g: stmt))
793 {
794 case GIMPLE_CALL:
795 check_call (local, call: as_a <gcall *> (p: stmt), ipa);
796 break;
797 case GIMPLE_LABEL:
798 if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
799 /* Target of long jump. */
800 {
801 if (dump_file)
802 fprintf (stream: dump_file, format: " nonlocal label is not const/pure\n");
803 local->pure_const_state = IPA_NEITHER;
804 }
805 break;
806 case GIMPLE_ASM:
807 if (gimple_asm_clobbers_memory_p (as_a <gasm *> (p: stmt)))
808 {
809 if (dump_file)
810 fprintf (stream: dump_file, format: " memory asm clobber is not const/pure\n");
811 /* Abandon all hope, ye who enter here. */
812 local->pure_const_state = IPA_NEITHER;
813 local->can_free = true;
814 }
815 if (gimple_asm_volatile_p (asm_stmt: as_a <gasm *> (p: stmt)))
816 {
817 if (dump_file)
818 fprintf (stream: dump_file, format: " volatile is not const/pure\n");
819 /* Abandon all hope, ye who enter here. */
820 local->pure_const_state = IPA_NEITHER;
821 local->looping = true;
822 local->can_free = true;
823 }
824 return;
825 default:
826 break;
827 }
828}
829
830/* Check that RETVAL is used only in STMT and in comparisons against 0.
831 RETVAL is return value of the function and STMT is return stmt. */
832
833static bool
834check_retval_uses (tree retval, gimple *stmt)
835{
836 imm_use_iterator use_iter;
837 gimple *use_stmt;
838
839 FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, retval)
840 if (gcond *cond = dyn_cast<gcond *> (p: use_stmt))
841 {
842 tree op2 = gimple_cond_rhs (gs: cond);
843 if (!integer_zerop (op2))
844 return false;
845 }
846 else if (gassign *ga = dyn_cast<gassign *> (p: use_stmt))
847 {
848 enum tree_code code = gimple_assign_rhs_code (gs: ga);
849 if (TREE_CODE_CLASS (code) != tcc_comparison)
850 return false;
851 if (!integer_zerop (gimple_assign_rhs2 (gs: ga)))
852 return false;
853 }
854 else if (is_gimple_debug (gs: use_stmt))
855 ;
856 else if (use_stmt != stmt)
857 return false;
858
859 return true;
860}
861
862/* malloc_candidate_p() checks if FUN can possibly be annotated with malloc
863 attribute. Currently this function does a very conservative analysis.
864 FUN is considered to be a candidate if
865 1) It returns a value of pointer type.
866 2) SSA_NAME_DEF_STMT (return_value) is either a function call or
867 a phi, and element of phi is either NULL or
868 SSA_NAME_DEF_STMT(element) is function call.
869 3) The return-value has immediate uses only within comparisons (gcond or gassign)
870 and return_stmt (and likewise a phi arg has immediate use only within comparison
871 or the phi stmt). */
872
873#define DUMP_AND_RETURN(reason) \
874{ \
875 if (dump_file && (dump_flags & TDF_DETAILS)) \
876 fprintf (dump_file, "\n%s is not a malloc candidate, reason: %s\n", \
877 (node->dump_name ()), (reason)); \
878 return false; \
879}
880
881static bool
882malloc_candidate_p_1 (function *fun, tree retval, gimple *ret_stmt, bool ipa,
883 bitmap visited)
884{
885 cgraph_node *node = cgraph_node::get_create (fun->decl);
886 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (retval)))
887 return true;
888
889 if (!check_retval_uses (retval, stmt: ret_stmt))
890 DUMP_AND_RETURN("Return value has uses outside return stmt"
891 " and comparisons against 0.")
892
893 gimple *def = SSA_NAME_DEF_STMT (retval);
894
895 if (gcall *call_stmt = dyn_cast<gcall *> (p: def))
896 {
897 tree callee_decl = gimple_call_fndecl (gs: call_stmt);
898 if (!callee_decl)
899 return false;
900
901 if (!ipa && !DECL_IS_MALLOC (callee_decl))
902 DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
903 " non-ipa mode.")
904
905 cgraph_edge *cs = node->get_edge (call_stmt);
906 if (cs)
907 {
908 ipa_call_summary *es = ipa_call_summaries->get_create (edge: cs);
909 es->is_return_callee_uncaptured = true;
910 }
911 }
912
913 else if (gphi *phi = dyn_cast<gphi *> (p: def))
914 {
915 bool all_args_zero = true;
916 for (unsigned i = 0; i < gimple_phi_num_args (gs: phi); ++i)
917 {
918 tree arg = gimple_phi_arg_def (gs: phi, index: i);
919 if (integer_zerop (arg))
920 continue;
921
922 all_args_zero = false;
923 if (TREE_CODE (arg) != SSA_NAME)
924 DUMP_AND_RETURN ("phi arg is not SSA_NAME.");
925 if (!check_retval_uses (retval: arg, stmt: phi))
926 DUMP_AND_RETURN ("phi arg has uses outside phi"
927 " and comparisons against 0.")
928
929 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
930 if (is_a<gphi *> (p: arg_def))
931 {
932 if (!malloc_candidate_p_1 (fun, retval: arg, ret_stmt: phi, ipa, visited))
933 DUMP_AND_RETURN ("nested phi fail")
934 continue;
935 }
936
937 gcall *call_stmt = dyn_cast<gcall *> (p: arg_def);
938 if (!call_stmt)
939 DUMP_AND_RETURN ("phi arg is a not a call_stmt.")
940
941 tree callee_decl = gimple_call_fndecl (gs: call_stmt);
942 if (!callee_decl)
943 return false;
944 if (!ipa && !DECL_IS_MALLOC (callee_decl))
945 DUMP_AND_RETURN("callee_decl does not have malloc attribute"
946 " for non-ipa mode.")
947
948 cgraph_edge *cs = node->get_edge (call_stmt);
949 if (cs)
950 {
951 ipa_call_summary *es = ipa_call_summaries->get_create (edge: cs);
952 es->is_return_callee_uncaptured = true;
953 }
954 }
955
956 if (all_args_zero)
957 DUMP_AND_RETURN ("Return value is a phi with all args equal to 0.")
958 }
959
960 else
961 DUMP_AND_RETURN("def_stmt of return value is not a call or phi-stmt.")
962
963 return true;
964}
965
966static bool
967malloc_candidate_p (function *fun, bool ipa)
968{
969 basic_block exit_block = EXIT_BLOCK_PTR_FOR_FN (fun);
970 edge e;
971 edge_iterator ei;
972 cgraph_node *node = cgraph_node::get_create (fun->decl);
973
974 if (EDGE_COUNT (exit_block->preds) == 0
975 || !flag_delete_null_pointer_checks)
976 return false;
977
978 auto_bitmap visited;
979 FOR_EACH_EDGE (e, ei, exit_block->preds)
980 {
981 gimple_stmt_iterator gsi = gsi_last_bb (bb: e->src);
982 greturn *ret_stmt = dyn_cast<greturn *> (p: gsi_stmt (i: gsi));
983
984 if (!ret_stmt)
985 return false;
986
987 tree retval = gimple_return_retval (gs: ret_stmt);
988 if (!retval)
989 DUMP_AND_RETURN("No return value.")
990
991 if (TREE_CODE (retval) != SSA_NAME
992 || TREE_CODE (TREE_TYPE (retval)) != POINTER_TYPE)
993 DUMP_AND_RETURN("Return value is not SSA_NAME or not a pointer type.")
994
995 if (!malloc_candidate_p_1 (fun, retval, ret_stmt, ipa, visited))
996 return false;
997 }
998
999 if (dump_file && (dump_flags & TDF_DETAILS))
1000 fprintf (stream: dump_file, format: "\nFound %s to be candidate for malloc attribute\n",
1001 IDENTIFIER_POINTER (DECL_NAME (fun->decl)));
1002 return true;
1003}
1004
1005#undef DUMP_AND_RETURN
1006
1007/* Return true if function is known to be finite. */
1008
1009bool
1010finite_function_p ()
1011{
1012 /* Const functions cannot have back edges (an
1013 indication of possible infinite loop side
1014 effect. */
1015 bool finite = true;
1016 if (mark_dfs_back_edges ())
1017 {
1018 /* Preheaders are needed for SCEV to work.
1019 Simple latches and recorded exits improve chances that loop will
1020 proved to be finite in testcases such as in loop-15.c
1021 and loop-24.c */
1022 loop_optimizer_init (LOOPS_HAVE_PREHEADERS
1023 | LOOPS_HAVE_SIMPLE_LATCHES
1024 | LOOPS_HAVE_RECORDED_EXITS);
1025 if (dump_file && (dump_flags & TDF_DETAILS))
1026 flow_loops_dump (dump_file, NULL, 0);
1027 if (mark_irreducible_loops ())
1028 {
1029 if (dump_file)
1030 fprintf (stream: dump_file, format: " has irreducible loops\n");
1031 finite = false;
1032 }
1033 else
1034 {
1035 scev_initialize ();
1036 for (auto loop : loops_list (cfun, 0))
1037 if (!finite_loop_p (loop))
1038 {
1039 if (dump_file)
1040 fprintf (stream: dump_file, format: " cannot prove finiteness of "
1041 "loop %i\n", loop->num);
1042 finite =false;
1043 break;
1044 }
1045 scev_finalize ();
1046 }
1047 loop_optimizer_finalize ();
1048 }
1049 return finite;
1050}
1051
1052/* This is the main routine for finding the reference patterns for
1053 global variables within a function FN. */
1054
1055static funct_state
1056analyze_function (struct cgraph_node *fn, bool ipa)
1057{
1058 tree decl = fn->decl;
1059 funct_state l;
1060 basic_block this_block;
1061
1062 l = XCNEW (class funct_state_d);
1063 l->pure_const_state = IPA_CONST;
1064 l->state_previously_known = IPA_NEITHER;
1065 l->looping_previously_known = true;
1066 l->looping = false;
1067 l->can_throw = false;
1068 l->can_free = false;
1069 state_from_flags (state: &l->state_previously_known, looping: &l->looping_previously_known,
1070 flags: flags_from_decl_or_type (fn->decl),
1071 cannot_lead_to_return: fn->cannot_return_p ());
1072
1073 if (fn->thunk || fn->alias)
1074 {
1075 /* Thunk gets propagated through, so nothing interesting happens. */
1076 gcc_assert (ipa);
1077 if (fn->thunk && thunk_info::get (node: fn)->virtual_offset_p)
1078 l->pure_const_state = IPA_NEITHER;
1079 return l;
1080 }
1081
1082 if (dump_file)
1083 {
1084 fprintf (stream: dump_file, format: "\n\n local analysis of %s\n ",
1085 fn->dump_name ());
1086 }
1087
1088 push_cfun (DECL_STRUCT_FUNCTION (decl));
1089
1090 FOR_EACH_BB_FN (this_block, cfun)
1091 {
1092 gimple_stmt_iterator gsi;
1093 struct walk_stmt_info wi;
1094
1095 memset (s: &wi, c: 0, n: sizeof (wi));
1096 for (gsi = gsi_start_bb (bb: this_block);
1097 !gsi_end_p (i: gsi);
1098 gsi_next (i: &gsi))
1099 {
1100 /* NULL memory accesses terminates BB. These accesses are known
1101 to trip undefined behaviour. gimple-ssa-isolate-paths turns them
1102 to volatile accesses and adds builtin_trap call which would
1103 confuse us otherwise. */
1104 if (infer_nonnull_range_by_dereference (gsi_stmt (i: gsi),
1105 null_pointer_node))
1106 {
1107 if (dump_file)
1108 fprintf (stream: dump_file, format: " NULL memory access; terminating BB%s\n",
1109 flag_non_call_exceptions ? "; looping" : "");
1110 if (flag_non_call_exceptions)
1111 {
1112 l->looping = true;
1113 if (stmt_can_throw_external (cfun, gsi_stmt (i: gsi)))
1114 {
1115 if (dump_file)
1116 fprintf (stream: dump_file, format: " can throw externally\n");
1117 l->can_throw = true;
1118 }
1119 }
1120 break;
1121 }
1122 check_stmt (gsip: &gsi, local: l, ipa);
1123 if (l->pure_const_state == IPA_NEITHER
1124 && l->looping
1125 && l->can_throw
1126 && l->can_free)
1127 goto end;
1128 }
1129 }
1130
1131end:
1132 if (l->pure_const_state != IPA_NEITHER
1133 && !l->looping
1134 && !finite_function_p ())
1135 l->looping = true;
1136
1137 if (dump_file && (dump_flags & TDF_DETAILS))
1138 fprintf (stream: dump_file, format: " checking previously known:");
1139
1140 better_state (state: &l->pure_const_state, looping: &l->looping,
1141 state2: l->state_previously_known,
1142 looping2: l->looping_previously_known);
1143 if (TREE_NOTHROW (decl))
1144 l->can_throw = false;
1145
1146 l->malloc_state = STATE_MALLOC_BOTTOM;
1147 if (DECL_IS_MALLOC (decl))
1148 l->malloc_state = STATE_MALLOC;
1149 else if (ipa && malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), ipa: true))
1150 l->malloc_state = STATE_MALLOC_TOP;
1151 else if (malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), ipa: false))
1152 l->malloc_state = STATE_MALLOC;
1153
1154 pop_cfun ();
1155 if (dump_file)
1156 {
1157 if (l->looping)
1158 fprintf (stream: dump_file, format: "Function is locally looping.\n");
1159 if (l->can_throw)
1160 fprintf (stream: dump_file, format: "Function is locally throwing.\n");
1161 if (l->pure_const_state == IPA_CONST)
1162 fprintf (stream: dump_file, format: "Function is locally const.\n");
1163 if (l->pure_const_state == IPA_PURE)
1164 fprintf (stream: dump_file, format: "Function is locally pure.\n");
1165 if (l->can_free)
1166 fprintf (stream: dump_file, format: "Function can locally free.\n");
1167 if (l->malloc_state == STATE_MALLOC)
1168 fprintf (stream: dump_file, format: "Function is locally malloc.\n");
1169 }
1170 return l;
1171}
1172
1173void
1174funct_state_summary_t::insert (cgraph_node *node, funct_state_d *state)
1175{
1176 /* There are some shared nodes, in particular the initializers on
1177 static declarations. We do not need to scan them more than once
1178 since all we would be interested in are the addressof
1179 operations. */
1180 if (opt_for_fn (node->decl, flag_ipa_pure_const))
1181 {
1182 funct_state_d *a = analyze_function (fn: node, ipa: true);
1183 new (state) funct_state_d (*a);
1184 free (ptr: a);
1185 }
1186 else
1187 /* Do not keep stale summaries. */
1188 funct_state_summaries->remove (node);
1189}
1190
1191/* Called when new clone is inserted to callgraph late. */
1192
1193void
1194funct_state_summary_t::duplicate (cgraph_node *, cgraph_node *dst,
1195 funct_state_d *src_data,
1196 funct_state_d *dst_data)
1197{
1198 new (dst_data) funct_state_d (*src_data);
1199 if (dst_data->malloc_state == STATE_MALLOC
1200 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (dst->decl))))
1201 dst_data->malloc_state = STATE_MALLOC_BOTTOM;
1202}
1203
1204
1205void
1206pass_ipa_pure_const::
1207register_hooks (void)
1208{
1209 if (init_p)
1210 return;
1211
1212 init_p = true;
1213
1214 funct_state_summaries = new funct_state_summary_t (symtab);
1215}
1216
1217
1218/* Analyze each function in the cgraph to see if it is locally PURE or
1219 CONST. */
1220
1221static void
1222pure_const_generate_summary (void)
1223{
1224 struct cgraph_node *node;
1225
1226 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1227 pass->register_hooks ();
1228
1229 /* Process all of the functions.
1230
1231 We process AVAIL_INTERPOSABLE functions. We cannot use the results
1232 by default, but the info can be used at LTO with -fwhole-program or
1233 when function got cloned and the clone is AVAILABLE. */
1234
1235 FOR_EACH_DEFINED_FUNCTION (node)
1236 if (opt_for_fn (node->decl, flag_ipa_pure_const))
1237 {
1238 funct_state_d *a = analyze_function (fn: node, ipa: true);
1239 new (funct_state_summaries->get_create (node)) funct_state_d (*a);
1240 free (ptr: a);
1241 }
1242}
1243
1244
1245/* Serialize the ipa info for lto. */
1246
1247static void
1248pure_const_write_summary (void)
1249{
1250 struct cgraph_node *node;
1251 struct lto_simple_output_block *ob
1252 = lto_create_simple_output_block (LTO_section_ipa_pure_const);
1253 unsigned int count = 0;
1254 lto_symtab_encoder_iterator lsei;
1255 lto_symtab_encoder_t encoder;
1256
1257 encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1258
1259 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1260 lsei_next_function_in_partition (lsei: &lsei))
1261 {
1262 node = lsei_cgraph_node (lsei);
1263 if (node->definition && funct_state_summaries->exists (node))
1264 count++;
1265 }
1266
1267 streamer_write_uhwi_stream (ob->main_stream, count);
1268
1269 /* Process all of the functions. */
1270 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1271 lsei_next_function_in_partition (lsei: &lsei))
1272 {
1273 node = lsei_cgraph_node (lsei);
1274 funct_state_d *fs = funct_state_summaries->get (node);
1275 if (node->definition && fs != NULL)
1276 {
1277 struct bitpack_d bp;
1278 int node_ref;
1279 lto_symtab_encoder_t encoder;
1280
1281 encoder = ob->decl_state->symtab_node_encoder;
1282 node_ref = lto_symtab_encoder_encode (encoder, node);
1283 streamer_write_uhwi_stream (ob->main_stream, node_ref);
1284
1285 /* Note that flags will need to be read in the opposite
1286 order as we are pushing the bitflags into FLAGS. */
1287 bp = bitpack_create (s: ob->main_stream);
1288 bp_pack_value (bp: &bp, val: fs->pure_const_state, nbits: 2);
1289 bp_pack_value (bp: &bp, val: fs->state_previously_known, nbits: 2);
1290 bp_pack_value (bp: &bp, val: fs->looping_previously_known, nbits: 1);
1291 bp_pack_value (bp: &bp, val: fs->looping, nbits: 1);
1292 bp_pack_value (bp: &bp, val: fs->can_throw, nbits: 1);
1293 bp_pack_value (bp: &bp, val: fs->can_free, nbits: 1);
1294 bp_pack_value (bp: &bp, val: fs->malloc_state, nbits: 2);
1295 streamer_write_bitpack (bp: &bp);
1296 }
1297 }
1298
1299 lto_destroy_simple_output_block (ob);
1300}
1301
1302
1303/* Deserialize the ipa info for lto. */
1304
1305static void
1306pure_const_read_summary (void)
1307{
1308 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1309 struct lto_file_decl_data *file_data;
1310 unsigned int j = 0;
1311
1312 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1313 pass->register_hooks ();
1314
1315 while ((file_data = file_data_vec[j++]))
1316 {
1317 const char *data;
1318 size_t len;
1319 class lto_input_block *ib
1320 = lto_create_simple_input_block (file_data,
1321 LTO_section_ipa_pure_const,
1322 &data, &len);
1323 if (ib)
1324 {
1325 unsigned int i;
1326 unsigned int count = streamer_read_uhwi (ib);
1327
1328 for (i = 0; i < count; i++)
1329 {
1330 unsigned int index;
1331 struct cgraph_node *node;
1332 struct bitpack_d bp;
1333 funct_state fs;
1334 lto_symtab_encoder_t encoder;
1335
1336 index = streamer_read_uhwi (ib);
1337 encoder = file_data->symtab_node_encoder;
1338 node = dyn_cast<cgraph_node *> (p: lto_symtab_encoder_deref (encoder,
1339 ref: index));
1340
1341 fs = funct_state_summaries->get_create (node);
1342 /* Note that the flags must be read in the opposite
1343 order in which they were written (the bitflags were
1344 pushed into FLAGS). */
1345 bp = streamer_read_bitpack (ib);
1346 fs->pure_const_state
1347 = (enum pure_const_state_e) bp_unpack_value (bp: &bp, nbits: 2);
1348 fs->state_previously_known
1349 = (enum pure_const_state_e) bp_unpack_value (bp: &bp, nbits: 2);
1350 fs->looping_previously_known = bp_unpack_value (bp: &bp, nbits: 1);
1351 fs->looping = bp_unpack_value (bp: &bp, nbits: 1);
1352 fs->can_throw = bp_unpack_value (bp: &bp, nbits: 1);
1353 fs->can_free = bp_unpack_value (bp: &bp, nbits: 1);
1354 fs->malloc_state
1355 = (enum malloc_state_e) bp_unpack_value (bp: &bp, nbits: 2);
1356
1357 if (dump_file)
1358 {
1359 int flags = flags_from_decl_or_type (node->decl);
1360 fprintf (stream: dump_file, format: "Read info for %s ", node->dump_name ());
1361 if (flags & ECF_CONST)
1362 fprintf (stream: dump_file, format: " const");
1363 if (flags & ECF_PURE)
1364 fprintf (stream: dump_file, format: " pure");
1365 if (flags & ECF_NOTHROW)
1366 fprintf (stream: dump_file, format: " nothrow");
1367 fprintf (stream: dump_file, format: "\n pure const state: %s\n",
1368 pure_const_names[fs->pure_const_state]);
1369 fprintf (stream: dump_file, format: " previously known state: %s\n",
1370 pure_const_names[fs->state_previously_known]);
1371 if (fs->looping)
1372 fprintf (stream: dump_file,format: " function is locally looping\n");
1373 if (fs->looping_previously_known)
1374 fprintf (stream: dump_file,format: " function is previously known looping\n");
1375 if (fs->can_throw)
1376 fprintf (stream: dump_file,format: " function is locally throwing\n");
1377 if (fs->can_free)
1378 fprintf (stream: dump_file,format: " function can locally free\n");
1379 fprintf (stream: dump_file, format: "\n malloc state: %s\n",
1380 malloc_state_names[fs->malloc_state]);
1381 }
1382 }
1383
1384 lto_destroy_simple_input_block (file_data,
1385 LTO_section_ipa_pure_const,
1386 ib, data, len);
1387 }
1388 }
1389}
1390
1391/* We only propagate across edges that can throw externally and their callee
1392 is not interposable. */
1393
1394static bool
1395ignore_edge_for_nothrow (struct cgraph_edge *e)
1396{
1397 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1398 return true;
1399
1400 enum availability avail;
1401 cgraph_node *ultimate_target
1402 = e->callee->function_or_virtual_thunk_symbol (avail: &avail, ref: e->caller);
1403 if (avail <= AVAIL_INTERPOSABLE || TREE_NOTHROW (ultimate_target->decl))
1404 return true;
1405 return ((opt_for_fn (e->callee->decl, flag_non_call_exceptions)
1406 && !e->callee->binds_to_current_def_p (ref: e->caller))
1407 || !opt_for_fn (e->caller->decl, flag_ipa_pure_const)
1408 || !opt_for_fn (ultimate_target->decl, flag_ipa_pure_const));
1409}
1410
1411/* Return true if NODE is self recursive function.
1412 Indirectly recursive functions appears as non-trivial strongly
1413 connected components, so we need to care about self recursion
1414 only. */
1415
1416static bool
1417self_recursive_p (struct cgraph_node *node)
1418{
1419 struct cgraph_edge *e;
1420 for (e = node->callees; e; e = e->next_callee)
1421 if (e->callee->function_symbol () == node)
1422 return true;
1423 return false;
1424}
1425
1426/* Return true if N is cdtor that is not const or pure. In this case we may
1427 need to remove unreachable function if it is marked const/pure. */
1428
1429static bool
1430cdtor_p (cgraph_node *n, void *)
1431{
1432 if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
1433 return ((!TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl))
1434 || DECL_LOOPING_CONST_OR_PURE_P (n->decl));
1435 return false;
1436}
1437
1438/* Skip edges from and to nodes without ipa_pure_const enabled.
1439 Ignore not available symbols. */
1440
1441static bool
1442ignore_edge_for_pure_const (struct cgraph_edge *e)
1443{
1444 enum availability avail;
1445 cgraph_node *ultimate_target
1446 = e->callee->function_or_virtual_thunk_symbol (avail: &avail, ref: e->caller);
1447
1448 return (avail <= AVAIL_INTERPOSABLE
1449 || !opt_for_fn (e->caller->decl, flag_ipa_pure_const)
1450 || !opt_for_fn (ultimate_target->decl,
1451 flag_ipa_pure_const));
1452}
1453
1454/* Return true if function should be skipped for local pure const analysis. */
1455
1456static bool
1457skip_function_for_local_pure_const (struct cgraph_node *node)
1458{
1459 /* Because we do not schedule pass_fixup_cfg over whole program after early
1460 optimizations we must not promote functions that are called by already
1461 processed functions. */
1462
1463 if (function_called_by_processed_nodes_p ())
1464 {
1465 if (dump_file)
1466 fprintf (stream: dump_file, format: "Function called in recursive cycle; ignoring\n");
1467 return true;
1468 }
1469 /* Save some work and do not analyze functions which are interposable and
1470 do not have any non-interposable aliases. */
1471 if (node->get_availability () <= AVAIL_INTERPOSABLE
1472 && !flag_lto
1473 && !node->has_aliases_p ())
1474 {
1475 if (dump_file)
1476 fprintf (stream: dump_file,
1477 format: "Function is interposable; not analyzing.\n");
1478 return true;
1479 }
1480 return false;
1481}
1482
1483/* Make function const and output warning. If LOCAL is true,
1484 return true if anything changed. Otherwise return true if
1485 we may have introduced removale ctors. */
1486
1487bool
1488ipa_make_function_const (struct cgraph_node *node, bool looping, bool local)
1489{
1490 bool cdtor = false;
1491
1492 if (TREE_READONLY (node->decl)
1493 && (looping || !DECL_LOOPING_CONST_OR_PURE_P (node->decl)))
1494 return false;
1495 warn_function_const (decl: node->decl, known_finite: !looping);
1496 if (local && skip_function_for_local_pure_const (node))
1497 return false;
1498 if (dump_file)
1499 fprintf (stream: dump_file, format: "Function found to be %sconst: %s\n",
1500 looping ? "looping " : "",
1501 node->dump_name ());
1502 if (!local && !looping)
1503 cdtor = node->call_for_symbol_and_aliases (callback: cdtor_p, NULL, include_overwritable: true);
1504 if (!dbg_cnt (index: ipa_attr))
1505 return false;
1506 if (node->set_const_flag (set_const: true, looping))
1507 {
1508 if (dump_file)
1509 fprintf (stream: dump_file,
1510 format: "Declaration updated to be %sconst: %s\n",
1511 looping ? "looping " : "",
1512 node->dump_name ());
1513 if (local)
1514 return true;
1515 return cdtor;
1516 }
1517 return false;
1518}
1519
1520/* Make function const and output warning. If LOCAL is true,
1521 return true if anything changed. Otherwise return true if
1522 we may have introduced removale ctors. */
1523
1524bool
1525ipa_make_function_pure (struct cgraph_node *node, bool looping, bool local)
1526{
1527 bool cdtor = false;
1528
1529 if (TREE_READONLY (node->decl)
1530 || (DECL_PURE_P (node->decl)
1531 && (looping || !DECL_LOOPING_CONST_OR_PURE_P (node->decl))))
1532 return false;
1533 warn_function_pure (decl: node->decl, known_finite: !looping);
1534 if (local && skip_function_for_local_pure_const (node))
1535 return false;
1536 if (dump_file)
1537 fprintf (stream: dump_file, format: "Function found to be %spure: %s\n",
1538 looping ? "looping " : "",
1539 node->dump_name ());
1540 if (!local && !looping)
1541 cdtor = node->call_for_symbol_and_aliases (callback: cdtor_p, NULL, include_overwritable: true);
1542 if (!dbg_cnt (index: ipa_attr))
1543 return false;
1544 if (node->set_pure_flag (pure: true, looping))
1545 {
1546 if (dump_file)
1547 fprintf (stream: dump_file,
1548 format: "Declaration updated to be %spure: %s\n",
1549 looping ? "looping " : "",
1550 node->dump_name ());
1551 if (local)
1552 return true;
1553 return cdtor;
1554 }
1555 return false;
1556}
1557
1558/* Produce transitive closure over the callgraph and compute pure/const
1559 attributes. */
1560
1561static bool
1562propagate_pure_const (void)
1563{
1564 struct cgraph_node *node;
1565 struct cgraph_node *w;
1566 struct cgraph_node **order =
1567 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1568 int order_pos;
1569 int i;
1570 struct ipa_dfs_info * w_info;
1571 bool remove_p = false;
1572
1573 order_pos = ipa_reduced_postorder (order, true,
1574 ignore_edge: ignore_edge_for_pure_const);
1575 if (dump_file)
1576 {
1577 cgraph_node::dump_cgraph (f: dump_file);
1578 ipa_print_order (dump_file, "reduced", order, order_pos);
1579 }
1580
1581 /* Propagate the local information through the call graph to produce
1582 the global information. All the nodes within a cycle will have
1583 the same info so we collapse cycles first. Then we can do the
1584 propagation in one pass from the leaves to the roots. */
1585 for (i = 0; i < order_pos; i++ )
1586 {
1587 enum pure_const_state_e pure_const_state = IPA_CONST;
1588 bool looping = false;
1589 int count = 0;
1590 node = order[i];
1591
1592 if (node->alias)
1593 continue;
1594
1595 if (dump_file && (dump_flags & TDF_DETAILS))
1596 fprintf (stream: dump_file, format: "Starting cycle\n");
1597
1598 /* Find the worst state for any node in the cycle. */
1599 w = node;
1600 while (w && pure_const_state != IPA_NEITHER)
1601 {
1602 struct cgraph_edge *e;
1603 struct cgraph_edge *ie;
1604 int i;
1605 struct ipa_ref *ref = NULL;
1606
1607 funct_state w_l = funct_state_summaries->get_create (node: w);
1608 if (dump_file && (dump_flags & TDF_DETAILS))
1609 fprintf (stream: dump_file, format: " Visiting %s state:%s looping %i\n",
1610 w->dump_name (),
1611 pure_const_names[w_l->pure_const_state],
1612 w_l->looping);
1613
1614 /* First merge in function body properties.
1615 We are safe to pass NULL as FROM and TO because we will take care
1616 of possible interposition when walking callees. */
1617 worse_state (state: &pure_const_state, looping: &looping,
1618 state2: w_l->pure_const_state, looping2: w_l->looping,
1619 NULL, NULL);
1620 if (pure_const_state == IPA_NEITHER)
1621 break;
1622
1623 count++;
1624
1625 /* We consider recursive cycles as possibly infinite.
1626 This might be relaxed since infinite recursion leads to stack
1627 overflow. */
1628 if (count > 1)
1629 looping = true;
1630
1631 /* Now walk the edges and merge in callee properties. */
1632 for (e = w->callees; e && pure_const_state != IPA_NEITHER;
1633 e = e->next_callee)
1634 {
1635 enum availability avail;
1636 struct cgraph_node *y = e->callee->
1637 function_or_virtual_thunk_symbol (avail: &avail,
1638 ref: e->caller);
1639 enum pure_const_state_e edge_state = IPA_CONST;
1640 bool edge_looping = false;
1641
1642 if (e->recursive_p ())
1643 looping = true;
1644
1645 if (dump_file && (dump_flags & TDF_DETAILS))
1646 {
1647 fprintf (stream: dump_file, format: " Call to %s",
1648 e->callee->dump_name ());
1649 }
1650 if (avail > AVAIL_INTERPOSABLE)
1651 {
1652 funct_state y_l = funct_state_summaries->get_create (node: y);
1653
1654 if (dump_file && (dump_flags & TDF_DETAILS))
1655 {
1656 fprintf (stream: dump_file,
1657 format: " state:%s looping:%i\n",
1658 pure_const_names[y_l->pure_const_state],
1659 y_l->looping);
1660 }
1661 if (y_l->pure_const_state > IPA_PURE
1662 && e->cannot_lead_to_return_p ())
1663 {
1664 if (dump_file && (dump_flags & TDF_DETAILS))
1665 fprintf (stream: dump_file,
1666 format: " Ignoring side effects"
1667 " -> pure, looping\n");
1668 edge_state = IPA_PURE;
1669 edge_looping = true;
1670 }
1671 else
1672 {
1673 edge_state = y_l->pure_const_state;
1674 edge_looping = y_l->looping;
1675 }
1676 }
1677 else if (builtin_safe_for_const_function_p (looping: &edge_looping,
1678 callee: y->decl))
1679 edge_state = IPA_CONST;
1680 else
1681 state_from_flags (state: &edge_state, looping: &edge_looping,
1682 flags: flags_from_decl_or_type (y->decl),
1683 cannot_lead_to_return: e->cannot_lead_to_return_p ());
1684
1685 /* Merge the results with what we already know. */
1686 better_state (state: &edge_state, looping: &edge_looping,
1687 state2: w_l->state_previously_known,
1688 looping2: w_l->looping_previously_known);
1689 worse_state (state: &pure_const_state, looping: &looping,
1690 state2: edge_state, looping2: edge_looping, from: e->caller, to: e->callee);
1691 if (pure_const_state == IPA_NEITHER)
1692 break;
1693 }
1694
1695 /* Now process the indirect call. */
1696 for (ie = w->indirect_calls;
1697 ie && pure_const_state != IPA_NEITHER; ie = ie->next_callee)
1698 {
1699 enum pure_const_state_e edge_state = IPA_CONST;
1700 bool edge_looping = false;
1701
1702 if (dump_file && (dump_flags & TDF_DETAILS))
1703 fprintf (stream: dump_file, format: " Indirect call");
1704 state_from_flags (state: &edge_state, looping: &edge_looping,
1705 flags: ie->indirect_info->ecf_flags,
1706 cannot_lead_to_return: ie->cannot_lead_to_return_p ());
1707 /* Merge the results with what we already know. */
1708 better_state (state: &edge_state, looping: &edge_looping,
1709 state2: w_l->state_previously_known,
1710 looping2: w_l->looping_previously_known);
1711 worse_state (state: &pure_const_state, looping: &looping,
1712 state2: edge_state, looping2: edge_looping, NULL, NULL);
1713 if (pure_const_state == IPA_NEITHER)
1714 break;
1715 }
1716
1717 /* And finally all loads and stores. */
1718 for (i = 0; w->iterate_reference (i, ref)
1719 && pure_const_state != IPA_NEITHER; i++)
1720 {
1721 enum pure_const_state_e ref_state = IPA_CONST;
1722 bool ref_looping = false;
1723 switch (ref->use)
1724 {
1725 case IPA_REF_LOAD:
1726 /* readonly reads are safe. */
1727 if (TREE_READONLY (ref->referred->decl))
1728 break;
1729 if (dump_file && (dump_flags & TDF_DETAILS))
1730 fprintf (stream: dump_file, format: " nonreadonly global var read\n");
1731 ref_state = IPA_PURE;
1732 break;
1733 case IPA_REF_STORE:
1734 if (ref->cannot_lead_to_return ())
1735 break;
1736 ref_state = IPA_NEITHER;
1737 if (dump_file && (dump_flags & TDF_DETAILS))
1738 fprintf (stream: dump_file, format: " global var write\n");
1739 break;
1740 case IPA_REF_ADDR:
1741 break;
1742 default:
1743 gcc_unreachable ();
1744 }
1745 better_state (state: &ref_state, looping: &ref_looping,
1746 state2: w_l->state_previously_known,
1747 looping2: w_l->looping_previously_known);
1748 worse_state (state: &pure_const_state, looping: &looping,
1749 state2: ref_state, looping2: ref_looping, NULL, NULL);
1750 if (pure_const_state == IPA_NEITHER)
1751 break;
1752 }
1753 w_info = (struct ipa_dfs_info *) w->aux;
1754 w = w_info->next_cycle;
1755 }
1756 if (dump_file && (dump_flags & TDF_DETAILS))
1757 fprintf (stream: dump_file, format: "Result %s looping %i\n",
1758 pure_const_names [pure_const_state],
1759 looping);
1760
1761 /* Find the worst state of can_free for any node in the cycle. */
1762 bool can_free = false;
1763 w = node;
1764 while (w && !can_free)
1765 {
1766 struct cgraph_edge *e;
1767 funct_state w_l = funct_state_summaries->get (node: w);
1768
1769 if (w_l->can_free
1770 || w->get_availability () == AVAIL_INTERPOSABLE
1771 || w->indirect_calls)
1772 can_free = true;
1773
1774 for (e = w->callees; e && !can_free; e = e->next_callee)
1775 {
1776 enum availability avail;
1777 struct cgraph_node *y = e->callee->
1778 function_or_virtual_thunk_symbol (avail: &avail,
1779 ref: e->caller);
1780
1781 if (avail > AVAIL_INTERPOSABLE)
1782 can_free = funct_state_summaries->get (node: y)->can_free;
1783 else
1784 can_free = true;
1785 }
1786 w_info = (struct ipa_dfs_info *) w->aux;
1787 w = w_info->next_cycle;
1788 }
1789
1790 /* Copy back the region's pure_const_state which is shared by
1791 all nodes in the region. */
1792 w = node;
1793 while (w)
1794 {
1795 funct_state w_l = funct_state_summaries->get (node: w);
1796 enum pure_const_state_e this_state = pure_const_state;
1797 bool this_looping = looping;
1798
1799 w_l->can_free = can_free;
1800 w->nonfreeing_fn = !can_free;
1801 if (!can_free && dump_file)
1802 fprintf (stream: dump_file, format: "Function found not to call free: %s\n",
1803 w->dump_name ());
1804
1805 if (w_l->state_previously_known != IPA_NEITHER
1806 && this_state > w_l->state_previously_known)
1807 {
1808 if (this_state == IPA_NEITHER)
1809 this_looping = w_l->looping_previously_known;
1810 this_state = w_l->state_previously_known;
1811 }
1812 if (!this_looping && self_recursive_p (node: w))
1813 this_looping = true;
1814 if (!w_l->looping_previously_known)
1815 this_looping = false;
1816
1817 /* All nodes within a cycle share the same info. */
1818 w_l->pure_const_state = this_state;
1819 w_l->looping = this_looping;
1820
1821 /* Inline clones share declaration with their offline copies;
1822 do not modify their declarations since the offline copy may
1823 be different. */
1824 if (!w->inlined_to)
1825 switch (this_state)
1826 {
1827 case IPA_CONST:
1828 remove_p |= ipa_make_function_const (node: w, looping: this_looping, local: false);
1829 break;
1830
1831 case IPA_PURE:
1832 remove_p |= ipa_make_function_pure (node: w, looping: this_looping, local: false);
1833 break;
1834
1835 default:
1836 break;
1837 }
1838 w_info = (struct ipa_dfs_info *) w->aux;
1839 w = w_info->next_cycle;
1840 }
1841 }
1842
1843 ipa_free_postorder_info ();
1844 free (ptr: order);
1845 return remove_p;
1846}
1847
1848/* Produce transitive closure over the callgraph and compute nothrow
1849 attributes. */
1850
1851static void
1852propagate_nothrow (void)
1853{
1854 struct cgraph_node *node;
1855 struct cgraph_node *w;
1856 struct cgraph_node **order =
1857 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1858 int order_pos;
1859 int i;
1860 struct ipa_dfs_info * w_info;
1861
1862 order_pos = ipa_reduced_postorder (order, true,
1863 ignore_edge: ignore_edge_for_nothrow);
1864 if (dump_file)
1865 {
1866 cgraph_node::dump_cgraph (f: dump_file);
1867 ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
1868 }
1869
1870 /* Propagate the local information through the call graph to produce
1871 the global information. All the nodes within a cycle will have
1872 the same info so we collapse cycles first. Then we can do the
1873 propagation in one pass from the leaves to the roots. */
1874 for (i = 0; i < order_pos; i++ )
1875 {
1876 bool can_throw = false;
1877 node = order[i];
1878
1879 if (node->alias)
1880 continue;
1881
1882 /* Find the worst state for any node in the cycle. */
1883 w = node;
1884 while (w && !can_throw)
1885 {
1886 struct cgraph_edge *e, *ie;
1887
1888 if (!TREE_NOTHROW (w->decl))
1889 {
1890 funct_state w_l = funct_state_summaries->get_create (node: w);
1891
1892 if (w_l->can_throw
1893 || w->get_availability () == AVAIL_INTERPOSABLE)
1894 can_throw = true;
1895
1896 for (e = w->callees; e && !can_throw; e = e->next_callee)
1897 {
1898 enum availability avail;
1899
1900 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1901 continue;
1902
1903 struct cgraph_node *y = e->callee->
1904 function_or_virtual_thunk_symbol (avail: &avail,
1905 ref: e->caller);
1906
1907 /* We can use info about the callee only if we know it
1908 cannot be interposed.
1909 When callee is compiled with non-call exceptions we also
1910 must check that the declaration is bound to current
1911 body as other semantically equivalent body may still
1912 throw. */
1913 if (avail <= AVAIL_INTERPOSABLE
1914 || (!TREE_NOTHROW (y->decl)
1915 && (funct_state_summaries->get_create (node: y)->can_throw
1916 || (opt_for_fn (y->decl, flag_non_call_exceptions)
1917 && !e->callee->binds_to_current_def_p (ref: w)))))
1918 can_throw = true;
1919 }
1920 for (ie = w->indirect_calls; ie && !can_throw;
1921 ie = ie->next_callee)
1922 if (ie->can_throw_external
1923 && !(ie->indirect_info->ecf_flags & ECF_NOTHROW))
1924 can_throw = true;
1925 }
1926 w_info = (struct ipa_dfs_info *) w->aux;
1927 w = w_info->next_cycle;
1928 }
1929
1930 /* Copy back the region's pure_const_state which is shared by
1931 all nodes in the region. */
1932 w = node;
1933 while (w)
1934 {
1935 funct_state w_l = funct_state_summaries->get_create (node: w);
1936 if (!can_throw && !TREE_NOTHROW (w->decl))
1937 {
1938 /* Inline clones share declaration with their offline copies;
1939 do not modify their declarations since the offline copy may
1940 be different. */
1941 if (!w->inlined_to)
1942 {
1943 w->set_nothrow_flag (true);
1944 if (dump_file)
1945 fprintf (stream: dump_file, format: "Function found to be nothrow: %s\n",
1946 w->dump_name ());
1947 }
1948 }
1949 else if (can_throw && !TREE_NOTHROW (w->decl))
1950 w_l->can_throw = true;
1951 w_info = (struct ipa_dfs_info *) w->aux;
1952 w = w_info->next_cycle;
1953 }
1954 }
1955
1956 ipa_free_postorder_info ();
1957 free (ptr: order);
1958}
1959
1960/* Debugging function to dump state of malloc lattice. */
1961
1962DEBUG_FUNCTION
1963static void
1964dump_malloc_lattice (FILE *dump_file, const char *s)
1965{
1966 if (!dump_file)
1967 return;
1968
1969 fprintf (stream: dump_file, format: "\n\nMALLOC LATTICE %s:\n", s);
1970 cgraph_node *node;
1971 FOR_EACH_FUNCTION (node)
1972 {
1973 funct_state fs = funct_state_summaries->get (node);
1974 if (fs)
1975 fprintf (stream: dump_file, format: "%s: %s\n", node->dump_name (),
1976 malloc_state_names[fs->malloc_state]);
1977 }
1978}
1979
1980/* Propagate malloc attribute across the callgraph. */
1981
1982static void
1983propagate_malloc (void)
1984{
1985 cgraph_node *node;
1986 FOR_EACH_FUNCTION (node)
1987 {
1988 if (DECL_IS_MALLOC (node->decl))
1989 if (!funct_state_summaries->exists (node))
1990 {
1991 funct_state fs = funct_state_summaries->get_create (node);
1992 fs->malloc_state = STATE_MALLOC;
1993 }
1994 }
1995
1996 dump_malloc_lattice (dump_file, s: "Initial");
1997 struct cgraph_node **order
1998 = XNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1999 int order_pos = ipa_reverse_postorder (order);
2000 bool changed = true;
2001
2002 while (changed)
2003 {
2004 changed = false;
2005 /* Walk in postorder. */
2006 for (int i = order_pos - 1; i >= 0; --i)
2007 {
2008 cgraph_node *node = order[i];
2009 if (node->alias
2010 || !node->definition
2011 || !funct_state_summaries->exists (node))
2012 continue;
2013
2014 funct_state l = funct_state_summaries->get (node);
2015
2016 /* FIXME: add support for indirect-calls. */
2017 if (node->indirect_calls)
2018 {
2019 l->malloc_state = STATE_MALLOC_BOTTOM;
2020 continue;
2021 }
2022
2023 if (node->get_availability () <= AVAIL_INTERPOSABLE)
2024 {
2025 l->malloc_state = STATE_MALLOC_BOTTOM;
2026 continue;
2027 }
2028
2029 if (l->malloc_state == STATE_MALLOC_BOTTOM)
2030 continue;
2031
2032 auto_vec<cgraph_node *, 16> callees;
2033 for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2034 {
2035 ipa_call_summary *es = ipa_call_summaries->get_create (edge: cs);
2036 if (es && es->is_return_callee_uncaptured)
2037 callees.safe_push (obj: cs->callee);
2038 }
2039
2040 malloc_state_e new_state = l->malloc_state;
2041 for (unsigned j = 0; j < callees.length (); j++)
2042 {
2043 cgraph_node *callee = callees[j];
2044 if (!funct_state_summaries->exists (node))
2045 {
2046 new_state = STATE_MALLOC_BOTTOM;
2047 break;
2048 }
2049 malloc_state_e callee_state
2050 = funct_state_summaries->get_create (node: callee)->malloc_state;
2051 if (new_state < callee_state)
2052 new_state = callee_state;
2053 }
2054 if (new_state != l->malloc_state)
2055 {
2056 changed = true;
2057 l->malloc_state = new_state;
2058 }
2059 }
2060 }
2061
2062 FOR_EACH_DEFINED_FUNCTION (node)
2063 if (funct_state_summaries->exists (node))
2064 {
2065 funct_state l = funct_state_summaries->get (node);
2066 if (!node->alias
2067 && l->malloc_state == STATE_MALLOC
2068 && !node->inlined_to
2069 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (node->decl))))
2070 {
2071 if (dump_file && (dump_flags & TDF_DETAILS))
2072 fprintf (stream: dump_file, format: "Function %s found to be malloc\n",
2073 node->dump_name ());
2074
2075 bool malloc_decl_p = DECL_IS_MALLOC (node->decl);
2076 node->set_malloc_flag (true);
2077 if (!malloc_decl_p && warn_suggest_attribute_malloc)
2078 warn_function_malloc (decl: node->decl);
2079 }
2080 }
2081
2082 dump_malloc_lattice (dump_file, s: "after propagation");
2083 ipa_free_postorder_info ();
2084 free (ptr: order);
2085}
2086
2087/* Produce the global information by preforming a transitive closure
2088 on the local information that was produced by generate_summary. */
2089
2090unsigned int
2091pass_ipa_pure_const::
2092execute (function *)
2093{
2094 bool remove_p;
2095
2096 /* Nothrow makes more function to not lead to return and improve
2097 later analysis. */
2098 propagate_nothrow ();
2099 propagate_malloc ();
2100 remove_p = propagate_pure_const ();
2101
2102 delete funct_state_summaries;
2103 return remove_p ? TODO_remove_functions : 0;
2104}
2105
2106static bool
2107gate_pure_const (void)
2108{
2109 return flag_ipa_pure_const || in_lto_p;
2110}
2111
2112pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
2113 : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
2114 pure_const_generate_summary, /* generate_summary */
2115 pure_const_write_summary, /* write_summary */
2116 pure_const_read_summary, /* read_summary */
2117 NULL, /* write_optimization_summary */
2118 NULL, /* read_optimization_summary */
2119 NULL, /* stmt_fixup */
2120 0, /* function_transform_todo_flags_start */
2121 NULL, /* function_transform */
2122 NULL), /* variable_transform */
2123 init_p (false) {}
2124
2125ipa_opt_pass_d *
2126make_pass_ipa_pure_const (gcc::context *ctxt)
2127{
2128 return new pass_ipa_pure_const (ctxt);
2129}
2130
2131/* Simple local pass for pure const discovery reusing the analysis from
2132 ipa_pure_const. This pass is effective when executed together with
2133 other optimization passes in early optimization pass queue. */
2134
2135namespace {
2136
2137const pass_data pass_data_local_pure_const =
2138{
2139 .type: GIMPLE_PASS, /* type */
2140 .name: "local-pure-const", /* name */
2141 .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */
2142 .tv_id: TV_IPA_PURE_CONST, /* tv_id */
2143 .properties_required: 0, /* properties_required */
2144 .properties_provided: 0, /* properties_provided */
2145 .properties_destroyed: 0, /* properties_destroyed */
2146 .todo_flags_start: 0, /* todo_flags_start */
2147 .todo_flags_finish: 0, /* todo_flags_finish */
2148};
2149
2150class pass_local_pure_const : public gimple_opt_pass
2151{
2152public:
2153 pass_local_pure_const (gcc::context *ctxt)
2154 : gimple_opt_pass (pass_data_local_pure_const, ctxt)
2155 {}
2156
2157 /* opt_pass methods: */
2158 opt_pass * clone () final override
2159 {
2160 return new pass_local_pure_const (m_ctxt);
2161 }
2162 bool gate (function *) final override { return gate_pure_const (); }
2163 unsigned int execute (function *) final override;
2164
2165}; // class pass_local_pure_const
2166
2167unsigned int
2168pass_local_pure_const::execute (function *fun)
2169{
2170 bool changed = false;
2171 funct_state l;
2172 bool skip;
2173 struct cgraph_node *node;
2174
2175 node = cgraph_node::get (decl: current_function_decl);
2176 skip = skip_function_for_local_pure_const (node);
2177
2178 if (!warn_suggest_attribute_const
2179 && !warn_suggest_attribute_pure
2180 && skip)
2181 return 0;
2182
2183 l = analyze_function (fn: node, ipa: false);
2184
2185 /* Do NORETURN discovery. */
2186 if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
2187 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2188 {
2189 warn_function_noreturn (decl: fun->decl);
2190 if (dump_file)
2191 fprintf (stream: dump_file, format: "Function found to be noreturn: %s\n",
2192 current_function_name ());
2193
2194 /* Update declaration and reduce profile to executed once. */
2195 if (cgraph_node::get (decl: current_function_decl)->set_noreturn_flag (true))
2196 changed = true;
2197 if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
2198 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2199 }
2200
2201 switch (l->pure_const_state)
2202 {
2203 case IPA_CONST:
2204 changed |= ipa_make_function_const
2205 (node: cgraph_node::get (decl: current_function_decl), looping: l->looping, local: true);
2206 break;
2207
2208 case IPA_PURE:
2209 changed |= ipa_make_function_pure
2210 (node: cgraph_node::get (decl: current_function_decl), looping: l->looping, local: true);
2211 break;
2212
2213 default:
2214 break;
2215 }
2216 if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
2217 {
2218 node->set_nothrow_flag (true);
2219 changed = true;
2220 if (dump_file)
2221 fprintf (stream: dump_file, format: "Function found to be nothrow: %s\n",
2222 current_function_name ());
2223 }
2224
2225 if (l->malloc_state == STATE_MALLOC
2226 && !DECL_IS_MALLOC (current_function_decl))
2227 {
2228 node->set_malloc_flag (true);
2229 if (warn_suggest_attribute_malloc)
2230 warn_function_malloc (decl: node->decl);
2231 changed = true;
2232 if (dump_file)
2233 fprintf (stream: dump_file, format: "Function found to be malloc: %s\n",
2234 node->dump_name ());
2235 }
2236
2237 free (ptr: l);
2238 if (changed)
2239 return execute_fixup_cfg ();
2240 else
2241 return 0;
2242}
2243
2244} // anon namespace
2245
2246gimple_opt_pass *
2247make_pass_local_pure_const (gcc::context *ctxt)
2248{
2249 return new pass_local_pure_const (ctxt);
2250}
2251
2252/* Emit noreturn warnings. */
2253
2254namespace {
2255
2256const pass_data pass_data_warn_function_noreturn =
2257{
2258 .type: GIMPLE_PASS, /* type */
2259 .name: "*warn_function_noreturn", /* name */
2260 .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */
2261 .tv_id: TV_NONE, /* tv_id */
2262 PROP_cfg, /* properties_required */
2263 .properties_provided: 0, /* properties_provided */
2264 .properties_destroyed: 0, /* properties_destroyed */
2265 .todo_flags_start: 0, /* todo_flags_start */
2266 .todo_flags_finish: 0, /* todo_flags_finish */
2267};
2268
2269class pass_warn_function_noreturn : public gimple_opt_pass
2270{
2271public:
2272 pass_warn_function_noreturn (gcc::context *ctxt)
2273 : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
2274 {}
2275
2276 /* opt_pass methods: */
2277 bool gate (function *) final override
2278 {
2279 return warn_suggest_attribute_noreturn;
2280 }
2281 unsigned int execute (function *fun) final override
2282 {
2283 if (!TREE_THIS_VOLATILE (current_function_decl)
2284 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2285 warn_function_noreturn (decl: current_function_decl);
2286 return 0;
2287 }
2288
2289}; // class pass_warn_function_noreturn
2290
2291} // anon namespace
2292
2293gimple_opt_pass *
2294make_pass_warn_function_noreturn (gcc::context *ctxt)
2295{
2296 return new pass_warn_function_noreturn (ctxt);
2297}
2298
2299/* Simple local pass for pure const discovery reusing the analysis from
2300 ipa_pure_const. This pass is effective when executed together with
2301 other optimization passes in early optimization pass queue. */
2302
2303namespace {
2304
2305const pass_data pass_data_nothrow =
2306{
2307 .type: GIMPLE_PASS, /* type */
2308 .name: "nothrow", /* name */
2309 .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */
2310 .tv_id: TV_IPA_PURE_CONST, /* tv_id */
2311 .properties_required: 0, /* properties_required */
2312 .properties_provided: 0, /* properties_provided */
2313 .properties_destroyed: 0, /* properties_destroyed */
2314 .todo_flags_start: 0, /* todo_flags_start */
2315 .todo_flags_finish: 0, /* todo_flags_finish */
2316};
2317
2318class pass_nothrow : public gimple_opt_pass
2319{
2320public:
2321 pass_nothrow (gcc::context *ctxt)
2322 : gimple_opt_pass (pass_data_nothrow, ctxt)
2323 {}
2324
2325 /* opt_pass methods: */
2326 opt_pass * clone () final override { return new pass_nothrow (m_ctxt); }
2327 bool gate (function *) final override { return optimize; }
2328 unsigned int execute (function *) final override;
2329
2330}; // class pass_nothrow
2331
2332unsigned int
2333pass_nothrow::execute (function *)
2334{
2335 struct cgraph_node *node;
2336 basic_block this_block;
2337
2338 if (TREE_NOTHROW (current_function_decl))
2339 return 0;
2340
2341 node = cgraph_node::get (decl: current_function_decl);
2342
2343 /* We run during lowering, we cannot really use availability yet. */
2344 if (cgraph_node::get (decl: current_function_decl)->get_availability ()
2345 <= AVAIL_INTERPOSABLE)
2346 {
2347 if (dump_file)
2348 fprintf (stream: dump_file, format: "Function is interposable;"
2349 " not analyzing.\n");
2350 return true;
2351 }
2352
2353 FOR_EACH_BB_FN (this_block, cfun)
2354 {
2355 for (gimple_stmt_iterator gsi = gsi_start_bb (bb: this_block);
2356 !gsi_end_p (i: gsi);
2357 gsi_next (i: &gsi))
2358 if (stmt_can_throw_external (cfun, gsi_stmt (i: gsi)))
2359 {
2360 if (is_gimple_call (gs: gsi_stmt (i: gsi)))
2361 {
2362 tree callee_t = gimple_call_fndecl (gs: gsi_stmt (i: gsi));
2363 if (callee_t && recursive_call_p (current_function_decl,
2364 callee_t))
2365 continue;
2366 }
2367
2368 if (dump_file)
2369 {
2370 fprintf (stream: dump_file, format: "Statement can throw: ");
2371 print_gimple_stmt (dump_file, gsi_stmt (i: gsi), 0);
2372 }
2373 return 0;
2374 }
2375 }
2376
2377 node->set_nothrow_flag (true);
2378
2379 bool cfg_changed = false;
2380 if (self_recursive_p (node))
2381 FOR_EACH_BB_FN (this_block, cfun)
2382 if (gcall *g = safe_dyn_cast <gcall *> (p: *gsi_last_bb (bb: this_block)))
2383 {
2384 tree callee_t = gimple_call_fndecl (gs: g);
2385 if (callee_t
2386 && recursive_call_p (current_function_decl, callee_t)
2387 && maybe_clean_eh_stmt (g)
2388 && gimple_purge_dead_eh_edges (this_block))
2389 cfg_changed = true;
2390 }
2391
2392 if (dump_file)
2393 fprintf (stream: dump_file, format: "Function found to be nothrow: %s\n",
2394 current_function_name ());
2395 return cfg_changed ? TODO_cleanup_cfg : 0;
2396}
2397
2398} // anon namespace
2399
2400gimple_opt_pass *
2401make_pass_nothrow (gcc::context *ctxt)
2402{
2403 return new pass_nothrow (ctxt);
2404}
2405

source code of gcc/ipa-pure-const.cc